HydrogenCodeStub::GenerateLightweightMissCode( ExternalReference miss) { Factory* factory = isolate()->factory(); // Generate the new code. MacroAssembler masm(isolate(), NULL, 256, CodeObjectRequired::kYes); { // Update the static counter each time a new code stub is generated. isolate()->counters()->code_stubs()->Increment(); // Generate the code for the stub. masm.set_generating_stub(true); // TODO(yangguo): remove this once we can serialize IC stubs. masm.enable_serializer(); NoCurrentFrameScope scope(&masm); GenerateLightweightMiss(&masm, miss); } // Create the code object. CodeDesc desc; masm.GetCode(&desc); // Copy the generated code into a heap object. Code::Flags flags = Code::ComputeFlags( GetCodeKind(), GetICState(), GetExtraICState(), GetStubType()); Handle new_object = factory->NewCode( desc, flags, masm.CodeObject(), NeedsImmovableCode()); return new_object; } template static Handle DoGenerateCode(Stub* stub) { Isolate* isolate = stub->isolate(); CodeStubDescriptor descriptor(stub); // If we are uninitialized we can use a light-weight stub to enter // the runtime that is significantly faster than using the standard // stub-failure deopt mechanism. if (stub->IsUninitialized() && descriptor.has_miss_handler()) { DCHECK(!descriptor.stack_parameter_count().is_valid()); return stub->GenerateLightweightMissCode(descriptor.miss_handler()); } base::ElapsedTimer timer; if (FLAG_profile_hydrogen_code_stub_compilation) { timer.Start(); } Zone zone; CompilationInfo info(stub, isolate, &zone); CodeStubGraphBuilder builder(&info); LChunk* chunk = OptimizeGraph(builder.CreateGraph()); Handle code = chunk->Codegen(); if (FLAG_profile_hydrogen_code_stub_compilation) { OFStream os(stdout); os << "[Lazy compilation of " << stub << " took " << timer.Elapsed().InMillisecondsF() << " ms]" << std::endl; } return code; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); HValue* number = GetParameter(NumberToStringStub::kNumber); return BuildNumberToString(number, Type::Number(zone())); } Handle NumberToStringStub::GenerateCode() { return DoGenerateCode(this); } // Returns the type string of a value; see ECMA-262, 11.4.3 (p 47). template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { Factory* factory = isolate()->factory(); HConstant* number_string = Add(factory->number_string()); HValue* object = GetParameter(TypeofStub::kObject); IfBuilder is_smi(this); HValue* smi_check = is_smi.If(object); is_smi.Then(); { Push(number_string); } is_smi.Else(); { IfBuilder is_number(this); is_number.If(object, isolate()->factory()->heap_number_map()); is_number.Then(); { Push(number_string); } is_number.Else(); { HValue* map = AddLoadMap(object, smi_check); HValue* instance_type = Add( map, nullptr, HObjectAccess::ForMapInstanceType()); IfBuilder is_string(this); is_string.If( instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); is_string.Then(); { Push(Add(factory->string_string())); } is_string.Else(); { HConstant* object_string = Add(factory->object_string()); IfBuilder is_oddball(this); is_oddball.If( instance_type, Add(ODDBALL_TYPE), Token::EQ); is_oddball.Then(); { Push(Add(object, nullptr, HObjectAccess::ForOddballTypeOf())); } is_oddball.Else(); { IfBuilder is_symbol(this); is_symbol.If( instance_type, Add(SYMBOL_TYPE), Token::EQ); is_symbol.Then(); { Push(Add(factory->symbol_string())); } is_symbol.Else(); { HValue* bit_field = Add( map, nullptr, HObjectAccess::ForMapBitField()); HValue* bit_field_masked = AddUncasted( Token::BIT_AND, bit_field, Add((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); IfBuilder is_function(this); is_function.If( bit_field_masked, Add(1 << Map::kIsCallable), Token::EQ); is_function.Then(); { Push(Add(factory->function_string())); } is_function.Else(); { #define SIMD128_BUILDER_OPEN(TYPE, Type, type, lane_count, lane_type) \ IfBuilder is_##type(this); \ is_##type.If( \ map, Add(factory->type##_map())); \ is_##type.Then(); \ { Push(Add(factory->type##_string())); } \ is_##type.Else(); { SIMD128_TYPES(SIMD128_BUILDER_OPEN) #undef SIMD128_BUILDER_OPEN // Is it an undetectable object? IfBuilder is_undetectable(this); is_undetectable.If( bit_field_masked, graph()->GetConstant0(), Token::NE); is_undetectable.Then(); { // typeof an undetectable object is 'undefined'. Push(Add(factory->undefined_string())); } is_undetectable.Else(); { // For any kind of object not handled above, the spec rule for // host objects gives that it is okay to return "object". Push(object_string); } #define SIMD128_BUILDER_CLOSE(TYPE, Type, type, lane_count, lane_type) } SIMD128_TYPES(SIMD128_BUILDER_CLOSE) #undef SIMD128_BUILDER_CLOSE } is_function.End(); } is_symbol.End(); } is_oddball.End(); } is_string.End(); } is_number.End(); } is_smi.End(); return environment()->Pop(); } Handle TypeofStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); // This stub is very performance sensitive, the generated code must be tuned // so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* boilerplate = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder if_notundefined(this); if_notundefined.IfNot( boilerplate, graph()->GetConstantUndefined()); if_notundefined.Then(); { int result_size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; HValue* result = Add(Add(result_size), HType::JSObject(), NOT_TENURED, JS_REGEXP_TYPE); Add( result, HObjectAccess::ForMap(), Add(boilerplate, nullptr, HObjectAccess::ForMap())); Add( result, HObjectAccess::ForPropertiesPointer(), Add(boilerplate, nullptr, HObjectAccess::ForPropertiesPointer())); Add( result, HObjectAccess::ForElementsPointer(), Add(boilerplate, nullptr, HObjectAccess::ForElementsPointer())); for (int offset = JSObject::kHeaderSize; offset < result_size; offset += kPointerSize) { HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(offset); Add(result, access, Add(boilerplate, nullptr, access)); } Push(result); } if_notundefined.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone); if_notundefined.End(); return Pop(); } Handle FastCloneRegExpStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { Factory* factory = isolate()->factory(); HValue* undefined = graph()->GetConstantUndefined(); AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode(); HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); // This stub is very performance sensitive, the generated code must be tuned // so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* allocation_site = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder checker(this); checker.IfNot(allocation_site, undefined); checker.Then(); HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add(allocation_site, nullptr, access); HValue* elements = AddLoadElements(boilerplate); HValue* capacity = AddLoadFixedArrayLength(elements); IfBuilder zero_capacity(this); zero_capacity.If(capacity, graph()->GetConstant0(), Token::EQ); zero_capacity.Then(); Push(BuildCloneShallowArrayEmpty(boilerplate, allocation_site, alloc_site_mode)); zero_capacity.Else(); IfBuilder if_fixed_cow(this); if_fixed_cow.If(elements, factory->fixed_cow_array_map()); if_fixed_cow.Then(); Push(BuildCloneShallowArrayCow(boilerplate, allocation_site, alloc_site_mode, FAST_ELEMENTS)); if_fixed_cow.Else(); IfBuilder if_fixed(this); if_fixed.If(elements, factory->fixed_array_map()); if_fixed.Then(); Push(BuildCloneShallowArrayNonEmpty(boilerplate, allocation_site, alloc_site_mode, FAST_ELEMENTS)); if_fixed.Else(); Push(BuildCloneShallowArrayNonEmpty(boilerplate, allocation_site, alloc_site_mode, FAST_DOUBLE_ELEMENTS)); if_fixed.End(); if_fixed_cow.End(); zero_capacity.End(); checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals); checker.End(); return environment()->Pop(); } Handle FastCloneShallowArrayStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* undefined = graph()->GetConstantUndefined(); HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* allocation_site = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder checker(this); checker.IfNot(allocation_site, undefined); checker.And(); HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add(allocation_site, nullptr, access); int length = casted_stub()->length(); if (length == 0) { // Empty objects have some slack added to them. length = JSObject::kInitialGlobalObjectUnusedPropertiesCount; } int size = JSObject::kHeaderSize + length * kPointerSize; int object_size = size; if (FLAG_allocation_site_pretenuring) { size += AllocationMemento::kSize; } HValue* boilerplate_map = Add(boilerplate, nullptr, HObjectAccess::ForMap()); HValue* boilerplate_size = Add( boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize()); HValue* size_in_words = Add(object_size >> kPointerSizeLog2); checker.If(boilerplate_size, size_in_words, Token::EQ); checker.Then(); HValue* size_in_bytes = Add(size); HInstruction* object = Add(size_in_bytes, HType::JSObject(), NOT_TENURED, JS_OBJECT_TYPE); for (int i = 0; i < object_size; i += kPointerSize) { HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i); Add(object, access, Add(boilerplate, nullptr, access)); } DCHECK(FLAG_allocation_site_pretenuring || (size == object_size)); if (FLAG_allocation_site_pretenuring) { BuildCreateAllocationMemento( object, Add(object_size), allocation_site); } environment()->Push(object); checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone); checker.End(); return environment()->Pop(); } Handle FastCloneShallowObjectStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(AllocationSite::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); // Store the map Handle allocation_site_map = isolate()->factory()->allocation_site_map(); AddStoreMapConstant(object, allocation_site_map); // Store the payload (smi elements kind) HValue* initial_elements_kind = Add(GetInitialFastElementsKind()); Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset), initial_elements_kind); // Unlike literals, constructed arrays don't have nested sites Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kNestedSiteOffset), graph()->GetConstant0()); // Pretenuring calculation field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureDataOffset), graph()->GetConstant0()); // Pretenuring memento creation count field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureCreateCountOffset), graph()->GetConstant0()); // Store an empty fixed array for the code dependency. HConstant* empty_fixed_array = Add(isolate()->factory()->empty_fixed_array()); Add( object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kDependentCodeOffset), empty_fixed_array); // Link the object to the allocation site list HValue* site_list = Add( ExternalReference::allocation_sites_list_address(isolate())); HValue* site = Add(site_list, nullptr, HObjectAccess::ForAllocationSiteList()); // TODO(mvstanton): This is a store to a weak pointer, which we may want to // mark as such in order to skip the write barrier, once we have a unified // system for weakness. For now we decided to keep it like this because having // an initial write barrier backed store makes this pointer strong until the // next GC, and allocation sites are designed to survive several GCs anyway. Add( object, HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset), site); Add(site_list, HObjectAccess::ForAllocationSiteList(), object); HInstruction* feedback_vector = GetParameter(0); HInstruction* slot = GetParameter(1); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return feedback_vector; } Handle CreateAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(WeakCell::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); Handle weak_cell_map = isolate()->factory()->weak_cell_map(); AddStoreMapConstant(object, weak_cell_map); HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex); Add(object, HObjectAccess::ForWeakCellValue(), value); Add(object, HObjectAccess::ForWeakCellNext(), graph()->GetConstantHole()); HInstruction* feedback_vector = GetParameter(CreateWeakCellDescriptor::kVectorIndex); HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return graph()->GetConstant0(); } Handle CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); return Add(script_context, nullptr, HObjectAccess::ForContextSlot(slot_index)); } Handle LoadScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); Add(script_context, HObjectAccess::ForContextSlot(slot_index), GetParameter(2), STORE_TO_INITIALIZED_ENTRY); return GetParameter(2); } Handle StoreScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); if (IsFastDoubleElementsKind(kind)) { info()->MarkAsSavesCallerDoubles(); } HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex); HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex); HValue* elements = AddLoadElements(object); HValue* current_capacity = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* length = casted_stub()->is_js_array() ? Add(object, static_cast(NULL), HObjectAccess::ForArrayLength(kind)) : current_capacity; return BuildCheckAndGrowElementsCapacity(object, elements, kind, length, current_capacity, key); } Handle GrowArrayElementsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined() ? CONVERT_HOLE_TO_UNDEFINED : NEVER_RETURN_HOLE; HInstruction* load = BuildUncheckedMonomorphicElementAccess( GetParameter(LoadDescriptor::kReceiverIndex), GetParameter(LoadDescriptor::kNameIndex), NULL, casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD, hole_mode, STANDARD_STORE); return load; } Handle LoadFastElementStub::GenerateCode() { return DoGenerateCode(this); } HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField( HValue* object, FieldIndex index) { Representation representation = index.is_double() ? Representation::Double() : Representation::Tagged(); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (index.is_double() && (!FLAG_unbox_double_fields || !index.is_inobject())) { // Load the heap number. object = Add( object, nullptr, access.WithRepresentation(Representation::Tagged())); // Load the double value from it. access = HObjectAccess::ForHeapNumberValue(); } return Add(object, nullptr, access); } template<> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildLoadNamedField(GetParameter(0), casted_stub()->index()); } Handle LoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr, casted_stub()->index()); } Handle ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
new_object = factory->NewCode( desc, flags, masm.CodeObject(), NeedsImmovableCode()); return new_object; } template static Handle DoGenerateCode(Stub* stub) { Isolate* isolate = stub->isolate(); CodeStubDescriptor descriptor(stub); // If we are uninitialized we can use a light-weight stub to enter // the runtime that is significantly faster than using the standard // stub-failure deopt mechanism. if (stub->IsUninitialized() && descriptor.has_miss_handler()) { DCHECK(!descriptor.stack_parameter_count().is_valid()); return stub->GenerateLightweightMissCode(descriptor.miss_handler()); } base::ElapsedTimer timer; if (FLAG_profile_hydrogen_code_stub_compilation) { timer.Start(); } Zone zone; CompilationInfo info(stub, isolate, &zone); CodeStubGraphBuilder builder(&info); LChunk* chunk = OptimizeGraph(builder.CreateGraph()); Handle code = chunk->Codegen(); if (FLAG_profile_hydrogen_code_stub_compilation) { OFStream os(stdout); os << "[Lazy compilation of " << stub << " took " << timer.Elapsed().InMillisecondsF() << " ms]" << std::endl; } return code; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); HValue* number = GetParameter(NumberToStringStub::kNumber); return BuildNumberToString(number, Type::Number(zone())); } Handle NumberToStringStub::GenerateCode() { return DoGenerateCode(this); } // Returns the type string of a value; see ECMA-262, 11.4.3 (p 47). template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { Factory* factory = isolate()->factory(); HConstant* number_string = Add(factory->number_string()); HValue* object = GetParameter(TypeofStub::kObject); IfBuilder is_smi(this); HValue* smi_check = is_smi.If(object); is_smi.Then(); { Push(number_string); } is_smi.Else(); { IfBuilder is_number(this); is_number.If(object, isolate()->factory()->heap_number_map()); is_number.Then(); { Push(number_string); } is_number.Else(); { HValue* map = AddLoadMap(object, smi_check); HValue* instance_type = Add( map, nullptr, HObjectAccess::ForMapInstanceType()); IfBuilder is_string(this); is_string.If( instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); is_string.Then(); { Push(Add(factory->string_string())); } is_string.Else(); { HConstant* object_string = Add(factory->object_string()); IfBuilder is_oddball(this); is_oddball.If( instance_type, Add(ODDBALL_TYPE), Token::EQ); is_oddball.Then(); { Push(Add(object, nullptr, HObjectAccess::ForOddballTypeOf())); } is_oddball.Else(); { IfBuilder is_symbol(this); is_symbol.If( instance_type, Add(SYMBOL_TYPE), Token::EQ); is_symbol.Then(); { Push(Add(factory->symbol_string())); } is_symbol.Else(); { HValue* bit_field = Add( map, nullptr, HObjectAccess::ForMapBitField()); HValue* bit_field_masked = AddUncasted( Token::BIT_AND, bit_field, Add((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); IfBuilder is_function(this); is_function.If( bit_field_masked, Add(1 << Map::kIsCallable), Token::EQ); is_function.Then(); { Push(Add(factory->function_string())); } is_function.Else(); { #define SIMD128_BUILDER_OPEN(TYPE, Type, type, lane_count, lane_type) \ IfBuilder is_##type(this); \ is_##type.If( \ map, Add(factory->type##_map())); \ is_##type.Then(); \ { Push(Add(factory->type##_string())); } \ is_##type.Else(); { SIMD128_TYPES(SIMD128_BUILDER_OPEN) #undef SIMD128_BUILDER_OPEN // Is it an undetectable object? IfBuilder is_undetectable(this); is_undetectable.If( bit_field_masked, graph()->GetConstant0(), Token::NE); is_undetectable.Then(); { // typeof an undetectable object is 'undefined'. Push(Add(factory->undefined_string())); } is_undetectable.Else(); { // For any kind of object not handled above, the spec rule for // host objects gives that it is okay to return "object". Push(object_string); } #define SIMD128_BUILDER_CLOSE(TYPE, Type, type, lane_count, lane_type) } SIMD128_TYPES(SIMD128_BUILDER_CLOSE) #undef SIMD128_BUILDER_CLOSE } is_function.End(); } is_symbol.End(); } is_oddball.End(); } is_string.End(); } is_number.End(); } is_smi.End(); return environment()->Pop(); } Handle TypeofStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); // This stub is very performance sensitive, the generated code must be tuned // so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* boilerplate = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder if_notundefined(this); if_notundefined.IfNot( boilerplate, graph()->GetConstantUndefined()); if_notundefined.Then(); { int result_size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; HValue* result = Add(Add(result_size), HType::JSObject(), NOT_TENURED, JS_REGEXP_TYPE); Add( result, HObjectAccess::ForMap(), Add(boilerplate, nullptr, HObjectAccess::ForMap())); Add( result, HObjectAccess::ForPropertiesPointer(), Add(boilerplate, nullptr, HObjectAccess::ForPropertiesPointer())); Add( result, HObjectAccess::ForElementsPointer(), Add(boilerplate, nullptr, HObjectAccess::ForElementsPointer())); for (int offset = JSObject::kHeaderSize; offset < result_size; offset += kPointerSize) { HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(offset); Add(result, access, Add(boilerplate, nullptr, access)); } Push(result); } if_notundefined.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone); if_notundefined.End(); return Pop(); } Handle FastCloneRegExpStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { Factory* factory = isolate()->factory(); HValue* undefined = graph()->GetConstantUndefined(); AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode(); HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); // This stub is very performance sensitive, the generated code must be tuned // so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* allocation_site = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder checker(this); checker.IfNot(allocation_site, undefined); checker.Then(); HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add(allocation_site, nullptr, access); HValue* elements = AddLoadElements(boilerplate); HValue* capacity = AddLoadFixedArrayLength(elements); IfBuilder zero_capacity(this); zero_capacity.If(capacity, graph()->GetConstant0(), Token::EQ); zero_capacity.Then(); Push(BuildCloneShallowArrayEmpty(boilerplate, allocation_site, alloc_site_mode)); zero_capacity.Else(); IfBuilder if_fixed_cow(this); if_fixed_cow.If(elements, factory->fixed_cow_array_map()); if_fixed_cow.Then(); Push(BuildCloneShallowArrayCow(boilerplate, allocation_site, alloc_site_mode, FAST_ELEMENTS)); if_fixed_cow.Else(); IfBuilder if_fixed(this); if_fixed.If(elements, factory->fixed_array_map()); if_fixed.Then(); Push(BuildCloneShallowArrayNonEmpty(boilerplate, allocation_site, alloc_site_mode, FAST_ELEMENTS)); if_fixed.Else(); Push(BuildCloneShallowArrayNonEmpty(boilerplate, allocation_site, alloc_site_mode, FAST_DOUBLE_ELEMENTS)); if_fixed.End(); if_fixed_cow.End(); zero_capacity.End(); checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals); checker.End(); return environment()->Pop(); } Handle FastCloneShallowArrayStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* undefined = graph()->GetConstantUndefined(); HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* allocation_site = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder checker(this); checker.IfNot(allocation_site, undefined); checker.And(); HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add(allocation_site, nullptr, access); int length = casted_stub()->length(); if (length == 0) { // Empty objects have some slack added to them. length = JSObject::kInitialGlobalObjectUnusedPropertiesCount; } int size = JSObject::kHeaderSize + length * kPointerSize; int object_size = size; if (FLAG_allocation_site_pretenuring) { size += AllocationMemento::kSize; } HValue* boilerplate_map = Add(boilerplate, nullptr, HObjectAccess::ForMap()); HValue* boilerplate_size = Add( boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize()); HValue* size_in_words = Add(object_size >> kPointerSizeLog2); checker.If(boilerplate_size, size_in_words, Token::EQ); checker.Then(); HValue* size_in_bytes = Add(size); HInstruction* object = Add(size_in_bytes, HType::JSObject(), NOT_TENURED, JS_OBJECT_TYPE); for (int i = 0; i < object_size; i += kPointerSize) { HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i); Add(object, access, Add(boilerplate, nullptr, access)); } DCHECK(FLAG_allocation_site_pretenuring || (size == object_size)); if (FLAG_allocation_site_pretenuring) { BuildCreateAllocationMemento( object, Add(object_size), allocation_site); } environment()->Push(object); checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone); checker.End(); return environment()->Pop(); } Handle FastCloneShallowObjectStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(AllocationSite::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); // Store the map Handle allocation_site_map = isolate()->factory()->allocation_site_map(); AddStoreMapConstant(object, allocation_site_map); // Store the payload (smi elements kind) HValue* initial_elements_kind = Add(GetInitialFastElementsKind()); Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset), initial_elements_kind); // Unlike literals, constructed arrays don't have nested sites Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kNestedSiteOffset), graph()->GetConstant0()); // Pretenuring calculation field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureDataOffset), graph()->GetConstant0()); // Pretenuring memento creation count field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureCreateCountOffset), graph()->GetConstant0()); // Store an empty fixed array for the code dependency. HConstant* empty_fixed_array = Add(isolate()->factory()->empty_fixed_array()); Add( object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kDependentCodeOffset), empty_fixed_array); // Link the object to the allocation site list HValue* site_list = Add( ExternalReference::allocation_sites_list_address(isolate())); HValue* site = Add(site_list, nullptr, HObjectAccess::ForAllocationSiteList()); // TODO(mvstanton): This is a store to a weak pointer, which we may want to // mark as such in order to skip the write barrier, once we have a unified // system for weakness. For now we decided to keep it like this because having // an initial write barrier backed store makes this pointer strong until the // next GC, and allocation sites are designed to survive several GCs anyway. Add( object, HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset), site); Add(site_list, HObjectAccess::ForAllocationSiteList(), object); HInstruction* feedback_vector = GetParameter(0); HInstruction* slot = GetParameter(1); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return feedback_vector; } Handle CreateAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(WeakCell::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); Handle weak_cell_map = isolate()->factory()->weak_cell_map(); AddStoreMapConstant(object, weak_cell_map); HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex); Add(object, HObjectAccess::ForWeakCellValue(), value); Add(object, HObjectAccess::ForWeakCellNext(), graph()->GetConstantHole()); HInstruction* feedback_vector = GetParameter(CreateWeakCellDescriptor::kVectorIndex); HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return graph()->GetConstant0(); } Handle CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); return Add(script_context, nullptr, HObjectAccess::ForContextSlot(slot_index)); } Handle LoadScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); Add(script_context, HObjectAccess::ForContextSlot(slot_index), GetParameter(2), STORE_TO_INITIALIZED_ENTRY); return GetParameter(2); } Handle StoreScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); if (IsFastDoubleElementsKind(kind)) { info()->MarkAsSavesCallerDoubles(); } HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex); HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex); HValue* elements = AddLoadElements(object); HValue* current_capacity = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* length = casted_stub()->is_js_array() ? Add(object, static_cast(NULL), HObjectAccess::ForArrayLength(kind)) : current_capacity; return BuildCheckAndGrowElementsCapacity(object, elements, kind, length, current_capacity, key); } Handle GrowArrayElementsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined() ? CONVERT_HOLE_TO_UNDEFINED : NEVER_RETURN_HOLE; HInstruction* load = BuildUncheckedMonomorphicElementAccess( GetParameter(LoadDescriptor::kReceiverIndex), GetParameter(LoadDescriptor::kNameIndex), NULL, casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD, hole_mode, STANDARD_STORE); return load; } Handle LoadFastElementStub::GenerateCode() { return DoGenerateCode(this); } HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField( HValue* object, FieldIndex index) { Representation representation = index.is_double() ? Representation::Double() : Representation::Tagged(); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (index.is_double() && (!FLAG_unbox_double_fields || !index.is_inobject())) { // Load the heap number. object = Add( object, nullptr, access.WithRepresentation(Representation::Tagged())); // Load the double value from it. access = HObjectAccess::ForHeapNumberValue(); } return Add(object, nullptr, access); } template<> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildLoadNamedField(GetParameter(0), casted_stub()->index()); } Handle LoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr, casted_stub()->index()); } Handle ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
DoGenerateCode(Stub* stub) { Isolate* isolate = stub->isolate(); CodeStubDescriptor descriptor(stub); // If we are uninitialized we can use a light-weight stub to enter // the runtime that is significantly faster than using the standard // stub-failure deopt mechanism. if (stub->IsUninitialized() && descriptor.has_miss_handler()) { DCHECK(!descriptor.stack_parameter_count().is_valid()); return stub->GenerateLightweightMissCode(descriptor.miss_handler()); } base::ElapsedTimer timer; if (FLAG_profile_hydrogen_code_stub_compilation) { timer.Start(); } Zone zone; CompilationInfo info(stub, isolate, &zone); CodeStubGraphBuilder builder(&info); LChunk* chunk = OptimizeGraph(builder.CreateGraph()); Handle code = chunk->Codegen(); if (FLAG_profile_hydrogen_code_stub_compilation) { OFStream os(stdout); os << "[Lazy compilation of " << stub << " took " << timer.Elapsed().InMillisecondsF() << " ms]" << std::endl; } return code; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); HValue* number = GetParameter(NumberToStringStub::kNumber); return BuildNumberToString(number, Type::Number(zone())); } Handle NumberToStringStub::GenerateCode() { return DoGenerateCode(this); } // Returns the type string of a value; see ECMA-262, 11.4.3 (p 47). template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { Factory* factory = isolate()->factory(); HConstant* number_string = Add(factory->number_string()); HValue* object = GetParameter(TypeofStub::kObject); IfBuilder is_smi(this); HValue* smi_check = is_smi.If(object); is_smi.Then(); { Push(number_string); } is_smi.Else(); { IfBuilder is_number(this); is_number.If(object, isolate()->factory()->heap_number_map()); is_number.Then(); { Push(number_string); } is_number.Else(); { HValue* map = AddLoadMap(object, smi_check); HValue* instance_type = Add( map, nullptr, HObjectAccess::ForMapInstanceType()); IfBuilder is_string(this); is_string.If( instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); is_string.Then(); { Push(Add(factory->string_string())); } is_string.Else(); { HConstant* object_string = Add(factory->object_string()); IfBuilder is_oddball(this); is_oddball.If( instance_type, Add(ODDBALL_TYPE), Token::EQ); is_oddball.Then(); { Push(Add(object, nullptr, HObjectAccess::ForOddballTypeOf())); } is_oddball.Else(); { IfBuilder is_symbol(this); is_symbol.If( instance_type, Add(SYMBOL_TYPE), Token::EQ); is_symbol.Then(); { Push(Add(factory->symbol_string())); } is_symbol.Else(); { HValue* bit_field = Add( map, nullptr, HObjectAccess::ForMapBitField()); HValue* bit_field_masked = AddUncasted( Token::BIT_AND, bit_field, Add((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); IfBuilder is_function(this); is_function.If( bit_field_masked, Add(1 << Map::kIsCallable), Token::EQ); is_function.Then(); { Push(Add(factory->function_string())); } is_function.Else(); { #define SIMD128_BUILDER_OPEN(TYPE, Type, type, lane_count, lane_type) \ IfBuilder is_##type(this); \ is_##type.If( \ map, Add(factory->type##_map())); \ is_##type.Then(); \ { Push(Add(factory->type##_string())); } \ is_##type.Else(); { SIMD128_TYPES(SIMD128_BUILDER_OPEN) #undef SIMD128_BUILDER_OPEN // Is it an undetectable object? IfBuilder is_undetectable(this); is_undetectable.If( bit_field_masked, graph()->GetConstant0(), Token::NE); is_undetectable.Then(); { // typeof an undetectable object is 'undefined'. Push(Add(factory->undefined_string())); } is_undetectable.Else(); { // For any kind of object not handled above, the spec rule for // host objects gives that it is okay to return "object". Push(object_string); } #define SIMD128_BUILDER_CLOSE(TYPE, Type, type, lane_count, lane_type) } SIMD128_TYPES(SIMD128_BUILDER_CLOSE) #undef SIMD128_BUILDER_CLOSE } is_function.End(); } is_symbol.End(); } is_oddball.End(); } is_string.End(); } is_number.End(); } is_smi.End(); return environment()->Pop(); } Handle TypeofStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); // This stub is very performance sensitive, the generated code must be tuned // so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* boilerplate = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder if_notundefined(this); if_notundefined.IfNot( boilerplate, graph()->GetConstantUndefined()); if_notundefined.Then(); { int result_size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; HValue* result = Add(Add(result_size), HType::JSObject(), NOT_TENURED, JS_REGEXP_TYPE); Add( result, HObjectAccess::ForMap(), Add(boilerplate, nullptr, HObjectAccess::ForMap())); Add( result, HObjectAccess::ForPropertiesPointer(), Add(boilerplate, nullptr, HObjectAccess::ForPropertiesPointer())); Add( result, HObjectAccess::ForElementsPointer(), Add(boilerplate, nullptr, HObjectAccess::ForElementsPointer())); for (int offset = JSObject::kHeaderSize; offset < result_size; offset += kPointerSize) { HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(offset); Add(result, access, Add(boilerplate, nullptr, access)); } Push(result); } if_notundefined.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone); if_notundefined.End(); return Pop(); } Handle FastCloneRegExpStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { Factory* factory = isolate()->factory(); HValue* undefined = graph()->GetConstantUndefined(); AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode(); HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); // This stub is very performance sensitive, the generated code must be tuned // so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* allocation_site = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder checker(this); checker.IfNot(allocation_site, undefined); checker.Then(); HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add(allocation_site, nullptr, access); HValue* elements = AddLoadElements(boilerplate); HValue* capacity = AddLoadFixedArrayLength(elements); IfBuilder zero_capacity(this); zero_capacity.If(capacity, graph()->GetConstant0(), Token::EQ); zero_capacity.Then(); Push(BuildCloneShallowArrayEmpty(boilerplate, allocation_site, alloc_site_mode)); zero_capacity.Else(); IfBuilder if_fixed_cow(this); if_fixed_cow.If(elements, factory->fixed_cow_array_map()); if_fixed_cow.Then(); Push(BuildCloneShallowArrayCow(boilerplate, allocation_site, alloc_site_mode, FAST_ELEMENTS)); if_fixed_cow.Else(); IfBuilder if_fixed(this); if_fixed.If(elements, factory->fixed_array_map()); if_fixed.Then(); Push(BuildCloneShallowArrayNonEmpty(boilerplate, allocation_site, alloc_site_mode, FAST_ELEMENTS)); if_fixed.Else(); Push(BuildCloneShallowArrayNonEmpty(boilerplate, allocation_site, alloc_site_mode, FAST_DOUBLE_ELEMENTS)); if_fixed.End(); if_fixed_cow.End(); zero_capacity.End(); checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals); checker.End(); return environment()->Pop(); } Handle FastCloneShallowArrayStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* undefined = graph()->GetConstantUndefined(); HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* allocation_site = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder checker(this); checker.IfNot(allocation_site, undefined); checker.And(); HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add(allocation_site, nullptr, access); int length = casted_stub()->length(); if (length == 0) { // Empty objects have some slack added to them. length = JSObject::kInitialGlobalObjectUnusedPropertiesCount; } int size = JSObject::kHeaderSize + length * kPointerSize; int object_size = size; if (FLAG_allocation_site_pretenuring) { size += AllocationMemento::kSize; } HValue* boilerplate_map = Add(boilerplate, nullptr, HObjectAccess::ForMap()); HValue* boilerplate_size = Add( boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize()); HValue* size_in_words = Add(object_size >> kPointerSizeLog2); checker.If(boilerplate_size, size_in_words, Token::EQ); checker.Then(); HValue* size_in_bytes = Add(size); HInstruction* object = Add(size_in_bytes, HType::JSObject(), NOT_TENURED, JS_OBJECT_TYPE); for (int i = 0; i < object_size; i += kPointerSize) { HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i); Add(object, access, Add(boilerplate, nullptr, access)); } DCHECK(FLAG_allocation_site_pretenuring || (size == object_size)); if (FLAG_allocation_site_pretenuring) { BuildCreateAllocationMemento( object, Add(object_size), allocation_site); } environment()->Push(object); checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone); checker.End(); return environment()->Pop(); } Handle FastCloneShallowObjectStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(AllocationSite::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); // Store the map Handle allocation_site_map = isolate()->factory()->allocation_site_map(); AddStoreMapConstant(object, allocation_site_map); // Store the payload (smi elements kind) HValue* initial_elements_kind = Add(GetInitialFastElementsKind()); Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset), initial_elements_kind); // Unlike literals, constructed arrays don't have nested sites Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kNestedSiteOffset), graph()->GetConstant0()); // Pretenuring calculation field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureDataOffset), graph()->GetConstant0()); // Pretenuring memento creation count field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureCreateCountOffset), graph()->GetConstant0()); // Store an empty fixed array for the code dependency. HConstant* empty_fixed_array = Add(isolate()->factory()->empty_fixed_array()); Add( object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kDependentCodeOffset), empty_fixed_array); // Link the object to the allocation site list HValue* site_list = Add( ExternalReference::allocation_sites_list_address(isolate())); HValue* site = Add(site_list, nullptr, HObjectAccess::ForAllocationSiteList()); // TODO(mvstanton): This is a store to a weak pointer, which we may want to // mark as such in order to skip the write barrier, once we have a unified // system for weakness. For now we decided to keep it like this because having // an initial write barrier backed store makes this pointer strong until the // next GC, and allocation sites are designed to survive several GCs anyway. Add( object, HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset), site); Add(site_list, HObjectAccess::ForAllocationSiteList(), object); HInstruction* feedback_vector = GetParameter(0); HInstruction* slot = GetParameter(1); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return feedback_vector; } Handle CreateAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(WeakCell::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); Handle weak_cell_map = isolate()->factory()->weak_cell_map(); AddStoreMapConstant(object, weak_cell_map); HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex); Add(object, HObjectAccess::ForWeakCellValue(), value); Add(object, HObjectAccess::ForWeakCellNext(), graph()->GetConstantHole()); HInstruction* feedback_vector = GetParameter(CreateWeakCellDescriptor::kVectorIndex); HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return graph()->GetConstant0(); } Handle CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); return Add(script_context, nullptr, HObjectAccess::ForContextSlot(slot_index)); } Handle LoadScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); Add(script_context, HObjectAccess::ForContextSlot(slot_index), GetParameter(2), STORE_TO_INITIALIZED_ENTRY); return GetParameter(2); } Handle StoreScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); if (IsFastDoubleElementsKind(kind)) { info()->MarkAsSavesCallerDoubles(); } HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex); HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex); HValue* elements = AddLoadElements(object); HValue* current_capacity = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* length = casted_stub()->is_js_array() ? Add(object, static_cast(NULL), HObjectAccess::ForArrayLength(kind)) : current_capacity; return BuildCheckAndGrowElementsCapacity(object, elements, kind, length, current_capacity, key); } Handle GrowArrayElementsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined() ? CONVERT_HOLE_TO_UNDEFINED : NEVER_RETURN_HOLE; HInstruction* load = BuildUncheckedMonomorphicElementAccess( GetParameter(LoadDescriptor::kReceiverIndex), GetParameter(LoadDescriptor::kNameIndex), NULL, casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD, hole_mode, STANDARD_STORE); return load; } Handle LoadFastElementStub::GenerateCode() { return DoGenerateCode(this); } HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField( HValue* object, FieldIndex index) { Representation representation = index.is_double() ? Representation::Double() : Representation::Tagged(); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (index.is_double() && (!FLAG_unbox_double_fields || !index.is_inobject())) { // Load the heap number. object = Add( object, nullptr, access.WithRepresentation(Representation::Tagged())); // Load the double value from it. access = HObjectAccess::ForHeapNumberValue(); } return Add(object, nullptr, access); } template<> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildLoadNamedField(GetParameter(0), casted_stub()->index()); } Handle LoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr, casted_stub()->index()); } Handle ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
code = chunk->Codegen(); if (FLAG_profile_hydrogen_code_stub_compilation) { OFStream os(stdout); os << "[Lazy compilation of " << stub << " took " << timer.Elapsed().InMillisecondsF() << " ms]" << std::endl; } return code; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); HValue* number = GetParameter(NumberToStringStub::kNumber); return BuildNumberToString(number, Type::Number(zone())); } Handle NumberToStringStub::GenerateCode() { return DoGenerateCode(this); } // Returns the type string of a value; see ECMA-262, 11.4.3 (p 47). template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { Factory* factory = isolate()->factory(); HConstant* number_string = Add(factory->number_string()); HValue* object = GetParameter(TypeofStub::kObject); IfBuilder is_smi(this); HValue* smi_check = is_smi.If(object); is_smi.Then(); { Push(number_string); } is_smi.Else(); { IfBuilder is_number(this); is_number.If(object, isolate()->factory()->heap_number_map()); is_number.Then(); { Push(number_string); } is_number.Else(); { HValue* map = AddLoadMap(object, smi_check); HValue* instance_type = Add( map, nullptr, HObjectAccess::ForMapInstanceType()); IfBuilder is_string(this); is_string.If( instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); is_string.Then(); { Push(Add(factory->string_string())); } is_string.Else(); { HConstant* object_string = Add(factory->object_string()); IfBuilder is_oddball(this); is_oddball.If( instance_type, Add(ODDBALL_TYPE), Token::EQ); is_oddball.Then(); { Push(Add(object, nullptr, HObjectAccess::ForOddballTypeOf())); } is_oddball.Else(); { IfBuilder is_symbol(this); is_symbol.If( instance_type, Add(SYMBOL_TYPE), Token::EQ); is_symbol.Then(); { Push(Add(factory->symbol_string())); } is_symbol.Else(); { HValue* bit_field = Add( map, nullptr, HObjectAccess::ForMapBitField()); HValue* bit_field_masked = AddUncasted( Token::BIT_AND, bit_field, Add((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); IfBuilder is_function(this); is_function.If( bit_field_masked, Add(1 << Map::kIsCallable), Token::EQ); is_function.Then(); { Push(Add(factory->function_string())); } is_function.Else(); { #define SIMD128_BUILDER_OPEN(TYPE, Type, type, lane_count, lane_type) \ IfBuilder is_##type(this); \ is_##type.If( \ map, Add(factory->type##_map())); \ is_##type.Then(); \ { Push(Add(factory->type##_string())); } \ is_##type.Else(); { SIMD128_TYPES(SIMD128_BUILDER_OPEN) #undef SIMD128_BUILDER_OPEN // Is it an undetectable object? IfBuilder is_undetectable(this); is_undetectable.If( bit_field_masked, graph()->GetConstant0(), Token::NE); is_undetectable.Then(); { // typeof an undetectable object is 'undefined'. Push(Add(factory->undefined_string())); } is_undetectable.Else(); { // For any kind of object not handled above, the spec rule for // host objects gives that it is okay to return "object". Push(object_string); } #define SIMD128_BUILDER_CLOSE(TYPE, Type, type, lane_count, lane_type) } SIMD128_TYPES(SIMD128_BUILDER_CLOSE) #undef SIMD128_BUILDER_CLOSE } is_function.End(); } is_symbol.End(); } is_oddball.End(); } is_string.End(); } is_number.End(); } is_smi.End(); return environment()->Pop(); } Handle TypeofStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); // This stub is very performance sensitive, the generated code must be tuned // so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* boilerplate = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder if_notundefined(this); if_notundefined.IfNot( boilerplate, graph()->GetConstantUndefined()); if_notundefined.Then(); { int result_size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; HValue* result = Add(Add(result_size), HType::JSObject(), NOT_TENURED, JS_REGEXP_TYPE); Add( result, HObjectAccess::ForMap(), Add(boilerplate, nullptr, HObjectAccess::ForMap())); Add( result, HObjectAccess::ForPropertiesPointer(), Add(boilerplate, nullptr, HObjectAccess::ForPropertiesPointer())); Add( result, HObjectAccess::ForElementsPointer(), Add(boilerplate, nullptr, HObjectAccess::ForElementsPointer())); for (int offset = JSObject::kHeaderSize; offset < result_size; offset += kPointerSize) { HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(offset); Add(result, access, Add(boilerplate, nullptr, access)); } Push(result); } if_notundefined.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone); if_notundefined.End(); return Pop(); } Handle FastCloneRegExpStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { Factory* factory = isolate()->factory(); HValue* undefined = graph()->GetConstantUndefined(); AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode(); HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); // This stub is very performance sensitive, the generated code must be tuned // so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* allocation_site = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder checker(this); checker.IfNot(allocation_site, undefined); checker.Then(); HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add(allocation_site, nullptr, access); HValue* elements = AddLoadElements(boilerplate); HValue* capacity = AddLoadFixedArrayLength(elements); IfBuilder zero_capacity(this); zero_capacity.If(capacity, graph()->GetConstant0(), Token::EQ); zero_capacity.Then(); Push(BuildCloneShallowArrayEmpty(boilerplate, allocation_site, alloc_site_mode)); zero_capacity.Else(); IfBuilder if_fixed_cow(this); if_fixed_cow.If(elements, factory->fixed_cow_array_map()); if_fixed_cow.Then(); Push(BuildCloneShallowArrayCow(boilerplate, allocation_site, alloc_site_mode, FAST_ELEMENTS)); if_fixed_cow.Else(); IfBuilder if_fixed(this); if_fixed.If(elements, factory->fixed_array_map()); if_fixed.Then(); Push(BuildCloneShallowArrayNonEmpty(boilerplate, allocation_site, alloc_site_mode, FAST_ELEMENTS)); if_fixed.Else(); Push(BuildCloneShallowArrayNonEmpty(boilerplate, allocation_site, alloc_site_mode, FAST_DOUBLE_ELEMENTS)); if_fixed.End(); if_fixed_cow.End(); zero_capacity.End(); checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals); checker.End(); return environment()->Pop(); } Handle FastCloneShallowArrayStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* undefined = graph()->GetConstantUndefined(); HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* allocation_site = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder checker(this); checker.IfNot(allocation_site, undefined); checker.And(); HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add(allocation_site, nullptr, access); int length = casted_stub()->length(); if (length == 0) { // Empty objects have some slack added to them. length = JSObject::kInitialGlobalObjectUnusedPropertiesCount; } int size = JSObject::kHeaderSize + length * kPointerSize; int object_size = size; if (FLAG_allocation_site_pretenuring) { size += AllocationMemento::kSize; } HValue* boilerplate_map = Add(boilerplate, nullptr, HObjectAccess::ForMap()); HValue* boilerplate_size = Add( boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize()); HValue* size_in_words = Add(object_size >> kPointerSizeLog2); checker.If(boilerplate_size, size_in_words, Token::EQ); checker.Then(); HValue* size_in_bytes = Add(size); HInstruction* object = Add(size_in_bytes, HType::JSObject(), NOT_TENURED, JS_OBJECT_TYPE); for (int i = 0; i < object_size; i += kPointerSize) { HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i); Add(object, access, Add(boilerplate, nullptr, access)); } DCHECK(FLAG_allocation_site_pretenuring || (size == object_size)); if (FLAG_allocation_site_pretenuring) { BuildCreateAllocationMemento( object, Add(object_size), allocation_site); } environment()->Push(object); checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone); checker.End(); return environment()->Pop(); } Handle FastCloneShallowObjectStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(AllocationSite::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); // Store the map Handle allocation_site_map = isolate()->factory()->allocation_site_map(); AddStoreMapConstant(object, allocation_site_map); // Store the payload (smi elements kind) HValue* initial_elements_kind = Add(GetInitialFastElementsKind()); Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset), initial_elements_kind); // Unlike literals, constructed arrays don't have nested sites Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kNestedSiteOffset), graph()->GetConstant0()); // Pretenuring calculation field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureDataOffset), graph()->GetConstant0()); // Pretenuring memento creation count field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureCreateCountOffset), graph()->GetConstant0()); // Store an empty fixed array for the code dependency. HConstant* empty_fixed_array = Add(isolate()->factory()->empty_fixed_array()); Add( object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kDependentCodeOffset), empty_fixed_array); // Link the object to the allocation site list HValue* site_list = Add( ExternalReference::allocation_sites_list_address(isolate())); HValue* site = Add(site_list, nullptr, HObjectAccess::ForAllocationSiteList()); // TODO(mvstanton): This is a store to a weak pointer, which we may want to // mark as such in order to skip the write barrier, once we have a unified // system for weakness. For now we decided to keep it like this because having // an initial write barrier backed store makes this pointer strong until the // next GC, and allocation sites are designed to survive several GCs anyway. Add( object, HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset), site); Add(site_list, HObjectAccess::ForAllocationSiteList(), object); HInstruction* feedback_vector = GetParameter(0); HInstruction* slot = GetParameter(1); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return feedback_vector; } Handle CreateAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(WeakCell::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); Handle weak_cell_map = isolate()->factory()->weak_cell_map(); AddStoreMapConstant(object, weak_cell_map); HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex); Add(object, HObjectAccess::ForWeakCellValue(), value); Add(object, HObjectAccess::ForWeakCellNext(), graph()->GetConstantHole()); HInstruction* feedback_vector = GetParameter(CreateWeakCellDescriptor::kVectorIndex); HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return graph()->GetConstant0(); } Handle CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); return Add(script_context, nullptr, HObjectAccess::ForContextSlot(slot_index)); } Handle LoadScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); Add(script_context, HObjectAccess::ForContextSlot(slot_index), GetParameter(2), STORE_TO_INITIALIZED_ENTRY); return GetParameter(2); } Handle StoreScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); if (IsFastDoubleElementsKind(kind)) { info()->MarkAsSavesCallerDoubles(); } HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex); HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex); HValue* elements = AddLoadElements(object); HValue* current_capacity = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* length = casted_stub()->is_js_array() ? Add(object, static_cast(NULL), HObjectAccess::ForArrayLength(kind)) : current_capacity; return BuildCheckAndGrowElementsCapacity(object, elements, kind, length, current_capacity, key); } Handle GrowArrayElementsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined() ? CONVERT_HOLE_TO_UNDEFINED : NEVER_RETURN_HOLE; HInstruction* load = BuildUncheckedMonomorphicElementAccess( GetParameter(LoadDescriptor::kReceiverIndex), GetParameter(LoadDescriptor::kNameIndex), NULL, casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD, hole_mode, STANDARD_STORE); return load; } Handle LoadFastElementStub::GenerateCode() { return DoGenerateCode(this); } HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField( HValue* object, FieldIndex index) { Representation representation = index.is_double() ? Representation::Double() : Representation::Tagged(); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (index.is_double() && (!FLAG_unbox_double_fields || !index.is_inobject())) { // Load the heap number. object = Add( object, nullptr, access.WithRepresentation(Representation::Tagged())); // Load the double value from it. access = HObjectAccess::ForHeapNumberValue(); } return Add(object, nullptr, access); } template<> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildLoadNamedField(GetParameter(0), casted_stub()->index()); } Handle LoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr, casted_stub()->index()); } Handle ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
NumberToStringStub::GenerateCode() { return DoGenerateCode(this); } // Returns the type string of a value; see ECMA-262, 11.4.3 (p 47). template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { Factory* factory = isolate()->factory(); HConstant* number_string = Add(factory->number_string()); HValue* object = GetParameter(TypeofStub::kObject); IfBuilder is_smi(this); HValue* smi_check = is_smi.If(object); is_smi.Then(); { Push(number_string); } is_smi.Else(); { IfBuilder is_number(this); is_number.If(object, isolate()->factory()->heap_number_map()); is_number.Then(); { Push(number_string); } is_number.Else(); { HValue* map = AddLoadMap(object, smi_check); HValue* instance_type = Add( map, nullptr, HObjectAccess::ForMapInstanceType()); IfBuilder is_string(this); is_string.If( instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); is_string.Then(); { Push(Add(factory->string_string())); } is_string.Else(); { HConstant* object_string = Add(factory->object_string()); IfBuilder is_oddball(this); is_oddball.If( instance_type, Add(ODDBALL_TYPE), Token::EQ); is_oddball.Then(); { Push(Add(object, nullptr, HObjectAccess::ForOddballTypeOf())); } is_oddball.Else(); { IfBuilder is_symbol(this); is_symbol.If( instance_type, Add(SYMBOL_TYPE), Token::EQ); is_symbol.Then(); { Push(Add(factory->symbol_string())); } is_symbol.Else(); { HValue* bit_field = Add( map, nullptr, HObjectAccess::ForMapBitField()); HValue* bit_field_masked = AddUncasted( Token::BIT_AND, bit_field, Add((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable))); IfBuilder is_function(this); is_function.If( bit_field_masked, Add(1 << Map::kIsCallable), Token::EQ); is_function.Then(); { Push(Add(factory->function_string())); } is_function.Else(); { #define SIMD128_BUILDER_OPEN(TYPE, Type, type, lane_count, lane_type) \ IfBuilder is_##type(this); \ is_##type.If( \ map, Add(factory->type##_map())); \ is_##type.Then(); \ { Push(Add(factory->type##_string())); } \ is_##type.Else(); { SIMD128_TYPES(SIMD128_BUILDER_OPEN) #undef SIMD128_BUILDER_OPEN // Is it an undetectable object? IfBuilder is_undetectable(this); is_undetectable.If( bit_field_masked, graph()->GetConstant0(), Token::NE); is_undetectable.Then(); { // typeof an undetectable object is 'undefined'. Push(Add(factory->undefined_string())); } is_undetectable.Else(); { // For any kind of object not handled above, the spec rule for // host objects gives that it is okay to return "object". Push(object_string); } #define SIMD128_BUILDER_CLOSE(TYPE, Type, type, lane_count, lane_type) } SIMD128_TYPES(SIMD128_BUILDER_CLOSE) #undef SIMD128_BUILDER_CLOSE } is_function.End(); } is_symbol.End(); } is_oddball.End(); } is_string.End(); } is_number.End(); } is_smi.End(); return environment()->Pop(); } Handle TypeofStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); // This stub is very performance sensitive, the generated code must be tuned // so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* boilerplate = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder if_notundefined(this); if_notundefined.IfNot( boilerplate, graph()->GetConstantUndefined()); if_notundefined.Then(); { int result_size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; HValue* result = Add(Add(result_size), HType::JSObject(), NOT_TENURED, JS_REGEXP_TYPE); Add( result, HObjectAccess::ForMap(), Add(boilerplate, nullptr, HObjectAccess::ForMap())); Add( result, HObjectAccess::ForPropertiesPointer(), Add(boilerplate, nullptr, HObjectAccess::ForPropertiesPointer())); Add( result, HObjectAccess::ForElementsPointer(), Add(boilerplate, nullptr, HObjectAccess::ForElementsPointer())); for (int offset = JSObject::kHeaderSize; offset < result_size; offset += kPointerSize) { HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(offset); Add(result, access, Add(boilerplate, nullptr, access)); } Push(result); } if_notundefined.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone); if_notundefined.End(); return Pop(); } Handle FastCloneRegExpStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { Factory* factory = isolate()->factory(); HValue* undefined = graph()->GetConstantUndefined(); AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode(); HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); // This stub is very performance sensitive, the generated code must be tuned // so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* allocation_site = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder checker(this); checker.IfNot(allocation_site, undefined); checker.Then(); HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add(allocation_site, nullptr, access); HValue* elements = AddLoadElements(boilerplate); HValue* capacity = AddLoadFixedArrayLength(elements); IfBuilder zero_capacity(this); zero_capacity.If(capacity, graph()->GetConstant0(), Token::EQ); zero_capacity.Then(); Push(BuildCloneShallowArrayEmpty(boilerplate, allocation_site, alloc_site_mode)); zero_capacity.Else(); IfBuilder if_fixed_cow(this); if_fixed_cow.If(elements, factory->fixed_cow_array_map()); if_fixed_cow.Then(); Push(BuildCloneShallowArrayCow(boilerplate, allocation_site, alloc_site_mode, FAST_ELEMENTS)); if_fixed_cow.Else(); IfBuilder if_fixed(this); if_fixed.If(elements, factory->fixed_array_map()); if_fixed.Then(); Push(BuildCloneShallowArrayNonEmpty(boilerplate, allocation_site, alloc_site_mode, FAST_ELEMENTS)); if_fixed.Else(); Push(BuildCloneShallowArrayNonEmpty(boilerplate, allocation_site, alloc_site_mode, FAST_DOUBLE_ELEMENTS)); if_fixed.End(); if_fixed_cow.End(); zero_capacity.End(); checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals); checker.End(); return environment()->Pop(); } Handle FastCloneShallowArrayStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* undefined = graph()->GetConstantUndefined(); HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* allocation_site = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder checker(this); checker.IfNot(allocation_site, undefined); checker.And(); HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add(allocation_site, nullptr, access); int length = casted_stub()->length(); if (length == 0) { // Empty objects have some slack added to them. length = JSObject::kInitialGlobalObjectUnusedPropertiesCount; } int size = JSObject::kHeaderSize + length * kPointerSize; int object_size = size; if (FLAG_allocation_site_pretenuring) { size += AllocationMemento::kSize; } HValue* boilerplate_map = Add(boilerplate, nullptr, HObjectAccess::ForMap()); HValue* boilerplate_size = Add( boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize()); HValue* size_in_words = Add(object_size >> kPointerSizeLog2); checker.If(boilerplate_size, size_in_words, Token::EQ); checker.Then(); HValue* size_in_bytes = Add(size); HInstruction* object = Add(size_in_bytes, HType::JSObject(), NOT_TENURED, JS_OBJECT_TYPE); for (int i = 0; i < object_size; i += kPointerSize) { HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i); Add(object, access, Add(boilerplate, nullptr, access)); } DCHECK(FLAG_allocation_site_pretenuring || (size == object_size)); if (FLAG_allocation_site_pretenuring) { BuildCreateAllocationMemento( object, Add(object_size), allocation_site); } environment()->Push(object); checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone); checker.End(); return environment()->Pop(); } Handle FastCloneShallowObjectStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(AllocationSite::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); // Store the map Handle allocation_site_map = isolate()->factory()->allocation_site_map(); AddStoreMapConstant(object, allocation_site_map); // Store the payload (smi elements kind) HValue* initial_elements_kind = Add(GetInitialFastElementsKind()); Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset), initial_elements_kind); // Unlike literals, constructed arrays don't have nested sites Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kNestedSiteOffset), graph()->GetConstant0()); // Pretenuring calculation field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureDataOffset), graph()->GetConstant0()); // Pretenuring memento creation count field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureCreateCountOffset), graph()->GetConstant0()); // Store an empty fixed array for the code dependency. HConstant* empty_fixed_array = Add(isolate()->factory()->empty_fixed_array()); Add( object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kDependentCodeOffset), empty_fixed_array); // Link the object to the allocation site list HValue* site_list = Add( ExternalReference::allocation_sites_list_address(isolate())); HValue* site = Add(site_list, nullptr, HObjectAccess::ForAllocationSiteList()); // TODO(mvstanton): This is a store to a weak pointer, which we may want to // mark as such in order to skip the write barrier, once we have a unified // system for weakness. For now we decided to keep it like this because having // an initial write barrier backed store makes this pointer strong until the // next GC, and allocation sites are designed to survive several GCs anyway. Add( object, HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset), site); Add(site_list, HObjectAccess::ForAllocationSiteList(), object); HInstruction* feedback_vector = GetParameter(0); HInstruction* slot = GetParameter(1); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return feedback_vector; } Handle CreateAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(WeakCell::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); Handle weak_cell_map = isolate()->factory()->weak_cell_map(); AddStoreMapConstant(object, weak_cell_map); HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex); Add(object, HObjectAccess::ForWeakCellValue(), value); Add(object, HObjectAccess::ForWeakCellNext(), graph()->GetConstantHole()); HInstruction* feedback_vector = GetParameter(CreateWeakCellDescriptor::kVectorIndex); HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return graph()->GetConstant0(); } Handle CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); return Add(script_context, nullptr, HObjectAccess::ForContextSlot(slot_index)); } Handle LoadScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); Add(script_context, HObjectAccess::ForContextSlot(slot_index), GetParameter(2), STORE_TO_INITIALIZED_ENTRY); return GetParameter(2); } Handle StoreScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); if (IsFastDoubleElementsKind(kind)) { info()->MarkAsSavesCallerDoubles(); } HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex); HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex); HValue* elements = AddLoadElements(object); HValue* current_capacity = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* length = casted_stub()->is_js_array() ? Add(object, static_cast(NULL), HObjectAccess::ForArrayLength(kind)) : current_capacity; return BuildCheckAndGrowElementsCapacity(object, elements, kind, length, current_capacity, key); } Handle GrowArrayElementsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined() ? CONVERT_HOLE_TO_UNDEFINED : NEVER_RETURN_HOLE; HInstruction* load = BuildUncheckedMonomorphicElementAccess( GetParameter(LoadDescriptor::kReceiverIndex), GetParameter(LoadDescriptor::kNameIndex), NULL, casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD, hole_mode, STANDARD_STORE); return load; } Handle LoadFastElementStub::GenerateCode() { return DoGenerateCode(this); } HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField( HValue* object, FieldIndex index) { Representation representation = index.is_double() ? Representation::Double() : Representation::Tagged(); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (index.is_double() && (!FLAG_unbox_double_fields || !index.is_inobject())) { // Load the heap number. object = Add( object, nullptr, access.WithRepresentation(Representation::Tagged())); // Load the double value from it. access = HObjectAccess::ForHeapNumberValue(); } return Add(object, nullptr, access); } template<> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildLoadNamedField(GetParameter(0), casted_stub()->index()); } Handle LoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr, casted_stub()->index()); } Handle ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
TypeofStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); // This stub is very performance sensitive, the generated code must be tuned // so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* boilerplate = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder if_notundefined(this); if_notundefined.IfNot( boilerplate, graph()->GetConstantUndefined()); if_notundefined.Then(); { int result_size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize; HValue* result = Add(Add(result_size), HType::JSObject(), NOT_TENURED, JS_REGEXP_TYPE); Add( result, HObjectAccess::ForMap(), Add(boilerplate, nullptr, HObjectAccess::ForMap())); Add( result, HObjectAccess::ForPropertiesPointer(), Add(boilerplate, nullptr, HObjectAccess::ForPropertiesPointer())); Add( result, HObjectAccess::ForElementsPointer(), Add(boilerplate, nullptr, HObjectAccess::ForElementsPointer())); for (int offset = JSObject::kHeaderSize; offset < result_size; offset += kPointerSize) { HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(offset); Add(result, access, Add(boilerplate, nullptr, access)); } Push(result); } if_notundefined.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone); if_notundefined.End(); return Pop(); } Handle FastCloneRegExpStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { Factory* factory = isolate()->factory(); HValue* undefined = graph()->GetConstantUndefined(); AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode(); HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); // This stub is very performance sensitive, the generated code must be tuned // so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* allocation_site = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder checker(this); checker.IfNot(allocation_site, undefined); checker.Then(); HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add(allocation_site, nullptr, access); HValue* elements = AddLoadElements(boilerplate); HValue* capacity = AddLoadFixedArrayLength(elements); IfBuilder zero_capacity(this); zero_capacity.If(capacity, graph()->GetConstant0(), Token::EQ); zero_capacity.Then(); Push(BuildCloneShallowArrayEmpty(boilerplate, allocation_site, alloc_site_mode)); zero_capacity.Else(); IfBuilder if_fixed_cow(this); if_fixed_cow.If(elements, factory->fixed_cow_array_map()); if_fixed_cow.Then(); Push(BuildCloneShallowArrayCow(boilerplate, allocation_site, alloc_site_mode, FAST_ELEMENTS)); if_fixed_cow.Else(); IfBuilder if_fixed(this); if_fixed.If(elements, factory->fixed_array_map()); if_fixed.Then(); Push(BuildCloneShallowArrayNonEmpty(boilerplate, allocation_site, alloc_site_mode, FAST_ELEMENTS)); if_fixed.Else(); Push(BuildCloneShallowArrayNonEmpty(boilerplate, allocation_site, alloc_site_mode, FAST_DOUBLE_ELEMENTS)); if_fixed.End(); if_fixed_cow.End(); zero_capacity.End(); checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals); checker.End(); return environment()->Pop(); } Handle FastCloneShallowArrayStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* undefined = graph()->GetConstantUndefined(); HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* allocation_site = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder checker(this); checker.IfNot(allocation_site, undefined); checker.And(); HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add(allocation_site, nullptr, access); int length = casted_stub()->length(); if (length == 0) { // Empty objects have some slack added to them. length = JSObject::kInitialGlobalObjectUnusedPropertiesCount; } int size = JSObject::kHeaderSize + length * kPointerSize; int object_size = size; if (FLAG_allocation_site_pretenuring) { size += AllocationMemento::kSize; } HValue* boilerplate_map = Add(boilerplate, nullptr, HObjectAccess::ForMap()); HValue* boilerplate_size = Add( boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize()); HValue* size_in_words = Add(object_size >> kPointerSizeLog2); checker.If(boilerplate_size, size_in_words, Token::EQ); checker.Then(); HValue* size_in_bytes = Add(size); HInstruction* object = Add(size_in_bytes, HType::JSObject(), NOT_TENURED, JS_OBJECT_TYPE); for (int i = 0; i < object_size; i += kPointerSize) { HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i); Add(object, access, Add(boilerplate, nullptr, access)); } DCHECK(FLAG_allocation_site_pretenuring || (size == object_size)); if (FLAG_allocation_site_pretenuring) { BuildCreateAllocationMemento( object, Add(object_size), allocation_site); } environment()->Push(object); checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone); checker.End(); return environment()->Pop(); } Handle FastCloneShallowObjectStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(AllocationSite::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); // Store the map Handle allocation_site_map = isolate()->factory()->allocation_site_map(); AddStoreMapConstant(object, allocation_site_map); // Store the payload (smi elements kind) HValue* initial_elements_kind = Add(GetInitialFastElementsKind()); Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset), initial_elements_kind); // Unlike literals, constructed arrays don't have nested sites Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kNestedSiteOffset), graph()->GetConstant0()); // Pretenuring calculation field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureDataOffset), graph()->GetConstant0()); // Pretenuring memento creation count field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureCreateCountOffset), graph()->GetConstant0()); // Store an empty fixed array for the code dependency. HConstant* empty_fixed_array = Add(isolate()->factory()->empty_fixed_array()); Add( object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kDependentCodeOffset), empty_fixed_array); // Link the object to the allocation site list HValue* site_list = Add( ExternalReference::allocation_sites_list_address(isolate())); HValue* site = Add(site_list, nullptr, HObjectAccess::ForAllocationSiteList()); // TODO(mvstanton): This is a store to a weak pointer, which we may want to // mark as such in order to skip the write barrier, once we have a unified // system for weakness. For now we decided to keep it like this because having // an initial write barrier backed store makes this pointer strong until the // next GC, and allocation sites are designed to survive several GCs anyway. Add( object, HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset), site); Add(site_list, HObjectAccess::ForAllocationSiteList(), object); HInstruction* feedback_vector = GetParameter(0); HInstruction* slot = GetParameter(1); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return feedback_vector; } Handle CreateAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(WeakCell::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); Handle weak_cell_map = isolate()->factory()->weak_cell_map(); AddStoreMapConstant(object, weak_cell_map); HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex); Add(object, HObjectAccess::ForWeakCellValue(), value); Add(object, HObjectAccess::ForWeakCellNext(), graph()->GetConstantHole()); HInstruction* feedback_vector = GetParameter(CreateWeakCellDescriptor::kVectorIndex); HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return graph()->GetConstant0(); } Handle CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); return Add(script_context, nullptr, HObjectAccess::ForContextSlot(slot_index)); } Handle LoadScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); Add(script_context, HObjectAccess::ForContextSlot(slot_index), GetParameter(2), STORE_TO_INITIALIZED_ENTRY); return GetParameter(2); } Handle StoreScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); if (IsFastDoubleElementsKind(kind)) { info()->MarkAsSavesCallerDoubles(); } HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex); HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex); HValue* elements = AddLoadElements(object); HValue* current_capacity = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* length = casted_stub()->is_js_array() ? Add(object, static_cast(NULL), HObjectAccess::ForArrayLength(kind)) : current_capacity; return BuildCheckAndGrowElementsCapacity(object, elements, kind, length, current_capacity, key); } Handle GrowArrayElementsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined() ? CONVERT_HOLE_TO_UNDEFINED : NEVER_RETURN_HOLE; HInstruction* load = BuildUncheckedMonomorphicElementAccess( GetParameter(LoadDescriptor::kReceiverIndex), GetParameter(LoadDescriptor::kNameIndex), NULL, casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD, hole_mode, STANDARD_STORE); return load; } Handle LoadFastElementStub::GenerateCode() { return DoGenerateCode(this); } HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField( HValue* object, FieldIndex index) { Representation representation = index.is_double() ? Representation::Double() : Representation::Tagged(); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (index.is_double() && (!FLAG_unbox_double_fields || !index.is_inobject())) { // Load the heap number. object = Add( object, nullptr, access.WithRepresentation(Representation::Tagged())); // Load the double value from it. access = HObjectAccess::ForHeapNumberValue(); } return Add(object, nullptr, access); } template<> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildLoadNamedField(GetParameter(0), casted_stub()->index()); } Handle LoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr, casted_stub()->index()); } Handle ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
FastCloneRegExpStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { Factory* factory = isolate()->factory(); HValue* undefined = graph()->GetConstantUndefined(); AllocationSiteMode alloc_site_mode = casted_stub()->allocation_site_mode(); HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); // This stub is very performance sensitive, the generated code must be tuned // so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* allocation_site = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder checker(this); checker.IfNot(allocation_site, undefined); checker.Then(); HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add(allocation_site, nullptr, access); HValue* elements = AddLoadElements(boilerplate); HValue* capacity = AddLoadFixedArrayLength(elements); IfBuilder zero_capacity(this); zero_capacity.If(capacity, graph()->GetConstant0(), Token::EQ); zero_capacity.Then(); Push(BuildCloneShallowArrayEmpty(boilerplate, allocation_site, alloc_site_mode)); zero_capacity.Else(); IfBuilder if_fixed_cow(this); if_fixed_cow.If(elements, factory->fixed_cow_array_map()); if_fixed_cow.Then(); Push(BuildCloneShallowArrayCow(boilerplate, allocation_site, alloc_site_mode, FAST_ELEMENTS)); if_fixed_cow.Else(); IfBuilder if_fixed(this); if_fixed.If(elements, factory->fixed_array_map()); if_fixed.Then(); Push(BuildCloneShallowArrayNonEmpty(boilerplate, allocation_site, alloc_site_mode, FAST_ELEMENTS)); if_fixed.Else(); Push(BuildCloneShallowArrayNonEmpty(boilerplate, allocation_site, alloc_site_mode, FAST_DOUBLE_ELEMENTS)); if_fixed.End(); if_fixed_cow.End(); zero_capacity.End(); checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateLiterals); checker.End(); return environment()->Pop(); } Handle FastCloneShallowArrayStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* undefined = graph()->GetConstantUndefined(); HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* allocation_site = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder checker(this); checker.IfNot(allocation_site, undefined); checker.And(); HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add(allocation_site, nullptr, access); int length = casted_stub()->length(); if (length == 0) { // Empty objects have some slack added to them. length = JSObject::kInitialGlobalObjectUnusedPropertiesCount; } int size = JSObject::kHeaderSize + length * kPointerSize; int object_size = size; if (FLAG_allocation_site_pretenuring) { size += AllocationMemento::kSize; } HValue* boilerplate_map = Add(boilerplate, nullptr, HObjectAccess::ForMap()); HValue* boilerplate_size = Add( boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize()); HValue* size_in_words = Add(object_size >> kPointerSizeLog2); checker.If(boilerplate_size, size_in_words, Token::EQ); checker.Then(); HValue* size_in_bytes = Add(size); HInstruction* object = Add(size_in_bytes, HType::JSObject(), NOT_TENURED, JS_OBJECT_TYPE); for (int i = 0; i < object_size; i += kPointerSize) { HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i); Add(object, access, Add(boilerplate, nullptr, access)); } DCHECK(FLAG_allocation_site_pretenuring || (size == object_size)); if (FLAG_allocation_site_pretenuring) { BuildCreateAllocationMemento( object, Add(object_size), allocation_site); } environment()->Push(object); checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone); checker.End(); return environment()->Pop(); } Handle FastCloneShallowObjectStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(AllocationSite::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); // Store the map Handle allocation_site_map = isolate()->factory()->allocation_site_map(); AddStoreMapConstant(object, allocation_site_map); // Store the payload (smi elements kind) HValue* initial_elements_kind = Add(GetInitialFastElementsKind()); Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset), initial_elements_kind); // Unlike literals, constructed arrays don't have nested sites Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kNestedSiteOffset), graph()->GetConstant0()); // Pretenuring calculation field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureDataOffset), graph()->GetConstant0()); // Pretenuring memento creation count field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureCreateCountOffset), graph()->GetConstant0()); // Store an empty fixed array for the code dependency. HConstant* empty_fixed_array = Add(isolate()->factory()->empty_fixed_array()); Add( object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kDependentCodeOffset), empty_fixed_array); // Link the object to the allocation site list HValue* site_list = Add( ExternalReference::allocation_sites_list_address(isolate())); HValue* site = Add(site_list, nullptr, HObjectAccess::ForAllocationSiteList()); // TODO(mvstanton): This is a store to a weak pointer, which we may want to // mark as such in order to skip the write barrier, once we have a unified // system for weakness. For now we decided to keep it like this because having // an initial write barrier backed store makes this pointer strong until the // next GC, and allocation sites are designed to survive several GCs anyway. Add( object, HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset), site); Add(site_list, HObjectAccess::ForAllocationSiteList(), object); HInstruction* feedback_vector = GetParameter(0); HInstruction* slot = GetParameter(1); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return feedback_vector; } Handle CreateAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(WeakCell::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); Handle weak_cell_map = isolate()->factory()->weak_cell_map(); AddStoreMapConstant(object, weak_cell_map); HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex); Add(object, HObjectAccess::ForWeakCellValue(), value); Add(object, HObjectAccess::ForWeakCellNext(), graph()->GetConstantHole()); HInstruction* feedback_vector = GetParameter(CreateWeakCellDescriptor::kVectorIndex); HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return graph()->GetConstant0(); } Handle CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); return Add(script_context, nullptr, HObjectAccess::ForContextSlot(slot_index)); } Handle LoadScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); Add(script_context, HObjectAccess::ForContextSlot(slot_index), GetParameter(2), STORE_TO_INITIALIZED_ENTRY); return GetParameter(2); } Handle StoreScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); if (IsFastDoubleElementsKind(kind)) { info()->MarkAsSavesCallerDoubles(); } HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex); HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex); HValue* elements = AddLoadElements(object); HValue* current_capacity = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* length = casted_stub()->is_js_array() ? Add(object, static_cast(NULL), HObjectAccess::ForArrayLength(kind)) : current_capacity; return BuildCheckAndGrowElementsCapacity(object, elements, kind, length, current_capacity, key); } Handle GrowArrayElementsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined() ? CONVERT_HOLE_TO_UNDEFINED : NEVER_RETURN_HOLE; HInstruction* load = BuildUncheckedMonomorphicElementAccess( GetParameter(LoadDescriptor::kReceiverIndex), GetParameter(LoadDescriptor::kNameIndex), NULL, casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD, hole_mode, STANDARD_STORE); return load; } Handle LoadFastElementStub::GenerateCode() { return DoGenerateCode(this); } HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField( HValue* object, FieldIndex index) { Representation representation = index.is_double() ? Representation::Double() : Representation::Tagged(); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (index.is_double() && (!FLAG_unbox_double_fields || !index.is_inobject())) { // Load the heap number. object = Add( object, nullptr, access.WithRepresentation(Representation::Tagged())); // Load the double value from it. access = HObjectAccess::ForHeapNumberValue(); } return Add(object, nullptr, access); } template<> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildLoadNamedField(GetParameter(0), casted_stub()->index()); } Handle LoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr, casted_stub()->index()); } Handle ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
FastCloneShallowArrayStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* undefined = graph()->GetConstantUndefined(); HValue* closure = GetParameter(0); HValue* literal_index = GetParameter(1); HValue* literals_array = Add( closure, nullptr, HObjectAccess::ForLiteralsPointer()); HInstruction* allocation_site = Add( literals_array, literal_index, nullptr, nullptr, FAST_ELEMENTS, NEVER_RETURN_HOLE, LiteralsArray::kOffsetToFirstLiteral - kHeapObjectTag); IfBuilder checker(this); checker.IfNot(allocation_site, undefined); checker.And(); HObjectAccess access = HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset); HInstruction* boilerplate = Add(allocation_site, nullptr, access); int length = casted_stub()->length(); if (length == 0) { // Empty objects have some slack added to them. length = JSObject::kInitialGlobalObjectUnusedPropertiesCount; } int size = JSObject::kHeaderSize + length * kPointerSize; int object_size = size; if (FLAG_allocation_site_pretenuring) { size += AllocationMemento::kSize; } HValue* boilerplate_map = Add(boilerplate, nullptr, HObjectAccess::ForMap()); HValue* boilerplate_size = Add( boilerplate_map, nullptr, HObjectAccess::ForMapInstanceSize()); HValue* size_in_words = Add(object_size >> kPointerSizeLog2); checker.If(boilerplate_size, size_in_words, Token::EQ); checker.Then(); HValue* size_in_bytes = Add(size); HInstruction* object = Add(size_in_bytes, HType::JSObject(), NOT_TENURED, JS_OBJECT_TYPE); for (int i = 0; i < object_size; i += kPointerSize) { HObjectAccess access = HObjectAccess::ForObservableJSObjectOffset(i); Add(object, access, Add(boilerplate, nullptr, access)); } DCHECK(FLAG_allocation_site_pretenuring || (size == object_size)); if (FLAG_allocation_site_pretenuring) { BuildCreateAllocationMemento( object, Add(object_size), allocation_site); } environment()->Push(object); checker.ElseDeopt(Deoptimizer::kUninitializedBoilerplateInFastClone); checker.End(); return environment()->Pop(); } Handle FastCloneShallowObjectStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(AllocationSite::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); // Store the map Handle allocation_site_map = isolate()->factory()->allocation_site_map(); AddStoreMapConstant(object, allocation_site_map); // Store the payload (smi elements kind) HValue* initial_elements_kind = Add(GetInitialFastElementsKind()); Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset), initial_elements_kind); // Unlike literals, constructed arrays don't have nested sites Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kNestedSiteOffset), graph()->GetConstant0()); // Pretenuring calculation field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureDataOffset), graph()->GetConstant0()); // Pretenuring memento creation count field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureCreateCountOffset), graph()->GetConstant0()); // Store an empty fixed array for the code dependency. HConstant* empty_fixed_array = Add(isolate()->factory()->empty_fixed_array()); Add( object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kDependentCodeOffset), empty_fixed_array); // Link the object to the allocation site list HValue* site_list = Add( ExternalReference::allocation_sites_list_address(isolate())); HValue* site = Add(site_list, nullptr, HObjectAccess::ForAllocationSiteList()); // TODO(mvstanton): This is a store to a weak pointer, which we may want to // mark as such in order to skip the write barrier, once we have a unified // system for weakness. For now we decided to keep it like this because having // an initial write barrier backed store makes this pointer strong until the // next GC, and allocation sites are designed to survive several GCs anyway. Add( object, HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset), site); Add(site_list, HObjectAccess::ForAllocationSiteList(), object); HInstruction* feedback_vector = GetParameter(0); HInstruction* slot = GetParameter(1); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return feedback_vector; } Handle CreateAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(WeakCell::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); Handle weak_cell_map = isolate()->factory()->weak_cell_map(); AddStoreMapConstant(object, weak_cell_map); HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex); Add(object, HObjectAccess::ForWeakCellValue(), value); Add(object, HObjectAccess::ForWeakCellNext(), graph()->GetConstantHole()); HInstruction* feedback_vector = GetParameter(CreateWeakCellDescriptor::kVectorIndex); HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return graph()->GetConstant0(); } Handle CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); return Add(script_context, nullptr, HObjectAccess::ForContextSlot(slot_index)); } Handle LoadScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); Add(script_context, HObjectAccess::ForContextSlot(slot_index), GetParameter(2), STORE_TO_INITIALIZED_ENTRY); return GetParameter(2); } Handle StoreScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); if (IsFastDoubleElementsKind(kind)) { info()->MarkAsSavesCallerDoubles(); } HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex); HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex); HValue* elements = AddLoadElements(object); HValue* current_capacity = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* length = casted_stub()->is_js_array() ? Add(object, static_cast(NULL), HObjectAccess::ForArrayLength(kind)) : current_capacity; return BuildCheckAndGrowElementsCapacity(object, elements, kind, length, current_capacity, key); } Handle GrowArrayElementsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined() ? CONVERT_HOLE_TO_UNDEFINED : NEVER_RETURN_HOLE; HInstruction* load = BuildUncheckedMonomorphicElementAccess( GetParameter(LoadDescriptor::kReceiverIndex), GetParameter(LoadDescriptor::kNameIndex), NULL, casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD, hole_mode, STANDARD_STORE); return load; } Handle LoadFastElementStub::GenerateCode() { return DoGenerateCode(this); } HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField( HValue* object, FieldIndex index) { Representation representation = index.is_double() ? Representation::Double() : Representation::Tagged(); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (index.is_double() && (!FLAG_unbox_double_fields || !index.is_inobject())) { // Load the heap number. object = Add( object, nullptr, access.WithRepresentation(Representation::Tagged())); // Load the double value from it. access = HObjectAccess::ForHeapNumberValue(); } return Add(object, nullptr, access); } template<> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildLoadNamedField(GetParameter(0), casted_stub()->index()); } Handle LoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr, casted_stub()->index()); } Handle ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
FastCloneShallowObjectStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(AllocationSite::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); // Store the map Handle allocation_site_map = isolate()->factory()->allocation_site_map(); AddStoreMapConstant(object, allocation_site_map); // Store the payload (smi elements kind) HValue* initial_elements_kind = Add(GetInitialFastElementsKind()); Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kTransitionInfoOffset), initial_elements_kind); // Unlike literals, constructed arrays don't have nested sites Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kNestedSiteOffset), graph()->GetConstant0()); // Pretenuring calculation field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureDataOffset), graph()->GetConstant0()); // Pretenuring memento creation count field. Add(object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kPretenureCreateCountOffset), graph()->GetConstant0()); // Store an empty fixed array for the code dependency. HConstant* empty_fixed_array = Add(isolate()->factory()->empty_fixed_array()); Add( object, HObjectAccess::ForAllocationSiteOffset( AllocationSite::kDependentCodeOffset), empty_fixed_array); // Link the object to the allocation site list HValue* site_list = Add( ExternalReference::allocation_sites_list_address(isolate())); HValue* site = Add(site_list, nullptr, HObjectAccess::ForAllocationSiteList()); // TODO(mvstanton): This is a store to a weak pointer, which we may want to // mark as such in order to skip the write barrier, once we have a unified // system for weakness. For now we decided to keep it like this because having // an initial write barrier backed store makes this pointer strong until the // next GC, and allocation sites are designed to survive several GCs anyway. Add( object, HObjectAccess::ForAllocationSiteOffset(AllocationSite::kWeakNextOffset), site); Add(site_list, HObjectAccess::ForAllocationSiteList(), object); HInstruction* feedback_vector = GetParameter(0); HInstruction* slot = GetParameter(1); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return feedback_vector; } Handle CreateAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(WeakCell::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); Handle weak_cell_map = isolate()->factory()->weak_cell_map(); AddStoreMapConstant(object, weak_cell_map); HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex); Add(object, HObjectAccess::ForWeakCellValue(), value); Add(object, HObjectAccess::ForWeakCellNext(), graph()->GetConstantHole()); HInstruction* feedback_vector = GetParameter(CreateWeakCellDescriptor::kVectorIndex); HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return graph()->GetConstant0(); } Handle CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); return Add(script_context, nullptr, HObjectAccess::ForContextSlot(slot_index)); } Handle LoadScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); Add(script_context, HObjectAccess::ForContextSlot(slot_index), GetParameter(2), STORE_TO_INITIALIZED_ENTRY); return GetParameter(2); } Handle StoreScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); if (IsFastDoubleElementsKind(kind)) { info()->MarkAsSavesCallerDoubles(); } HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex); HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex); HValue* elements = AddLoadElements(object); HValue* current_capacity = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* length = casted_stub()->is_js_array() ? Add(object, static_cast(NULL), HObjectAccess::ForArrayLength(kind)) : current_capacity; return BuildCheckAndGrowElementsCapacity(object, elements, kind, length, current_capacity, key); } Handle GrowArrayElementsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined() ? CONVERT_HOLE_TO_UNDEFINED : NEVER_RETURN_HOLE; HInstruction* load = BuildUncheckedMonomorphicElementAccess( GetParameter(LoadDescriptor::kReceiverIndex), GetParameter(LoadDescriptor::kNameIndex), NULL, casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD, hole_mode, STANDARD_STORE); return load; } Handle LoadFastElementStub::GenerateCode() { return DoGenerateCode(this); } HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField( HValue* object, FieldIndex index) { Representation representation = index.is_double() ? Representation::Double() : Representation::Tagged(); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (index.is_double() && (!FLAG_unbox_double_fields || !index.is_inobject())) { // Load the heap number. object = Add( object, nullptr, access.WithRepresentation(Representation::Tagged())); // Load the double value from it. access = HObjectAccess::ForHeapNumberValue(); } return Add(object, nullptr, access); } template<> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildLoadNamedField(GetParameter(0), casted_stub()->index()); } Handle LoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr, casted_stub()->index()); } Handle ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
CreateAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { // This stub is performance sensitive, the generated code must be tuned // so that it doesn't build an eager frame. info()->MarkMustNotHaveEagerFrame(); HValue* size = Add(WeakCell::kSize); HInstruction* object = Add(size, HType::JSObject(), TENURED, JS_OBJECT_TYPE); Handle weak_cell_map = isolate()->factory()->weak_cell_map(); AddStoreMapConstant(object, weak_cell_map); HInstruction* value = GetParameter(CreateWeakCellDescriptor::kValueIndex); Add(object, HObjectAccess::ForWeakCellValue(), value); Add(object, HObjectAccess::ForWeakCellNext(), graph()->GetConstantHole()); HInstruction* feedback_vector = GetParameter(CreateWeakCellDescriptor::kVectorIndex); HInstruction* slot = GetParameter(CreateWeakCellDescriptor::kSlotIndex); Add(feedback_vector, slot, object, nullptr, FAST_ELEMENTS, INITIALIZING_STORE); return graph()->GetConstant0(); } Handle CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); return Add(script_context, nullptr, HObjectAccess::ForContextSlot(slot_index)); } Handle LoadScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); Add(script_context, HObjectAccess::ForContextSlot(slot_index), GetParameter(2), STORE_TO_INITIALIZED_ENTRY); return GetParameter(2); } Handle StoreScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); if (IsFastDoubleElementsKind(kind)) { info()->MarkAsSavesCallerDoubles(); } HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex); HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex); HValue* elements = AddLoadElements(object); HValue* current_capacity = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* length = casted_stub()->is_js_array() ? Add(object, static_cast(NULL), HObjectAccess::ForArrayLength(kind)) : current_capacity; return BuildCheckAndGrowElementsCapacity(object, elements, kind, length, current_capacity, key); } Handle GrowArrayElementsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined() ? CONVERT_HOLE_TO_UNDEFINED : NEVER_RETURN_HOLE; HInstruction* load = BuildUncheckedMonomorphicElementAccess( GetParameter(LoadDescriptor::kReceiverIndex), GetParameter(LoadDescriptor::kNameIndex), NULL, casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD, hole_mode, STANDARD_STORE); return load; } Handle LoadFastElementStub::GenerateCode() { return DoGenerateCode(this); } HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField( HValue* object, FieldIndex index) { Representation representation = index.is_double() ? Representation::Double() : Representation::Tagged(); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (index.is_double() && (!FLAG_unbox_double_fields || !index.is_inobject())) { // Load the heap number. object = Add( object, nullptr, access.WithRepresentation(Representation::Tagged())); // Load the double value from it. access = HObjectAccess::ForHeapNumberValue(); } return Add(object, nullptr, access); } template<> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildLoadNamedField(GetParameter(0), casted_stub()->index()); } Handle LoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr, casted_stub()->index()); } Handle ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
CreateWeakCellStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); return Add(script_context, nullptr, HObjectAccess::ForContextSlot(slot_index)); } Handle LoadScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); Add(script_context, HObjectAccess::ForContextSlot(slot_index), GetParameter(2), STORE_TO_INITIALIZED_ENTRY); return GetParameter(2); } Handle StoreScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); if (IsFastDoubleElementsKind(kind)) { info()->MarkAsSavesCallerDoubles(); } HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex); HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex); HValue* elements = AddLoadElements(object); HValue* current_capacity = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* length = casted_stub()->is_js_array() ? Add(object, static_cast(NULL), HObjectAccess::ForArrayLength(kind)) : current_capacity; return BuildCheckAndGrowElementsCapacity(object, elements, kind, length, current_capacity, key); } Handle GrowArrayElementsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined() ? CONVERT_HOLE_TO_UNDEFINED : NEVER_RETURN_HOLE; HInstruction* load = BuildUncheckedMonomorphicElementAccess( GetParameter(LoadDescriptor::kReceiverIndex), GetParameter(LoadDescriptor::kNameIndex), NULL, casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD, hole_mode, STANDARD_STORE); return load; } Handle LoadFastElementStub::GenerateCode() { return DoGenerateCode(this); } HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField( HValue* object, FieldIndex index) { Representation representation = index.is_double() ? Representation::Double() : Representation::Tagged(); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (index.is_double() && (!FLAG_unbox_double_fields || !index.is_inobject())) { // Load the heap number. object = Add( object, nullptr, access.WithRepresentation(Representation::Tagged())); // Load the double value from it. access = HObjectAccess::ForHeapNumberValue(); } return Add(object, nullptr, access); } template<> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildLoadNamedField(GetParameter(0), casted_stub()->index()); } Handle LoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr, casted_stub()->index()); } Handle ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
LoadScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { int context_index = casted_stub()->context_index(); int slot_index = casted_stub()->slot_index(); HValue* script_context = BuildGetScriptContext(context_index); Add(script_context, HObjectAccess::ForContextSlot(slot_index), GetParameter(2), STORE_TO_INITIALIZED_ENTRY); return GetParameter(2); } Handle StoreScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); if (IsFastDoubleElementsKind(kind)) { info()->MarkAsSavesCallerDoubles(); } HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex); HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex); HValue* elements = AddLoadElements(object); HValue* current_capacity = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* length = casted_stub()->is_js_array() ? Add(object, static_cast(NULL), HObjectAccess::ForArrayLength(kind)) : current_capacity; return BuildCheckAndGrowElementsCapacity(object, elements, kind, length, current_capacity, key); } Handle GrowArrayElementsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined() ? CONVERT_HOLE_TO_UNDEFINED : NEVER_RETURN_HOLE; HInstruction* load = BuildUncheckedMonomorphicElementAccess( GetParameter(LoadDescriptor::kReceiverIndex), GetParameter(LoadDescriptor::kNameIndex), NULL, casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD, hole_mode, STANDARD_STORE); return load; } Handle LoadFastElementStub::GenerateCode() { return DoGenerateCode(this); } HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField( HValue* object, FieldIndex index) { Representation representation = index.is_double() ? Representation::Double() : Representation::Tagged(); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (index.is_double() && (!FLAG_unbox_double_fields || !index.is_inobject())) { // Load the heap number. object = Add( object, nullptr, access.WithRepresentation(Representation::Tagged())); // Load the double value from it. access = HObjectAccess::ForHeapNumberValue(); } return Add(object, nullptr, access); } template<> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildLoadNamedField(GetParameter(0), casted_stub()->index()); } Handle LoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr, casted_stub()->index()); } Handle ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
StoreScriptContextFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); if (IsFastDoubleElementsKind(kind)) { info()->MarkAsSavesCallerDoubles(); } HValue* object = GetParameter(GrowArrayElementsDescriptor::kObjectIndex); HValue* key = GetParameter(GrowArrayElementsDescriptor::kKeyIndex); HValue* elements = AddLoadElements(object); HValue* current_capacity = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* length = casted_stub()->is_js_array() ? Add(object, static_cast(NULL), HObjectAccess::ForArrayLength(kind)) : current_capacity; return BuildCheckAndGrowElementsCapacity(object, elements, kind, length, current_capacity, key); } Handle GrowArrayElementsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined() ? CONVERT_HOLE_TO_UNDEFINED : NEVER_RETURN_HOLE; HInstruction* load = BuildUncheckedMonomorphicElementAccess( GetParameter(LoadDescriptor::kReceiverIndex), GetParameter(LoadDescriptor::kNameIndex), NULL, casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD, hole_mode, STANDARD_STORE); return load; } Handle LoadFastElementStub::GenerateCode() { return DoGenerateCode(this); } HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField( HValue* object, FieldIndex index) { Representation representation = index.is_double() ? Representation::Double() : Representation::Tagged(); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (index.is_double() && (!FLAG_unbox_double_fields || !index.is_inobject())) { // Load the heap number. object = Add( object, nullptr, access.WithRepresentation(Representation::Tagged())); // Load the double value from it. access = HObjectAccess::ForHeapNumberValue(); } return Add(object, nullptr, access); } template<> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildLoadNamedField(GetParameter(0), casted_stub()->index()); } Handle LoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr, casted_stub()->index()); } Handle ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
GrowArrayElementsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { LoadKeyedHoleMode hole_mode = casted_stub()->convert_hole_to_undefined() ? CONVERT_HOLE_TO_UNDEFINED : NEVER_RETURN_HOLE; HInstruction* load = BuildUncheckedMonomorphicElementAccess( GetParameter(LoadDescriptor::kReceiverIndex), GetParameter(LoadDescriptor::kNameIndex), NULL, casted_stub()->is_js_array(), casted_stub()->elements_kind(), LOAD, hole_mode, STANDARD_STORE); return load; } Handle LoadFastElementStub::GenerateCode() { return DoGenerateCode(this); } HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField( HValue* object, FieldIndex index) { Representation representation = index.is_double() ? Representation::Double() : Representation::Tagged(); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (index.is_double() && (!FLAG_unbox_double_fields || !index.is_inobject())) { // Load the heap number. object = Add( object, nullptr, access.WithRepresentation(Representation::Tagged())); // Load the double value from it. access = HObjectAccess::ForHeapNumberValue(); } return Add(object, nullptr, access); } template<> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildLoadNamedField(GetParameter(0), casted_stub()->index()); } Handle LoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr, casted_stub()->index()); } Handle ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
LoadFastElementStub::GenerateCode() { return DoGenerateCode(this); } HLoadNamedField* CodeStubGraphBuilderBase::BuildLoadNamedField( HValue* object, FieldIndex index) { Representation representation = index.is_double() ? Representation::Double() : Representation::Tagged(); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (index.is_double() && (!FLAG_unbox_double_fields || !index.is_inobject())) { // Load the heap number. object = Add( object, nullptr, access.WithRepresentation(Representation::Tagged())); // Load the double value from it. access = HObjectAccess::ForHeapNumberValue(); } return Add(object, nullptr, access); } template<> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildLoadNamedField(GetParameter(0), casted_stub()->index()); } Handle LoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr, casted_stub()->index()); } Handle ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
LoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { return BuildArrayBufferViewFieldAccessor(GetParameter(0), nullptr, casted_stub()->index()); } Handle ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
ArrayBufferViewLoadFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* map = AddLoadMap(GetParameter(0), NULL); HObjectAccess descriptors_access = HObjectAccess::ForObservableJSObjectOffset( Map::kDescriptorsOffset, Representation::Tagged()); HValue* descriptors = Add(map, nullptr, descriptors_access); HObjectAccess value_access = HObjectAccess::ForObservableJSObjectOffset( DescriptorArray::GetValueOffset(casted_stub()->constant_index())); return Add(descriptors, nullptr, value_access); } Handle LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
LoadConstantStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::UnmappedCase(HValue* elements, HValue* key, HValue* value) { HValue* result = NULL; HInstruction* backing_store = Add(elements, graph()->GetConstant1(), nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); Add(backing_store, isolate()->factory()->fixed_array_map()); HValue* backing_store_length = Add( backing_store, nullptr, HObjectAccess::ForFixedArrayLength()); IfBuilder in_unmapped_range(this); in_unmapped_range.If(key, backing_store_length, Token::LT); in_unmapped_range.Then(); { if (value == NULL) { result = Add(backing_store, key, nullptr, nullptr, FAST_HOLEY_ELEMENTS, NEVER_RETURN_HOLE); } else { Add(backing_store, key, value, nullptr, FAST_HOLEY_ELEMENTS); } } in_unmapped_range.ElseDeopt(Deoptimizer::kOutsideOfRange); in_unmapped_range.End(); return result; } HValue* CodeStubGraphBuilderBase::EmitKeyedSloppyArguments(HValue* receiver, HValue* key, HValue* value) { // Mapped arguments are actual arguments. Unmapped arguments are values added // to the arguments object after it was created for the call. Mapped arguments // are stored in the context at indexes given by elements[key + 2]. Unmapped // arguments are stored as regular indexed properties in the arguments array, // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed // look at argument object construction. // // The sloppy arguments elements array has a special format: // // 0: context // 1: unmapped arguments array // 2: mapped_index0, // 3: mapped_index1, // ... // // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments). // If key + 2 >= elements.length then attempt to look in the unmapped // arguments array (given by elements[1]) and return the value at key, missing // to the runtime if the unmapped arguments array is not a fixed array or if // key >= unmapped_arguments_array.length. // // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value // in the unmapped arguments array, as described above. Otherwise, t is a Smi // index into the context array given at elements[0]. Return the value at // context[t]. bool is_load = value == NULL; key = AddUncasted(key, Representation::Smi()); IfBuilder positive_smi(this); positive_smi.If(key, graph()->GetConstant0(), Token::LT); positive_smi.ThenDeopt(Deoptimizer::kKeyIsNegative); positive_smi.End(); HValue* constant_two = Add(2); HValue* elements = AddLoadElements(receiver, nullptr); HValue* elements_length = Add( elements, nullptr, HObjectAccess::ForFixedArrayLength()); HValue* adjusted_length = AddUncasted(elements_length, constant_two); IfBuilder in_range(this); in_range.If(key, adjusted_length, Token::LT); in_range.Then(); { HValue* index = AddUncasted(key, constant_two); HInstruction* mapped_index = Add(elements, index, nullptr, nullptr, FAST_HOLEY_ELEMENTS, ALLOW_RETURN_HOLE); IfBuilder is_valid(this); is_valid.IfNot(mapped_index, graph()->GetConstantHole()); is_valid.Then(); { // TODO(mvstanton): I'd like to assert from this point, that if the // mapped_index is not the hole that it is indeed, a smi. An unnecessary // smi check is being emitted. HValue* the_context = Add(elements, graph()->GetConstant0(), nullptr, nullptr, FAST_ELEMENTS); STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize); if (is_load) { HValue* result = Add(the_context, mapped_index, nullptr, nullptr, FAST_ELEMENTS, ALLOW_RETURN_HOLE); environment()->Push(result); } else { DCHECK(value != NULL); Add(the_context, mapped_index, value, nullptr, FAST_ELEMENTS); environment()->Push(value); } } is_valid.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } is_valid.End(); } in_range.Else(); { HValue* result = UnmappedCase(elements, key, value); environment()->Push(is_load ? result : value); } in_range.End(); return environment()->Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(LoadDescriptor::kReceiverIndex); HValue* key = GetParameter(LoadDescriptor::kNameIndex); return EmitKeyedSloppyArguments(receiver, key, NULL); } Handle KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
KeyedLoadSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* receiver = GetParameter(StoreDescriptor::kReceiverIndex); HValue* key = GetParameter(StoreDescriptor::kNameIndex); HValue* value = GetParameter(StoreDescriptor::kValueIndex); return EmitKeyedSloppyArguments(receiver, key, value); } Handle KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
KeyedStoreSloppyArgumentsStub::GenerateCode() { return DoGenerateCode(this); } void CodeStubGraphBuilderBase::BuildStoreNamedField( HValue* object, HValue* value, FieldIndex index, Representation representation, bool transition_to_field) { DCHECK(!index.is_double() || representation.IsDouble()); int offset = index.offset(); HObjectAccess access = index.is_inobject() ? HObjectAccess::ForObservableJSObjectOffset(offset, representation) : HObjectAccess::ForBackingStoreOffset(offset, representation); if (representation.IsDouble()) { if (!FLAG_unbox_double_fields || !index.is_inobject()) { HObjectAccess heap_number_access = access.WithRepresentation(Representation::Tagged()); if (transition_to_field) { // The store requires a mutable HeapNumber to be allocated. NoObservableSideEffectsScope no_side_effects(this); HInstruction* heap_number_size = Add(HeapNumber::kSize); // TODO(hpayer): Allocation site pretenuring support. HInstruction* heap_number = Add(heap_number_size, HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(heap_number, isolate()->factory()->mutable_heap_number_map()); Add(heap_number, HObjectAccess::ForHeapNumberValue(), value); // Store the new mutable heap number into the object. access = heap_number_access; value = heap_number; } else { // Load the heap number. object = Add(object, nullptr, heap_number_access); // Store the double value into it. access = HObjectAccess::ForHeapNumberValue(); } } } else if (representation.IsHeapObject()) { BuildCheckHeapObject(value); } Add(object, access, value, INITIALIZING_STORE); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildStoreNamedField(GetParameter(0), GetParameter(2), casted_stub()->index(), casted_stub()->representation(), false); return GetParameter(2); } Handle StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
StoreFieldStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* object = GetParameter(StoreTransitionHelper::ReceiverIndex()); switch (casted_stub()->store_mode()) { case StoreTransitionStub::ExtendStorageAndStoreMapAndValue: { HValue* properties = Add( object, nullptr, HObjectAccess::ForPropertiesPointer()); HValue* length = AddLoadFixedArrayLength(properties); HValue* delta = Add(static_cast(JSObject::kFieldsAdded)); HValue* new_capacity = AddUncasted(length, delta); // Grow properties array. ElementsKind kind = FAST_ELEMENTS; Add(new_capacity, Add((Page::kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) >> ElementsKindToShiftSize(kind))); // Reuse this code for properties backing store allocation. HValue* new_properties = BuildAllocateAndInitializeArray(kind, new_capacity); BuildCopyProperties(properties, new_properties, length, new_capacity); Add(object, HObjectAccess::ForPropertiesPointer(), new_properties); } // Fall through. case StoreTransitionStub::StoreMapAndValue: // Store the new value into the "extended" object. BuildStoreNamedField( object, GetParameter(StoreTransitionHelper::ValueIndex()), casted_stub()->index(), casted_stub()->representation(), true); // Fall through. case StoreTransitionStub::StoreMapOnly: // And finally update the map. Add(object, HObjectAccess::ForMap(), GetParameter(StoreTransitionHelper::MapIndex())); break; } return GetParameter(StoreTransitionHelper::ValueIndex()); } Handle StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
StoreTransitionStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BuildUncheckedMonomorphicElementAccess( GetParameter(StoreDescriptor::kReceiverIndex), GetParameter(StoreDescriptor::kNameIndex), GetParameter(StoreDescriptor::kValueIndex), casted_stub()->is_js_array(), casted_stub()->elements_kind(), STORE, NEVER_RETURN_HOLE, casted_stub()->store_mode()); return GetParameter(2); } Handle StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
StoreFastElementStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { info()->MarkAsSavesCallerDoubles(); BuildTransitionElementsKind(GetParameter(0), GetParameter(1), casted_stub()->from_kind(), casted_stub()->to_kind(), casted_stub()->is_js_array()); return GetParameter(0); } Handle TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
TransitionElementsKindStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapNumber(), NOT_TENURED, HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->heap_number_map()); return result; } Handle AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
AllocateHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(Add(HeapNumber::kSize), HType::HeapObject(), NOT_TENURED, MUTABLE_HEAP_NUMBER_TYPE); AddStoreMapConstant(result, isolate()->factory()->mutable_heap_number_map()); return result; } Handle AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
AllocateMutableHeapNumberStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { HValue* result = Add(GetParameter(0), HType::Tagged(), NOT_TENURED, JS_OBJECT_TYPE); return result; } Handle AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
AllocateInNewSpaceStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildArrayConstructor( ElementsKind kind, AllocationSiteOverrideMode override_mode, ArgumentClass argument_class) { HValue* constructor = GetParameter(ArrayConstructorStubBase::kConstructor); HValue* alloc_site = GetParameter(ArrayConstructorStubBase::kAllocationSite); JSArrayBuilder array_builder(this, kind, alloc_site, constructor, override_mode); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildInternalArrayConstructor( ElementsKind kind, ArgumentClass argument_class) { HValue* constructor = GetParameter( InternalArrayConstructorStubBase::kConstructor); JSArrayBuilder array_builder(this, kind, constructor); HValue* result = NULL; switch (argument_class) { case NONE: // This stub is very performance sensitive, the generated code must be // tuned so that it doesn't build and eager frame. info()->MarkMustNotHaveEagerFrame(); result = array_builder.AllocateEmptyArray(); break; case SINGLE: result = BuildArraySingleArgumentConstructor(&array_builder); break; case MULTIPLE: result = BuildArrayNArgumentsConstructor(&array_builder, kind); break; } return result; } HValue* CodeStubGraphBuilderBase::BuildArraySingleArgumentConstructor( JSArrayBuilder* array_builder) { // Smi check and range check on the input arg. HValue* constant_one = graph()->GetConstant1(); HValue* constant_zero = graph()->GetConstant0(); HInstruction* elements = Add(false); HInstruction* argument = Add( elements, constant_one, constant_zero); return BuildAllocateArrayFromLength(array_builder, argument); } HValue* CodeStubGraphBuilderBase::BuildArrayNArgumentsConstructor( JSArrayBuilder* array_builder, ElementsKind kind) { // Insert a bounds check because the number of arguments might exceed // the kInitialMaxFastElementArray limit. This cannot happen for code // that was parsed, but calling via Array.apply(thisArg, [...]) might // trigger it. HValue* length = GetArgumentsLength(); HConstant* max_alloc_length = Add(JSArray::kInitialMaxFastElementArray); HValue* checked_length = Add(length, max_alloc_length); // We need to fill with the hole if it's a smi array in the multi-argument // case because we might have to bail out while copying arguments into // the array because they aren't compatible with a smi array. // If it's a double array, no problem, and if it's fast then no // problem either because doubles are boxed. // // TODO(mvstanton): consider an instruction to memset fill the array // with zero in this case instead. JSArrayBuilder::FillMode fill_mode = IsFastSmiElementsKind(kind) ? JSArrayBuilder::FILL_WITH_HOLE : JSArrayBuilder::DONT_FILL_WITH_HOLE; HValue* new_object = array_builder->AllocateArray(checked_length, max_alloc_length, checked_length, fill_mode); HValue* elements = array_builder->GetElementsLocation(); DCHECK(elements != NULL); // Now populate the elements correctly. LoopBuilder builder(this, context(), LoopBuilder::kPostIncrement); HValue* start = graph()->GetConstant0(); HValue* key = builder.BeginBody(start, checked_length, Token::LT); HInstruction* argument_elements = Add(false); HInstruction* argument = Add( argument_elements, checked_length, key); Add(elements, key, argument, nullptr, kind); builder.EndBody(); return new_object; } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, NONE); } Handle ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
ArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, SINGLE); } Handle ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
ArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); AllocationSiteOverrideMode override_mode = casted_stub()->override_mode(); return BuildArrayConstructor(kind, override_mode, MULTIPLE); } Handle ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
ArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, NONE); } Handle InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
InternalArrayNoArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, SINGLE); } Handle InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
InternalArraySingleArgumentConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder:: BuildCodeStub() { ElementsKind kind = casted_stub()->elements_kind(); return BuildInternalArrayConstructor(kind, MULTIPLE); } Handle InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
InternalArrayNArgumentsConstructorStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { Isolate* isolate = graph()->isolate(); CompareNilICStub* stub = casted_stub(); HIfContinuation continuation; Handle sentinel_map(isolate->heap()->meta_map()); Type* type = stub->GetType(zone(), sentinel_map); BuildCompareNil(GetParameter(0), type, &continuation, kEmbedMapsViaWeakCells); IfBuilder if_nil(this, &continuation); if_nil.Then(); if (continuation.IsFalseReachable()) { if_nil.Else(); if_nil.Return(graph()->GetConstantFalse()); } if_nil.End(); return continuation.IsTrueReachable() ? graph()->GetConstantTrue() : graph()->GetConstantUndefined(); } Handle CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
CompareNilICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { BinaryOpICState state = casted_stub()->state(); HValue* left = GetParameter(BinaryOpICStub::kLeft); HValue* right = GetParameter(BinaryOpICStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); DCHECK(!left_type->Is(Type::None()) && !right_type->Is(Type::None()) && (state.HasSideEffects() || !result_type->Is(Type::None()))); HValue* result = NULL; HAllocationMode allocation_mode(NOT_TENURED); if (state.op() == Token::ADD && (left_type->Maybe(Type::String()) || right_type->Maybe(Type::String())) && !left_type->Is(Type::String()) && !right_type->Is(Type::String())) { // For the generic add stub a fast case for string addition is performance // critical. if (left_type->Maybe(Type::String())) { IfBuilder if_leftisstring(this); if_leftisstring.If(left); if_leftisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, Type::String(zone()), right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_leftisstring.End(); result = Pop(); } else { IfBuilder if_rightisstring(this); if_rightisstring.If(right); if_rightisstring.Then(); { Push(BuildBinaryOperation(state.op(), left, right, left_type, Type::String(zone()), result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.Else(); { Push(BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength())); } if_rightisstring.End(); result = Pop(); } } else { result = BuildBinaryOperation( state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } // If we encounter a generic argument, the number conversion is // observable, thus we cannot afford to bail out after the fact. if (!state.HasSideEffects()) { result = EnforceNumberType(result, result_type); } return result; } Handle BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
BinaryOpICStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeStub() { BinaryOpICState state = casted_stub()->state(); HValue* allocation_site = GetParameter( BinaryOpWithAllocationSiteStub::kAllocationSite); HValue* left = GetParameter(BinaryOpWithAllocationSiteStub::kLeft); HValue* right = GetParameter(BinaryOpWithAllocationSiteStub::kRight); Type* left_type = state.GetLeftType(); Type* right_type = state.GetRightType(); Type* result_type = state.GetResultType(); HAllocationMode allocation_mode(allocation_site); return BuildBinaryOperation(state.op(), left, right, left_type, right_type, result_type, state.fixed_right_arg(), allocation_mode, state.strength()); } Handle BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
BinaryOpWithAllocationSiteStub::GenerateCode() { return DoGenerateCode(this); } HValue* CodeStubGraphBuilderBase::BuildToString(HValue* input, bool convert) { if (!convert) return BuildCheckString(input); IfBuilder if_inputissmi(this); HValue* inputissmi = if_inputissmi.If(input); if_inputissmi.Then(); { // Convert the input smi to a string. Push(BuildNumberToString(input, Type::SignedSmall())); } if_inputissmi.Else(); { HValue* input_map = Add(input, inputissmi, HObjectAccess::ForMap()); HValue* input_instance_type = Add( input_map, inputissmi, HObjectAccess::ForMapInstanceType()); IfBuilder if_inputisstring(this); if_inputisstring.If( input_instance_type, Add(FIRST_NONSTRING_TYPE), Token::LT); if_inputisstring.Then(); { // The input is already a string. Push(input); } if_inputisstring.Else(); { // Convert to primitive first (if necessary), see // ES6 section 12.7.3 The Addition operator. IfBuilder if_inputisprimitive(this); STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE); if_inputisprimitive.If( input_instance_type, Add(LAST_PRIMITIVE_TYPE), Token::LTE); if_inputisprimitive.Then(); { // The input is already a primitive. Push(input); } if_inputisprimitive.Else(); { // Convert the input to a primitive. Push(BuildToPrimitive(input, input_map)); } if_inputisprimitive.End(); // Convert the primitive to a string value. ToStringDescriptor descriptor(isolate()); ToStringStub stub(isolate()); HValue* values[] = {context(), Pop()}; Push(AddUncasted( Add(stub.GetCode()), 0, descriptor, Vector(values, arraysize(values)))); } if_inputisstring.End(); } if_inputissmi.End(); return Pop(); } HValue* CodeStubGraphBuilderBase::BuildToPrimitive(HValue* input, HValue* input_map) { // Get the native context of the caller. HValue* native_context = BuildGetNativeContext(); // Determine the initial map of the %ObjectPrototype%. HValue* object_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::OBJECT_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the %StringPrototype%. HValue* string_function_prototype_map = Add(native_context, nullptr, HObjectAccess::ForContextSlot( Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX)); // Determine the initial map of the String function. HValue* string_function = Add( native_context, nullptr, HObjectAccess::ForContextSlot(Context::STRING_FUNCTION_INDEX)); HValue* string_function_initial_map = Add( string_function, nullptr, HObjectAccess::ForPrototypeOrInitialMap()); // Determine the map of the [[Prototype]] of {input}. HValue* input_prototype = Add(input_map, nullptr, HObjectAccess::ForPrototype()); HValue* input_prototype_map = Add(input_prototype, nullptr, HObjectAccess::ForMap()); // For string wrappers (JSValue instances with [[StringData]] internal // fields), we can shortcirciut the ToPrimitive if // // (a) the {input} map matches the initial map of the String function, // (b) the {input} [[Prototype]] is the unmodified %StringPrototype% (i.e. // no one monkey-patched toString, @@toPrimitive or valueOf), and // (c) the %ObjectPrototype% (i.e. the [[Prototype]] of the // %StringPrototype%) is also unmodified, that is no one sneaked a // @@toPrimitive into the %ObjectPrototype%. // // If all these assumptions hold, we can just take the [[StringData]] value // and return it. // TODO(bmeurer): This just repairs a regression introduced by removing the // weird (and broken) intrinsic %_IsStringWrapperSafeForDefaultValue, which // was intendend to something similar to this, although less efficient and // wrong in the presence of @@toPrimitive. Long-term we might want to move // into the direction of having a ToPrimitiveStub that can do common cases // while staying in JavaScript land (i.e. not going to C++). IfBuilder if_inputisstringwrapper(this); if_inputisstringwrapper.If( input_map, string_function_initial_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( input_prototype_map, string_function_prototype_map); if_inputisstringwrapper.And(); if_inputisstringwrapper.If( Add(Add(input_prototype_map, nullptr, HObjectAccess::ForPrototype()), nullptr, HObjectAccess::ForMap()), object_function_prototype_map); if_inputisstringwrapper.Then(); { Push(BuildLoadNamedField( input, FieldIndex::ForInObjectOffset(JSValue::kValueOffset))); } if_inputisstringwrapper.Else(); { // TODO(bmeurer): Add support for fast ToPrimitive conversion using // a dedicated ToPrimitiveStub. Add(input); Push(Add(Runtime::FunctionForId(Runtime::kToPrimitive), 1)); } if_inputisstringwrapper.End(); return Pop(); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StringAddStub* stub = casted_stub(); StringAddFlags flags = stub->flags(); PretenureFlag pretenure_flag = stub->pretenure_flag(); HValue* left = GetParameter(StringAddStub::kLeft); HValue* right = GetParameter(StringAddStub::kRight); // Make sure that both arguments are strings if not known in advance. if ((flags & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) { left = BuildToString(left, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } if ((flags & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) { right = BuildToString(right, (flags & STRING_ADD_CONVERT) == STRING_ADD_CONVERT); } return BuildStringAdd(left, right, HAllocationMode(pretenure_flag)); } Handle StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
StringAddStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { ToBooleanStub* stub = casted_stub(); IfBuilder if_true(this); if_true.If(GetParameter(0), stub->types()); if_true.Then(); if_true.Return(graph()->GetConstantTrue()); if_true.Else(); if_true.End(); return graph()->GetConstantFalse(); } Handle ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add
ToBooleanStub::GenerateCode() { return DoGenerateCode(this); } template <> HValue* CodeStubGraphBuilder::BuildCodeInitializedStub() { StoreGlobalStub* stub = casted_stub(); HParameter* value = GetParameter(StoreDescriptor::kValueIndex); if (stub->check_global()) { // Check that the map of the global has not changed: use a placeholder map // that will be replaced later with the global object's map. HParameter* proxy = GetParameter(StoreDescriptor::kReceiverIndex); HValue* proxy_map = Add(proxy, nullptr, HObjectAccess::ForMap()); HValue* global = Add(proxy_map, nullptr, HObjectAccess::ForPrototype()); HValue* map_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::global_map_placeholder(isolate()))); HValue* expected_map = Add( map_cell, nullptr, HObjectAccess::ForWeakCellValue()); HValue* map = Add(global, nullptr, HObjectAccess::ForMap()); IfBuilder map_check(this); map_check.IfNot(expected_map, map); map_check.ThenDeopt(Deoptimizer::kUnknownMap); map_check.End(); } HValue* weak_cell = Add(isolate()->factory()->NewWeakCell( StoreGlobalStub::property_cell_placeholder(isolate()))); HValue* cell = Add(weak_cell, nullptr, HObjectAccess::ForWeakCellValue()); Add