HELLO·Android
系统源代码
IT资讯
技术文章
我的收藏
注册
登录
-
我收藏的文章
创建代码块
我的代码块
我的账号
Nougat 7.0
|
7.0.0_r31
下载
查看原文件
收藏
根目录
external
v8
src
snapshot
serialize.cc
// Copyright 2012 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/snapshot/serialize.h" #include "src/accessors.h" #include "src/api.h" #include "src/base/platform/platform.h" #include "src/bootstrapper.h" #include "src/code-stubs.h" #include "src/deoptimizer.h" #include "src/execution.h" #include "src/global-handles.h" #include "src/ic/ic.h" #include "src/ic/stub-cache.h" #include "src/objects.h" #include "src/parsing/parser.h" #include "src/profiler/cpu-profiler.h" #include "src/runtime/runtime.h" #include "src/snapshot/natives.h" #include "src/snapshot/snapshot.h" #include "src/snapshot/snapshot-source-sink.h" #include "src/v8.h" #include "src/v8threads.h" #include "src/version.h" namespace v8 { namespace internal { // ----------------------------------------------------------------------------- // Coding of external references. ExternalReferenceTable* ExternalReferenceTable::instance(Isolate* isolate) { ExternalReferenceTable* external_reference_table = isolate->external_reference_table(); if (external_reference_table == NULL) { external_reference_table = new ExternalReferenceTable(isolate); isolate->set_external_reference_table(external_reference_table); } return external_reference_table; } ExternalReferenceTable::ExternalReferenceTable(Isolate* isolate) { // Miscellaneous Add(ExternalReference::roots_array_start(isolate).address(), "Heap::roots_array_start()"); Add(ExternalReference::address_of_stack_limit(isolate).address(), "StackGuard::address_of_jslimit()"); Add(ExternalReference::address_of_real_stack_limit(isolate).address(), "StackGuard::address_of_real_jslimit()"); Add(ExternalReference::new_space_start(isolate).address(), "Heap::NewSpaceStart()"); Add(ExternalReference::new_space_mask(isolate).address(), "Heap::NewSpaceMask()"); Add(ExternalReference::new_space_allocation_limit_address(isolate).address(), "Heap::NewSpaceAllocationLimitAddress()"); Add(ExternalReference::new_space_allocation_top_address(isolate).address(), "Heap::NewSpaceAllocationTopAddress()"); Add(ExternalReference::mod_two_doubles_operation(isolate).address(), "mod_two_doubles"); // Keyed lookup cache. Add(ExternalReference::keyed_lookup_cache_keys(isolate).address(), "KeyedLookupCache::keys()"); Add(ExternalReference::keyed_lookup_cache_field_offsets(isolate).address(), "KeyedLookupCache::field_offsets()"); Add(ExternalReference::handle_scope_next_address(isolate).address(), "HandleScope::next"); Add(ExternalReference::handle_scope_limit_address(isolate).address(), "HandleScope::limit"); Add(ExternalReference::handle_scope_level_address(isolate).address(), "HandleScope::level"); Add(ExternalReference::new_deoptimizer_function(isolate).address(), "Deoptimizer::New()"); Add(ExternalReference::compute_output_frames_function(isolate).address(), "Deoptimizer::ComputeOutputFrames()"); Add(ExternalReference::address_of_min_int().address(), "LDoubleConstant::min_int"); Add(ExternalReference::address_of_one_half().address(), "LDoubleConstant::one_half"); Add(ExternalReference::isolate_address(isolate).address(), "isolate"); Add(ExternalReference::address_of_negative_infinity().address(), "LDoubleConstant::negative_infinity"); Add(ExternalReference::power_double_double_function(isolate).address(), "power_double_double_function"); Add(ExternalReference::power_double_int_function(isolate).address(), "power_double_int_function"); Add(ExternalReference::math_log_double_function(isolate).address(), "std::log"); Add(ExternalReference::store_buffer_top(isolate).address(), "store_buffer_top"); Add(ExternalReference::address_of_the_hole_nan().address(), "the_hole_nan"); Add(ExternalReference::get_date_field_function(isolate).address(), "JSDate::GetField"); Add(ExternalReference::date_cache_stamp(isolate).address(), "date_cache_stamp"); Add(ExternalReference::address_of_pending_message_obj(isolate).address(), "address_of_pending_message_obj"); Add(ExternalReference::get_make_code_young_function(isolate).address(), "Code::MakeCodeYoung"); Add(ExternalReference::cpu_features().address(), "cpu_features"); Add(ExternalReference::old_space_allocation_top_address(isolate).address(), "Heap::OldSpaceAllocationTopAddress"); Add(ExternalReference::old_space_allocation_limit_address(isolate).address(), "Heap::OldSpaceAllocationLimitAddress"); Add(ExternalReference::allocation_sites_list_address(isolate).address(), "Heap::allocation_sites_list_address()"); Add(ExternalReference::address_of_uint32_bias().address(), "uint32_bias"); Add(ExternalReference::get_mark_code_as_executed_function(isolate).address(), "Code::MarkCodeAsExecuted"); Add(ExternalReference::is_profiling_address(isolate).address(), "CpuProfiler::is_profiling"); Add(ExternalReference::scheduled_exception_address(isolate).address(), "Isolate::scheduled_exception"); Add(ExternalReference::invoke_function_callback(isolate).address(), "InvokeFunctionCallback"); Add(ExternalReference::invoke_accessor_getter_callback(isolate).address(), "InvokeAccessorGetterCallback"); Add(ExternalReference::log_enter_external_function(isolate).address(), "Logger::EnterExternal"); Add(ExternalReference::log_leave_external_function(isolate).address(), "Logger::LeaveExternal"); Add(ExternalReference::address_of_minus_one_half().address(), "double_constants.minus_one_half"); Add(ExternalReference::stress_deopt_count(isolate).address(), "Isolate::stress_deopt_count_address()"); Add(ExternalReference::virtual_handler_register(isolate).address(), "Isolate::virtual_handler_register()"); Add(ExternalReference::virtual_slot_register(isolate).address(), "Isolate::virtual_slot_register()"); Add(ExternalReference::runtime_function_table_address(isolate).address(), "Runtime::runtime_function_table_address()"); // Debug addresses Add(ExternalReference::debug_after_break_target_address(isolate).address(), "Debug::after_break_target_address()"); Add(ExternalReference::debug_is_active_address(isolate).address(), "Debug::is_active_address()"); Add(ExternalReference::debug_step_in_enabled_address(isolate).address(), "Debug::step_in_enabled_address()"); #ifndef V8_INTERPRETED_REGEXP Add(ExternalReference::re_case_insensitive_compare_uc16(isolate).address(), "NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16()"); Add(ExternalReference::re_check_stack_guard_state(isolate).address(), "RegExpMacroAssembler*::CheckStackGuardState()"); Add(ExternalReference::re_grow_stack(isolate).address(), "NativeRegExpMacroAssembler::GrowStack()"); Add(ExternalReference::re_word_character_map().address(), "NativeRegExpMacroAssembler::word_character_map"); Add(ExternalReference::address_of_regexp_stack_limit(isolate).address(), "RegExpStack::limit_address()"); Add(ExternalReference::address_of_regexp_stack_memory_address(isolate) .address(), "RegExpStack::memory_address()"); Add(ExternalReference::address_of_regexp_stack_memory_size(isolate).address(), "RegExpStack::memory_size()"); Add(ExternalReference::address_of_static_offsets_vector(isolate).address(), "OffsetsVector::static_offsets_vector"); #endif // V8_INTERPRETED_REGEXP // The following populates all of the different type of external references // into the ExternalReferenceTable. // // NOTE: This function was originally 100k of code. It has since been // rewritten to be mostly table driven, as the callback macro style tends to // very easily cause code bloat. Please be careful in the future when adding // new references. struct RefTableEntry { uint16_t id; const char* name; }; static const RefTableEntry c_builtins[] = { #define DEF_ENTRY_C(name, ignored) \ { Builtins::c_##name, "Builtins::" #name } \ , BUILTIN_LIST_C(DEF_ENTRY_C) #undef DEF_ENTRY_C }; for (unsigned i = 0; i < arraysize(c_builtins); ++i) { ExternalReference ref(static_cast
(c_builtins[i].id), isolate); Add(ref.address(), c_builtins[i].name); } static const RefTableEntry builtins[] = { #define DEF_ENTRY_C(name, ignored) \ { Builtins::k##name, "Builtins::" #name } \ , #define DEF_ENTRY_A(name, i1, i2, i3) \ { Builtins::k##name, "Builtins::" #name } \ , BUILTIN_LIST_C(DEF_ENTRY_C) BUILTIN_LIST_A(DEF_ENTRY_A) BUILTIN_LIST_DEBUG_A(DEF_ENTRY_A) #undef DEF_ENTRY_C #undef DEF_ENTRY_A }; for (unsigned i = 0; i < arraysize(builtins); ++i) { ExternalReference ref(static_cast
(builtins[i].id), isolate); Add(ref.address(), builtins[i].name); } static const RefTableEntry runtime_functions[] = { #define RUNTIME_ENTRY(name, i1, i2) \ { Runtime::k##name, "Runtime::" #name } \ , FOR_EACH_INTRINSIC(RUNTIME_ENTRY) #undef RUNTIME_ENTRY }; for (unsigned i = 0; i < arraysize(runtime_functions); ++i) { ExternalReference ref( static_cast
(runtime_functions[i].id), isolate); Add(ref.address(), runtime_functions[i].name); } // Stat counters struct StatsRefTableEntry { StatsCounter* (Counters::*counter)(); const char* name; }; static const StatsRefTableEntry stats_ref_table[] = { #define COUNTER_ENTRY(name, caption) \ { &Counters::name, "Counters::" #name } \ , STATS_COUNTER_LIST_1(COUNTER_ENTRY) STATS_COUNTER_LIST_2(COUNTER_ENTRY) #undef COUNTER_ENTRY }; Counters* counters = isolate->counters(); for (unsigned i = 0; i < arraysize(stats_ref_table); ++i) { // To make sure the indices are not dependent on whether counters are // enabled, use a dummy address as filler. Address address = NotAvailable(); StatsCounter* counter = (counters->*(stats_ref_table[i].counter))(); if (counter->Enabled()) { address = reinterpret_cast
(counter->GetInternalPointer()); } Add(address, stats_ref_table[i].name); } // Top addresses static const char* address_names[] = { #define BUILD_NAME_LITERAL(Name, name) "Isolate::" #name "_address", FOR_EACH_ISOLATE_ADDRESS_NAME(BUILD_NAME_LITERAL) NULL #undef BUILD_NAME_LITERAL }; for (int i = 0; i < Isolate::kIsolateAddressCount; ++i) { Add(isolate->get_address_from_id(static_cast
(i)), address_names[i]); } // Accessors struct AccessorRefTable { Address address; const char* name; }; static const AccessorRefTable accessors[] = { #define ACCESSOR_INFO_DECLARATION(name) \ { FUNCTION_ADDR(&Accessors::name##Getter), "Accessors::" #name "Getter" } \ , {FUNCTION_ADDR(&Accessors::name##Setter), "Accessors::" #name "Setter"}, ACCESSOR_INFO_LIST(ACCESSOR_INFO_DECLARATION) #undef ACCESSOR_INFO_DECLARATION }; for (unsigned i = 0; i < arraysize(accessors); ++i) { Add(accessors[i].address, accessors[i].name); } StubCache* stub_cache = isolate->stub_cache(); // Stub cache tables Add(stub_cache->key_reference(StubCache::kPrimary).address(), "StubCache::primary_->key"); Add(stub_cache->value_reference(StubCache::kPrimary).address(), "StubCache::primary_->value"); Add(stub_cache->map_reference(StubCache::kPrimary).address(), "StubCache::primary_->map"); Add(stub_cache->key_reference(StubCache::kSecondary).address(), "StubCache::secondary_->key"); Add(stub_cache->value_reference(StubCache::kSecondary).address(), "StubCache::secondary_->value"); Add(stub_cache->map_reference(StubCache::kSecondary).address(), "StubCache::secondary_->map"); // Runtime entries Add(ExternalReference::delete_handle_scope_extensions(isolate).address(), "HandleScope::DeleteExtensions"); Add(ExternalReference::incremental_marking_record_write_function(isolate) .address(), "IncrementalMarking::RecordWrite"); Add(ExternalReference::store_buffer_overflow_function(isolate).address(), "StoreBuffer::StoreBufferOverflow"); // Add a small set of deopt entry addresses to encoder without generating the // deopt table code, which isn't possible at deserialization time. HandleScope scope(isolate); for (int entry = 0; entry < kDeoptTableSerializeEntryCount; ++entry) { Address address = Deoptimizer::GetDeoptimizationEntry( isolate, entry, Deoptimizer::LAZY, Deoptimizer::CALCULATE_ENTRY_ADDRESS); Add(address, "lazy_deopt"); } } ExternalReferenceEncoder::ExternalReferenceEncoder(Isolate* isolate) { map_ = isolate->external_reference_map(); if (map_ != NULL) return; map_ = new HashMap(HashMap::PointersMatch); ExternalReferenceTable* table = ExternalReferenceTable::instance(isolate); for (int i = 0; i < table->size(); ++i) { Address addr = table->address(i); if (addr == ExternalReferenceTable::NotAvailable()) continue; // We expect no duplicate external references entries in the table. DCHECK_NULL(map_->Lookup(addr, Hash(addr))); map_->LookupOrInsert(addr, Hash(addr))->value = reinterpret_cast
(i); } isolate->set_external_reference_map(map_); } uint32_t ExternalReferenceEncoder::Encode(Address address) const { DCHECK_NOT_NULL(address); HashMap::Entry* entry = const_cast
(map_)->Lookup(address, Hash(address)); DCHECK_NOT_NULL(entry); return static_cast
(reinterpret_cast
(entry->value)); } const char* ExternalReferenceEncoder::NameOfAddress(Isolate* isolate, Address address) const { HashMap::Entry* entry = const_cast
(map_)->Lookup(address, Hash(address)); if (entry == NULL) return "
"; uint32_t i = static_cast
(reinterpret_cast
(entry->value)); return ExternalReferenceTable::instance(isolate)->name(i); } class CodeAddressMap: public CodeEventLogger { public: explicit CodeAddressMap(Isolate* isolate) : isolate_(isolate) { isolate->logger()->addCodeEventListener(this); } ~CodeAddressMap() override { isolate_->logger()->removeCodeEventListener(this); } void CodeMoveEvent(Address from, Address to) override { address_to_name_map_.Move(from, to); } void CodeDisableOptEvent(Code* code, SharedFunctionInfo* shared) override {} void CodeDeleteEvent(Address from) override { address_to_name_map_.Remove(from); } const char* Lookup(Address address) { return address_to_name_map_.Lookup(address); } private: class NameMap { public: NameMap() : impl_(HashMap::PointersMatch) {} ~NameMap() { for (HashMap::Entry* p = impl_.Start(); p != NULL; p = impl_.Next(p)) { DeleteArray(static_cast
(p->value)); } } void Insert(Address code_address, const char* name, int name_size) { HashMap::Entry* entry = FindOrCreateEntry(code_address); if (entry->value == NULL) { entry->value = CopyName(name, name_size); } } const char* Lookup(Address code_address) { HashMap::Entry* entry = FindEntry(code_address); return (entry != NULL) ? static_cast
(entry->value) : NULL; } void Remove(Address code_address) { HashMap::Entry* entry = FindEntry(code_address); if (entry != NULL) { DeleteArray(static_cast
(entry->value)); RemoveEntry(entry); } } void Move(Address from, Address to) { if (from == to) return; HashMap::Entry* from_entry = FindEntry(from); DCHECK(from_entry != NULL); void* value = from_entry->value; RemoveEntry(from_entry); HashMap::Entry* to_entry = FindOrCreateEntry(to); DCHECK(to_entry->value == NULL); to_entry->value = value; } private: static char* CopyName(const char* name, int name_size) { char* result = NewArray
(name_size + 1); for (int i = 0; i < name_size; ++i) { char c = name[i]; if (c == '\0') c = ' '; result[i] = c; } result[name_size] = '\0'; return result; } HashMap::Entry* FindOrCreateEntry(Address code_address) { return impl_.LookupOrInsert(code_address, ComputePointerHash(code_address)); } HashMap::Entry* FindEntry(Address code_address) { return impl_.Lookup(code_address, ComputePointerHash(code_address)); } void RemoveEntry(HashMap::Entry* entry) { impl_.Remove(entry->key, entry->hash); } HashMap impl_; DISALLOW_COPY_AND_ASSIGN(NameMap); }; void LogRecordedBuffer(Code* code, SharedFunctionInfo*, const char* name, int length) override { address_to_name_map_.Insert(code->address(), name, length); } NameMap address_to_name_map_; Isolate* isolate_; }; void Deserializer::DecodeReservation( Vector
res) { DCHECK_EQ(0, reservations_[NEW_SPACE].length()); STATIC_ASSERT(NEW_SPACE == 0); int current_space = NEW_SPACE; for (auto& r : res) { reservations_[current_space].Add({r.chunk_size(), NULL, NULL}); if (r.is_last()) current_space++; } DCHECK_EQ(kNumberOfSpaces, current_space); for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) current_chunk_[i] = 0; } void Deserializer::FlushICacheForNewIsolate() { DCHECK(!deserializing_user_code_); // The entire isolate is newly deserialized. Simply flush all code pages. PageIterator it(isolate_->heap()->code_space()); while (it.has_next()) { Page* p = it.next(); Assembler::FlushICache(isolate_, p->area_start(), p->area_end() - p->area_start()); } } void Deserializer::FlushICacheForNewCodeObjects() { DCHECK(deserializing_user_code_); for (Code* code : new_code_objects_) { Assembler::FlushICache(isolate_, code->instruction_start(), code->instruction_size()); } } bool Deserializer::ReserveSpace() { #ifdef DEBUG for (int i = NEW_SPACE; i < kNumberOfSpaces; ++i) { CHECK(reservations_[i].length() > 0); } #endif // DEBUG if (!isolate_->heap()->ReserveSpace(reservations_)) return false; for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) { high_water_[i] = reservations_[i][0].start; } return true; } void Deserializer::Initialize(Isolate* isolate) { DCHECK_NULL(isolate_); DCHECK_NOT_NULL(isolate); isolate_ = isolate; DCHECK_NULL(external_reference_table_); external_reference_table_ = ExternalReferenceTable::instance(isolate); CHECK_EQ(magic_number_, SerializedData::ComputeMagicNumber(external_reference_table_)); } void Deserializer::Deserialize(Isolate* isolate) { Initialize(isolate); if (!ReserveSpace()) V8::FatalProcessOutOfMemory("deserializing context"); // No active threads. DCHECK_NULL(isolate_->thread_manager()->FirstThreadStateInUse()); // No active handles. DCHECK(isolate_->handle_scope_implementer()->blocks()->is_empty()); { DisallowHeapAllocation no_gc; isolate_->heap()->IterateSmiRoots(this); isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG); isolate_->heap()->RepairFreeListsAfterDeserialization(); isolate_->heap()->IterateWeakRoots(this, VISIT_ALL); DeserializeDeferredObjects(); FlushICacheForNewIsolate(); } isolate_->heap()->set_native_contexts_list( isolate_->heap()->undefined_value()); // The allocation site list is build during root iteration, but if no sites // were encountered then it needs to be initialized to undefined. if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) { isolate_->heap()->set_allocation_sites_list( isolate_->heap()->undefined_value()); } // Update data pointers to the external strings containing natives sources. Natives::UpdateSourceCache(isolate_->heap()); ExtraNatives::UpdateSourceCache(isolate_->heap()); // Issue code events for newly deserialized code objects. LOG_CODE_EVENT(isolate_, LogCodeObjects()); LOG_CODE_EVENT(isolate_, LogCompiledFunctions()); } MaybeHandle
Deserializer::DeserializePartial( Isolate* isolate, Handle
global_proxy) { Initialize(isolate); if (!ReserveSpace()) { V8::FatalProcessOutOfMemory("deserialize context"); return MaybeHandle
(); } Vector
> attached_objects = Vector
>::New(1); attached_objects[kGlobalProxyReference] = global_proxy; SetAttachedObjects(attached_objects); DisallowHeapAllocation no_gc; // Keep track of the code space start and end pointers in case new // code objects were unserialized OldSpace* code_space = isolate_->heap()->code_space(); Address start_address = code_space->top(); Object* root; VisitPointer(&root); DeserializeDeferredObjects(); // There's no code deserialized here. If this assert fires then that's // changed and logging should be added to notify the profiler et al of the // new code, which also has to be flushed from instruction cache. CHECK_EQ(start_address, code_space->top()); return Handle
(root, isolate); } MaybeHandle
Deserializer::DeserializeCode( Isolate* isolate) { Initialize(isolate); if (!ReserveSpace()) { return Handle
(); } else { deserializing_user_code_ = true; HandleScope scope(isolate); Handle
result; { DisallowHeapAllocation no_gc; Object* root; VisitPointer(&root); DeserializeDeferredObjects(); FlushICacheForNewCodeObjects(); result = Handle
(SharedFunctionInfo::cast(root)); } CommitPostProcessedObjects(isolate); return scope.CloseAndEscape(result); } } Deserializer::~Deserializer() { // TODO(svenpanne) Re-enable this assertion when v8 initialization is fixed. // DCHECK(source_.AtEOF()); attached_objects_.Dispose(); } // This is called on the roots. It is the driver of the deserialization // process. It is also called on the body of each function. void Deserializer::VisitPointers(Object** start, Object** end) { // The space must be new space. Any other space would cause ReadChunk to try // to update the remembered using NULL as the address. ReadData(start, end, NEW_SPACE, NULL); } void Deserializer::DeserializeDeferredObjects() { for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) { switch (code) { case kAlignmentPrefix: case kAlignmentPrefix + 1: case kAlignmentPrefix + 2: SetAlignment(code); break; default: { int space = code & kSpaceMask; DCHECK(space <= kNumberOfSpaces); DCHECK(code - space == kNewObject); HeapObject* object = GetBackReferencedObject(space); int size = source_.GetInt() << kPointerSizeLog2; Address obj_address = object->address(); Object** start = reinterpret_cast
(obj_address + kPointerSize); Object** end = reinterpret_cast
(obj_address + size); bool filled = ReadData(start, end, space, obj_address); CHECK(filled); DCHECK(CanBeDeferred(object)); PostProcessNewObject(object, space); } } } } // Used to insert a deserialized internalized string into the string table. class StringTableInsertionKey : public HashTableKey { public: explicit StringTableInsertionKey(String* string) : string_(string), hash_(HashForObject(string)) { DCHECK(string->IsInternalizedString()); } bool IsMatch(Object* string) override { // We know that all entries in a hash table had their hash keys created. // Use that knowledge to have fast failure. if (hash_ != HashForObject(string)) return false; // We want to compare the content of two internalized strings here. return string_->SlowEquals(String::cast(string)); } uint32_t Hash() override { return hash_; } uint32_t HashForObject(Object* key) override { return String::cast(key)->Hash(); } MUST_USE_RESULT Handle
AsHandle(Isolate* isolate) override { return handle(string_, isolate); } private: String* string_; uint32_t hash_; DisallowHeapAllocation no_gc; }; HeapObject* Deserializer::PostProcessNewObject(HeapObject* obj, int space) { if (deserializing_user_code()) { if (obj->IsString()) { String* string = String::cast(obj); // Uninitialize hash field as the hash seed may have changed. string->set_hash_field(String::kEmptyHashField); if (string->IsInternalizedString()) { // Canonicalize the internalized string. If it already exists in the // string table, set it to forward to the existing one. StringTableInsertionKey key(string); String* canonical = StringTable::LookupKeyIfExists(isolate_, &key); if (canonical == NULL) { new_internalized_strings_.Add(handle(string)); return string; } else { string->SetForwardedInternalizedString(canonical); return canonical; } } } else if (obj->IsScript()) { new_scripts_.Add(handle(Script::cast(obj))); } else { DCHECK(CanBeDeferred(obj)); } } if (obj->IsAllocationSite()) { DCHECK(obj->IsAllocationSite()); // Allocation sites are present in the snapshot, and must be linked into // a list at deserialization time. AllocationSite* site = AllocationSite::cast(obj); // TODO(mvstanton): consider treating the heap()->allocation_sites_list() // as a (weak) root. If this root is relocated correctly, this becomes // unnecessary. if (isolate_->heap()->allocation_sites_list() == Smi::FromInt(0)) { site->set_weak_next(isolate_->heap()->undefined_value()); } else { site->set_weak_next(isolate_->heap()->allocation_sites_list()); } isolate_->heap()->set_allocation_sites_list(site); } else if (obj->IsCode()) { // We flush all code pages after deserializing the startup snapshot. In that // case, we only need to remember code objects in the large object space. // When deserializing user code, remember each individual code object. if (deserializing_user_code() || space == LO_SPACE) { new_code_objects_.Add(Code::cast(obj)); } } // Check alignment. DCHECK_EQ(0, Heap::GetFillToAlign(obj->address(), obj->RequiredAlignment())); return obj; } void Deserializer::CommitPostProcessedObjects(Isolate* isolate) { StringTable::EnsureCapacityForDeserialization( isolate, new_internalized_strings_.length()); for (Handle
string : new_internalized_strings_) { StringTableInsertionKey key(*string); DCHECK_NULL(StringTable::LookupKeyIfExists(isolate, &key)); StringTable::LookupKey(isolate, &key); } Heap* heap = isolate->heap(); Factory* factory = isolate->factory(); for (Handle
登录后可以享受更多权益
您还没有登录,登录后您可以:
收藏Android系统代码
收藏喜欢的文章
多个平台共享账号
去登录
首次使用?从这里
注册