HELLO·Android
系统源代码
IT资讯
技术文章
我的收藏
注册
登录
-
我收藏的文章
创建代码块
我的代码块
我的账号
Pie
|
9.0.0_r8
下载
查看原文件
收藏
根目录
external
v8
src
snapshot
deserializer.cc
// Copyright 2016 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/snapshot/deserializer.h" #include "src/api.h" #include "src/assembler-inl.h" #include "src/bootstrapper.h" #include "src/external-reference-table.h" #include "src/heap/heap-inl.h" #include "src/isolate.h" #include "src/macro-assembler.h" #include "src/objects-inl.h" #include "src/snapshot/natives.h" #include "src/v8.h" #include "src/v8threads.h" namespace v8 { namespace internal { void Deserializer::DecodeReservation( Vector
res) { DCHECK_EQ(0, reservations_[NEW_SPACE].length()); STATIC_ASSERT(NEW_SPACE == 0); int current_space = NEW_SPACE; for (auto& r : res) { reservations_[current_space].Add({r.chunk_size(), NULL, NULL}); if (r.is_last()) current_space++; } DCHECK_EQ(kNumberOfSpaces, current_space); for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) current_chunk_[i] = 0; } void Deserializer::FlushICacheForNewIsolate() { DCHECK(!deserializing_user_code_); // The entire isolate is newly deserialized. Simply flush all code pages. for (Page* p : *isolate_->heap()->code_space()) { Assembler::FlushICache(isolate_, p->area_start(), p->area_end() - p->area_start()); } } void Deserializer::FlushICacheForNewCodeObjectsAndRecordEmbeddedObjects() { DCHECK(deserializing_user_code_); for (Code* code : new_code_objects_) { // Record all references to embedded objects in the new code object. isolate_->heap()->RecordWritesIntoCode(code); if (FLAG_serialize_age_code) code->PreAge(isolate_); Assembler::FlushICache(isolate_, code->instruction_start(), code->instruction_size()); } } bool Deserializer::ReserveSpace() { #ifdef DEBUG for (int i = NEW_SPACE; i < kNumberOfSpaces; ++i) { CHECK(reservations_[i].length() > 0); } #endif // DEBUG DCHECK(allocated_maps_.is_empty()); if (!isolate_->heap()->ReserveSpace(reservations_, &allocated_maps_)) return false; for (int i = 0; i < kNumberOfPreallocatedSpaces; i++) { high_water_[i] = reservations_[i][0].start; } return true; } void Deserializer::Initialize(Isolate* isolate) { DCHECK_NULL(isolate_); DCHECK_NOT_NULL(isolate); isolate_ = isolate; DCHECK_NULL(external_reference_table_); external_reference_table_ = ExternalReferenceTable::instance(isolate); CHECK_EQ(magic_number_, SerializedData::ComputeMagicNumber(external_reference_table_)); } void Deserializer::Deserialize(Isolate* isolate) { Initialize(isolate); if (!ReserveSpace()) V8::FatalProcessOutOfMemory("deserializing context"); // No active threads. DCHECK_NULL(isolate_->thread_manager()->FirstThreadStateInUse()); // No active handles. DCHECK(isolate_->handle_scope_implementer()->blocks()->is_empty()); // Partial snapshot cache is not yet populated. DCHECK(isolate_->partial_snapshot_cache()->is_empty()); { DisallowHeapAllocation no_gc; isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG_ROOT_LIST); isolate_->heap()->IterateSmiRoots(this); isolate_->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG); isolate_->heap()->RepairFreeListsAfterDeserialization(); isolate_->heap()->IterateWeakRoots(this, VISIT_ALL); DeserializeDeferredObjects(); FlushICacheForNewIsolate(); RestoreExternalReferenceRedirectors(&accessor_infos_); } isolate_->heap()->set_native_contexts_list( isolate_->heap()->undefined_value()); // The allocation site list is build during root iteration, but if no sites // were encountered then it needs to be initialized to undefined. if (isolate_->heap()->allocation_sites_list() == Smi::kZero) { isolate_->heap()->set_allocation_sites_list( isolate_->heap()->undefined_value()); } // Issue code events for newly deserialized code objects. LOG_CODE_EVENT(isolate_, LogCodeObjects()); LOG_CODE_EVENT(isolate_, LogBytecodeHandlers()); LOG_CODE_EVENT(isolate_, LogCompiledFunctions()); } MaybeHandle
Deserializer::DeserializePartial( Isolate* isolate, Handle
global_proxy, v8::DeserializeInternalFieldsCallback internal_fields_deserializer) { Initialize(isolate); if (!ReserveSpace()) { V8::FatalProcessOutOfMemory("deserialize context"); return MaybeHandle
(); } AddAttachedObject(global_proxy); DisallowHeapAllocation no_gc; // Keep track of the code space start and end pointers in case new // code objects were unserialized OldSpace* code_space = isolate_->heap()->code_space(); Address start_address = code_space->top(); Object* root; VisitPointer(&root); DeserializeDeferredObjects(); DeserializeInternalFields(internal_fields_deserializer); isolate->heap()->RegisterReservationsForBlackAllocation(reservations_); // There's no code deserialized here. If this assert fires then that's // changed and logging should be added to notify the profiler et al of the // new code, which also has to be flushed from instruction cache. CHECK_EQ(start_address, code_space->top()); return Handle
(root, isolate); } MaybeHandle
Deserializer::DeserializeObject(Isolate* isolate) { Initialize(isolate); if (!ReserveSpace()) { return MaybeHandle
(); } else { deserializing_user_code_ = true; HandleScope scope(isolate); Handle
result; { DisallowHeapAllocation no_gc; Object* root; VisitPointer(&root); DeserializeDeferredObjects(); FlushICacheForNewCodeObjectsAndRecordEmbeddedObjects(); result = Handle
(HeapObject::cast(root)); isolate->heap()->RegisterReservationsForBlackAllocation(reservations_); } CommitPostProcessedObjects(isolate); return scope.CloseAndEscape(result); } } Deserializer::~Deserializer() { // TODO(svenpanne) Re-enable this assertion when v8 initialization is fixed. // DCHECK(source_.AtEOF()); #ifdef DEBUG for (int space = 0; space < kNumberOfPreallocatedSpaces; space++) { int chunk_index = current_chunk_[space]; CHECK_EQ(reservations_[space].length(), chunk_index + 1); CHECK_EQ(reservations_[space][chunk_index].end, high_water_[space]); } CHECK_EQ(allocated_maps_.length(), next_map_index_); #endif // DEBUG } // This is called on the roots. It is the driver of the deserialization // process. It is also called on the body of each function. void Deserializer::VisitPointers(Object** start, Object** end) { // The space must be new space. Any other space would cause ReadChunk to try // to update the remembered using NULL as the address. ReadData(start, end, NEW_SPACE, NULL); } void Deserializer::Synchronize(VisitorSynchronization::SyncTag tag) { static const byte expected = kSynchronize; CHECK_EQ(expected, source_.Get()); } void Deserializer::DeserializeDeferredObjects() { for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) { switch (code) { case kAlignmentPrefix: case kAlignmentPrefix + 1: case kAlignmentPrefix + 2: SetAlignment(code); break; default: { int space = code & kSpaceMask; DCHECK(space <= kNumberOfSpaces); DCHECK(code - space == kNewObject); HeapObject* object = GetBackReferencedObject(space); int size = source_.GetInt() << kPointerSizeLog2; Address obj_address = object->address(); Object** start = reinterpret_cast
(obj_address + kPointerSize); Object** end = reinterpret_cast
(obj_address + size); bool filled = ReadData(start, end, space, obj_address); CHECK(filled); DCHECK(CanBeDeferred(object)); PostProcessNewObject(object, space); } } } } void Deserializer::DeserializeInternalFields( v8::DeserializeInternalFieldsCallback internal_fields_deserializer) { if (!source_.HasMore() || source_.Get() != kInternalFieldsData) return; DisallowHeapAllocation no_gc; DisallowJavascriptExecution no_js(isolate_); DisallowCompilation no_compile(isolate_); DCHECK_NOT_NULL(internal_fields_deserializer.callback); for (int code = source_.Get(); code != kSynchronize; code = source_.Get()) { HandleScope scope(isolate_); int space = code & kSpaceMask; DCHECK(space <= kNumberOfSpaces); DCHECK(code - space == kNewObject); Handle
obj(JSObject::cast(GetBackReferencedObject(space)), isolate_); int index = source_.GetInt(); int size = source_.GetInt(); byte* data = new byte[size]; source_.CopyRaw(data, size); internal_fields_deserializer.callback(v8::Utils::ToLocal(obj), index, {reinterpret_cast
(data), size}, internal_fields_deserializer.data); delete[] data; } } // Used to insert a deserialized internalized string into the string table. class StringTableInsertionKey : public HashTableKey { public: explicit StringTableInsertionKey(String* string) : string_(string), hash_(HashForObject(string)) { DCHECK(string->IsInternalizedString()); } bool IsMatch(Object* string) override { // We know that all entries in a hash table had their hash keys created. // Use that knowledge to have fast failure. if (hash_ != HashForObject(string)) return false; // We want to compare the content of two internalized strings here. return string_->SlowEquals(String::cast(string)); } uint32_t Hash() override { return hash_; } uint32_t HashForObject(Object* key) override { return String::cast(key)->Hash(); } MUST_USE_RESULT Handle
AsHandle(Isolate* isolate) override { return handle(string_, isolate); } private: String* string_; uint32_t hash_; DisallowHeapAllocation no_gc; }; HeapObject* Deserializer::PostProcessNewObject(HeapObject* obj, int space) { if (deserializing_user_code()) { if (obj->IsString()) { String* string = String::cast(obj); // Uninitialize hash field as the hash seed may have changed. string->set_hash_field(String::kEmptyHashField); if (string->IsInternalizedString()) { // Canonicalize the internalized string. If it already exists in the // string table, set it to forward to the existing one. StringTableInsertionKey key(string); String* canonical = StringTable::LookupKeyIfExists(isolate_, &key); if (canonical == NULL) { new_internalized_strings_.Add(handle(string)); return string; } else { string->SetForwardedInternalizedString(canonical); return canonical; } } } else if (obj->IsScript()) { new_scripts_.Add(handle(Script::cast(obj))); } else { DCHECK(CanBeDeferred(obj)); } } if (obj->IsAllocationSite()) { DCHECK(obj->IsAllocationSite()); // Allocation sites are present in the snapshot, and must be linked into // a list at deserialization time. AllocationSite* site = AllocationSite::cast(obj); // TODO(mvstanton): consider treating the heap()->allocation_sites_list() // as a (weak) root. If this root is relocated correctly, this becomes // unnecessary. if (isolate_->heap()->allocation_sites_list() == Smi::kZero) { site->set_weak_next(isolate_->heap()->undefined_value()); } else { site->set_weak_next(isolate_->heap()->allocation_sites_list()); } isolate_->heap()->set_allocation_sites_list(site); } else if (obj->IsCode()) { // We flush all code pages after deserializing the startup snapshot. In that // case, we only need to remember code objects in the large object space. // When deserializing user code, remember each individual code object. if (deserializing_user_code() || space == LO_SPACE) { new_code_objects_.Add(Code::cast(obj)); } } else if (obj->IsAccessorInfo()) { if (isolate_->external_reference_redirector()) { accessor_infos_.Add(AccessorInfo::cast(obj)); } } // Check alignment. DCHECK_EQ(0, Heap::GetFillToAlign(obj->address(), obj->RequiredAlignment())); return obj; } void Deserializer::CommitPostProcessedObjects(Isolate* isolate) { StringTable::EnsureCapacityForDeserialization( isolate, new_internalized_strings_.length()); for (Handle
string : new_internalized_strings_) { StringTableInsertionKey key(*string); DCHECK_NULL(StringTable::LookupKeyIfExists(isolate, &key)); StringTable::LookupKey(isolate, &key); } Heap* heap = isolate->heap(); Factory* factory = isolate->factory(); for (Handle
登录后可以享受更多权益
您还没有登录,登录后您可以:
收藏Android系统代码
收藏喜欢的文章
多个平台共享账号
去登录
首次使用?从这里
注册