00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028 #include "v8.h"
00029
00030 #include "accessors.h"
00031 #include "api.h"
00032 #include "bootstrapper.h"
00033 #include "codegen-inl.h"
00034 #include "compilation-cache.h"
00035 #include "debug.h"
00036 #include "global-handles.h"
00037 #include "jsregexp.h"
00038 #include "mark-compact.h"
00039 #include "natives.h"
00040 #include "scanner.h"
00041 #include "scopeinfo.h"
00042 #include "v8threads.h"
00043
00044 namespace v8 { namespace internal {
00045
00046 #define ROOT_ALLOCATION(type, name) type* Heap::name##_;
00047 ROOT_LIST(ROOT_ALLOCATION)
00048 #undef ROOT_ALLOCATION
00049
00050
00051 #define STRUCT_ALLOCATION(NAME, Name, name) Map* Heap::name##_map_;
00052 STRUCT_LIST(STRUCT_ALLOCATION)
00053 #undef STRUCT_ALLOCATION
00054
00055
00056 #define SYMBOL_ALLOCATION(name, string) String* Heap::name##_;
00057 SYMBOL_LIST(SYMBOL_ALLOCATION)
00058 #undef SYMBOL_ALLOCATION
00059
00060 NewSpace Heap::new_space_;
00061 OldSpace* Heap::old_pointer_space_ = NULL;
00062 OldSpace* Heap::old_data_space_ = NULL;
00063 OldSpace* Heap::code_space_ = NULL;
00064 MapSpace* Heap::map_space_ = NULL;
00065 LargeObjectSpace* Heap::lo_space_ = NULL;
00066
00067 static const int kMinimumPromotionLimit = 2*MB;
00068 static const int kMinimumAllocationLimit = 8*MB;
00069
00070 int Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit;
00071 int Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit;
00072
00073 int Heap::old_gen_exhausted_ = false;
00074
00075 int Heap::amount_of_external_allocated_memory_ = 0;
00076 int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0;
00077
00078
00079
00080 int Heap::semispace_size_ = 2*MB;
00081 int Heap::old_generation_size_ = 512*MB;
00082 int Heap::initial_semispace_size_ = 256*KB;
00083
00084 GCCallback Heap::global_gc_prologue_callback_ = NULL;
00085 GCCallback Heap::global_gc_epilogue_callback_ = NULL;
00086
00087
00088
00089 int Heap::young_generation_size_ = 0;
00090
00091
00092 int Heap::new_space_growth_limit_ = 8;
00093 int Heap::scavenge_count_ = 0;
00094 Heap::HeapState Heap::gc_state_ = NOT_IN_GC;
00095
00096 int Heap::mc_count_ = 0;
00097 int Heap::gc_count_ = 0;
00098
00099 int Heap::always_allocate_scope_depth_ = 0;
00100
00101 #ifdef DEBUG
00102 bool Heap::allocation_allowed_ = true;
00103
00104 int Heap::allocation_timeout_ = 0;
00105 bool Heap::disallow_allocation_failure_ = false;
00106 #endif // DEBUG
00107
00108
00109 int Heap::Capacity() {
00110 if (!HasBeenSetup()) return 0;
00111
00112 return new_space_.Capacity() +
00113 old_pointer_space_->Capacity() +
00114 old_data_space_->Capacity() +
00115 code_space_->Capacity() +
00116 map_space_->Capacity();
00117 }
00118
00119
00120 int Heap::Available() {
00121 if (!HasBeenSetup()) return 0;
00122
00123 return new_space_.Available() +
00124 old_pointer_space_->Available() +
00125 old_data_space_->Available() +
00126 code_space_->Available() +
00127 map_space_->Available();
00128 }
00129
00130
00131 bool Heap::HasBeenSetup() {
00132 return old_pointer_space_ != NULL &&
00133 old_data_space_ != NULL &&
00134 code_space_ != NULL &&
00135 map_space_ != NULL &&
00136 lo_space_ != NULL;
00137 }
00138
00139
00140 GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
00141
00142 if (space != NEW_SPACE || FLAG_gc_global) {
00143 Counters::gc_compactor_caused_by_request.Increment();
00144 return MARK_COMPACTOR;
00145 }
00146
00147
00148 if (OldGenerationPromotionLimitReached()) {
00149 Counters::gc_compactor_caused_by_promoted_data.Increment();
00150 return MARK_COMPACTOR;
00151 }
00152
00153
00154 if (old_gen_exhausted_) {
00155 Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
00156 return MARK_COMPACTOR;
00157 }
00158
00159
00160
00161
00162
00163
00164
00165
00166
00167
00168 if (MemoryAllocator::MaxAvailable() <= new_space_.Size()) {
00169 Counters::gc_compactor_caused_by_oldspace_exhaustion.Increment();
00170 return MARK_COMPACTOR;
00171 }
00172
00173
00174 return SCAVENGER;
00175 }
00176
00177
00178
00179
00180 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
00181 void Heap::ReportStatisticsBeforeGC() {
00182
00183
00184
00185 #if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING)
00186 if (FLAG_heap_stats || FLAG_log_gc) new_space_.CollectStatistics();
00187 if (FLAG_heap_stats) {
00188 ReportHeapStatistics("Before GC");
00189 } else if (FLAG_log_gc) {
00190 new_space_.ReportStatistics();
00191 }
00192 if (FLAG_heap_stats || FLAG_log_gc) new_space_.ClearHistograms();
00193 #elif defined(DEBUG)
00194 if (FLAG_heap_stats) {
00195 new_space_.CollectStatistics();
00196 ReportHeapStatistics("Before GC");
00197 new_space_.ClearHistograms();
00198 }
00199 #elif defined(ENABLE_LOGGING_AND_PROFILING)
00200 if (FLAG_log_gc) {
00201 new_space_.CollectStatistics();
00202 new_space_.ReportStatistics();
00203 new_space_.ClearHistograms();
00204 }
00205 #endif
00206 }
00207
00208
00209
00210
00211 void Heap::ReportStatisticsAfterGC() {
00212
00213
00214 #if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING)
00215 if (FLAG_heap_stats) {
00216 ReportHeapStatistics("After GC");
00217 } else if (FLAG_log_gc) {
00218 new_space_.ReportStatistics();
00219 }
00220 #elif defined(DEBUG)
00221 if (FLAG_heap_stats) ReportHeapStatistics("After GC");
00222 #elif defined(ENABLE_LOGGING_AND_PROFILING)
00223 if (FLAG_log_gc) new_space_.ReportStatistics();
00224 #endif
00225 }
00226 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
00227
00228
00229 void Heap::GarbageCollectionPrologue() {
00230 RegExpImpl::NewSpaceCollectionPrologue();
00231 gc_count_++;
00232 #ifdef DEBUG
00233 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
00234 allow_allocation(false);
00235
00236 if (FLAG_verify_heap) {
00237 Verify();
00238 }
00239
00240 if (FLAG_gc_verbose) Print();
00241
00242 if (FLAG_print_rset) {
00243
00244 old_pointer_space_->PrintRSet();
00245 map_space_->PrintRSet();
00246 lo_space_->PrintRSet();
00247 }
00248 #endif
00249
00250 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
00251 ReportStatisticsBeforeGC();
00252 #endif
00253 }
00254
00255 int Heap::SizeOfObjects() {
00256 int total = 0;
00257 AllSpaces spaces;
00258 while (Space* space = spaces.next()) total += space->Size();
00259 return total;
00260 }
00261
00262 void Heap::GarbageCollectionEpilogue() {
00263 #ifdef DEBUG
00264 allow_allocation(true);
00265 ZapFromSpace();
00266
00267 if (FLAG_verify_heap) {
00268 Verify();
00269 }
00270
00271 if (FLAG_print_global_handles) GlobalHandles::Print();
00272 if (FLAG_print_handles) PrintHandles();
00273 if (FLAG_gc_verbose) Print();
00274 if (FLAG_code_stats) ReportCodeStatistics("After GC");
00275 #endif
00276
00277 Counters::alive_after_last_gc.Set(SizeOfObjects());
00278
00279 SymbolTable* symbol_table = SymbolTable::cast(Heap::symbol_table_);
00280 Counters::symbol_table_capacity.Set(symbol_table->Capacity());
00281 Counters::number_of_symbols.Set(symbol_table->NumberOfElements());
00282 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
00283 ReportStatisticsAfterGC();
00284 #endif
00285 }
00286
00287
00288 void Heap::CollectAllGarbage() {
00289
00290
00291
00292 CollectGarbage(0, OLD_POINTER_SPACE);
00293 }
00294
00295
00296 bool Heap::CollectGarbage(int requested_size, AllocationSpace space) {
00297
00298 VMState state(GC);
00299
00300 #ifdef DEBUG
00301
00302
00303
00304
00305
00306 allocation_timeout_ = Max(6, FLAG_gc_interval);
00307 #endif
00308
00309 { GCTracer tracer;
00310 GarbageCollectionPrologue();
00311
00312
00313 tracer.set_gc_count(gc_count_);
00314
00315 GarbageCollector collector = SelectGarbageCollector(space);
00316
00317 tracer.set_collector(collector);
00318
00319 StatsRate* rate = (collector == SCAVENGER)
00320 ? &Counters::gc_scavenger
00321 : &Counters::gc_compactor;
00322 rate->Start();
00323 PerformGarbageCollection(space, collector, &tracer);
00324 rate->Stop();
00325
00326 GarbageCollectionEpilogue();
00327 }
00328
00329
00330 #ifdef ENABLE_LOGGING_AND_PROFILING
00331 if (FLAG_log_gc) HeapProfiler::WriteSample();
00332 #endif
00333
00334 switch (space) {
00335 case NEW_SPACE:
00336 return new_space_.Available() >= requested_size;
00337 case OLD_POINTER_SPACE:
00338 return old_pointer_space_->Available() >= requested_size;
00339 case OLD_DATA_SPACE:
00340 return old_data_space_->Available() >= requested_size;
00341 case CODE_SPACE:
00342 return code_space_->Available() >= requested_size;
00343 case MAP_SPACE:
00344 return map_space_->Available() >= requested_size;
00345 case LO_SPACE:
00346 return lo_space_->Available() >= requested_size;
00347 }
00348 return false;
00349 }
00350
00351
00352 void Heap::PerformScavenge() {
00353 GCTracer tracer;
00354 PerformGarbageCollection(NEW_SPACE, SCAVENGER, &tracer);
00355 }
00356
00357
00358 void Heap::PerformGarbageCollection(AllocationSpace space,
00359 GarbageCollector collector,
00360 GCTracer* tracer) {
00361 if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
00362 ASSERT(!allocation_allowed_);
00363 global_gc_prologue_callback_();
00364 }
00365
00366 if (collector == MARK_COMPACTOR) {
00367 MarkCompact(tracer);
00368
00369 int old_gen_size = PromotedSpaceSize();
00370 old_gen_promotion_limit_ =
00371 old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3);
00372 old_gen_allocation_limit_ =
00373 old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 3);
00374 old_gen_exhausted_ = false;
00375
00376
00377
00378
00379
00380
00381
00382
00383
00384
00385 if (space == NEW_SPACE && !MarkCompactCollector::HasCompacted()) {
00386 Scavenge();
00387 }
00388 } else {
00389 Scavenge();
00390 }
00391 Counters::objs_since_last_young.Set(0);
00392
00393
00394 GlobalHandles::PostGarbageCollectionProcessing();
00395
00396 if (collector == MARK_COMPACTOR) {
00397
00398 amount_of_external_allocated_memory_at_last_global_gc_ =
00399 amount_of_external_allocated_memory_;
00400 }
00401
00402 if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) {
00403 ASSERT(!allocation_allowed_);
00404 global_gc_epilogue_callback_();
00405 }
00406 }
00407
00408
00409 void Heap::MarkCompact(GCTracer* tracer) {
00410 gc_state_ = MARK_COMPACT;
00411 mc_count_++;
00412 tracer->set_full_gc_count(mc_count_);
00413 LOG(ResourceEvent("markcompact", "begin"));
00414
00415 MarkCompactPrologue();
00416
00417 MarkCompactCollector::CollectGarbage(tracer);
00418
00419 MarkCompactEpilogue();
00420
00421 LOG(ResourceEvent("markcompact", "end"));
00422
00423 gc_state_ = NOT_IN_GC;
00424
00425 Shrink();
00426
00427 Counters::objs_since_last_full.Set(0);
00428 }
00429
00430
00431 void Heap::MarkCompactPrologue() {
00432 ClearKeyedLookupCache();
00433 CompilationCache::MarkCompactPrologue();
00434 RegExpImpl::OldSpaceCollectionPrologue();
00435 Top::MarkCompactPrologue();
00436 ThreadManager::MarkCompactPrologue();
00437 }
00438
00439
00440 void Heap::MarkCompactEpilogue() {
00441 Top::MarkCompactEpilogue();
00442 ThreadManager::MarkCompactEpilogue();
00443 }
00444
00445
00446 Object* Heap::FindCodeObject(Address a) {
00447 Object* obj = code_space_->FindObject(a);
00448 if (obj->IsFailure()) {
00449 obj = lo_space_->FindObject(a);
00450 }
00451 ASSERT(!obj->IsFailure());
00452 return obj;
00453 }
00454
00455
00456
00457 class ScavengeVisitor: public ObjectVisitor {
00458 public:
00459
00460 void VisitPointer(Object** p) { ScavengePointer(p); }
00461
00462 void VisitPointers(Object** start, Object** end) {
00463
00464 for (Object** p = start; p < end; p++) ScavengePointer(p);
00465 }
00466
00467 private:
00468 void ScavengePointer(Object** p) {
00469 Object* object = *p;
00470 if (!Heap::InNewSpace(object)) return;
00471 Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
00472 reinterpret_cast<HeapObject*>(object));
00473 }
00474 };
00475
00476
00477
00478 static Address promoted_top = NULL;
00479
00480
00481 #ifdef DEBUG
00482
00483
00484 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
00485 public:
00486 void VisitPointers(Object** start, Object**end) {
00487 for (Object** current = start; current < end; current++) {
00488 if ((*current)->IsHeapObject()) {
00489 ASSERT(!Heap::InNewSpace(HeapObject::cast(*current)));
00490 }
00491 }
00492 }
00493 };
00494 #endif
00495
00496 void Heap::Scavenge() {
00497 #ifdef DEBUG
00498 if (FLAG_enable_slow_asserts) {
00499 VerifyNonPointerSpacePointersVisitor v;
00500 HeapObjectIterator it(code_space_);
00501 while (it.has_next()) {
00502 HeapObject* object = it.next();
00503 if (object->IsCode()) {
00504 Code::cast(object)->ConvertICTargetsFromAddressToObject();
00505 }
00506 object->Iterate(&v);
00507 if (object->IsCode()) {
00508 Code::cast(object)->ConvertICTargetsFromObjectToAddress();
00509 }
00510 }
00511 }
00512 #endif
00513
00514 gc_state_ = SCAVENGE;
00515
00516
00517 LOG(ResourceEvent("scavenge", "begin"));
00518
00519 scavenge_count_++;
00520 if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
00521 scavenge_count_ > new_space_growth_limit_) {
00522
00523
00524
00525
00526
00527 new_space_.Double();
00528 new_space_growth_limit_ *= 2;
00529 }
00530
00531
00532
00533 new_space_.Flip();
00534 new_space_.ResetAllocationInfo();
00535
00536
00537
00538
00539
00540
00541
00542
00543
00544
00545
00546
00547
00548
00549
00550 Address new_mark = new_space_.ToSpaceLow();
00551 Address promoted_mark = new_space_.ToSpaceHigh();
00552 promoted_top = new_space_.ToSpaceHigh();
00553
00554 ScavengeVisitor scavenge_visitor;
00555
00556 IterateRoots(&scavenge_visitor);
00557
00558
00559
00560 IterateRSet(old_pointer_space_, &ScavengePointer);
00561 IterateRSet(map_space_, &ScavengePointer);
00562 lo_space_->IterateRSet(&ScavengePointer);
00563
00564 bool has_processed_weak_pointers = false;
00565
00566 while (true) {
00567 ASSERT(new_mark <= new_space_.top());
00568 ASSERT(promoted_mark >= promoted_top);
00569
00570
00571 while (new_mark < new_space_.top() || promoted_mark > promoted_top) {
00572
00573
00574 Address previous_top = new_space_.top();
00575 SemiSpaceIterator new_it(new_space(), new_mark);
00576 while (new_it.has_next()) {
00577 new_it.next()->Iterate(&scavenge_visitor);
00578 }
00579 new_mark = previous_top;
00580
00581
00582
00583 previous_top = promoted_top;
00584 for (Address current = promoted_mark - kPointerSize;
00585 current >= previous_top;
00586 current -= kPointerSize) {
00587 HeapObject* object = HeapObject::cast(Memory::Object_at(current));
00588 object->Iterate(&scavenge_visitor);
00589 UpdateRSet(object);
00590 }
00591 promoted_mark = previous_top;
00592 }
00593
00594 if (has_processed_weak_pointers) break;
00595
00596 GlobalHandles::IterateWeakRoots(&scavenge_visitor);
00597 has_processed_weak_pointers = true;
00598 }
00599
00600
00601 new_space_.set_age_mark(new_mark);
00602
00603 LOG(ResourceEvent("scavenge", "end"));
00604
00605 gc_state_ = NOT_IN_GC;
00606 }
00607
00608
00609 void Heap::ClearRSetRange(Address start, int size_in_bytes) {
00610 uint32_t start_bit;
00611 Address start_word_address =
00612 Page::ComputeRSetBitPosition(start, 0, &start_bit);
00613 uint32_t end_bit;
00614 Address end_word_address =
00615 Page::ComputeRSetBitPosition(start + size_in_bytes - kIntSize,
00616 0,
00617 &end_bit);
00618
00619
00620
00621
00622 uint32_t start_bitmask = start_bit - 1;
00623 uint32_t end_bitmask = ~((end_bit << 1) - 1);
00624
00625
00626
00627
00628 if (start_word_address == end_word_address) {
00629 Memory::uint32_at(start_word_address) &= (start_bitmask | end_bitmask);
00630 } else {
00631 Memory::uint32_at(start_word_address) &= start_bitmask;
00632 Memory::uint32_at(end_word_address) &= end_bitmask;
00633 start_word_address += kIntSize;
00634 memset(start_word_address, 0, end_word_address - start_word_address);
00635 }
00636 }
00637
00638
00639 class UpdateRSetVisitor: public ObjectVisitor {
00640 public:
00641
00642 void VisitPointer(Object** p) {
00643 UpdateRSet(p);
00644 }
00645
00646 void VisitPointers(Object** start, Object** end) {
00647
00648
00649
00650 for (Object** p = start; p < end; p++) UpdateRSet(p);
00651 }
00652 private:
00653
00654 void UpdateRSet(Object** p) {
00655
00656
00657
00658 ASSERT(!Page::IsRSetSet(reinterpret_cast<Address>(p), 0));
00659 if (Heap::InNewSpace(*p)) {
00660 Page::SetRSet(reinterpret_cast<Address>(p), 0);
00661 }
00662 }
00663 };
00664
00665
00666 int Heap::UpdateRSet(HeapObject* obj) {
00667 ASSERT(!InNewSpace(obj));
00668
00669
00670
00671
00672 if (obj->IsFixedArray()) {
00673 FixedArray* array = FixedArray::cast(obj);
00674 int length = array->length();
00675 for (int i = 0; i < length; i++) {
00676 int offset = FixedArray::kHeaderSize + i * kPointerSize;
00677 ASSERT(!Page::IsRSetSet(obj->address(), offset));
00678 if (Heap::InNewSpace(array->get(i))) {
00679 Page::SetRSet(obj->address(), offset);
00680 }
00681 }
00682 } else if (!obj->IsCode()) {
00683
00684
00685 UpdateRSetVisitor v;
00686 obj->Iterate(&v);
00687 }
00688 return obj->Size();
00689 }
00690
00691
00692 void Heap::RebuildRSets() {
00693
00694
00695 map_space_->ClearRSet();
00696 RebuildRSets(map_space_);
00697
00698 old_pointer_space_->ClearRSet();
00699 RebuildRSets(old_pointer_space_);
00700
00701 Heap::lo_space_->ClearRSet();
00702 RebuildRSets(lo_space_);
00703 }
00704
00705
00706 void Heap::RebuildRSets(PagedSpace* space) {
00707 HeapObjectIterator it(space);
00708 while (it.has_next()) Heap::UpdateRSet(it.next());
00709 }
00710
00711
00712 void Heap::RebuildRSets(LargeObjectSpace* space) {
00713 LargeObjectIterator it(space);
00714 while (it.has_next()) Heap::UpdateRSet(it.next());
00715 }
00716
00717
00718 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
00719 void Heap::RecordCopiedObject(HeapObject* obj) {
00720 bool should_record = false;
00721 #ifdef DEBUG
00722 should_record = FLAG_heap_stats;
00723 #endif
00724 #ifdef ENABLE_LOGGING_AND_PROFILING
00725 should_record = should_record || FLAG_log_gc;
00726 #endif
00727 if (should_record) {
00728 if (new_space_.Contains(obj)) {
00729 new_space_.RecordAllocation(obj);
00730 } else {
00731 new_space_.RecordPromotion(obj);
00732 }
00733 }
00734 }
00735 #endif // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
00736
00737
00738
00739 HeapObject* Heap::MigrateObject(HeapObject* source,
00740 HeapObject* target,
00741 int size) {
00742
00743 CopyBlock(reinterpret_cast<Object**>(target->address()),
00744 reinterpret_cast<Object**>(source->address()),
00745 size);
00746
00747
00748 source->set_map_word(MapWord::FromForwardingAddress(target));
00749
00750
00751 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
00752 RecordCopiedObject(target);
00753 #endif
00754
00755 return target;
00756 }
00757
00758
00759
00760 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
00761 ASSERT(InFromSpace(object));
00762
00763
00764
00765
00766
00767 MapWord first_word = object->map_word();
00768
00769
00770
00771 if (first_word.IsForwardingAddress()) {
00772 *p = first_word.ToForwardingAddress();
00773 return;
00774 }
00775
00776
00777 return ScavengeObjectSlow(p, object);
00778 }
00779
00780 static inline bool IsShortcutCandidate(HeapObject* object, Map* map) {
00781
00782
00783 ASSERT(object->map() == map);
00784 return (map->instance_type() < FIRST_NONSTRING_TYPE) &&
00785 (String::cast(object)->map_representation_tag(map) == kConsStringTag) &&
00786 (ConsString::cast(object)->second() == Heap::empty_string());
00787 }
00788
00789
00790 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
00791 ASSERT(InFromSpace(object));
00792 MapWord first_word = object->map_word();
00793 ASSERT(!first_word.IsForwardingAddress());
00794
00795
00796 if (IsShortcutCandidate(object, first_word.ToMap())) {
00797 object = HeapObject::cast(ConsString::cast(object)->first());
00798 *p = object;
00799
00800
00801 if (!InNewSpace(object)) return;
00802 first_word = object->map_word();
00803 if (first_word.IsForwardingAddress()) {
00804 *p = first_word.ToForwardingAddress();
00805 return;
00806 }
00807 }
00808
00809 int object_size = object->SizeFromMap(first_word.ToMap());
00810
00811 if (ShouldBePromoted(object->address(), object_size)) {
00812 OldSpace* target_space = Heap::TargetSpace(object);
00813 ASSERT(target_space == Heap::old_pointer_space_ ||
00814 target_space == Heap::old_data_space_);
00815 Object* result = target_space->AllocateRaw(object_size);
00816 if (!result->IsFailure()) {
00817 *p = MigrateObject(object, HeapObject::cast(result), object_size);
00818 if (target_space == Heap::old_pointer_space_) {
00819
00820
00821 promoted_top -= kPointerSize;
00822 Memory::Object_at(promoted_top) = *p;
00823 } else {
00824 #ifdef DEBUG
00825
00826
00827 VerifyNonPointerSpacePointersVisitor v;
00828 (*p)->Iterate(&v);
00829 #endif
00830 }
00831 return;
00832 }
00833 }
00834
00835
00836 Object* result = new_space_.AllocateRaw(object_size);
00837
00838 ASSERT(!result->IsFailure());
00839 *p = MigrateObject(object, HeapObject::cast(result), object_size);
00840 }
00841
00842
00843 void Heap::ScavengePointer(HeapObject** p) {
00844 ScavengeObject(p, *p);
00845 }
00846
00847
00848 Object* Heap::AllocatePartialMap(InstanceType instance_type,
00849 int instance_size) {
00850 Object* result = AllocateRawMap(Map::kSize);
00851 if (result->IsFailure()) return result;
00852
00853
00854 reinterpret_cast<Map*>(result)->set_map(meta_map());
00855 reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
00856 reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
00857 reinterpret_cast<Map*>(result)->set_inobject_properties(0);
00858 reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
00859 return result;
00860 }
00861
00862
00863 Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
00864 Object* result = AllocateRawMap(Map::kSize);
00865 if (result->IsFailure()) return result;
00866
00867 Map* map = reinterpret_cast<Map*>(result);
00868 map->set_map(meta_map());
00869 map->set_instance_type(instance_type);
00870 map->set_prototype(null_value());
00871 map->set_constructor(null_value());
00872 map->set_instance_size(instance_size);
00873 map->set_inobject_properties(0);
00874 map->set_instance_descriptors(empty_descriptor_array());
00875 map->set_code_cache(empty_fixed_array());
00876 map->set_unused_property_fields(0);
00877 map->set_bit_field(0);
00878 return map;
00879 }
00880
00881
00882 bool Heap::CreateInitialMaps() {
00883 Object* obj = AllocatePartialMap(MAP_TYPE, Map::kSize);
00884 if (obj->IsFailure()) return false;
00885
00886
00887 meta_map_ = reinterpret_cast<Map*>(obj);
00888 meta_map()->set_map(meta_map());
00889
00890 obj = AllocatePartialMap(FIXED_ARRAY_TYPE, Array::kHeaderSize);
00891 if (obj->IsFailure()) return false;
00892 fixed_array_map_ = Map::cast(obj);
00893
00894 obj = AllocatePartialMap(ODDBALL_TYPE, Oddball::kSize);
00895 if (obj->IsFailure()) return false;
00896 oddball_map_ = Map::cast(obj);
00897
00898
00899 obj = AllocateEmptyFixedArray();
00900 if (obj->IsFailure()) return false;
00901 empty_fixed_array_ = FixedArray::cast(obj);
00902
00903 obj = Allocate(oddball_map(), OLD_DATA_SPACE);
00904 if (obj->IsFailure()) return false;
00905 null_value_ = obj;
00906
00907
00908 obj = AllocateEmptyFixedArray();
00909 if (obj->IsFailure()) return false;
00910
00911 empty_descriptor_array_ = reinterpret_cast<DescriptorArray*>(obj);
00912
00913
00914 meta_map()->set_instance_descriptors(empty_descriptor_array());
00915 meta_map()->set_code_cache(empty_fixed_array());
00916
00917 fixed_array_map()->set_instance_descriptors(empty_descriptor_array());
00918 fixed_array_map()->set_code_cache(empty_fixed_array());
00919
00920 oddball_map()->set_instance_descriptors(empty_descriptor_array());
00921 oddball_map()->set_code_cache(empty_fixed_array());
00922
00923
00924 meta_map()->set_prototype(null_value());
00925 meta_map()->set_constructor(null_value());
00926
00927 fixed_array_map()->set_prototype(null_value());
00928 fixed_array_map()->set_constructor(null_value());
00929 oddball_map()->set_prototype(null_value());
00930 oddball_map()->set_constructor(null_value());
00931
00932 obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
00933 if (obj->IsFailure()) return false;
00934 heap_number_map_ = Map::cast(obj);
00935
00936 obj = AllocateMap(PROXY_TYPE, Proxy::kSize);
00937 if (obj->IsFailure()) return false;
00938 proxy_map_ = Map::cast(obj);
00939
00940 #define ALLOCATE_STRING_MAP(type, size, name) \
00941 obj = AllocateMap(type, size); \
00942 if (obj->IsFailure()) return false; \
00943 name##_map_ = Map::cast(obj);
00944 STRING_TYPE_LIST(ALLOCATE_STRING_MAP);
00945 #undef ALLOCATE_STRING_MAP
00946
00947 obj = AllocateMap(SHORT_STRING_TYPE, SeqTwoByteString::kHeaderSize);
00948 if (obj->IsFailure()) return false;
00949 undetectable_short_string_map_ = Map::cast(obj);
00950 undetectable_short_string_map_->set_is_undetectable();
00951
00952 obj = AllocateMap(MEDIUM_STRING_TYPE, SeqTwoByteString::kHeaderSize);
00953 if (obj->IsFailure()) return false;
00954 undetectable_medium_string_map_ = Map::cast(obj);
00955 undetectable_medium_string_map_->set_is_undetectable();
00956
00957 obj = AllocateMap(LONG_STRING_TYPE, SeqTwoByteString::kHeaderSize);
00958 if (obj->IsFailure()) return false;
00959 undetectable_long_string_map_ = Map::cast(obj);
00960 undetectable_long_string_map_->set_is_undetectable();
00961
00962 obj = AllocateMap(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize);
00963 if (obj->IsFailure()) return false;
00964 undetectable_short_ascii_string_map_ = Map::cast(obj);
00965 undetectable_short_ascii_string_map_->set_is_undetectable();
00966
00967 obj = AllocateMap(MEDIUM_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize);
00968 if (obj->IsFailure()) return false;
00969 undetectable_medium_ascii_string_map_ = Map::cast(obj);
00970 undetectable_medium_ascii_string_map_->set_is_undetectable();
00971
00972 obj = AllocateMap(LONG_ASCII_STRING_TYPE, SeqAsciiString::kHeaderSize);
00973 if (obj->IsFailure()) return false;
00974 undetectable_long_ascii_string_map_ = Map::cast(obj);
00975 undetectable_long_ascii_string_map_->set_is_undetectable();
00976
00977 obj = AllocateMap(BYTE_ARRAY_TYPE, Array::kHeaderSize);
00978 if (obj->IsFailure()) return false;
00979 byte_array_map_ = Map::cast(obj);
00980
00981 obj = AllocateMap(CODE_TYPE, Code::kHeaderSize);
00982 if (obj->IsFailure()) return false;
00983 code_map_ = Map::cast(obj);
00984
00985 obj = AllocateMap(FILLER_TYPE, kPointerSize);
00986 if (obj->IsFailure()) return false;
00987 one_word_filler_map_ = Map::cast(obj);
00988
00989 obj = AllocateMap(FILLER_TYPE, 2 * kPointerSize);
00990 if (obj->IsFailure()) return false;
00991 two_word_filler_map_ = Map::cast(obj);
00992
00993 #define ALLOCATE_STRUCT_MAP(NAME, Name, name) \
00994 obj = AllocateMap(NAME##_TYPE, Name::kSize); \
00995 if (obj->IsFailure()) return false; \
00996 name##_map_ = Map::cast(obj);
00997 STRUCT_LIST(ALLOCATE_STRUCT_MAP)
00998 #undef ALLOCATE_STRUCT_MAP
00999
01000 obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
01001 if (obj->IsFailure()) return false;
01002 hash_table_map_ = Map::cast(obj);
01003
01004 obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
01005 if (obj->IsFailure()) return false;
01006 context_map_ = Map::cast(obj);
01007
01008 obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize);
01009 if (obj->IsFailure()) return false;
01010 global_context_map_ = Map::cast(obj);
01011
01012 obj = AllocateMap(JS_FUNCTION_TYPE, JSFunction::kSize);
01013 if (obj->IsFailure()) return false;
01014 boilerplate_function_map_ = Map::cast(obj);
01015
01016 obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE, SharedFunctionInfo::kSize);
01017 if (obj->IsFailure()) return false;
01018 shared_function_info_map_ = Map::cast(obj);
01019
01020 ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array()));
01021 return true;
01022 }
01023
01024
01025 Object* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
01026
01027
01028 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
01029 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
01030 Object* result = AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE);
01031 if (result->IsFailure()) return result;
01032
01033 HeapObject::cast(result)->set_map(heap_number_map());
01034 HeapNumber::cast(result)->set_value(value);
01035 return result;
01036 }
01037
01038
01039 Object* Heap::AllocateHeapNumber(double value) {
01040
01041 if (always_allocate()) return AllocateHeapNumber(value, NOT_TENURED);
01042
01043
01044 STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
01045 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
01046 Object* result = new_space_.AllocateRaw(HeapNumber::kSize);
01047 if (result->IsFailure()) return result;
01048 HeapObject::cast(result)->set_map(heap_number_map());
01049 HeapNumber::cast(result)->set_value(value);
01050 return result;
01051 }
01052
01053
01054 Object* Heap::CreateOddball(Map* map,
01055 const char* to_string,
01056 Object* to_number) {
01057 Object* result = Allocate(map, OLD_DATA_SPACE);
01058 if (result->IsFailure()) return result;
01059 return Oddball::cast(result)->Initialize(to_string, to_number);
01060 }
01061
01062
01063 bool Heap::CreateApiObjects() {
01064 Object* obj;
01065
01066 obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
01067 if (obj->IsFailure()) return false;
01068 neander_map_ = Map::cast(obj);
01069
01070 obj = Heap::AllocateJSObjectFromMap(neander_map_);
01071 if (obj->IsFailure()) return false;
01072 Object* elements = AllocateFixedArray(2);
01073 if (elements->IsFailure()) return false;
01074 FixedArray::cast(elements)->set(0, Smi::FromInt(0));
01075 JSObject::cast(obj)->set_elements(FixedArray::cast(elements));
01076 message_listeners_ = JSObject::cast(obj);
01077
01078 obj = Heap::AllocateJSObjectFromMap(neander_map_);
01079 if (obj->IsFailure()) return false;
01080 elements = AllocateFixedArray(2);
01081 if (elements->IsFailure()) return false;
01082 FixedArray::cast(elements)->set(0, Smi::FromInt(0));
01083 JSObject::cast(obj)->set_elements(FixedArray::cast(elements));
01084 debug_event_listeners_ = JSObject::cast(obj);
01085
01086 return true;
01087 }
01088
01089 void Heap::CreateFixedStubs() {
01090
01091
01092
01093
01094 HandleScope scope;
01095 {
01096 CEntryStub stub;
01097 c_entry_code_ = *stub.GetCode();
01098 }
01099 {
01100 CEntryDebugBreakStub stub;
01101 c_entry_debug_break_code_ = *stub.GetCode();
01102 }
01103 {
01104 JSEntryStub stub;
01105 js_entry_code_ = *stub.GetCode();
01106 }
01107 {
01108 JSConstructEntryStub stub;
01109 js_construct_entry_code_ = *stub.GetCode();
01110 }
01111 }
01112
01113
01114 bool Heap::CreateInitialObjects() {
01115 Object* obj;
01116
01117
01118 obj = AllocateHeapNumber(-0.0, TENURED);
01119 if (obj->IsFailure()) return false;
01120 minus_zero_value_ = obj;
01121 ASSERT(signbit(minus_zero_value_->Number()) != 0);
01122
01123 obj = AllocateHeapNumber(OS::nan_value(), TENURED);
01124 if (obj->IsFailure()) return false;
01125 nan_value_ = obj;
01126
01127 obj = Allocate(oddball_map(), OLD_DATA_SPACE);
01128 if (obj->IsFailure()) return false;
01129 undefined_value_ = obj;
01130 ASSERT(!InNewSpace(undefined_value()));
01131
01132
01133 obj = SymbolTable::Allocate(kInitialSymbolTableSize);
01134 if (obj->IsFailure()) return false;
01135 symbol_table_ = obj;
01136
01137
01138 Object* symbol = LookupAsciiSymbol("undefined");
01139 if (symbol->IsFailure()) return false;
01140 Oddball::cast(undefined_value_)->set_to_string(String::cast(symbol));
01141 Oddball::cast(undefined_value_)->set_to_number(nan_value_);
01142
01143
01144 symbol = LookupAsciiSymbol("null");
01145 if (symbol->IsFailure()) return false;
01146 Oddball::cast(null_value_)->set_to_string(String::cast(symbol));
01147 Oddball::cast(null_value_)->set_to_number(Smi::FromInt(0));
01148
01149
01150 obj = Oddball::cast(null_value())->Initialize("null", Smi::FromInt(0));
01151 if (obj->IsFailure()) return false;
01152
01153 obj = CreateOddball(oddball_map(), "true", Smi::FromInt(1));
01154 if (obj->IsFailure()) return false;
01155 true_value_ = obj;
01156
01157 obj = CreateOddball(oddball_map(), "false", Smi::FromInt(0));
01158 if (obj->IsFailure()) return false;
01159 false_value_ = obj;
01160
01161 obj = CreateOddball(oddball_map(), "hole", Smi::FromInt(-1));
01162 if (obj->IsFailure()) return false;
01163 the_hole_value_ = obj;
01164
01165
01166 obj = AllocateRawAsciiString(0, TENURED);
01167 if (obj->IsFailure()) return false;
01168 empty_string_ = String::cast(obj);
01169
01170 #define SYMBOL_INITIALIZE(name, string) \
01171 obj = LookupAsciiSymbol(string); \
01172 if (obj->IsFailure()) return false; \
01173 (name##_) = String::cast(obj);
01174 SYMBOL_LIST(SYMBOL_INITIALIZE)
01175 #undef SYMBOL_INITIALIZE
01176
01177
01178 obj = AllocateProxy((Address) &Accessors::ObjectPrototype);
01179 if (obj->IsFailure()) return false;
01180 prototype_accessors_ = Proxy::cast(obj);
01181
01182
01183 obj = Dictionary::Allocate(4);
01184 if (obj->IsFailure()) return false;
01185 code_stubs_ = Dictionary::cast(obj);
01186
01187
01188 obj = Dictionary::Allocate(4);
01189 if (obj->IsFailure()) return false;
01190 non_monomorphic_cache_ = Dictionary::cast(obj);
01191
01192 CreateFixedStubs();
01193
01194
01195 obj = AllocateFixedArray(kNumberStringCacheSize * 2);
01196 if (obj->IsFailure()) return false;
01197 number_string_cache_ = FixedArray::cast(obj);
01198
01199
01200 obj = AllocateFixedArray(String::kMaxAsciiCharCode+1);
01201 if (obj->IsFailure()) return false;
01202 single_character_string_cache_ = FixedArray::cast(obj);
01203
01204
01205 obj = AllocateFixedArray(Natives::GetBuiltinsCount());
01206 if (obj->IsFailure()) return false;
01207 natives_source_cache_ = FixedArray::cast(obj);
01208
01209
01210 ClearKeyedLookupCache();
01211
01212
01213 CompilationCache::Clear();
01214
01215 return true;
01216 }
01217
01218
01219 static inline int double_get_hash(double d) {
01220 DoubleRepresentation rep(d);
01221 return ((static_cast<int>(rep.bits) ^ static_cast<int>(rep.bits >> 32)) &
01222 (Heap::kNumberStringCacheSize - 1));
01223 }
01224
01225
01226 static inline int smi_get_hash(Smi* smi) {
01227 return (smi->value() & (Heap::kNumberStringCacheSize - 1));
01228 }
01229
01230
01231
01232 Object* Heap::GetNumberStringCache(Object* number) {
01233 int hash;
01234 if (number->IsSmi()) {
01235 hash = smi_get_hash(Smi::cast(number));
01236 } else {
01237 hash = double_get_hash(number->Number());
01238 }
01239 Object* key = number_string_cache_->get(hash * 2);
01240 if (key == number) {
01241 return String::cast(number_string_cache_->get(hash * 2 + 1));
01242 } else if (key->IsHeapNumber() &&
01243 number->IsHeapNumber() &&
01244 key->Number() == number->Number()) {
01245 return String::cast(number_string_cache_->get(hash * 2 + 1));
01246 }
01247 return undefined_value();
01248 }
01249
01250
01251 void Heap::SetNumberStringCache(Object* number, String* string) {
01252 int hash;
01253 if (number->IsSmi()) {
01254 hash = smi_get_hash(Smi::cast(number));
01255 number_string_cache_->set(hash * 2, number, SKIP_WRITE_BARRIER);
01256 } else {
01257 hash = double_get_hash(number->Number());
01258 number_string_cache_->set(hash * 2, number);
01259 }
01260 number_string_cache_->set(hash * 2 + 1, string);
01261 }
01262
01263
01264 Object* Heap::SmiOrNumberFromDouble(double value,
01265 bool new_object,
01266 PretenureFlag pretenure) {
01267
01268
01269
01270 static const DoubleRepresentation plus_zero(0.0);
01271 static const DoubleRepresentation minus_zero(-0.0);
01272 static const DoubleRepresentation nan(OS::nan_value());
01273 ASSERT(minus_zero_value_ != NULL);
01274 ASSERT(sizeof(plus_zero.value) == sizeof(plus_zero.bits));
01275
01276 DoubleRepresentation rep(value);
01277 if (rep.bits == plus_zero.bits) return Smi::FromInt(0);
01278 if (rep.bits == minus_zero.bits) {
01279 return new_object ? AllocateHeapNumber(-0.0, pretenure)
01280 : minus_zero_value_;
01281 }
01282 if (rep.bits == nan.bits) {
01283 return new_object
01284 ? AllocateHeapNumber(OS::nan_value(), pretenure)
01285 : nan_value_;
01286 }
01287
01288
01289 int int_value = FastD2I(value);
01290 if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
01291 return Smi::FromInt(int_value);
01292 }
01293
01294
01295 return AllocateHeapNumber(value, pretenure);
01296 }
01297
01298
01299 Object* Heap::NewNumberFromDouble(double value, PretenureFlag pretenure) {
01300 return SmiOrNumberFromDouble(value,
01301 true ,
01302 pretenure);
01303 }
01304
01305
01306 Object* Heap::NumberFromDouble(double value, PretenureFlag pretenure) {
01307 return SmiOrNumberFromDouble(value,
01308 false ,
01309 pretenure);
01310 }
01311
01312
01313 Object* Heap::AllocateProxy(Address proxy, PretenureFlag pretenure) {
01314
01315 STATIC_ASSERT(Proxy::kSize <= Page::kMaxHeapObjectSize);
01316 AllocationSpace space =
01317 (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
01318 Object* result = Allocate(proxy_map(), space);
01319 if (result->IsFailure()) return result;
01320
01321 Proxy::cast(result)->set_proxy(proxy);
01322 return result;
01323 }
01324
01325
01326 Object* Heap::AllocateSharedFunctionInfo(Object* name) {
01327 Object* result = Allocate(shared_function_info_map(), NEW_SPACE);
01328 if (result->IsFailure()) return result;
01329
01330 SharedFunctionInfo* share = SharedFunctionInfo::cast(result);
01331 share->set_name(name);
01332 Code* illegal = Builtins::builtin(Builtins::Illegal);
01333 share->set_code(illegal);
01334 share->set_expected_nof_properties(0);
01335 share->set_length(0);
01336 share->set_formal_parameter_count(0);
01337 share->set_instance_class_name(Object_symbol());
01338 share->set_function_data(undefined_value());
01339 share->set_lazy_load_data(undefined_value());
01340 share->set_script(undefined_value());
01341 share->set_start_position_and_type(0);
01342 share->set_debug_info(undefined_value());
01343 return result;
01344 }
01345
01346
01347 Object* Heap::AllocateConsString(String* first, String* second) {
01348 int first_length = first->length();
01349 int second_length = second->length();
01350 int length = first_length + second_length;
01351 bool is_ascii = first->is_ascii_representation()
01352 && second->is_ascii_representation();
01353
01354
01355 if (length < String::kMinNonFlatLength) {
01356 ASSERT(first->IsFlat());
01357 ASSERT(second->IsFlat());
01358 if (is_ascii) {
01359 Object* result = AllocateRawAsciiString(length);
01360 if (result->IsFailure()) return result;
01361
01362 char* dest = SeqAsciiString::cast(result)->GetChars();
01363 String::WriteToFlat(first, dest, 0, first_length);
01364 String::WriteToFlat(second, dest + first_length, 0, second_length);
01365 return result;
01366 } else {
01367 Object* result = AllocateRawTwoByteString(length);
01368 if (result->IsFailure()) return result;
01369
01370 uc16* dest = SeqTwoByteString::cast(result)->GetChars();
01371 String::WriteToFlat(first, dest, 0, first_length);
01372 String::WriteToFlat(second, dest + first_length, 0, second_length);
01373 return result;
01374 }
01375 }
01376
01377 Map* map;
01378 if (length <= String::kMaxShortStringSize) {
01379 map = is_ascii ? short_cons_ascii_string_map()
01380 : short_cons_string_map();
01381 } else if (length <= String::kMaxMediumStringSize) {
01382 map = is_ascii ? medium_cons_ascii_string_map()
01383 : medium_cons_string_map();
01384 } else {
01385 map = is_ascii ? long_cons_ascii_string_map()
01386 : long_cons_string_map();
01387 }
01388
01389 Object* result = Allocate(map, NEW_SPACE);
01390 if (result->IsFailure()) return result;
01391 ASSERT(InNewSpace(result));
01392 ConsString* cons_string = ConsString::cast(result);
01393 cons_string->set_first(first, SKIP_WRITE_BARRIER);
01394 cons_string->set_second(second, SKIP_WRITE_BARRIER);
01395 cons_string->set_length(length);
01396 return result;
01397 }
01398
01399
01400 Object* Heap::AllocateSlicedString(String* buffer, int start, int end) {
01401 int length = end - start;
01402
01403
01404 if (end - start <= String::kMinNonFlatLength) {
01405 return Heap::AllocateSubString(buffer, start, end);
01406 }
01407
01408 Map* map;
01409 if (length <= String::kMaxShortStringSize) {
01410 map = buffer->is_ascii_representation() ? short_sliced_ascii_string_map()
01411 : short_sliced_string_map();
01412 } else if (length <= String::kMaxMediumStringSize) {
01413 map = buffer->is_ascii_representation() ? medium_sliced_ascii_string_map()
01414 : medium_sliced_string_map();
01415 } else {
01416 map = buffer->is_ascii_representation() ? long_sliced_ascii_string_map()
01417 : long_sliced_string_map();
01418 }
01419
01420 Object* result = Allocate(map, NEW_SPACE);
01421 if (result->IsFailure()) return result;
01422
01423 SlicedString* sliced_string = SlicedString::cast(result);
01424 sliced_string->set_buffer(buffer);
01425 sliced_string->set_start(start);
01426 sliced_string->set_length(length);
01427
01428 return result;
01429 }
01430
01431
01432 Object* Heap::AllocateSubString(String* buffer, int start, int end) {
01433 int length = end - start;
01434
01435 if (length == 1) {
01436 return Heap::LookupSingleCharacterStringFromCode(buffer->Get(start));
01437 }
01438
01439
01440 buffer->TryFlatten();
01441
01442 Object* result = buffer->is_ascii_representation()
01443 ? AllocateRawAsciiString(length)
01444 : AllocateRawTwoByteString(length);
01445 if (result->IsFailure()) return result;
01446
01447
01448 String* string_result = String::cast(result);
01449 StringHasher hasher(length);
01450 int i = 0;
01451 for (; i < length && hasher.is_array_index(); i++) {
01452 uc32 c = buffer->Get(start + i);
01453 hasher.AddCharacter(c);
01454 string_result->Set(i, c);
01455 }
01456 for (; i < length; i++) {
01457 uc32 c = buffer->Get(start + i);
01458 hasher.AddCharacterNoIndex(c);
01459 string_result->Set(i, c);
01460 }
01461 string_result->set_length_field(hasher.GetHashField());
01462 return result;
01463 }
01464
01465
01466 Object* Heap::AllocateExternalStringFromAscii(
01467 ExternalAsciiString::Resource* resource) {
01468 Map* map;
01469 int length = resource->length();
01470 if (length <= String::kMaxShortStringSize) {
01471 map = short_external_ascii_string_map();
01472 } else if (length <= String::kMaxMediumStringSize) {
01473 map = medium_external_ascii_string_map();
01474 } else {
01475 map = long_external_ascii_string_map();
01476 }
01477
01478 Object* result = Allocate(map, NEW_SPACE);
01479 if (result->IsFailure()) return result;
01480
01481 ExternalAsciiString* external_string = ExternalAsciiString::cast(result);
01482 external_string->set_length(length);
01483 external_string->set_resource(resource);
01484
01485 return result;
01486 }
01487
01488
01489 Object* Heap::AllocateExternalStringFromTwoByte(
01490 ExternalTwoByteString::Resource* resource) {
01491 Map* map;
01492 int length = resource->length();
01493 if (length <= String::kMaxShortStringSize) {
01494 map = short_external_string_map();
01495 } else if (length <= String::kMaxMediumStringSize) {
01496 map = medium_external_string_map();
01497 } else {
01498 map = long_external_string_map();
01499 }
01500
01501 Object* result = Allocate(map, NEW_SPACE);
01502 if (result->IsFailure()) return result;
01503
01504 ExternalTwoByteString* external_string = ExternalTwoByteString::cast(result);
01505 external_string->set_length(length);
01506 external_string->set_resource(resource);
01507
01508 return result;
01509 }
01510
01511
01512 Object* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
01513 if (code <= String::kMaxAsciiCharCode) {
01514 Object* value = Heap::single_character_string_cache()->get(code);
01515 if (value != Heap::undefined_value()) return value;
01516
01517 char buffer[1];
01518 buffer[0] = static_cast<char>(code);
01519 Object* result = LookupSymbol(Vector<const char>(buffer, 1));
01520
01521 if (result->IsFailure()) return result;
01522 Heap::single_character_string_cache()->set(code, result);
01523 return result;
01524 }
01525
01526 Object* result = Heap::AllocateRawTwoByteString(1);
01527 if (result->IsFailure()) return result;
01528 String::cast(result)->Set(0, code);
01529 return result;
01530 }
01531
01532
01533 Object* Heap::AllocateByteArray(int length) {
01534 int size = ByteArray::SizeFor(length);
01535 AllocationSpace space =
01536 size > MaxHeapObjectSize() ? LO_SPACE : NEW_SPACE;
01537
01538 Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
01539
01540 if (result->IsFailure()) return result;
01541
01542 reinterpret_cast<Array*>(result)->set_map(byte_array_map());
01543 reinterpret_cast<Array*>(result)->set_length(length);
01544 return result;
01545 }
01546
01547
01548 Object* Heap::CreateCode(const CodeDesc& desc,
01549 ScopeInfo<>* sinfo,
01550 Code::Flags flags) {
01551
01552 int body_size = RoundUp(desc.instr_size + desc.reloc_size, kObjectAlignment);
01553 int sinfo_size = 0;
01554 if (sinfo != NULL) sinfo_size = sinfo->Serialize(NULL);
01555 int obj_size = Code::SizeFor(body_size, sinfo_size);
01556 Object* result;
01557 if (obj_size > MaxHeapObjectSize()) {
01558 result = lo_space_->AllocateRawCode(obj_size);
01559 } else {
01560 result = code_space_->AllocateRaw(obj_size);
01561 }
01562
01563 if (result->IsFailure()) return result;
01564
01565
01566 HeapObject::cast(result)->set_map(code_map());
01567 Code* code = Code::cast(result);
01568 code->set_instruction_size(desc.instr_size);
01569 code->set_relocation_size(desc.reloc_size);
01570 code->set_sinfo_size(sinfo_size);
01571 code->set_flags(flags);
01572 code->set_ic_flag(Code::IC_TARGET_IS_ADDRESS);
01573 code->CopyFrom(desc);
01574 if (sinfo != NULL) sinfo->Serialize(code);
01575
01576 #ifdef DEBUG
01577 code->Verify();
01578 #endif
01579 return code;
01580 }
01581
01582
01583 Object* Heap::CopyCode(Code* code) {
01584
01585 int obj_size = code->Size();
01586 Object* result;
01587 if (obj_size > MaxHeapObjectSize()) {
01588 result = lo_space_->AllocateRawCode(obj_size);
01589 } else {
01590 result = code_space_->AllocateRaw(obj_size);
01591 }
01592
01593 if (result->IsFailure()) return result;
01594
01595
01596 Address old_addr = code->address();
01597 Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
01598 CopyBlock(reinterpret_cast<Object**>(new_addr),
01599 reinterpret_cast<Object**>(old_addr),
01600 obj_size);
01601
01602 Code* new_code = Code::cast(result);
01603 new_code->Relocate(new_addr - old_addr);
01604 return new_code;
01605 }
01606
01607
01608 Object* Heap::Allocate(Map* map, AllocationSpace space) {
01609 ASSERT(gc_state_ == NOT_IN_GC);
01610 ASSERT(map->instance_type() != MAP_TYPE);
01611 Object* result = AllocateRaw(map->instance_size(),
01612 space,
01613 TargetSpaceId(map->instance_type()));
01614 if (result->IsFailure()) return result;
01615 HeapObject::cast(result)->set_map(map);
01616 return result;
01617 }
01618
01619
01620 Object* Heap::InitializeFunction(JSFunction* function,
01621 SharedFunctionInfo* shared,
01622 Object* prototype) {
01623 ASSERT(!prototype->IsMap());
01624 function->initialize_properties();
01625 function->initialize_elements();
01626 function->set_shared(shared);
01627 function->set_prototype_or_initial_map(prototype);
01628 function->set_context(undefined_value());
01629 function->set_literals(empty_fixed_array(), SKIP_WRITE_BARRIER);
01630 return function;
01631 }
01632
01633
01634 Object* Heap::AllocateFunctionPrototype(JSFunction* function) {
01635
01636 Object* prototype =
01637 AllocateJSObject(Top::context()->global_context()->object_function());
01638 if (prototype->IsFailure()) return prototype;
01639
01640
01641 Object* result =
01642 JSObject::cast(prototype)->SetProperty(constructor_symbol(),
01643 function,
01644 DONT_ENUM);
01645 if (result->IsFailure()) return result;
01646 return prototype;
01647 }
01648
01649
01650 Object* Heap::AllocateFunction(Map* function_map,
01651 SharedFunctionInfo* shared,
01652 Object* prototype) {
01653 Object* result = Allocate(function_map, OLD_POINTER_SPACE);
01654 if (result->IsFailure()) return result;
01655 return InitializeFunction(JSFunction::cast(result), shared, prototype);
01656 }
01657
01658
01659 Object* Heap::AllocateArgumentsObject(Object* callee, int length) {
01660
01661
01662
01663
01664
01665 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
01666
01667 JSObject* boilerplate =
01668 Top::context()->global_context()->arguments_boilerplate();
01669
01670
01671 Map* map = boilerplate->map();
01672 int object_size = map->instance_size();
01673 Object* result = AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
01674 if (result->IsFailure()) return result;
01675
01676
01677
01678
01679 CopyBlock(reinterpret_cast<Object**>(HeapObject::cast(result)->address()),
01680 reinterpret_cast<Object**>(boilerplate->address()),
01681 object_size);
01682
01683
01684 JSObject::cast(result)->InObjectPropertyAtPut(arguments_callee_index,
01685 callee);
01686 JSObject::cast(result)->InObjectPropertyAtPut(arguments_length_index,
01687 Smi::FromInt(length),
01688 SKIP_WRITE_BARRIER);
01689
01690
01691 ASSERT(JSObject::cast(result)->HasFastProperties());
01692 ASSERT(JSObject::cast(result)->HasFastElements());
01693
01694 return result;
01695 }
01696
01697
01698 Object* Heap::AllocateInitialMap(JSFunction* fun) {
01699 ASSERT(!fun->has_initial_map());
01700
01701
01702
01703 int expected_nof_properties = fun->shared()->expected_nof_properties();
01704 int instance_size = JSObject::kHeaderSize +
01705 expected_nof_properties * kPointerSize;
01706 if (instance_size > JSObject::kMaxInstanceSize) {
01707 instance_size = JSObject::kMaxInstanceSize;
01708 expected_nof_properties = (instance_size - JSObject::kHeaderSize) /
01709 kPointerSize;
01710 }
01711 Object* map_obj = Heap::AllocateMap(JS_OBJECT_TYPE, instance_size);
01712 if (map_obj->IsFailure()) return map_obj;
01713
01714
01715 Object* prototype;
01716 if (fun->has_instance_prototype()) {
01717 prototype = fun->instance_prototype();
01718 } else {
01719 prototype = AllocateFunctionPrototype(fun);
01720 if (prototype->IsFailure()) return prototype;
01721 }
01722 Map* map = Map::cast(map_obj);
01723 map->set_inobject_properties(expected_nof_properties);
01724 map->set_unused_property_fields(expected_nof_properties);
01725 map->set_prototype(prototype);
01726 return map;
01727 }
01728
01729
01730 void Heap::InitializeJSObjectFromMap(JSObject* obj,
01731 FixedArray* properties,
01732 Map* map) {
01733 obj->set_properties(properties);
01734 obj->initialize_elements();
01735
01736
01737
01738
01739
01740
01741
01742 obj->InitializeBody(map->instance_size());
01743 }
01744
01745
01746 Object* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
01747
01748
01749 ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
01750
01751
01752 int prop_size = map->unused_property_fields() - map->inobject_properties();
01753 Object* properties = AllocateFixedArray(prop_size);
01754 if (properties->IsFailure()) return properties;
01755
01756
01757 AllocationSpace space =
01758 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
01759 if (map->instance_size() > MaxHeapObjectSize()) space = LO_SPACE;
01760 Object* obj = Allocate(map, space);
01761 if (obj->IsFailure()) return obj;
01762
01763
01764 InitializeJSObjectFromMap(JSObject::cast(obj),
01765 FixedArray::cast(properties),
01766 map);
01767 return obj;
01768 }
01769
01770
01771 Object* Heap::AllocateJSObject(JSFunction* constructor,
01772 PretenureFlag pretenure) {
01773
01774 if (!constructor->has_initial_map()) {
01775 Object* initial_map = AllocateInitialMap(constructor);
01776 if (initial_map->IsFailure()) return initial_map;
01777 constructor->set_initial_map(Map::cast(initial_map));
01778 Map::cast(initial_map)->set_constructor(constructor);
01779 }
01780
01781 return AllocateJSObjectFromMap(constructor->initial_map(), pretenure);
01782 }
01783
01784
01785 Object* Heap::CopyJSObject(JSObject* source) {
01786
01787
01788 ASSERT(!source->IsJSFunction());
01789
01790
01791 Map* map = source->map();
01792 int object_size = map->instance_size();
01793 Object* clone;
01794
01795
01796
01797 if (always_allocate()) {
01798 clone = AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
01799 if (clone->IsFailure()) return clone;
01800 Address clone_address = HeapObject::cast(clone)->address();
01801 CopyBlock(reinterpret_cast<Object**>(clone_address),
01802 reinterpret_cast<Object**>(source->address()),
01803 object_size);
01804
01805 for (int offset = JSObject::kHeaderSize;
01806 offset < object_size;
01807 offset += kPointerSize) {
01808 RecordWrite(clone_address, offset);
01809 }
01810 } else {
01811 clone = new_space_.AllocateRaw(object_size);
01812 if (clone->IsFailure()) return clone;
01813 ASSERT(Heap::InNewSpace(clone));
01814
01815
01816 CopyBlock(reinterpret_cast<Object**>(HeapObject::cast(clone)->address()),
01817 reinterpret_cast<Object**>(source->address()),
01818 object_size);
01819 }
01820
01821 FixedArray* elements = FixedArray::cast(source->elements());
01822 FixedArray* properties = FixedArray::cast(source->properties());
01823
01824 if (elements->length()> 0) {
01825 Object* elem = CopyFixedArray(elements);
01826 if (elem->IsFailure()) return elem;
01827 JSObject::cast(clone)->set_elements(FixedArray::cast(elem));
01828 }
01829
01830 if (properties->length() > 0) {
01831 Object* prop = CopyFixedArray(properties);
01832 if (prop->IsFailure()) return prop;
01833 JSObject::cast(clone)->set_properties(FixedArray::cast(prop));
01834 }
01835
01836 return clone;
01837 }
01838
01839
01840 Object* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor,
01841 JSGlobalProxy* object) {
01842
01843 if (!constructor->has_initial_map()) {
01844 Object* initial_map = AllocateInitialMap(constructor);
01845 if (initial_map->IsFailure()) return initial_map;
01846 constructor->set_initial_map(Map::cast(initial_map));
01847 Map::cast(initial_map)->set_constructor(constructor);
01848 }
01849
01850 Map* map = constructor->initial_map();
01851
01852
01853
01854 ASSERT(map->instance_size() == object->map()->instance_size());
01855
01856
01857 int prop_size = map->unused_property_fields() - map->inobject_properties();
01858 Object* properties = AllocateFixedArray(prop_size);
01859 if (properties->IsFailure()) return properties;
01860
01861
01862 object->set_map(constructor->initial_map());
01863
01864
01865 InitializeJSObjectFromMap(object, FixedArray::cast(properties), map);
01866 return object;
01867 }
01868
01869
01870 Object* Heap::AllocateStringFromAscii(Vector<const char> string,
01871 PretenureFlag pretenure) {
01872 Object* result = AllocateRawAsciiString(string.length(), pretenure);
01873 if (result->IsFailure()) return result;
01874
01875
01876 SeqAsciiString* string_result = SeqAsciiString::cast(result);
01877 for (int i = 0; i < string.length(); i++) {
01878 string_result->SeqAsciiStringSet(i, string[i]);
01879 }
01880 return result;
01881 }
01882
01883
01884 Object* Heap::AllocateStringFromUtf8(Vector<const char> string,
01885 PretenureFlag pretenure) {
01886
01887
01888 Access<Scanner::Utf8Decoder> decoder(Scanner::utf8_decoder());
01889 decoder->Reset(string.start(), string.length());
01890 int chars = 0;
01891 bool is_ascii = true;
01892 while (decoder->has_more()) {
01893 uc32 r = decoder->GetNext();
01894 if (r > String::kMaxAsciiCharCode) is_ascii = false;
01895 chars++;
01896 }
01897
01898
01899
01900 if (is_ascii) return AllocateStringFromAscii(string, pretenure);
01901
01902 Object* result = AllocateRawTwoByteString(chars, pretenure);
01903 if (result->IsFailure()) return result;
01904
01905
01906 String* string_result = String::cast(result);
01907 decoder->Reset(string.start(), string.length());
01908 for (int i = 0; i < chars; i++) {
01909 uc32 r = decoder->GetNext();
01910 string_result->Set(i, r);
01911 }
01912 return result;
01913 }
01914
01915
01916 Object* Heap::AllocateStringFromTwoByte(Vector<const uc16> string,
01917 PretenureFlag pretenure) {
01918
01919 int i = 0;
01920 while (i < string.length() && string[i] <= String::kMaxAsciiCharCode) i++;
01921
01922 Object* result;
01923 if (i == string.length()) {
01924 result = AllocateRawAsciiString(string.length(), pretenure);
01925 } else {
01926 result = AllocateRawTwoByteString(string.length(), pretenure);
01927 }
01928 if (result->IsFailure()) return result;
01929
01930
01931
01932 String* string_result = String::cast(result);
01933 for (int i = 0; i < string.length(); i++) {
01934 string_result->Set(i, string[i]);
01935 }
01936 return result;
01937 }
01938
01939
01940 Map* Heap::SymbolMapForString(String* string) {
01941
01942 if (InNewSpace(string)) return NULL;
01943
01944
01945 Map* map = string->map();
01946
01947 if (map == short_ascii_string_map()) return short_ascii_symbol_map();
01948 if (map == medium_ascii_string_map()) return medium_ascii_symbol_map();
01949 if (map == long_ascii_string_map()) return long_ascii_symbol_map();
01950
01951 if (map == short_string_map()) return short_symbol_map();
01952 if (map == medium_string_map()) return medium_symbol_map();
01953 if (map == long_string_map()) return long_symbol_map();
01954
01955 if (map == short_cons_string_map()) return short_cons_symbol_map();
01956 if (map == medium_cons_string_map()) return medium_cons_symbol_map();
01957 if (map == long_cons_string_map()) return long_cons_symbol_map();
01958
01959 if (map == short_cons_ascii_string_map()) {
01960 return short_cons_ascii_symbol_map();
01961 }
01962 if (map == medium_cons_ascii_string_map()) {
01963 return medium_cons_ascii_symbol_map();
01964 }
01965 if (map == long_cons_ascii_string_map()) {
01966 return long_cons_ascii_symbol_map();
01967 }
01968
01969 if (map == short_sliced_string_map()) return short_sliced_symbol_map();
01970 if (map == medium_sliced_string_map()) return medium_sliced_symbol_map();
01971 if (map == long_sliced_string_map()) return long_sliced_symbol_map();
01972
01973 if (map == short_sliced_ascii_string_map()) {
01974 return short_sliced_ascii_symbol_map();
01975 }
01976 if (map == medium_sliced_ascii_string_map()) {
01977 return medium_sliced_ascii_symbol_map();
01978 }
01979 if (map == long_sliced_ascii_string_map()) {
01980 return long_sliced_ascii_symbol_map();
01981 }
01982
01983 if (map == short_external_string_map()) return short_external_string_map();
01984 if (map == medium_external_string_map()) return medium_external_string_map();
01985 if (map == long_external_string_map()) return long_external_string_map();
01986
01987 if (map == short_external_ascii_string_map()) {
01988 return short_external_ascii_string_map();
01989 }
01990 if (map == medium_external_ascii_string_map()) {
01991 return medium_external_ascii_string_map();
01992 }
01993 if (map == long_external_ascii_string_map()) {
01994 return long_external_ascii_string_map();
01995 }
01996
01997
01998 return NULL;
01999 }
02000
02001
02002 Object* Heap::AllocateSymbol(unibrow::CharacterStream* buffer,
02003 int chars,
02004 uint32_t length_field) {
02005
02006 ASSERT(static_cast<unsigned>(chars) == buffer->Length());
02007
02008 bool is_ascii = true;
02009 while (buffer->has_more()) {
02010 if (buffer->GetNext() > unibrow::Utf8::kMaxOneByteChar) is_ascii = false;
02011 }
02012 buffer->Rewind();
02013
02014
02015 int size;
02016 Map* map;
02017
02018 if (is_ascii) {
02019 if (chars <= String::kMaxShortStringSize) {
02020 map = short_ascii_symbol_map();
02021 } else if (chars <= String::kMaxMediumStringSize) {
02022 map = medium_ascii_symbol_map();
02023 } else {
02024 map = long_ascii_symbol_map();
02025 }
02026 size = SeqAsciiString::SizeFor(chars);
02027 } else {
02028 if (chars <= String::kMaxShortStringSize) {
02029 map = short_symbol_map();
02030 } else if (chars <= String::kMaxMediumStringSize) {
02031 map = medium_symbol_map();
02032 } else {
02033 map = long_symbol_map();
02034 }
02035 size = SeqTwoByteString::SizeFor(chars);
02036 }
02037
02038
02039 AllocationSpace space =
02040 (size > MaxHeapObjectSize()) ? LO_SPACE : OLD_DATA_SPACE;
02041 Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
02042 if (result->IsFailure()) return result;
02043
02044 reinterpret_cast<HeapObject*>(result)->set_map(map);
02045
02046 String::cast(result)->set_length_field(length_field);
02047
02048 ASSERT_EQ(size, String::cast(result)->Size());
02049
02050
02051 for (int i = 0; i < chars; i++) {
02052 String::cast(result)->Set(i, buffer->GetNext());
02053 }
02054 return result;
02055 }
02056
02057
02058 Object* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) {
02059 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
02060 int size = SeqAsciiString::SizeFor(length);
02061 if (size > MaxHeapObjectSize()) {
02062 space = LO_SPACE;
02063 }
02064
02065
02066
02067 Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
02068 if (result->IsFailure()) return result;
02069
02070
02071 Map* map;
02072 if (length <= String::kMaxShortStringSize) {
02073 map = short_ascii_string_map();
02074 } else if (length <= String::kMaxMediumStringSize) {
02075 map = medium_ascii_string_map();
02076 } else {
02077 map = long_ascii_string_map();
02078 }
02079
02080
02081 HeapObject::cast(result)->set_map(map);
02082 String::cast(result)->set_length(length);
02083 ASSERT_EQ(size, HeapObject::cast(result)->Size());
02084 return result;
02085 }
02086
02087
02088 Object* Heap::AllocateRawTwoByteString(int length, PretenureFlag pretenure) {
02089 AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
02090 int size = SeqTwoByteString::SizeFor(length);
02091 if (size > MaxHeapObjectSize()) {
02092 space = LO_SPACE;
02093 }
02094
02095
02096
02097 Object* result = AllocateRaw(size, space, OLD_DATA_SPACE);
02098 if (result->IsFailure()) return result;
02099
02100
02101 Map* map;
02102 if (length <= String::kMaxShortStringSize) {
02103 map = short_string_map();
02104 } else if (length <= String::kMaxMediumStringSize) {
02105 map = medium_string_map();
02106 } else {
02107 map = long_string_map();
02108 }
02109
02110
02111 HeapObject::cast(result)->set_map(map);
02112 String::cast(result)->set_length(length);
02113 ASSERT_EQ(size, HeapObject::cast(result)->Size());
02114 return result;
02115 }
02116
02117
02118 Object* Heap::AllocateEmptyFixedArray() {
02119 int size = FixedArray::SizeFor(0);
02120 Object* result = AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
02121 if (result->IsFailure()) return result;
02122
02123 reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
02124 reinterpret_cast<Array*>(result)->set_length(0);
02125 return result;
02126 }
02127
02128
02129 Object* Heap::AllocateRawFixedArray(int length) {
02130
02131 if (always_allocate()) return AllocateFixedArray(length, NOT_TENURED);
02132
02133 int size = FixedArray::SizeFor(length);
02134 return (size > MaxHeapObjectSize())
02135 ? lo_space_->AllocateRawFixedArray(size)
02136 : new_space_.AllocateRaw(size);
02137 }
02138
02139
02140 Object* Heap::CopyFixedArray(FixedArray* src) {
02141 int len = src->length();
02142 Object* obj = AllocateRawFixedArray(len);
02143 if (obj->IsFailure()) return obj;
02144 if (Heap::InNewSpace(obj)) {
02145 HeapObject* dst = HeapObject::cast(obj);
02146 CopyBlock(reinterpret_cast<Object**>(dst->address()),
02147 reinterpret_cast<Object**>(src->address()),
02148 FixedArray::SizeFor(len));
02149 return obj;
02150 }
02151 HeapObject::cast(obj)->set_map(src->map());
02152 FixedArray* result = FixedArray::cast(obj);
02153 result->set_length(len);
02154
02155 WriteBarrierMode mode = result->GetWriteBarrierMode();
02156 for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
02157 return result;
02158 }
02159
02160
02161 Object* Heap::AllocateFixedArray(int length) {
02162 Object* result = AllocateRawFixedArray(length);
02163 if (!result->IsFailure()) {
02164
02165 reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
02166 FixedArray* array = FixedArray::cast(result);
02167 array->set_length(length);
02168 Object* value = undefined_value();
02169
02170 for (int index = 0; index < length; index++) {
02171 array->set(index, value, SKIP_WRITE_BARRIER);
02172 }
02173 }
02174 return result;
02175 }
02176
02177
02178 Object* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
02179 ASSERT(empty_fixed_array()->IsFixedArray());
02180 if (length == 0) return empty_fixed_array();
02181
02182 int size = FixedArray::SizeFor(length);
02183 Object* result;
02184 if (size > MaxHeapObjectSize()) {
02185 result = lo_space_->AllocateRawFixedArray(size);
02186 } else {
02187 AllocationSpace space =
02188 (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
02189 result = AllocateRaw(size, space, OLD_POINTER_SPACE);
02190 }
02191 if (result->IsFailure()) return result;
02192
02193
02194 reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
02195 FixedArray* array = FixedArray::cast(result);
02196 array->set_length(length);
02197 Object* value = undefined_value();
02198 for (int index = 0; index < length; index++) {
02199 array->set(index, value, SKIP_WRITE_BARRIER);
02200 }
02201 return array;
02202 }
02203
02204
02205 Object* Heap::AllocateFixedArrayWithHoles(int length) {
02206 if (length == 0) return empty_fixed_array();
02207 Object* result = AllocateRawFixedArray(length);
02208 if (!result->IsFailure()) {
02209
02210 reinterpret_cast<Array*>(result)->set_map(fixed_array_map());
02211 FixedArray* array = FixedArray::cast(result);
02212 array->set_length(length);
02213
02214 Object* value = the_hole_value();
02215 for (int index = 0; index < length; index++) {
02216 array->set(index, value, SKIP_WRITE_BARRIER);
02217 }
02218 }
02219 return result;
02220 }
02221
02222
02223 Object* Heap::AllocateHashTable(int length) {
02224 Object* result = Heap::AllocateFixedArray(length);
02225 if (result->IsFailure()) return result;
02226 reinterpret_cast<Array*>(result)->set_map(hash_table_map());
02227 ASSERT(result->IsDictionary());
02228 return result;
02229 }
02230
02231
02232 Object* Heap::AllocateGlobalContext() {
02233 Object* result = Heap::AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
02234 if (result->IsFailure()) return result;
02235 Context* context = reinterpret_cast<Context*>(result);
02236 context->set_map(global_context_map());
02237 ASSERT(context->IsGlobalContext());
02238 ASSERT(result->IsContext());
02239 return result;
02240 }
02241
02242
02243 Object* Heap::AllocateFunctionContext(int length, JSFunction* function) {
02244 ASSERT(length >= Context::MIN_CONTEXT_SLOTS);
02245 Object* result = Heap::AllocateFixedArray(length);
02246 if (result->IsFailure()) return result;
02247 Context* context = reinterpret_cast<Context*>(result);
02248 context->set_map(context_map());
02249 context->set_closure(function);
02250 context->set_fcontext(context);
02251 context->set_previous(NULL);
02252 context->set_extension(NULL);
02253 context->set_global(function->context()->global());
02254 ASSERT(!context->IsGlobalContext());
02255 ASSERT(context->is_function_context());
02256 ASSERT(result->IsContext());
02257 return result;
02258 }
02259
02260
02261 Object* Heap::AllocateWithContext(Context* previous, JSObject* extension) {
02262 Object* result = Heap::AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
02263 if (result->IsFailure()) return result;
02264 Context* context = reinterpret_cast<Context*>(result);
02265 context->set_map(context_map());
02266 context->set_closure(previous->closure());
02267 context->set_fcontext(previous->fcontext());
02268 context->set_previous(previous);
02269 context->set_extension(extension);
02270 context->set_global(previous->global());
02271 ASSERT(!context->IsGlobalContext());
02272 ASSERT(!context->is_function_context());
02273 ASSERT(result->IsContext());
02274 return result;
02275 }
02276
02277
02278 Object* Heap::AllocateStruct(InstanceType type) {
02279 Map* map;
02280 switch (type) {
02281 #define MAKE_CASE(NAME, Name, name) case NAME##_TYPE: map = name##_map(); break;
02282 STRUCT_LIST(MAKE_CASE)
02283 #undef MAKE_CASE
02284 default:
02285 UNREACHABLE();
02286 return Failure::InternalError();
02287 }
02288 int size = map->instance_size();
02289 AllocationSpace space =
02290 (size > MaxHeapObjectSize()) ? LO_SPACE : OLD_POINTER_SPACE;
02291 Object* result = Heap::Allocate(map, space);
02292 if (result->IsFailure()) return result;
02293 Struct::cast(result)->InitializeBody(size);
02294 return result;
02295 }
02296
02297
02298 #ifdef DEBUG
02299
02300 void Heap::Print() {
02301 if (!HasBeenSetup()) return;
02302 Top::PrintStack();
02303 AllSpaces spaces;
02304 while (Space* space = spaces.next()) space->Print();
02305 }
02306
02307
02308 void Heap::ReportCodeStatistics(const char* title) {
02309 PrintF(">>>>>> Code Stats (%s) >>>>>>\n", title);
02310 PagedSpace::ResetCodeStatistics();
02311
02312
02313 code_space_->CollectCodeStatistics();
02314 lo_space_->CollectCodeStatistics();
02315 PagedSpace::ReportCodeStatistics();
02316 }
02317
02318
02319
02320
02321
02322 void Heap::ReportHeapStatistics(const char* title) {
02323 USE(title);
02324 PrintF(">>>>>> =============== %s (%d) =============== >>>>>>\n",
02325 title, gc_count_);
02326 PrintF("mark-compact GC : %d\n", mc_count_);
02327 PrintF("old_gen_promotion_limit_ %d\n", old_gen_promotion_limit_);
02328 PrintF("old_gen_allocation_limit_ %d\n", old_gen_allocation_limit_);
02329
02330 PrintF("\n");
02331 PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles());
02332 GlobalHandles::PrintStats();
02333 PrintF("\n");
02334
02335 PrintF("Heap statistics : ");
02336 MemoryAllocator::ReportStatistics();
02337 PrintF("To space : ");
02338 new_space_.ReportStatistics();
02339 PrintF("Old pointer space : ");
02340 old_pointer_space_->ReportStatistics();
02341 PrintF("Old data space : ");
02342 old_data_space_->ReportStatistics();
02343 PrintF("Code space : ");
02344 code_space_->ReportStatistics();
02345 PrintF("Map space : ");
02346 map_space_->ReportStatistics();
02347 PrintF("Large object space : ");
02348 lo_space_->ReportStatistics();
02349 PrintF(">>>>>> ========================================= >>>>>>\n");
02350 }
02351
02352 #endif // DEBUG
02353
02354 bool Heap::Contains(HeapObject* value) {
02355 return Contains(value->address());
02356 }
02357
02358
02359 bool Heap::Contains(Address addr) {
02360 if (OS::IsOutsideAllocatedSpace(addr)) return false;
02361 return HasBeenSetup() &&
02362 (new_space_.ToSpaceContains(addr) ||
02363 old_pointer_space_->Contains(addr) ||
02364 old_data_space_->Contains(addr) ||
02365 code_space_->Contains(addr) ||
02366 map_space_->Contains(addr) ||
02367 lo_space_->SlowContains(addr));
02368 }
02369
02370
02371 bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
02372 return InSpace(value->address(), space);
02373 }
02374
02375
02376 bool Heap::InSpace(Address addr, AllocationSpace space) {
02377 if (OS::IsOutsideAllocatedSpace(addr)) return false;
02378 if (!HasBeenSetup()) return false;
02379
02380 switch (space) {
02381 case NEW_SPACE:
02382 return new_space_.ToSpaceContains(addr);
02383 case OLD_POINTER_SPACE:
02384 return old_pointer_space_->Contains(addr);
02385 case OLD_DATA_SPACE:
02386 return old_data_space_->Contains(addr);
02387 case CODE_SPACE:
02388 return code_space_->Contains(addr);
02389 case MAP_SPACE:
02390 return map_space_->Contains(addr);
02391 case LO_SPACE:
02392 return lo_space_->SlowContains(addr);
02393 }
02394
02395 return false;
02396 }
02397
02398
02399 #ifdef DEBUG
02400 void Heap::Verify() {
02401 ASSERT(HasBeenSetup());
02402
02403 VerifyPointersVisitor visitor;
02404 Heap::IterateRoots(&visitor);
02405
02406 AllSpaces spaces;
02407 while (Space* space = spaces.next()) {
02408 space->Verify();
02409 }
02410 }
02411 #endif // DEBUG
02412
02413
02414 Object* Heap::LookupSymbol(Vector<const char> string) {
02415 Object* symbol = NULL;
02416 Object* new_table =
02417 SymbolTable::cast(symbol_table_)->LookupSymbol(string, &symbol);
02418 if (new_table->IsFailure()) return new_table;
02419 symbol_table_ = new_table;
02420 ASSERT(symbol != NULL);
02421 return symbol;
02422 }
02423
02424
02425 Object* Heap::LookupSymbol(String* string) {
02426 if (string->IsSymbol()) return string;
02427 Object* symbol = NULL;
02428 Object* new_table =
02429 SymbolTable::cast(symbol_table_)->LookupString(string, &symbol);
02430 if (new_table->IsFailure()) return new_table;
02431 symbol_table_ = new_table;
02432 ASSERT(symbol != NULL);
02433 return symbol;
02434 }
02435
02436
02437 bool Heap::LookupSymbolIfExists(String* string, String** symbol) {
02438 if (string->IsSymbol()) {
02439 *symbol = string;
02440 return true;
02441 }
02442 SymbolTable* table = SymbolTable::cast(symbol_table_);
02443 return table->LookupSymbolIfExists(string, symbol);
02444 }
02445
02446
02447 #ifdef DEBUG
02448 void Heap::ZapFromSpace() {
02449 ASSERT(HAS_HEAP_OBJECT_TAG(kFromSpaceZapValue));
02450 for (Address a = new_space_.FromSpaceLow();
02451 a < new_space_.FromSpaceHigh();
02452 a += kPointerSize) {
02453 Memory::Address_at(a) = kFromSpaceZapValue;
02454 }
02455 }
02456 #endif // DEBUG
02457
02458
02459 void Heap::IterateRSetRange(Address object_start,
02460 Address object_end,
02461 Address rset_start,
02462 ObjectSlotCallback copy_object_func) {
02463 Address object_address = object_start;
02464 Address rset_address = rset_start;
02465
02466
02467 while (object_address < object_end) {
02468 uint32_t rset_word = Memory::uint32_at(rset_address);
02469 if (rset_word != 0) {
02470 uint32_t result_rset = rset_word;
02471 for (uint32_t bitmask = 1; bitmask != 0; bitmask = bitmask << 1) {
02472
02473 if ((rset_word & bitmask) != 0 && object_address < object_end) {
02474 Object** object_p = reinterpret_cast<Object**>(object_address);
02475 if (Heap::InNewSpace(*object_p)) {
02476 copy_object_func(reinterpret_cast<HeapObject**>(object_p));
02477 }
02478
02479
02480 if (!Heap::InNewSpace(*object_p)) result_rset &= ~bitmask;
02481 }
02482 object_address += kPointerSize;
02483 }
02484
02485 if (result_rset != rset_word) {
02486 Memory::uint32_at(rset_address) = result_rset;
02487 }
02488 } else {
02489
02490 object_address += kPointerSize * kBitsPerInt;
02491 }
02492 rset_address += kIntSize;
02493 }
02494 }
02495
02496
02497 void Heap::IterateRSet(PagedSpace* space, ObjectSlotCallback copy_object_func) {
02498 ASSERT(Page::is_rset_in_use());
02499 ASSERT(space == old_pointer_space_ || space == map_space_);
02500
02501 PageIterator it(space, PageIterator::PAGES_IN_USE);
02502 while (it.has_next()) {
02503 Page* page = it.next();
02504 IterateRSetRange(page->ObjectAreaStart(), page->AllocationTop(),
02505 page->RSetStart(), copy_object_func);
02506 }
02507 }
02508
02509
02510 #ifdef DEBUG
02511 #define SYNCHRONIZE_TAG(tag) v->Synchronize(tag)
02512 #else
02513 #define SYNCHRONIZE_TAG(tag)
02514 #endif
02515
02516 void Heap::IterateRoots(ObjectVisitor* v) {
02517 IterateStrongRoots(v);
02518 v->VisitPointer(reinterpret_cast<Object**>(&symbol_table_));
02519 SYNCHRONIZE_TAG("symbol_table");
02520 }
02521
02522
02523 void Heap::IterateStrongRoots(ObjectVisitor* v) {
02524 #define ROOT_ITERATE(type, name) \
02525 v->VisitPointer(bit_cast<Object**, type**>(&name##_));
02526 STRONG_ROOT_LIST(ROOT_ITERATE);
02527 #undef ROOT_ITERATE
02528 SYNCHRONIZE_TAG("strong_root_list");
02529
02530 #define STRUCT_MAP_ITERATE(NAME, Name, name) \
02531 v->VisitPointer(bit_cast<Object**, Map**>(&name##_map_));
02532 STRUCT_LIST(STRUCT_MAP_ITERATE);
02533 #undef STRUCT_MAP_ITERATE
02534 SYNCHRONIZE_TAG("struct_map");
02535
02536 #define SYMBOL_ITERATE(name, string) \
02537 v->VisitPointer(bit_cast<Object**, String**>(&name##_));
02538 SYMBOL_LIST(SYMBOL_ITERATE)
02539 #undef SYMBOL_ITERATE
02540 SYNCHRONIZE_TAG("symbol");
02541
02542 Bootstrapper::Iterate(v);
02543 SYNCHRONIZE_TAG("bootstrapper");
02544 Top::Iterate(v);
02545 SYNCHRONIZE_TAG("top");
02546 Debug::Iterate(v);
02547 SYNCHRONIZE_TAG("debug");
02548 CompilationCache::Iterate(v);
02549 SYNCHRONIZE_TAG("compilationcache");
02550
02551
02552 HandleScopeImplementer::Iterate(v);
02553 SYNCHRONIZE_TAG("handlescope");
02554
02555
02556
02557
02558
02559 Builtins::IterateBuiltins(v);
02560 SYNCHRONIZE_TAG("builtins");
02561
02562
02563 GlobalHandles::IterateRoots(v);
02564 SYNCHRONIZE_TAG("globalhandles");
02565
02566
02567 ThreadManager::Iterate(v);
02568 SYNCHRONIZE_TAG("threadmanager");
02569 }
02570 #undef SYNCHRONIZE_TAG
02571
02572
02573
02574
02575 static bool heap_configured = false;
02576
02577
02578
02579
02580 bool Heap::ConfigureHeap(int semispace_size, int old_gen_size) {
02581 if (HasBeenSetup()) return false;
02582
02583 if (semispace_size > 0) semispace_size_ = semispace_size;
02584 if (old_gen_size > 0) old_generation_size_ = old_gen_size;
02585
02586
02587
02588 semispace_size_ = RoundUpToPowerOf2(semispace_size_);
02589 initial_semispace_size_ = Min(initial_semispace_size_, semispace_size_);
02590 young_generation_size_ = 2 * semispace_size_;
02591
02592
02593 old_generation_size_ = RoundUp(old_generation_size_, Page::kPageSize);
02594
02595 heap_configured = true;
02596 return true;
02597 }
02598
02599
02600 bool Heap::ConfigureHeapDefault() {
02601 return ConfigureHeap(FLAG_new_space_size, FLAG_old_space_size);
02602 }
02603
02604
02605 int Heap::PromotedSpaceSize() {
02606 return old_pointer_space_->Size()
02607 + old_data_space_->Size()
02608 + code_space_->Size()
02609 + map_space_->Size()
02610 + lo_space_->Size();
02611 }
02612
02613
02614 int Heap::PromotedExternalMemorySize() {
02615 if (amount_of_external_allocated_memory_
02616 <= amount_of_external_allocated_memory_at_last_global_gc_) return 0;
02617 return amount_of_external_allocated_memory_
02618 - amount_of_external_allocated_memory_at_last_global_gc_;
02619 }
02620
02621
02622 bool Heap::Setup(bool create_heap_objects) {
02623
02624
02625
02626
02627
02628
02629
02630
02631 if (!heap_configured) {
02632 if (!ConfigureHeapDefault()) return false;
02633 }
02634
02635
02636
02637
02638 if (!MemoryAllocator::Setup(MaxCapacity())) return false;
02639 void* chunk
02640 = MemoryAllocator::ReserveInitialChunk(2 * young_generation_size_);
02641 if (chunk == NULL) return false;
02642
02643
02644
02645
02646
02647 ASSERT(IsPowerOf2(young_generation_size_));
02648 Address code_space_start = reinterpret_cast<Address>(chunk);
02649 Address new_space_start = RoundUp(code_space_start, young_generation_size_);
02650 Address old_space_start = new_space_start + young_generation_size_;
02651 int code_space_size = new_space_start - code_space_start;
02652 int old_space_size = young_generation_size_ - code_space_size;
02653
02654
02655 if (!new_space_.Setup(new_space_start, young_generation_size_)) return false;
02656
02657
02658
02659 old_pointer_space_ =
02660 new OldSpace(old_generation_size_, OLD_POINTER_SPACE, NOT_EXECUTABLE);
02661 if (old_pointer_space_ == NULL) return false;
02662 if (!old_pointer_space_->Setup(old_space_start, old_space_size >> 1)) {
02663 return false;
02664 }
02665 old_data_space_ =
02666 new OldSpace(old_generation_size_, OLD_DATA_SPACE, NOT_EXECUTABLE);
02667 if (old_data_space_ == NULL) return false;
02668 if (!old_data_space_->Setup(old_space_start + (old_space_size >> 1),
02669 old_space_size >> 1)) {
02670 return false;
02671 }
02672
02673
02674
02675 code_space_ =
02676 new OldSpace(old_generation_size_, CODE_SPACE, EXECUTABLE);
02677 if (code_space_ == NULL) return false;
02678 if (!code_space_->Setup(code_space_start, code_space_size)) return false;
02679
02680
02681 map_space_ = new MapSpace(kMaxMapSpaceSize, MAP_SPACE);
02682 if (map_space_ == NULL) return false;
02683
02684
02685 if (!map_space_->Setup(NULL, 0)) return false;
02686
02687
02688
02689
02690 lo_space_ = new LargeObjectSpace(LO_SPACE);
02691 if (lo_space_ == NULL) return false;
02692 if (!lo_space_->Setup()) return false;
02693
02694 if (create_heap_objects) {
02695
02696 if (!CreateInitialMaps()) return false;
02697 if (!CreateApiObjects()) return false;
02698
02699
02700 if (!CreateInitialObjects()) return false;
02701 }
02702
02703 LOG(IntEvent("heap-capacity", Capacity()));
02704 LOG(IntEvent("heap-available", Available()));
02705
02706 return true;
02707 }
02708
02709
02710 void Heap::TearDown() {
02711 GlobalHandles::TearDown();
02712
02713 new_space_.TearDown();
02714
02715 if (old_pointer_space_ != NULL) {
02716 old_pointer_space_->TearDown();
02717 delete old_pointer_space_;
02718 old_pointer_space_ = NULL;
02719 }
02720
02721 if (old_data_space_ != NULL) {
02722 old_data_space_->TearDown();
02723 delete old_data_space_;
02724 old_data_space_ = NULL;
02725 }
02726
02727 if (code_space_ != NULL) {
02728 code_space_->TearDown();
02729 delete code_space_;
02730 code_space_ = NULL;
02731 }
02732
02733 if (map_space_ != NULL) {
02734 map_space_->TearDown();
02735 delete map_space_;
02736 map_space_ = NULL;
02737 }
02738
02739 if (lo_space_ != NULL) {
02740 lo_space_->TearDown();
02741 delete lo_space_;
02742 lo_space_ = NULL;
02743 }
02744
02745 MemoryAllocator::TearDown();
02746 }
02747
02748
02749 void Heap::Shrink() {
02750
02751 map_space_->Shrink();
02752 old_pointer_space_->Shrink();
02753 old_data_space_->Shrink();
02754 code_space_->Shrink();
02755 }
02756
02757
02758 #ifdef DEBUG
02759
02760 class PrintHandleVisitor: public ObjectVisitor {
02761 public:
02762 void VisitPointers(Object** start, Object** end) {
02763 for (Object** p = start; p < end; p++)
02764 PrintF(" handle %p to %p\n", p, *p);
02765 }
02766 };
02767
02768 void Heap::PrintHandles() {
02769 PrintF("Handles:\n");
02770 PrintHandleVisitor v;
02771 HandleScopeImplementer::Iterate(&v);
02772 }
02773
02774 #endif
02775
02776
02777 Space* AllSpaces::next() {
02778 switch (counter_++) {
02779 case NEW_SPACE:
02780 return Heap::new_space();
02781 case OLD_POINTER_SPACE:
02782 return Heap::old_pointer_space();
02783 case OLD_DATA_SPACE:
02784 return Heap::old_data_space();
02785 case CODE_SPACE:
02786 return Heap::code_space();
02787 case MAP_SPACE:
02788 return Heap::map_space();
02789 case LO_SPACE:
02790 return Heap::lo_space();
02791 default:
02792 return NULL;
02793 }
02794 }
02795
02796
02797 PagedSpace* PagedSpaces::next() {
02798 switch (counter_++) {
02799 case OLD_POINTER_SPACE:
02800 return Heap::old_pointer_space();
02801 case OLD_DATA_SPACE:
02802 return Heap::old_data_space();
02803 case CODE_SPACE:
02804 return Heap::code_space();
02805 case MAP_SPACE:
02806 return Heap::map_space();
02807 default:
02808 return NULL;
02809 }
02810 }
02811
02812
02813
02814 OldSpace* OldSpaces::next() {
02815 switch (counter_++) {
02816 case OLD_POINTER_SPACE:
02817 return Heap::old_pointer_space();
02818 case OLD_DATA_SPACE:
02819 return Heap::old_data_space();
02820 case CODE_SPACE:
02821 return Heap::code_space();
02822 default:
02823 return NULL;
02824 }
02825 }
02826
02827
02828 SpaceIterator::SpaceIterator() : current_space_(FIRST_SPACE), iterator_(NULL) {
02829 }
02830
02831
02832 SpaceIterator::~SpaceIterator() {
02833
02834 delete iterator_;
02835 }
02836
02837
02838 bool SpaceIterator::has_next() {
02839
02840 return current_space_ != LAST_SPACE;
02841 }
02842
02843
02844 ObjectIterator* SpaceIterator::next() {
02845 if (iterator_ != NULL) {
02846 delete iterator_;
02847 iterator_ = NULL;
02848
02849 current_space_++;
02850 if (current_space_ > LAST_SPACE) {
02851 return NULL;
02852 }
02853 }
02854
02855
02856 return CreateIterator();
02857 }
02858
02859
02860
02861 ObjectIterator* SpaceIterator::CreateIterator() {
02862 ASSERT(iterator_ == NULL);
02863
02864 switch (current_space_) {
02865 case NEW_SPACE:
02866 iterator_ = new SemiSpaceIterator(Heap::new_space());
02867 break;
02868 case OLD_POINTER_SPACE:
02869 iterator_ = new HeapObjectIterator(Heap::old_pointer_space());
02870 break;
02871 case OLD_DATA_SPACE:
02872 iterator_ = new HeapObjectIterator(Heap::old_data_space());
02873 break;
02874 case CODE_SPACE:
02875 iterator_ = new HeapObjectIterator(Heap::code_space());
02876 break;
02877 case MAP_SPACE:
02878 iterator_ = new HeapObjectIterator(Heap::map_space());
02879 break;
02880 case LO_SPACE:
02881 iterator_ = new LargeObjectIterator(Heap::lo_space());
02882 break;
02883 }
02884
02885
02886 ASSERT(iterator_ != NULL);
02887 return iterator_;
02888 }
02889
02890
02891 HeapIterator::HeapIterator() {
02892 Init();
02893 }
02894
02895
02896 HeapIterator::~HeapIterator() {
02897 Shutdown();
02898 }
02899
02900
02901 void HeapIterator::Init() {
02902
02903 space_iterator_ = new SpaceIterator();
02904 object_iterator_ = space_iterator_->next();
02905 }
02906
02907
02908 void HeapIterator::Shutdown() {
02909
02910 delete space_iterator_;
02911 space_iterator_ = NULL;
02912 object_iterator_ = NULL;
02913 }
02914
02915
02916 bool HeapIterator::has_next() {
02917
02918 if (object_iterator_ == NULL) return false;
02919
02920 if (object_iterator_->has_next_object()) {
02921
02922 return true;
02923 } else {
02924
02925 while (space_iterator_->has_next()) {
02926 object_iterator_ = space_iterator_->next();
02927 if (object_iterator_->has_next_object()) {
02928 return true;
02929 }
02930 }
02931 }
02932
02933 object_iterator_ = NULL;
02934 return false;
02935 }
02936
02937
02938 HeapObject* HeapIterator::next() {
02939 if (has_next()) {
02940 return object_iterator_->next_object();
02941 } else {
02942 return NULL;
02943 }
02944 }
02945
02946
02947 void HeapIterator::reset() {
02948
02949 Shutdown();
02950 Init();
02951 }
02952
02953
02954
02955
02956
02957 #ifdef ENABLE_LOGGING_AND_PROFILING
02958 void HeapProfiler::CollectStats(HeapObject* obj, HistogramInfo* info) {
02959 InstanceType type = obj->map()->instance_type();
02960 ASSERT(0 <= type && type <= LAST_TYPE);
02961 info[type].increment_number(1);
02962 info[type].increment_bytes(obj->Size());
02963 }
02964 #endif
02965
02966
02967 #ifdef ENABLE_LOGGING_AND_PROFILING
02968 void HeapProfiler::WriteSample() {
02969 LOG(HeapSampleBeginEvent("Heap", "allocated"));
02970
02971 HistogramInfo info[LAST_TYPE+1];
02972 #define DEF_TYPE_NAME(name) info[name].set_name(#name);
02973 INSTANCE_TYPE_LIST(DEF_TYPE_NAME)
02974 #undef DEF_TYPE_NAME
02975
02976 HeapIterator iterator;
02977 while (iterator.has_next()) {
02978 CollectStats(iterator.next(), info);
02979 }
02980
02981
02982 int string_number = 0;
02983 int string_bytes = 0;
02984 #define INCREMENT_SIZE(type, size, name) \
02985 string_number += info[type].number(); \
02986 string_bytes += info[type].bytes();
02987 STRING_TYPE_LIST(INCREMENT_SIZE)
02988 #undef INCREMENT_SIZE
02989 if (string_bytes > 0) {
02990 LOG(HeapSampleItemEvent("STRING_TYPE", string_number, string_bytes));
02991 }
02992
02993 for (int i = FIRST_NONSTRING_TYPE; i <= LAST_TYPE; ++i) {
02994 if (info[i].bytes() > 0) {
02995 LOG(HeapSampleItemEvent(info[i].name(), info[i].number(),
02996 info[i].bytes()));
02997 }
02998 }
02999
03000 LOG(HeapSampleEndEvent("Heap", "allocated"));
03001 }
03002
03003
03004 #endif
03005
03006
03007
03008 #ifdef DEBUG
03009
03010 static bool search_for_any_global;
03011 static Object* search_target;
03012 static bool found_target;
03013 static List<Object*> object_stack(20);
03014
03015
03016
03017 static const int kMarkTag = 2;
03018
03019 static void MarkObjectRecursively(Object** p);
03020 class MarkObjectVisitor : public ObjectVisitor {
03021 public:
03022 void VisitPointers(Object** start, Object** end) {
03023
03024 for (Object** p = start; p < end; p++) {
03025 if ((*p)->IsHeapObject())
03026 MarkObjectRecursively(p);
03027 }
03028 }
03029 };
03030
03031 static MarkObjectVisitor mark_visitor;
03032
03033 static void MarkObjectRecursively(Object** p) {
03034 if (!(*p)->IsHeapObject()) return;
03035
03036 HeapObject* obj = HeapObject::cast(*p);
03037
03038 Object* map = obj->map();
03039
03040 if (!map->IsHeapObject()) return;
03041
03042 if (found_target) return;
03043 object_stack.Add(obj);
03044 if ((search_for_any_global && obj->IsJSGlobalObject()) ||
03045 (!search_for_any_global && (obj == search_target))) {
03046 found_target = true;
03047 return;
03048 }
03049
03050 if (obj->IsCode()) {
03051 Code::cast(obj)->ConvertICTargetsFromAddressToObject();
03052 }
03053
03054
03055 Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
03056
03057 Address map_addr = map_p->address();
03058
03059 obj->set_map(reinterpret_cast<Map*>(map_addr + kMarkTag));
03060
03061 MarkObjectRecursively(&map);
03062
03063 obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p),
03064 &mark_visitor);
03065
03066 if (!found_target)
03067 object_stack.RemoveLast();
03068 }
03069
03070
03071 static void UnmarkObjectRecursively(Object** p);
03072 class UnmarkObjectVisitor : public ObjectVisitor {
03073 public:
03074 void VisitPointers(Object** start, Object** end) {
03075
03076 for (Object** p = start; p < end; p++) {
03077 if ((*p)->IsHeapObject())
03078 UnmarkObjectRecursively(p);
03079 }
03080 }
03081 };
03082
03083 static UnmarkObjectVisitor unmark_visitor;
03084
03085 static void UnmarkObjectRecursively(Object** p) {
03086 if (!(*p)->IsHeapObject()) return;
03087
03088 HeapObject* obj = HeapObject::cast(*p);
03089
03090 Object* map = obj->map();
03091
03092 if (map->IsHeapObject()) return;
03093
03094 Address map_addr = reinterpret_cast<Address>(map);
03095
03096 map_addr -= kMarkTag;
03097
03098 ASSERT_TAG_ALIGNED(map_addr);
03099
03100 HeapObject* map_p = HeapObject::FromAddress(map_addr);
03101
03102 obj->set_map(reinterpret_cast<Map*>(map_p));
03103
03104 UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p));
03105
03106 obj->IterateBody(Map::cast(map_p)->instance_type(),
03107 obj->SizeFromMap(Map::cast(map_p)),
03108 &unmark_visitor);
03109
03110 if (obj->IsCode()) {
03111 Code::cast(obj)->ConvertICTargetsFromObjectToAddress();
03112 }
03113 }
03114
03115
03116 static void MarkRootObjectRecursively(Object** root) {
03117 if (search_for_any_global) {
03118 ASSERT(search_target == NULL);
03119 } else {
03120 ASSERT(search_target->IsHeapObject());
03121 }
03122 found_target = false;
03123 object_stack.Clear();
03124
03125 MarkObjectRecursively(root);
03126 UnmarkObjectRecursively(root);
03127
03128 if (found_target) {
03129 PrintF("=====================================\n");
03130 PrintF("==== Path to object ====\n");
03131 PrintF("=====================================\n\n");
03132
03133 ASSERT(!object_stack.is_empty());
03134 for (int i = 0; i < object_stack.length(); i++) {
03135 if (i > 0) PrintF("\n |\n |\n V\n\n");
03136 Object* obj = object_stack[i];
03137 obj->Print();
03138 }
03139 PrintF("=====================================\n");
03140 }
03141 }
03142
03143
03144
03145 class MarkRootVisitor: public ObjectVisitor {
03146 public:
03147 void VisitPointers(Object** start, Object** end) {
03148
03149 for (Object** p = start; p < end; p++) {
03150 if ((*p)->IsHeapObject())
03151 MarkRootObjectRecursively(p);
03152 }
03153 }
03154 };
03155
03156
03157
03158
03159 void Heap::TracePathToObject() {
03160 search_target = NULL;
03161 search_for_any_global = false;
03162
03163 MarkRootVisitor root_visitor;
03164 IterateRoots(&root_visitor);
03165 }
03166
03167
03168
03169
03170
03171 void Heap::TracePathToGlobal() {
03172 search_target = NULL;
03173 search_for_any_global = true;
03174
03175 MarkRootVisitor root_visitor;
03176 IterateRoots(&root_visitor);
03177 }
03178 #endif
03179
03180
03181 GCTracer::GCTracer()
03182 : start_time_(0.0),
03183 start_size_(0.0),
03184 gc_count_(0),
03185 full_gc_count_(0),
03186 is_compacting_(false),
03187 marked_count_(0) {
03188
03189
03190 previous_has_compacted_ = MarkCompactCollector::HasCompacted();
03191 previous_marked_count_ = MarkCompactCollector::previous_marked_count();
03192 if (!FLAG_trace_gc) return;
03193 start_time_ = OS::TimeCurrentMillis();
03194 start_size_ = SizeOfHeapObjects();
03195 }
03196
03197
03198 GCTracer::~GCTracer() {
03199 if (!FLAG_trace_gc) return;
03200
03201 PrintF("%s %.1f -> %.1f MB, %d ms.\n",
03202 CollectorString(),
03203 start_size_, SizeOfHeapObjects(),
03204 static_cast<int>(OS::TimeCurrentMillis() - start_time_));
03205 }
03206
03207
03208 const char* GCTracer::CollectorString() {
03209 switch (collector_) {
03210 case SCAVENGER:
03211 return "Scavenge";
03212 case MARK_COMPACTOR:
03213 return MarkCompactCollector::HasCompacted() ? "Mark-compact"
03214 : "Mark-sweep";
03215 }
03216 return "Unknown GC";
03217 }
03218
03219
03220 #ifdef DEBUG
03221 bool Heap::GarbageCollectionGreedyCheck() {
03222 ASSERT(FLAG_gc_greedy);
03223 if (Bootstrapper::IsActive()) return true;
03224 if (disallow_allocation_failure()) return true;
03225 return CollectGarbage(0, NEW_SPACE);
03226 }
03227 #endif
03228
03229 } }