説明を見る。00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028 #ifndef V8_HEAP_INL_H_
00029 #define V8_HEAP_INL_H_
00030
00031 #include "log.h"
00032 #include "v8-counters.h"
00033
00034 namespace v8 { namespace internal {
00035
00036 int Heap::MaxHeapObjectSize() {
00037 return Page::kMaxHeapObjectSize;
00038 }
00039
00040
00041 Object* Heap::AllocateRaw(int size_in_bytes,
00042 AllocationSpace space,
00043 AllocationSpace retry_space) {
00044 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
00045 ASSERT(space != NEW_SPACE ||
00046 retry_space == OLD_POINTER_SPACE ||
00047 retry_space == OLD_DATA_SPACE);
00048 #ifdef DEBUG
00049 if (FLAG_gc_interval >= 0 &&
00050 !disallow_allocation_failure_ &&
00051 Heap::allocation_timeout_-- <= 0) {
00052 return Failure::RetryAfterGC(size_in_bytes, space);
00053 }
00054 Counters::objs_since_last_full.Increment();
00055 Counters::objs_since_last_young.Increment();
00056 #endif
00057 Object* result;
00058 if (NEW_SPACE == space) {
00059 result = new_space_.AllocateRaw(size_in_bytes);
00060 if (always_allocate() && result->IsFailure()) {
00061 space = retry_space;
00062 } else {
00063 return result;
00064 }
00065 }
00066
00067 if (OLD_POINTER_SPACE == space) {
00068 result = old_pointer_space_->AllocateRaw(size_in_bytes);
00069 } else if (OLD_DATA_SPACE == space) {
00070 result = old_data_space_->AllocateRaw(size_in_bytes);
00071 } else if (CODE_SPACE == space) {
00072 result = code_space_->AllocateRaw(size_in_bytes);
00073 } else if (LO_SPACE == space) {
00074 result = lo_space_->AllocateRaw(size_in_bytes);
00075 } else {
00076 ASSERT(MAP_SPACE == space);
00077 result = map_space_->AllocateRaw(size_in_bytes);
00078 }
00079 if (result->IsFailure()) old_gen_exhausted_ = true;
00080 return result;
00081 }
00082
00083
00084 Object* Heap::NumberFromInt32(int32_t value) {
00085 if (Smi::IsValid(value)) return Smi::FromInt(value);
00086
00087 return AllocateHeapNumber(FastI2D(value));
00088 }
00089
00090
00091 Object* Heap::NumberFromUint32(uint32_t value) {
00092 if ((int32_t)value >= 0 && Smi::IsValid((int32_t)value)) {
00093 return Smi::FromInt((int32_t)value);
00094 }
00095
00096 return AllocateHeapNumber(FastUI2D(value));
00097 }
00098
00099
00100 Object* Heap::AllocateRawMap(int size_in_bytes) {
00101 #ifdef DEBUG
00102 Counters::objs_since_last_full.Increment();
00103 Counters::objs_since_last_young.Increment();
00104 #endif
00105 Object* result = map_space_->AllocateRaw(size_in_bytes);
00106 if (result->IsFailure()) old_gen_exhausted_ = true;
00107 return result;
00108 }
00109
00110
00111 bool Heap::InNewSpace(Object* object) {
00112 return new_space_.Contains(object);
00113 }
00114
00115
00116 bool Heap::InFromSpace(Object* object) {
00117 return new_space_.FromSpaceContains(object);
00118 }
00119
00120
00121 bool Heap::InToSpace(Object* object) {
00122 return new_space_.ToSpaceContains(object);
00123 }
00124
00125
00126 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
00127
00128
00129
00130 return old_address < new_space_.age_mark()
00131 || (new_space_.Size() + object_size) >= (new_space_.Capacity() >> 2);
00132 }
00133
00134
00135 void Heap::RecordWrite(Address address, int offset) {
00136 if (new_space_.Contains(address)) return;
00137 ASSERT(!new_space_.FromSpaceContains(address));
00138 SLOW_ASSERT(Contains(address + offset));
00139 Page::SetRSet(address, offset);
00140 }
00141
00142
00143 OldSpace* Heap::TargetSpace(HeapObject* object) {
00144 InstanceType type = object->map()->instance_type();
00145 AllocationSpace space = TargetSpaceId(type);
00146 return (space == OLD_POINTER_SPACE)
00147 ? old_pointer_space_
00148 : old_data_space_;
00149 }
00150
00151
00152 AllocationSpace Heap::TargetSpaceId(InstanceType type) {
00153
00154
00155
00156
00157 ASSERT((type != CODE_TYPE) && (type != MAP_TYPE));
00158 bool has_pointers =
00159 type != HEAP_NUMBER_TYPE &&
00160 (type >= FIRST_NONSTRING_TYPE ||
00161 (type & kStringRepresentationMask) != kSeqStringTag);
00162 return has_pointers ? OLD_POINTER_SPACE : OLD_DATA_SPACE;
00163 }
00164
00165
00166 void Heap::CopyBlock(Object** dst, Object** src, int byte_size) {
00167 ASSERT(IsAligned(byte_size, kPointerSize));
00168
00169
00170
00171 static const int kBlockCopyLimit = 16 * kPointerSize;
00172
00173 if (byte_size >= kBlockCopyLimit) {
00174 memcpy(dst, src, byte_size);
00175 } else {
00176 int remaining = byte_size / kPointerSize;
00177 do {
00178 remaining--;
00179 *dst++ = *src++;
00180 } while (remaining > 0);
00181 }
00182 }
00183
00184
00185 Object* Heap::GetKeyedLookupCache() {
00186 if (keyed_lookup_cache()->IsUndefined()) {
00187 Object* obj = LookupCache::Allocate(4);
00188 if (obj->IsFailure()) return obj;
00189 keyed_lookup_cache_ = obj;
00190 }
00191 return keyed_lookup_cache();
00192 }
00193
00194
00195 void Heap::SetKeyedLookupCache(LookupCache* cache) {
00196 keyed_lookup_cache_ = cache;
00197 }
00198
00199
00200 void Heap::ClearKeyedLookupCache() {
00201 keyed_lookup_cache_ = undefined_value();
00202 }
00203
00204
00205 #define GC_GREEDY_CHECK() \
00206 ASSERT(!FLAG_gc_greedy || v8::internal::Heap::GarbageCollectionGreedyCheck())
00207
00208
00209
00210
00211
00212
00213
00214
00215
00216 #define CALL_AND_RETRY(FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \
00217 do { \
00218 GC_GREEDY_CHECK(); \
00219 Object* __object__ = FUNCTION_CALL; \
00220 if (!__object__->IsFailure()) return RETURN_VALUE; \
00221 if (__object__->IsOutOfMemoryFailure()) { \
00222 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0"); \
00223 } \
00224 if (!__object__->IsRetryAfterGC()) return RETURN_EMPTY; \
00225 Heap::CollectGarbage(Failure::cast(__object__)->requested(), \
00226 Failure::cast(__object__)->allocation_space()); \
00227 __object__ = FUNCTION_CALL; \
00228 if (!__object__->IsFailure()) return RETURN_VALUE; \
00229 if (__object__->IsOutOfMemoryFailure()) { \
00230 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1"); \
00231 } \
00232 if (!__object__->IsRetryAfterGC()) return RETURN_EMPTY; \
00233 Counters::gc_last_resort_from_handles.Increment(); \
00234 Heap::CollectAllGarbage(); \
00235 { \
00236 AlwaysAllocateScope __scope__; \
00237 __object__ = FUNCTION_CALL; \
00238 } \
00239 if (!__object__->IsFailure()) return RETURN_VALUE; \
00240 if (__object__->IsOutOfMemoryFailure()) { \
00241 \
00242 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2"); \
00243 } \
00244 ASSERT(!__object__->IsRetryAfterGC()); \
00245 return RETURN_EMPTY; \
00246 } while (false)
00247
00248
00249 #define CALL_HEAP_FUNCTION(FUNCTION_CALL, TYPE) \
00250 CALL_AND_RETRY(FUNCTION_CALL, \
00251 Handle<TYPE>(TYPE::cast(__object__)), \
00252 Handle<TYPE>())
00253
00254
00255 #define CALL_HEAP_FUNCTION_VOID(FUNCTION_CALL) \
00256 CALL_AND_RETRY(FUNCTION_CALL, , )
00257
00258
00259 #ifdef DEBUG
00260
00261 inline bool Heap::allow_allocation(bool new_state) {
00262 bool old = allocation_allowed_;
00263 allocation_allowed_ = new_state;
00264 return old;
00265 }
00266
00267 #endif
00268
00269
00270 } }
00271
00272 #endif // V8_HEAP_INL_H_