Merge V8 at r8836: Pick up V8 3.2.10.34

Bug: 5095592
Change-Id: I955924aac6e0bdba591798526c33c4d59fd3dc4f
diff --git a/.gitignore b/.gitignore
index db57d1b..af1b0d7 100644
--- a/.gitignore
+++ b/.gitignore
@@ -19,10 +19,12 @@
 d8_g
 shell
 shell_g
+/build/gyp
 /obj/
 /test/es5conform/data/
 /test/mozilla/data/
 /test/sputnik/sputniktests/
+/test/test262/data/
 /tools/oom_dump/oom_dump
 /tools/oom_dump/oom_dump.o
 /tools/visual_studio/Debug
diff --git a/V8_MERGE_REVISION b/V8_MERGE_REVISION
index aab1a21..b540fa6 100644
--- a/V8_MERGE_REVISION
+++ b/V8_MERGE_REVISION
@@ -2,4 +2,4 @@
 
 We have synced V8 past the last revision used in Chrome 12, as we continue to take 3.2 updates.
 
-http://v8.googlecode.com/svn/branches/3.2@8710
+http://v8.googlecode.com/svn/branches/3.2@8836
diff --git a/preparser/SConscript b/preparser/SConscript
new file mode 100644
index 0000000..1d51e82
--- /dev/null
+++ b/preparser/SConscript
@@ -0,0 +1,38 @@
+# Copyright 2011 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+#       notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+#       copyright notice, this list of conditions and the following
+#       disclaimer in the documentation and/or other materials provided
+#       with the distribution.
+#     * Neither the name of Google Inc. nor the names of its
+#       contributors may be used to endorse or promote products derived
+#       from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from os.path import join
+Import('context')
+
+def ConfigureObjectFiles():
+  env = Environment()
+  env.Replace(**context.flags['preparser'])
+  context.ApplyEnvOverrides(env)
+  return env.Object('preparser-process.cc')
+
+preparser_object = ConfigureObjectFiles()
+Return('preparser_object')
diff --git a/src/SConscript b/src/SConscript
index a68ee3e..06ee907 100755
--- a/src/SConscript
+++ b/src/SConscript
@@ -307,10 +307,7 @@
   env.Replace(**context.flags['v8'])
   context.ApplyEnvOverrides(env)
   env['BUILDERS']['JS2C'] = Builder(action=js2c.JS2C)
-  if 'ENABLE_LOGGING_AND_PROFILING' in env['CPPDEFINES']:
-    env['BUILDERS']['Snapshot'] = Builder(action='$SOURCE $TARGET --logfile "$LOGFILE" --log-snapshot-positions')
-  else:
-    env['BUILDERS']['Snapshot'] = Builder(action='$SOURCE $TARGET')
+  env['BUILDERS']['Snapshot'] = Builder(action='$SOURCE $TARGET --logfile "$LOGFILE" --log-snapshot-positions')
 
   # Build the standard platform-independent source files.
   source_files = context.GetRelevantSources(SOURCES)
diff --git a/src/api.cc b/src/api.cc
index 247507f..e1d6568 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -89,7 +89,7 @@
     if (has_pending_exception) {                                               \
       if (handle_scope_implementer->CallDepthIsZero() &&                       \
           (isolate)->is_out_of_memory()) {                                     \
-        if (!handle_scope_implementer->ignore_out_of_memory())                 \
+        if (!(isolate)->ignore_out_of_memory())                                \
           i::V8::FatalProcessOutOfMemory(NULL);                                \
       }                                                                        \
       bool call_depth_is_zero = handle_scope_implementer->CallDepthIsZero();   \
@@ -856,7 +856,6 @@
 int TypeSwitch::match(v8::Handle<Value> value) {
   i::Isolate* isolate = i::Isolate::Current();
   LOG_API(isolate, "TypeSwitch::match");
-  USE(isolate);
   i::Handle<i::Object> obj = Utils::OpenHandle(*value);
   i::Handle<i::TypeSwitchInfo> info = Utils::OpenHandle(this);
   i::FixedArray* types = i::FixedArray::cast(info->types());
@@ -3234,7 +3233,6 @@
 void Function::SetName(v8::Handle<v8::String> name) {
   i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
   ENTER_V8(isolate);
-  USE(isolate);
   i::Handle<i::JSFunction> func = Utils::OpenHandle(this);
   func->shared()->set_name(*Utils::OpenHandle(*name));
 }
@@ -3908,8 +3906,8 @@
 Local<Value> v8::External::Wrap(void* data) {
   i::Isolate* isolate = i::Isolate::Current();
   STATIC_ASSERT(sizeof(data) == sizeof(i::Address));
-  LOG_API(isolate, "External::Wrap");
   EnsureInitializedForIsolate(isolate, "v8::External::Wrap()");
+  LOG_API(isolate, "External::Wrap");
   ENTER_V8(isolate);
 
   v8::Local<v8::Value> result = CanBeEncodedAsSmi(data)
@@ -3953,8 +3951,8 @@
 Local<External> v8::External::New(void* data) {
   STATIC_ASSERT(sizeof(data) == sizeof(i::Address));
   i::Isolate* isolate = i::Isolate::Current();
-  LOG_API(isolate, "External::New");
   EnsureInitializedForIsolate(isolate, "v8::External::New()");
+  LOG_API(isolate, "External::New");
   ENTER_V8(isolate);
   return ExternalNewImpl(data);
 }
@@ -4373,8 +4371,7 @@
 
 
 void V8::IgnoreOutOfMemoryException() {
-  EnterIsolateIfNeeded()->handle_scope_implementer()->set_ignore_out_of_memory(
-      true);
+  EnterIsolateIfNeeded()->set_ignore_out_of_memory(true);
 }
 
 
diff --git a/src/api.h b/src/api.h
index 7423d28..d38a1d5 100644
--- a/src/api.h
+++ b/src/api.h
@@ -404,7 +404,6 @@
         entered_contexts_(0),
         saved_contexts_(0),
         spare_(NULL),
-        ignore_out_of_memory_(false),
         call_depth_(0) { }
 
   // Threading support for handle data.
@@ -437,10 +436,6 @@
   inline bool HasSavedContexts();
 
   inline List<internal::Object**>* blocks() { return &blocks_; }
-  inline bool ignore_out_of_memory() { return ignore_out_of_memory_; }
-  inline void set_ignore_out_of_memory(bool value) {
-    ignore_out_of_memory_ = value;
-  }
 
  private:
   void ResetAfterArchive() {
@@ -448,7 +443,6 @@
     entered_contexts_.Initialize(0);
     saved_contexts_.Initialize(0);
     spare_ = NULL;
-    ignore_out_of_memory_ = false;
     call_depth_ = 0;
   }
 
@@ -472,7 +466,6 @@
   // Used as a stack to keep track of saved contexts.
   List<Context*> saved_contexts_;
   Object** spare_;
-  bool ignore_out_of_memory_;
   int call_depth_;
   // This is only used for threading support.
   v8::ImplementationUtilities::HandleScopeData handle_scope_data_;
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index 06f3a4e..3dcd427 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -1422,7 +1422,7 @@
     // Test the double value. Zero and NaN are false.
     __ VFPCompareAndLoadFlags(reg, 0.0, scratch);
     __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
-    EmitBranch(true_block, false_block, ne);
+    EmitBranch(true_block, false_block, eq);
   } else {
     ASSERT(r.IsTagged());
     Register reg = ToRegister(instr->InputAt(0));
diff --git a/src/debug.cc b/src/debug.cc
index 093f38e..6f0431c 100644
--- a/src/debug.cc
+++ b/src/debug.cc
@@ -1986,8 +1986,8 @@
 }
 
 
-Debugger::Debugger()
-    : debugger_access_(OS::CreateMutex()),
+Debugger::Debugger(Isolate* isolate)
+    : debugger_access_(isolate->debugger_access()),
       event_listener_(Handle<Object>()),
       event_listener_data_(Handle<Object>()),
       compiling_natives_(false),
@@ -2003,13 +2003,12 @@
       agent_(NULL),
       command_queue_(kQueueInitialSize),
       command_received_(OS::CreateSemaphore(0)),
-      event_command_queue_(kQueueInitialSize) {
+      event_command_queue_(kQueueInitialSize),
+      isolate_(isolate) {
 }
 
 
 Debugger::~Debugger() {
-  delete debugger_access_;
-  debugger_access_ = 0;
   delete dispatch_handler_access_;
   dispatch_handler_access_ = 0;
   delete command_received_;
diff --git a/src/debug.h b/src/debug.h
index 9366fc3..6be33a6 100644
--- a/src/debug.h
+++ b/src/debug.h
@@ -809,7 +809,7 @@
   bool IsDebuggerActive();
 
  private:
-  Debugger();
+  explicit Debugger(Isolate* isolate);
 
   void CallEventCallback(v8::DebugEvent event,
                          Handle<Object> exec_state,
diff --git a/src/execution.cc b/src/execution.cc
index eb26438..7a2bbc6 100644
--- a/src/execution.cc
+++ b/src/execution.cc
@@ -132,7 +132,7 @@
   if (*has_pending_exception) {
     isolate->ReportPendingMessages();
     if (isolate->pending_exception() == Failure::OutOfMemoryException()) {
-      if (!isolate->handle_scope_implementer()->ignore_out_of_memory()) {
+      if (!isolate->ignore_out_of_memory()) {
         V8::FatalProcessOutOfMemory("JS", true);
       }
     }
diff --git a/src/global-handles.h b/src/global-handles.h
index a1a269f..2171b2c 100644
--- a/src/global-handles.h
+++ b/src/global-handles.h
@@ -32,8 +32,6 @@
 
 #include "list-inl.h"
 
-#include "../include/v8-profiler.h"
-
 namespace v8 {
 namespace internal {
 
diff --git a/src/heap.cc b/src/heap.cc
index 0b4abf3..2b6c11f 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -1518,7 +1518,6 @@
 
 
 void Heap::SwitchScavengingVisitorsTableIfProfilingWasEnabled() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
   if (scavenging_visitors_table_mode_ == LOGGING_AND_PROFILING_ENABLED) {
     // Table was already updated by some isolate.
     return;
@@ -1544,7 +1543,6 @@
     Release_Store(&scavenging_visitors_table_mode_,
                   LOGGING_AND_PROFILING_ENABLED);
   }
-#endif
 }
 
 
diff --git a/src/isolate.cc b/src/isolate.cc
index e42d78e..5b3438f 100644
--- a/src/isolate.cc
+++ b/src/isolate.cc
@@ -70,6 +70,7 @@
   return thread_id;
 }
 
+
 // Create a dummy thread that will wait forever on a semaphore. The only
 // purpose for this thread is to have some stack area to save essential data
 // into for use by a stacks only core dump (aka minidump).
@@ -323,7 +324,6 @@
   // Can't use SetIsolateThreadLocals(default_isolate_, NULL) here
   // becase a non-null thread data may be already set.
   Thread::SetThreadLocal(isolate_key_, default_isolate_);
-  CHECK(default_isolate_->PreInit());
 }
 
 
@@ -417,11 +417,15 @@
       bootstrapper_(NULL),
       runtime_profiler_(NULL),
       compilation_cache_(NULL),
-      counters_(new Counters()),
+      counters_(NULL),
       code_range_(NULL),
+      // Must be initialized early to allow v8::SetResourceConstraints calls.
       break_access_(OS::CreateMutex()),
-      logger_(new Logger()),
-      stats_table_(new StatsTable()),
+      debugger_initialized_(false),
+      // Must be initialized early to allow v8::Debug calls.
+      debugger_access_(OS::CreateMutex()),
+      logger_(NULL),
+      stats_table_(NULL),
       stub_cache_(NULL),
       deoptimizer_data_(NULL),
       capture_stack_trace_for_uncaught_exceptions_(false),
@@ -463,6 +467,9 @@
   simulator_redirection_ = NULL;
 #endif
 
+  thread_manager_ = new ThreadManager();
+  thread_manager_->isolate_ = this;
+
 #ifdef DEBUG
   // heap_histograms_ initializes itself.
   memset(&js_spill_information_, 0, sizeof(js_spill_information_));
@@ -548,7 +555,7 @@
     logger_->TearDown();
 
     // The default isolate is re-initializable due to legacy API.
-    state_ = PREINITIALIZED;
+    state_ = UNINITIALIZED;
   }
 }
 
@@ -635,68 +642,6 @@
 }
 
 
-bool Isolate::PreInit() {
-  if (state_ != UNINITIALIZED) return true;
-
-  TRACE_ISOLATE(preinit);
-
-  ASSERT(Isolate::Current() == this);
-
-#ifdef ENABLE_DEBUGGER_SUPPORT
-  debug_ = new Debug(this);
-  debugger_ = new Debugger();
-  debugger_->isolate_ = this;
-#endif
-
-  memory_allocator_ = new MemoryAllocator();
-  memory_allocator_->isolate_ = this;
-  code_range_ = new CodeRange();
-  code_range_->isolate_ = this;
-
-  // Safe after setting Heap::isolate_, initializing StackGuard and
-  // ensuring that Isolate::Current() == this.
-  heap_.SetStackLimits();
-
-#ifdef DEBUG
-  DisallowAllocationFailure disallow_allocation_failure;
-#endif
-
-#define C(name) isolate_addresses_[Isolate::k_##name] =                        \
-    reinterpret_cast<Address>(name());
-  ISOLATE_ADDRESS_LIST(C)
-  ISOLATE_ADDRESS_LIST_PROF(C)
-#undef C
-
-  string_tracker_ = new StringTracker();
-  string_tracker_->isolate_ = this;
-  thread_manager_ = new ThreadManager();
-  thread_manager_->isolate_ = this;
-  compilation_cache_ = new CompilationCache(this);
-  transcendental_cache_ = new TranscendentalCache();
-  keyed_lookup_cache_ = new KeyedLookupCache();
-  context_slot_cache_ = new ContextSlotCache();
-  descriptor_lookup_cache_ = new DescriptorLookupCache();
-  unicode_cache_ = new UnicodeCache();
-  pc_to_code_cache_ = new PcToCodeCache(this);
-  write_input_buffer_ = new StringInputBuffer();
-  global_handles_ = new GlobalHandles(this);
-  bootstrapper_ = new Bootstrapper();
-  handle_scope_implementer_ = new HandleScopeImplementer();
-  stub_cache_ = new StubCache(this);
-  ast_sentinels_ = new AstSentinels();
-  regexp_stack_ = new RegExpStack();
-  regexp_stack_->isolate_ = this;
-
-#ifdef ENABLE_LOGGING_AND_PROFILING
-  producer_heap_profile_ = new ProducerHeapProfile();
-  producer_heap_profile_->isolate_ = this;
-#endif
-
-  state_ = PREINITIALIZED;
-  return true;
-}
-
-
 void Isolate::InitializeThreadLocal() {
   thread_local_top_.Initialize();
   clear_pending_exception();
@@ -732,19 +677,77 @@
 }
 
 
+void Isolate::InitializeLoggingAndCounters() {
+  if (logger_ == NULL) {
+    logger_ = new Logger;
+  }
+  if (counters_ == NULL) {
+    counters_ = new Counters;
+  }
+}
+
+
+void Isolate::InitializeDebugger() {
+#ifdef ENABLE_DEBUGGER_SUPPORT
+  ScopedLock lock(debugger_access_);
+  if (NoBarrier_Load(&debugger_initialized_)) return;
+  InitializeLoggingAndCounters();
+  debug_ = new Debug(this);
+  debugger_ = new Debugger(this);
+  Release_Store(&debugger_initialized_, true);
+#endif
+}
+
+
 bool Isolate::Init(Deserializer* des) {
   ASSERT(state_ != INITIALIZED);
-
+  ASSERT(Isolate::Current() == this);
   TRACE_ISOLATE(init);
 
-  bool create_heap_objects = des == NULL;
-
 #ifdef DEBUG
   // The initialization process does not handle memory exhaustion.
   DisallowAllocationFailure disallow_allocation_failure;
 #endif
 
-  if (state_ == UNINITIALIZED && !PreInit()) return false;
+  InitializeLoggingAndCounters();
+
+  InitializeDebugger();
+
+  memory_allocator_ = new MemoryAllocator(this);
+  code_range_ = new CodeRange(this);
+
+  // Safe after setting Heap::isolate_, initializing StackGuard and
+  // ensuring that Isolate::Current() == this.
+  heap_.SetStackLimits();
+
+#define C(name) isolate_addresses_[Isolate::k_##name] =                        \
+    reinterpret_cast<Address>(name());
+  ISOLATE_ADDRESS_LIST(C)
+  ISOLATE_ADDRESS_LIST_PROF(C)
+#undef C
+
+  string_tracker_ = new StringTracker();
+  string_tracker_->isolate_ = this;
+  compilation_cache_ = new CompilationCache(this);
+  transcendental_cache_ = new TranscendentalCache();
+  keyed_lookup_cache_ = new KeyedLookupCache();
+  context_slot_cache_ = new ContextSlotCache();
+  descriptor_lookup_cache_ = new DescriptorLookupCache();
+  unicode_cache_ = new UnicodeCache();
+  pc_to_code_cache_ = new PcToCodeCache(this);
+  write_input_buffer_ = new StringInputBuffer();
+  global_handles_ = new GlobalHandles(this);
+  bootstrapper_ = new Bootstrapper();
+  handle_scope_implementer_ = new HandleScopeImplementer();
+  stub_cache_ = new StubCache(this);
+  ast_sentinels_ = new AstSentinels();
+  regexp_stack_ = new RegExpStack();
+  regexp_stack_->isolate_ = this;
+
+#ifdef ENABLE_LOGGING_AND_PROFILING
+  producer_heap_profile_ = new ProducerHeapProfile();
+  producer_heap_profile_->isolate_ = this;
+#endif
 
   // Enable logging before setting up the heap
   logger_->Setup();
@@ -767,7 +770,8 @@
     stack_guard_.InitThread(lock);
   }
 
-  // Setup the object heap
+  // Setup the object heap.
+  const bool create_heap_objects = (des == NULL);
   ASSERT(!heap_.HasBeenSetup());
   if (!heap_.Setup(create_heap_objects)) {
     V8::SetFatalError();
@@ -827,6 +831,16 @@
 }
 
 
+// Initialized lazily to allow early
+// v8::V8::SetAddHistogramSampleFunction calls.
+StatsTable* Isolate::stats_table() {
+  if (stats_table_ == NULL) {
+    stats_table_ = new StatsTable;
+  }
+  return stats_table_;
+}
+
+
 void Isolate::Enter() {
   Isolate* current_isolate = NULL;
   PerIsolateThreadData* current_data = CurrentPerIsolateThreadData();
@@ -866,8 +880,6 @@
 
   SetIsolateThreadLocals(this, data);
 
-  CHECK(PreInit());
-
   // In case it's the first time some thread enters the isolate.
   set_thread_id(data->thread_id());
 }
diff --git a/src/isolate.h b/src/isolate.h
index 35ffcb4..167c8ef 100644
--- a/src/isolate.h
+++ b/src/isolate.h
@@ -267,6 +267,9 @@
   // Call back function to report unsafe JS accesses.
   v8::FailedAccessCheckCallback failed_access_check_callback_;
 
+  // Whether out of memory exceptions should be ignored.
+  bool ignore_out_of_memory_;
+
  private:
   void InitializeInternal();
 
@@ -466,6 +469,13 @@
     return reinterpret_cast<Isolate*>(Thread::GetThreadLocal(isolate_key_));
   }
 
+  // Usually called by Init(), but can be called early e.g. to allow
+  // testing components that require logging but not the whole
+  // isolate.
+  //
+  // Safe to call more than once.
+  void InitializeLoggingAndCounters();
+
   bool Init(Deserializer* des);
 
   bool IsInitialized() { return state_ == INITIALIZED; }
@@ -512,10 +522,12 @@
   // switched to non-legacy behavior).
   static void EnterDefaultIsolate();
 
-  // Debug.
   // Mutex for serializing access to break control structures.
   Mutex* break_access() { return break_access_; }
 
+  // Mutex for serializing access to debugger.
+  Mutex* debugger_access() { return debugger_access_; }
+
   Address get_address_from_id(AddressId id);
 
   // Access to top context (where the current function object was created).
@@ -676,6 +688,12 @@
   // Tells whether the current context has experienced an out of memory
   // exception.
   bool is_out_of_memory();
+  bool ignore_out_of_memory() {
+    return thread_local_top_.ignore_out_of_memory_;
+  }
+  void set_ignore_out_of_memory(bool value) {
+    thread_local_top_.ignore_out_of_memory_ = value;
+  }
 
   void PrintCurrentStackTrace(FILE* out);
   void PrintStackTrace(FILE* out, char* thread_data);
@@ -784,14 +802,24 @@
 #undef GLOBAL_CONTEXT_FIELD_ACCESSOR
 
   Bootstrapper* bootstrapper() { return bootstrapper_; }
-  Counters* counters() { return counters_; }
+  Counters* counters() {
+    // Call InitializeLoggingAndCounters() if logging is needed before
+    // the isolate is fully initialized.
+    ASSERT(counters_ != NULL);
+    return counters_;
+  }
   CodeRange* code_range() { return code_range_; }
   RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
   CompilationCache* compilation_cache() { return compilation_cache_; }
-  Logger* logger() { return logger_; }
+  Logger* logger() {
+    // Call InitializeLoggingAndCounters() if logging is needed before
+    // the isolate is fully initialized.
+    ASSERT(logger_ != NULL);
+    return logger_;
+  }
   StackGuard* stack_guard() { return &stack_guard_; }
   Heap* heap() { return &heap_; }
-  StatsTable* stats_table() { return stats_table_; }
+  StatsTable* stats_table();
   StubCache* stub_cache() { return stub_cache_; }
   DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
   ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
@@ -908,8 +936,14 @@
   void PreallocatedStorageInit(size_t size);
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
-  Debugger* debugger() { return debugger_; }
-  Debug* debug() { return debug_; }
+  Debugger* debugger() {
+    if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
+    return debugger_;
+  }
+  Debug* debug() {
+    if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
+    return debug_;
+  }
 #endif
 
 #ifdef ENABLE_LOGGING_AND_PROFILING
@@ -1045,8 +1079,6 @@
   static Isolate* default_isolate_;
   static ThreadDataTable* thread_data_table_;
 
-  bool PreInit();
-
   void Deinit();
 
   static void SetIsolateThreadLocals(Isolate* isolate,
@@ -1054,7 +1086,6 @@
 
   enum State {
     UNINITIALIZED,    // Some components may not have been allocated.
-    PREINITIALIZED,   // Components have been allocated but not initialized.
     INITIALIZED       // All components are fully initialized.
   };
 
@@ -1098,6 +1129,8 @@
 
   void PropagatePendingExceptionToExternalTryCatch();
 
+  void InitializeDebugger();
+
   int stack_trace_nesting_level_;
   StringStream* incomplete_message_;
   // The preallocated memory thread singleton.
@@ -1111,6 +1144,8 @@
   Counters* counters_;
   CodeRange* code_range_;
   Mutex* break_access_;
+  Atomic32 debugger_initialized_;
+  Mutex* debugger_access_;
   Heap heap_;
   Logger* logger_;
   StackGuard stack_guard_;
@@ -1204,6 +1239,7 @@
   friend class ExecutionAccess;
   friend class IsolateInitializer;
   friend class ThreadId;
+  friend class TestMemoryAllocatorScope;
   friend class v8::Isolate;
   friend class v8::Locker;
 
diff --git a/src/log.cc b/src/log.cc
index 6d95094..3ce2072 100644
--- a/src/log.cc
+++ b/src/log.cc
@@ -1550,11 +1550,7 @@
 
 
 Sampler* Logger::sampler() {
-#ifdef ENABLE_LOGGING_AND_PROFILING
   return ticker_;
-#else
-  return NULL;
-#endif
 }
 
 
@@ -1631,10 +1627,8 @@
 
 
 static void ComputeCpuProfiling(Sampler* sampler, void* flag_ptr) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
   bool* flag = reinterpret_cast<bool*>(flag_ptr);
   *flag |= sampler->IsProfiling();
-#endif
 }
 
 
diff --git a/src/objects.cc b/src/objects.cc
index 8491d58..fac83f1 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -8342,7 +8342,8 @@
     }
     ASSERT(storage->length() >= index);
   } else {
-    property_dictionary()->CopyKeysTo(storage);
+    property_dictionary()->CopyKeysTo(storage,
+                                      index);
   }
 }
 
@@ -8926,7 +8927,7 @@
     int, JSObject::DeleteMode);
 
 template void Dictionary<StringDictionaryShape, String*>::CopyKeysTo(
-    FixedArray*);
+    FixedArray*, int);
 
 template int
 Dictionary<StringDictionaryShape, String*>::NumberOfElementsFilterAttributes(
@@ -9982,11 +9983,11 @@
 
 
 template<typename Shape, typename Key>
-void Dictionary<Shape, Key>::CopyKeysTo(FixedArray* storage) {
+void Dictionary<Shape, Key>::CopyKeysTo(
+    FixedArray* storage, int index) {
   ASSERT(storage->length() >= NumberOfElementsFilterAttributes(
       static_cast<PropertyAttributes>(NONE)));
   int capacity = HashTable<Shape, Key>::Capacity();
-  int index = 0;
   for (int i = 0; i < capacity; i++) {
     Object* k = HashTable<Shape, Key>::KeyAt(i);
     if (HashTable<Shape, Key>::IsKey(k)) {
diff --git a/src/objects.h b/src/objects.h
index e966b3d..bc8f42c 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -2554,7 +2554,7 @@
   // Copies keys to preallocated fixed array.
   void CopyKeysTo(FixedArray* storage, PropertyAttributes filter);
   // Fill in details for properties into storage.
-  void CopyKeysTo(FixedArray* storage);
+  void CopyKeysTo(FixedArray* storage, int index);
 
   // Accessors for next enumeration index.
   void SetNextEnumerationIndex(int index) {
diff --git a/src/runtime.cc b/src/runtime.cc
index 7335da8..855bd41 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -4339,9 +4339,10 @@
   // Get the property names.
   jsproto = obj;
   int proto_with_hidden_properties = 0;
+  int next_copy_index = 0;
   for (int i = 0; i < length; i++) {
-    jsproto->GetLocalPropertyNames(*names,
-                                   i == 0 ? 0 : local_property_count[i - 1]);
+    jsproto->GetLocalPropertyNames(*names, next_copy_index);
+    next_copy_index += local_property_count[i];
     if (!GetHiddenProperties(jsproto, false)->IsUndefined()) {
       proto_with_hidden_properties++;
     }
diff --git a/src/spaces.cc b/src/spaces.cc
index 674078c..3db9306 100644
--- a/src/spaces.cc
+++ b/src/spaces.cc
@@ -148,12 +148,12 @@
 // CodeRange
 
 
-CodeRange::CodeRange()
-    : code_range_(NULL),
+CodeRange::CodeRange(Isolate* isolate)
+    : isolate_(isolate),
+      code_range_(NULL),
       free_list_(0),
       allocation_list_(0),
-      current_allocation_block_index_(0),
-      isolate_(NULL) {
+      current_allocation_block_index_(0) {
 }
 
 
@@ -279,8 +279,9 @@
 const int kEstimatedNumberOfChunks = 270;
 
 
-MemoryAllocator::MemoryAllocator()
-    : capacity_(0),
+MemoryAllocator::MemoryAllocator(Isolate* isolate)
+    : isolate_(isolate),
+      capacity_(0),
       capacity_executable_(0),
       size_(0),
       size_executable_(0),
@@ -288,8 +289,7 @@
       chunks_(kEstimatedNumberOfChunks),
       free_chunk_ids_(kEstimatedNumberOfChunks),
       max_nof_chunks_(0),
-      top_(0),
-      isolate_(NULL) {
+      top_(0) {
 }
 
 
diff --git a/src/spaces.h b/src/spaces.h
index bd939d1..f323f85 100644
--- a/src/spaces.h
+++ b/src/spaces.h
@@ -413,6 +413,8 @@
 // manages a range of virtual memory.
 class CodeRange {
  public:
+  explicit CodeRange(Isolate* isolate);
+
   // Reserves a range of virtual memory, but does not commit any of it.
   // Can only be called once, at heap initialization time.
   // Returns false on failure.
@@ -422,9 +424,9 @@
   // manage it.
   void TearDown();
 
-  bool exists() { return code_range_ != NULL; }
+  bool exists() { return this != NULL && code_range_ != NULL; }
   bool contains(Address address) {
-    if (code_range_ == NULL) return false;
+    if (this == NULL || code_range_ == NULL) return false;
     Address start = static_cast<Address>(code_range_->address());
     return start <= address && address < start + code_range_->size();
   }
@@ -437,7 +439,7 @@
   void FreeRawMemory(void* buf, size_t length);
 
  private:
-  CodeRange();
+  Isolate* isolate_;
 
   // The reserved range of virtual memory that all code objects are put in.
   VirtualMemory* code_range_;
@@ -471,10 +473,6 @@
   static int CompareFreeBlockAddress(const FreeBlock* left,
                                      const FreeBlock* right);
 
-  friend class Isolate;
-
-  Isolate* isolate_;
-
   DISALLOW_COPY_AND_ASSIGN(CodeRange);
 };
 
@@ -505,6 +503,8 @@
 
 class MemoryAllocator {
  public:
+  explicit MemoryAllocator(Isolate* isolate);
+
   // Initializes its internal bookkeeping structures.
   // Max capacity of the total space and executable memory limit.
   bool Setup(intptr_t max_capacity, intptr_t capacity_executable);
@@ -675,11 +675,11 @@
 #endif
 
  private:
-  MemoryAllocator();
-
   static const int kChunkSize = kPagesPerChunk * Page::kPageSize;
   static const int kChunkSizeLog2 = kPagesPerChunkLog2 + kPageSizeBits;
 
+  Isolate* isolate_;
+
   // Maximum space size in bytes.
   intptr_t capacity_;
   // Maximum subset of capacity_ that can be executable
@@ -772,10 +772,6 @@
                            Page* prev,
                            Page** last_page_in_use);
 
-  friend class Isolate;
-
-  Isolate* isolate_;
-
   DISALLOW_COPY_AND_ASSIGN(MemoryAllocator);
 };
 
diff --git a/src/top.cc b/src/top.cc
index 842d269..b9207c8 100644
--- a/src/top.cc
+++ b/src/top.cc
@@ -47,6 +47,10 @@
 
 ThreadLocalTop::ThreadLocalTop() {
   InitializeInternal();
+  // This flag may be set using v8::V8::IgnoreOutOfMemoryException()
+  // before an isolate is initialized. The initialize methods below do
+  // not touch it to preserve its value.
+  ignore_out_of_memory_ = false;
 }
 
 
@@ -327,6 +331,7 @@
     incomplete_message_ = &accumulator;
     PrintStack(&accumulator);
     accumulator.OutputToStdOut();
+    InitializeLoggingAndCounters();
     accumulator.Log();
     incomplete_message_ = NULL;
     stack_trace_nesting_level_ = 0;
@@ -947,11 +952,9 @@
 
 
 char* Isolate::ArchiveThread(char* to) {
-#ifdef ENABLE_LOGGING_AND_PROFILING
   if (RuntimeProfiler::IsEnabled() && current_vm_state() == JS) {
     RuntimeProfiler::IsolateExitedJS(this);
   }
-#endif
   memcpy(to, reinterpret_cast<char*>(thread_local_top()),
          sizeof(ThreadLocalTop));
   InitializeThreadLocal();
@@ -971,11 +974,9 @@
   thread_local_top()->simulator_ = Simulator::current(this);
 #endif
 #endif
-#ifdef ENABLE_LOGGING_AND_PROFILING
   if (RuntimeProfiler::IsEnabled() && current_vm_state() == JS) {
     RuntimeProfiler::IsolateEnteredJS(this);
   }
-#endif
   return from + sizeof(ThreadLocalTop);
 }
 
diff --git a/src/version.cc b/src/version.cc
index 7e18e6d..d72a355 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -35,7 +35,7 @@
 #define MAJOR_VERSION     3
 #define MINOR_VERSION     2
 #define BUILD_NUMBER      10
-#define PATCH_LEVEL       31
+#define PATCH_LEVEL       34
 // Use 1 for candidates and 0 otherwise.
 // (Boolean macro values are not supported by all preprocessors.)
 #define IS_CANDIDATE_VERSION 0
diff --git a/test/cctest/test-alloc.cc b/test/cctest/test-alloc.cc
index 0ccf4b8..83ab1a9 100644
--- a/test/cctest/test-alloc.cc
+++ b/test/cctest/test-alloc.cc
@@ -186,7 +186,9 @@
 TEST(CodeRange) {
   const int code_range_size = 16*MB;
   OS::Setup();
-  Isolate::Current()->code_range()->Setup(code_range_size);
+  Isolate::Current()->InitializeLoggingAndCounters();
+  CodeRange* code_range = new CodeRange(Isolate::Current());
+  code_range->Setup(code_range_size);
   int current_allocated = 0;
   int total_allocated = 0;
   List<Block> blocks(1000);
@@ -198,8 +200,7 @@
       size_t requested = (Page::kPageSize << (Pseudorandom() % 6)) +
            Pseudorandom() % 5000 + 1;
       size_t allocated = 0;
-      void* base = Isolate::Current()->code_range()->
-          AllocateRawMemory(requested, &allocated);
+      void* base = code_range->AllocateRawMemory(requested, &allocated);
       CHECK(base != NULL);
       blocks.Add(Block(base, static_cast<int>(allocated)));
       current_allocated += static_cast<int>(allocated);
@@ -207,8 +208,7 @@
     } else {
       // Free a block.
       int index = Pseudorandom() % blocks.length();
-      Isolate::Current()->code_range()->FreeRawMemory(
-          blocks[index].base, blocks[index].size);
+      code_range->FreeRawMemory(blocks[index].base, blocks[index].size);
       current_allocated -= blocks[index].size;
       if (index < blocks.length() - 1) {
         blocks[index] = blocks.RemoveLast();
@@ -218,5 +218,6 @@
     }
   }
 
-  Isolate::Current()->code_range()->TearDown();
+  code_range->TearDown();
+  delete code_range;
 }
diff --git a/test/cctest/test-debug.cc b/test/cctest/test-debug.cc
index b81129e..7f506db 100644
--- a/test/cctest/test-debug.cc
+++ b/test/cctest/test-debug.cc
@@ -5819,6 +5819,7 @@
 
 
 TEST(DebuggerAgent) {
+  v8::V8::Initialize();
   i::Debugger* debugger = i::Isolate::Current()->debugger();
   // Make sure these ports is not used by other tests to allow tests to run in
   // parallel.
diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc
index 09aa613..d25f39f 100644
--- a/test/cctest/test-heap.cc
+++ b/test/cctest/test-heap.cc
@@ -291,8 +291,8 @@
 
 
 TEST(GlobalHandles) {
-  GlobalHandles* global_handles = Isolate::Current()->global_handles();
   InitializeVM();
+  GlobalHandles* global_handles = Isolate::Current()->global_handles();
 
   Handle<Object> h1;
   Handle<Object> h2;
@@ -339,8 +339,8 @@
 
 
 TEST(WeakGlobalHandlesScavenge) {
-  GlobalHandles* global_handles = Isolate::Current()->global_handles();
   InitializeVM();
+  GlobalHandles* global_handles = Isolate::Current()->global_handles();
 
   WeakPointerCleared = false;
 
@@ -377,8 +377,8 @@
 
 
 TEST(WeakGlobalHandlesMark) {
-  GlobalHandles* global_handles = Isolate::Current()->global_handles();
   InitializeVM();
+  GlobalHandles* global_handles = Isolate::Current()->global_handles();
 
   WeakPointerCleared = false;
 
@@ -416,8 +416,8 @@
 }
 
 TEST(DeleteWeakGlobalHandle) {
-  GlobalHandles* global_handles = Isolate::Current()->global_handles();
   InitializeVM();
+  GlobalHandles* global_handles = Isolate::Current()->global_handles();
 
   WeakPointerCleared = false;
 
diff --git a/test/cctest/test-log.cc b/test/cctest/test-log.cc
index 17c7387..b43e0cd 100644
--- a/test/cctest/test-log.cc
+++ b/test/cctest/test-log.cc
@@ -29,6 +29,7 @@
   // Log to memory buffer.
   i::FLAG_logfile = "*";
   i::FLAG_log = true;
+  ISOLATE->InitializeLoggingAndCounters();
   LOGGER->Setup();
 }
 
@@ -120,6 +121,7 @@
   // Log to stdout
   i::FLAG_logfile = "-";
   i::FLAG_log = true;
+  ISOLATE->InitializeLoggingAndCounters();
   LOGGER->Setup();
   CHECK_EQ(0, LOGGER->GetLogLines(0, NULL, 0));
   CHECK_EQ(0, LOGGER->GetLogLines(100, NULL, 0));
diff --git a/test/cctest/test-parsing.cc b/test/cctest/test-parsing.cc
index 39856b6..2b06a5c 100755
--- a/test/cctest/test-parsing.cc
+++ b/test/cctest/test-parsing.cc
@@ -134,6 +134,8 @@
 
 
 TEST(ScanHTMLEndComments) {
+  v8::V8::Initialize();
+
   // Regression test. See:
   //    http://code.google.com/p/chromium/issues/detail?id=53548
   // Tests that --> is correctly interpreted as comment-to-end-of-line if there
@@ -245,6 +247,8 @@
 
 
 TEST(StandAlonePreParser) {
+  v8::V8::Initialize();
+
   int marker;
   i::Isolate::Current()->stack_guard()->SetStackLimit(
       reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
@@ -281,6 +285,8 @@
 
 
 TEST(RegressChromium62639) {
+  v8::V8::Initialize();
+
   int marker;
   i::Isolate::Current()->stack_guard()->SetStackLimit(
       reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
@@ -302,6 +308,8 @@
 
 
 TEST(Regress928) {
+  v8::V8::Initialize();
+
   // Preparsing didn't consider the catch clause of a try statement
   // as with-content, which made it assume that a function inside
   // the block could be lazily compiled, and an extra, unexpected,
@@ -342,6 +350,8 @@
 
 
 TEST(PreParseOverflow) {
+  v8::V8::Initialize();
+
   int marker;
   i::Isolate::Current()->stack_guard()->SetStackLimit(
       reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
@@ -592,6 +602,8 @@
 }
 
 TEST(StreamScanner) {
+  v8::V8::Initialize();
+
   const char* str1 = "{ foo get for : */ <- \n\n /*foo*/ bib";
   i::Utf8ToUC16CharacterStream stream1(reinterpret_cast<const i::byte*>(str1),
                                        static_cast<unsigned>(strlen(str1)));
@@ -672,6 +684,8 @@
 
 
 TEST(RegExpScanning) {
+  v8::V8::Initialize();
+
   // RegExp token with added garbage at the end. The scanner should only
   // scan the RegExp until the terminating slash just before "flipperwald".
   TestScanRegExp("/b/flipperwald", "b");
diff --git a/test/cctest/test-serialize.cc b/test/cctest/test-serialize.cc
index 730d72a..4767fc6 100644
--- a/test/cctest/test-serialize.cc
+++ b/test/cctest/test-serialize.cc
@@ -99,10 +99,10 @@
 
 
 TEST(ExternalReferenceEncoder) {
-  OS::Setup();
   Isolate* isolate = i::Isolate::Current();
   isolate->stats_table()->SetCounterFunction(counter_function);
-  HEAP->Setup(false);
+  v8::V8::Initialize();
+
   ExternalReferenceEncoder encoder;
   CHECK_EQ(make_code(BUILTIN, Builtins::kArrayCode),
            Encode(encoder, Builtins::kArrayCode));
@@ -139,10 +139,10 @@
 
 
 TEST(ExternalReferenceDecoder) {
-  OS::Setup();
   Isolate* isolate = i::Isolate::Current();
   isolate->stats_table()->SetCounterFunction(counter_function);
-  HEAP->Setup(false);
+  v8::V8::Initialize();
+
   ExternalReferenceDecoder decoder;
   CHECK_EQ(AddressOf(Builtins::kArrayCode),
            decoder.Decode(make_code(BUILTIN, Builtins::kArrayCode)));
diff --git a/test/cctest/test-spaces.cc b/test/cctest/test-spaces.cc
index de0c41e..0f22ce1 100644
--- a/test/cctest/test-spaces.cc
+++ b/test/cctest/test-spaces.cc
@@ -91,46 +91,74 @@
 }
 
 
+namespace v8 {
+namespace internal {
+
+// Temporarily sets a given allocator in an isolate.
+class TestMemoryAllocatorScope {
+ public:
+  TestMemoryAllocatorScope(Isolate* isolate, MemoryAllocator* allocator)
+      : isolate_(isolate),
+        old_allocator_(isolate->memory_allocator_) {
+    isolate->memory_allocator_ = allocator;
+  }
+
+  ~TestMemoryAllocatorScope() {
+    isolate_->memory_allocator_ = old_allocator_;
+  }
+
+ private:
+  Isolate* isolate_;
+  MemoryAllocator* old_allocator_;
+
+  DISALLOW_COPY_AND_ASSIGN(TestMemoryAllocatorScope);
+};
+
+} }  // namespace v8::internal
+
+
 TEST(MemoryAllocator) {
   OS::Setup();
   Isolate* isolate = Isolate::Current();
-  CHECK(HEAP->ConfigureHeapDefault());
-  CHECK(isolate->memory_allocator()->Setup(HEAP->MaxReserved(),
-                                           HEAP->MaxExecutableSize()));
+  isolate->InitializeLoggingAndCounters();
+  Heap* heap = isolate->heap();
+  CHECK(heap->ConfigureHeapDefault());
+  MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
+  CHECK(memory_allocator->Setup(heap->MaxReserved(),
+                                heap->MaxExecutableSize()));
+  TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
 
-  OldSpace faked_space(HEAP,
-                       HEAP->MaxReserved(),
+  OldSpace faked_space(heap,
+                       heap->MaxReserved(),
                        OLD_POINTER_SPACE,
                        NOT_EXECUTABLE);
   int total_pages = 0;
   int requested = MemoryAllocator::kPagesPerChunk;
   int allocated;
   // If we request n pages, we should get n or n - 1.
-  Page* first_page =
-      isolate->memory_allocator()->AllocatePages(
-          requested, &allocated, &faked_space);
+  Page* first_page = memory_allocator->AllocatePages(
+      requested, &allocated, &faked_space);
   CHECK(first_page->is_valid());
   CHECK(allocated == requested || allocated == requested - 1);
   total_pages += allocated;
 
   Page* last_page = first_page;
   for (Page* p = first_page; p->is_valid(); p = p->next_page()) {
-    CHECK(isolate->memory_allocator()->IsPageInSpace(p, &faked_space));
+    CHECK(memory_allocator->IsPageInSpace(p, &faked_space));
     last_page = p;
   }
 
   // Again, we should get n or n - 1 pages.
-  Page* others =
-      isolate->memory_allocator()->AllocatePages(
-          requested, &allocated, &faked_space);
+  Page* others = memory_allocator->AllocatePages(
+      requested, &allocated, &faked_space);
   CHECK(others->is_valid());
   CHECK(allocated == requested || allocated == requested - 1);
   total_pages += allocated;
 
-  isolate->memory_allocator()->SetNextPage(last_page, others);
+  memory_allocator->SetNextPage(last_page, others);
   int page_count = 0;
   for (Page* p = first_page; p->is_valid(); p = p->next_page()) {
-    CHECK(isolate->memory_allocator()->IsPageInSpace(p, &faked_space));
+    CHECK(memory_allocator->IsPageInSpace(p, &faked_space));
     page_count++;
   }
   CHECK(total_pages == page_count);
@@ -141,34 +169,39 @@
   // Freeing pages at the first chunk starting at or after the second page
   // should free the entire second chunk.  It will return the page it was passed
   // (since the second page was in the first chunk).
-  Page* free_return = isolate->memory_allocator()->FreePages(second_page);
+  Page* free_return = memory_allocator->FreePages(second_page);
   CHECK(free_return == second_page);
-  isolate->memory_allocator()->SetNextPage(first_page, free_return);
+  memory_allocator->SetNextPage(first_page, free_return);
 
   // Freeing pages in the first chunk starting at the first page should free
   // the first chunk and return an invalid page.
-  Page* invalid_page = isolate->memory_allocator()->FreePages(first_page);
+  Page* invalid_page = memory_allocator->FreePages(first_page);
   CHECK(!invalid_page->is_valid());
 
-  isolate->memory_allocator()->TearDown();
+  memory_allocator->TearDown();
+  delete memory_allocator;
 }
 
 
 TEST(NewSpace) {
   OS::Setup();
-  CHECK(HEAP->ConfigureHeapDefault());
-  CHECK(Isolate::Current()->memory_allocator()->Setup(
-      HEAP->MaxReserved(), HEAP->MaxExecutableSize()));
+  Isolate* isolate = Isolate::Current();
+  isolate->InitializeLoggingAndCounters();
+  Heap* heap = isolate->heap();
+  CHECK(heap->ConfigureHeapDefault());
+  MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
+  CHECK(memory_allocator->Setup(heap->MaxReserved(),
+                                heap->MaxExecutableSize()));
+  TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
 
-  NewSpace new_space(HEAP);
+  NewSpace new_space(heap);
 
   void* chunk =
-      Isolate::Current()->memory_allocator()->ReserveInitialChunk(
-          4 * HEAP->ReservedSemiSpaceSize());
+      memory_allocator->ReserveInitialChunk(4 * heap->ReservedSemiSpaceSize());
   CHECK(chunk != NULL);
   Address start = RoundUp(static_cast<Address>(chunk),
-                          2 * HEAP->ReservedSemiSpaceSize());
-  CHECK(new_space.Setup(start, 2 * HEAP->ReservedSemiSpaceSize()));
+                          2 * heap->ReservedSemiSpaceSize());
+  CHECK(new_space.Setup(start, 2 * heap->ReservedSemiSpaceSize()));
   CHECK(new_space.HasBeenSetup());
 
   while (new_space.Available() >= Page::kMaxHeapObjectSize) {
@@ -178,28 +211,33 @@
   }
 
   new_space.TearDown();
-  Isolate::Current()->memory_allocator()->TearDown();
+  memory_allocator->TearDown();
+  delete memory_allocator;
 }
 
 
 TEST(OldSpace) {
   OS::Setup();
-  CHECK(HEAP->ConfigureHeapDefault());
-  CHECK(Isolate::Current()->memory_allocator()->Setup(
-      HEAP->MaxReserved(), HEAP->MaxExecutableSize()));
+  Isolate* isolate = Isolate::Current();
+  isolate->InitializeLoggingAndCounters();
+  Heap* heap = isolate->heap();
+  CHECK(heap->ConfigureHeapDefault());
+  MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
+  CHECK(memory_allocator->Setup(heap->MaxReserved(),
+                                heap->MaxExecutableSize()));
+  TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
 
-  OldSpace* s = new OldSpace(HEAP,
-                             HEAP->MaxOldGenerationSize(),
+  OldSpace* s = new OldSpace(heap,
+                             heap->MaxOldGenerationSize(),
                              OLD_POINTER_SPACE,
                              NOT_EXECUTABLE);
   CHECK(s != NULL);
 
-  void* chunk =
-      Isolate::Current()->memory_allocator()->ReserveInitialChunk(
-          4 * HEAP->ReservedSemiSpaceSize());
+  void* chunk = memory_allocator->ReserveInitialChunk(
+      4 * heap->ReservedSemiSpaceSize());
   CHECK(chunk != NULL);
   Address start = static_cast<Address>(chunk);
-  size_t size = RoundUp(start, 2 * HEAP->ReservedSemiSpaceSize()) - start;
+  size_t size = RoundUp(start, 2 * heap->ReservedSemiSpaceSize()) - start;
 
   CHECK(s->Setup(start, size));
 
@@ -209,13 +247,13 @@
 
   s->TearDown();
   delete s;
-  Isolate::Current()->memory_allocator()->TearDown();
+  memory_allocator->TearDown();
+  delete memory_allocator;
 }
 
 
 TEST(LargeObjectSpace) {
-  OS::Setup();
-  CHECK(HEAP->Setup(false));
+  v8::V8::Initialize();
 
   LargeObjectSpace* lo = HEAP->lo_space();
   CHECK(lo != NULL);
@@ -247,9 +285,4 @@
   CHECK(!lo->IsEmpty());
 
   CHECK(lo->AllocateRaw(lo_size)->IsFailure());
-
-  lo->TearDown();
-  delete lo;
-
-  Isolate::Current()->memory_allocator()->TearDown();
 }
diff --git a/test/mjsunit/compiler/regress-lbranch-double.js b/test/mjsunit/compiler/regress-lbranch-double.js
new file mode 100644
index 0000000..dca6d5b
--- /dev/null
+++ b/test/mjsunit/compiler/regress-lbranch-double.js
@@ -0,0 +1,40 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax
+
+// ARM's code generator for LBranch had a bug, swapping the true/false
+// branches when the representation of the condition is a double.
+
+function foo() {
+  return Math.sqrt(2.6415) ? 88 : 99;
+}
+
+assertEquals(88, foo());
+assertEquals(88, foo());
+%OptimizeFunctionOnNextCall(foo)
+assertEquals(88, foo());
diff --git a/test/mjsunit/regress/regress-91517.js b/test/mjsunit/regress/regress-91517.js
new file mode 100644
index 0000000..68a768c
--- /dev/null
+++ b/test/mjsunit/regress/regress-91517.js
@@ -0,0 +1,112 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+//       notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+//       copyright notice, this list of conditions and the following
+//       disclaimer in the documentation and/or other materials provided
+//       with the distribution.
+//     * Neither the name of Google Inc. nor the names of its
+//       contributors may be used to endorse or promote products derived
+//       from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Getting property names of an object with a prototype chain that
+// triggers dictionary elements in GetLocalPropertyNames() shouldn't
+// crash the runtime
+
+// Flags: --allow-natives-syntax
+
+function Object1() {
+  this.foo = 1;
+}
+
+function Object2() {
+  this.fuz = 2;
+  this.objects = new Object();
+  this.fuz1 = 2;
+  this.fuz2 = 2;
+  this.fuz3 = 2;
+  this.fuz4 = 2;
+  this.fuz5 = 2;
+  this.fuz6 = 2;
+  this.fuz7 = 2;
+  this.fuz8 = 2;
+  this.fuz9 = 2;
+  this.fuz10 = 2;
+  this.fuz11 = 2;
+  this.fuz12 = 2;
+  this.fuz13 = 2;
+  this.fuz14 = 2;
+  this.fuz15 = 2;
+  this.fuz16 = 2;
+  this.fuz17 = 2;
+  // Force dictionary-based properties
+  for (x=1;x<1000;x++) {
+    this["sdf" + x] = 2;
+  }
+}
+
+function Object3() {
+  this.boo = 3;
+}
+
+function Object4() {
+  this.baz = 4;
+}
+
+obj1 = new Object1();
+obj2 = new Object2();
+obj3 = new Object3();
+obj4 = new Object4();
+
+%SetHiddenPrototype(obj4, obj3);
+%SetHiddenPrototype(obj3, obj2);
+%SetHiddenPrototype(obj2, obj1);
+
+function contains(a, obj) {
+  for(var i = 0; i < a.length; i++) {
+    if(a[i] === obj){
+      return true;
+    }
+  }
+  return false;
+}
+names = %GetLocalPropertyNames(obj4);
+assertEquals(1021, names.length);
+assertTrue(contains(names, "baz"));
+assertTrue(contains(names, "boo"));
+assertTrue(contains(names, "foo"));
+assertTrue(contains(names, "fuz"));
+assertTrue(contains(names, "fuz1"));
+assertTrue(contains(names, "fuz2"));
+assertTrue(contains(names, "fuz3"));
+assertTrue(contains(names, "fuz4"));
+assertTrue(contains(names, "fuz5"));
+assertTrue(contains(names, "fuz6"));
+assertTrue(contains(names, "fuz7"));
+assertTrue(contains(names, "fuz8"));
+assertTrue(contains(names, "fuz9"));
+assertTrue(contains(names, "fuz10"));
+assertTrue(contains(names, "fuz11"));
+assertTrue(contains(names, "fuz12"));
+assertTrue(contains(names, "fuz13"));
+assertTrue(contains(names, "fuz14"));
+assertTrue(contains(names, "fuz15"));
+assertTrue(contains(names, "fuz16"));
+assertTrue(contains(names, "fuz17"));
+assertFalse(names[1020] == undefined);