Upgrade to V8 3.5
Merge V8 3.5.10.24
Simple merge required updates to makefiles only.
Bug: 5688872
Change-Id: I0acdb9a1a53919d84e9a7525308e8371739d2f06
diff --git a/.gitignore b/.gitignore
index 7219675..253639d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -21,6 +21,7 @@
shell_g
/build/gyp
/obj/
+/out/
/test/es5conform/data/
/test/mozilla/data/
/test/sputnik/sputniktests/
@@ -31,5 +32,4 @@
/tools/visual_studio/Release
/xcodebuild/
TAGS
-Makefile
*.Makefile
diff --git a/Android.libv8.mk b/Android.libv8.mk
index e2978c7..96d632e 100644
--- a/Android.libv8.mk
+++ b/Android.libv8.mk
@@ -16,6 +16,7 @@
# and V8_LOCAL_JS_LIBRARY_FILES
V8_LOCAL_SRC_FILES :=
V8_LOCAL_JS_LIBRARY_FILES :=
+V8_LOCAL_EXPERIMENTAL_JS_LIBRARY_FILES :=
include $(LOCAL_PATH)/Android.v8common.mk
# Target can only be linux
@@ -26,7 +27,7 @@
LOCAL_SRC_FILES := $(V8_LOCAL_SRC_FILES)
LOCAL_JS_LIBRARY_FILES := $(addprefix $(LOCAL_PATH)/, $(V8_LOCAL_JS_LIBRARY_FILES))
-LOCAL_JS_EXPERIMENTAL_LIBRARY_FILES := $(addprefix $(LOCAL_PATH)/, src/proxy.js)
+LOCAL_JS_EXPERIMENTAL_LIBRARY_FILES := $(addprefix $(LOCAL_PATH)/, $(V8_LOCAL_EXPERIMENTAL_JS_LIBRARY_FILES))
# Copy js2c.py to intermediates directory and invoke there to avoid generating
# jsmin.pyc in the source directory
diff --git a/Android.mksnapshot.mk b/Android.mksnapshot.mk
index 8f54be7..07da14b 100644
--- a/Android.mksnapshot.mk
+++ b/Android.mksnapshot.mk
@@ -12,6 +12,7 @@
V8_LOCAL_SRC_FILES :=
V8_LOCAL_JS_LIBRARY_FILES :=
+V8_LOCAL_JS_EXPERIMENTAL_LIBRARY_FILES :=
include $(LOCAL_PATH)/Android.v8common.mk
V8_LOCAL_SRC_FILES += \
@@ -33,7 +34,7 @@
LOCAL_SRC_FILES := $(V8_LOCAL_SRC_FILES)
LOCAL_JS_LIBRARY_FILES := $(addprefix $(LOCAL_PATH)/, $(V8_LOCAL_JS_LIBRARY_FILES))
-LOCAL_JS_EXPERIMENTAL_LIBRARY_FILES := $(addprefix $(LOCAL_PATH)/, src/proxy.js)
+LOCAL_JS_EXPERIMENTAL_LIBRARY_FILES := $(addprefix $(LOCAL_PATH)/, $(V8_LOCAL_EXPERIMENTAL_JS_LIBRARY_FILES))
# Copy js2c.py to intermediates directory and invoke there to avoid generating
# jsmin.pyc in the source directory
diff --git a/Android.v8common.mk b/Android.v8common.mk
index d318f0a..cb64a81 100644
--- a/Android.v8common.mk
+++ b/Android.v8common.mk
@@ -29,6 +29,7 @@
src/disassembler.cc \
src/diy-fp.cc \
src/dtoa.cc \
+ src/elements.cc \
src/execution.cc \
src/extensions/externalize-string-extension.cc \
src/extensions/gc-extension.cc \
@@ -136,7 +137,7 @@
src/uri.js \
src/math.js \
src/messages.js \
- src/apinatives.js
+ src/apinatives.js
# These JS library sources must follow the above sources but their order is not
# important.
@@ -150,3 +151,8 @@
V8_LOCAL_JS_LIBRARY_FILES += \
src/macros.py
+
+V8_LOCAL_JS_EXPERIMENTAL_LIBRARY_FILES := \
+ src/proxy.js \
+ src/weakmap.js
+
diff --git a/ChangeLog b/ChangeLog
index 6901729..02d0ebb 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,8 +1,134 @@
+2011-08-31: Version 3.5.10
+
+ Added dependency of v8_base on WinSocket2 Windows library in
+ the GYP-build.
+
+ Various bugfixes.
+
+
+2011-08-29: Version 3.5.9
+
+ Made FromPropertyDescriptor not trigger inherited setters.
+
+ Fixed .gyp files to work on the ARM simulator.
+
+ Fixed shared library build warnings for MSVS.
+
+
+2011-08-24: Version 3.5.8
+
+ Added V8EXPORT attributes for v8::Array::CheckCast and
+ v8::Number::CheckCast.
+
+ Made a slight API change enabling opting out from null termination
+ in String::Write*().
+
+ Fixed arm build for gcc-4.6.
+
+
+2011-08-22: Version 3.5.7
+
+ Make scanner handle invalid unicode escapes in identifiers correctly.
+
+ Make regexp flag parsing stricter.
+
+ Fix several memory leaks.
+
+
+2011-08-17: Version 3.5.6
+
+ Fixed issue that could cause crashes when running with --heap-stats.
+
+ Fixed compilation on Linux 2.6.9 and older.
+
+ Fixed live-object-list to work with isolates.
+
+ Fixed memory leaks in zones and isolates.
+
+ Fixed a performance regression for TypedArrays on x64.
+
+ Stability improvements on all platforms.
+
+
+2011-08-15: Version 3.5.5
+
+ Fixed bugs involving negative zero and the optimizing compiler.
+
+ Fixed optimized version of Function.apply(x, arguments). (issue 1592)
+
+ Eliminated uses of deprecated ARM instructions.
+
+ Sped up Math.floor by using SSE 4.1 roundsd instruction on ia32.
+
+ Removed restriction on the size of disassembled code that is printed.
+
+
+2011-08-10: Version 3.5.4
+
+ Added a preliminary implementation of ES Harmony weak maps. Weak
+ maps can be enabled by the flag --harmony-weakmaps.
+
+ Introduced a toplevel Makefile to support GYP-based building. GYP
+ can be obtained from http://gyp.googlecode.com.
+
+ Fixed a bug in the length property of functions created by
+ Function.prototype.bind.
+
+ Reduced malloc heap allocation on process startup.
+
+ Several important code generation bug fixes.
+
+ Performance improvements on all platforms.
+
+
+2011-08-03: Version 3.5.3
+
+ MIPS: Port of fix to ClassOf check from ARM.
+ Patch from Paul Lind <plind44@gmail.com>.
+
+ Stopped using mprotect on Cygwin.
+ Avoided uninitialized member warning on gcc 4.3.4
+ Both patches by Bert Belder.
+
+ Bug fixes and performance improvements on all platforms.
+
+
+2011-08-01: Version 3.5.2
+
+ Performance improvements on all platforms.
+
+
+2011-07-28: Version 3.5.1
+
+ Fixed setting the readonly flag on the prototype property using the
+ API call FunctionTemplate::SetPrototypeAttributes (issue 1539).
+
+ Changed the tools/test.py script to use d8 instead of shell for
+ testing.
+
+ Fixed crash in ToBooleanStub when GC happens during invocation.
+
+ Enabled automatic unboxing of double arrays.
+
+ Performance improvements on all platforms.
+
+
+2011-07-25: Version 3.5.0
+
+ Implemented Object.prototype.{hasOwnProperty, propertyIsEnumerable} for
+ proxies.
+
+ Removed logging to memory support.
+
+ Bugfixes and performance work.
+
+
2011-07-20: Version 3.4.14
Fix the debugger for strict-mode functions. (Chromium issue 89236)
- Add GetPropertyAttribute method for Object in the API. (Patch by Peter Varga)
+ Add GetPropertyAttribute method for Object in the API. (Patch by
+ Peter Varga)
Fix -Wunused-but-set-variable for gcc-4.6 on x64. (Issue 1291)
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..3008779
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,171 @@
+# Copyright 2011 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+# Variable default definitions. Override them by exporting them in your shell.
+CXX ?= "g++" # For distcc: export CXX="distcc g++"
+LINK ?= "g++"
+OUTDIR ?= out
+TESTJOBS ?= -j16
+GYPFLAGS ?=
+
+# Special build flags. Use them like this: "make library=shared"
+
+# library=shared || component=shared_library
+ifeq ($(library), shared)
+ GYPFLAGS += -Dcomponent=shared_library
+endif
+ifdef component
+ GYPFLAGS += -Dcomponent=$(component)
+endif
+# console=readline
+ifdef console
+ GYPFLAGS += -Dconsole=$(console)
+endif
+# disassembler=on
+ifeq ($(disassembler), on)
+ GYPFLAGS += -Dv8_enable_disassembler=1
+endif
+# snapshot=off
+ifeq ($(snapshot), off)
+ GYPFLAGS += -Dv8_use_snapshot='false'
+endif
+# gdbjit=on
+ifeq ($(gdbjit), on)
+ GYPFLAGS += -Dv8_enable_gdbjit=1
+endif
+# liveobjectlist=on
+ifeq ($(liveobjectlist), on)
+ GYPFLAGS += -Dv8_use_liveobjectlist=true
+endif
+# vfp3=off
+ifeq ($(vfp3), off)
+ GYPFLAGS += -Dv8_can_use_vfp_instructions=false
+else
+ GYPFLAGS += -Dv8_can_use_vfp_instructions=true
+endif
+
+# ----------------- available targets: --------------------
+# - any arch listed in ARCHES (see below)
+# - any mode listed in MODES
+# - every combination <arch>.<mode>, e.g. "ia32.release"
+# - any of the above with .check appended, e.g. "ia32.release.check"
+# - default (no target specified): build all ARCHES and MODES
+# - "check": build all targets and run all tests
+# - "<arch>.clean" for any <arch> in ARCHES
+# - "clean": clean all ARCHES
+
+# ----------------- internal stuff ------------------------
+
+# Architectures and modes to be compiled. Consider these to be internal
+# variables, don't override them (use the targets instead).
+ARCHES = ia32 x64 arm
+MODES = release debug
+
+# List of files that trigger Makefile regeneration:
+GYPFILES = build/all.gyp build/common.gypi build/standalone.gypi \
+ preparser/preparser.gyp samples/samples.gyp src/d8.gyp \
+ test/cctest/cctest.gyp tools/gyp/v8.gyp
+
+# Generates all combinations of ARCHES and MODES, e.g. "ia32.release".
+BUILDS = $(foreach mode,$(MODES),$(addsuffix .$(mode),$(ARCHES)))
+# Generates corresponding test targets, e.g. "ia32.release.check".
+CHECKS = $(addsuffix .check,$(BUILDS))
+# File where previously used GYPFLAGS are stored.
+ENVFILE = $(OUTDIR)/environment
+
+.PHONY: all clean $(ENVFILE).new \
+ $(ARCHES) $(MODES) $(BUILDS) $(addsuffix .clean,$(ARCHES))
+
+# Target definitions. "all" is the default.
+all: $(MODES)
+
+# Compile targets. MODES and ARCHES are convenience targets.
+.SECONDEXPANSION:
+$(MODES): $(addsuffix .$$@,$(ARCHES))
+
+$(ARCHES): $(addprefix $$@.,$(MODES))
+
+# Defines how to build a particular target (e.g. ia32.release).
+$(BUILDS): $(OUTDIR)/Makefile-$$(basename $$@)
+ @$(MAKE) -C "$(OUTDIR)" -f Makefile-$(basename $@) \
+ CXX="$(CXX)" LINK="$(LINK)" \
+ BUILDTYPE=$(shell echo $(subst .,,$(suffix $@)) | \
+ python -c "print raw_input().capitalize()") \
+ builddir="$(shell pwd)/$(OUTDIR)/$@"
+
+# Test targets.
+check: all
+ @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR)
+
+$(addsuffix .check,$(MODES)): $$(basename $$@)
+ @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) \
+ --mode=$(basename $@)
+
+$(addsuffix .check,$(ARCHES)): $$(basename $$@)
+ @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) \
+ --arch=$(basename $@)
+
+$(CHECKS): $$(basename $$@)
+ @tools/test-wrapper-gypbuild.py $(TESTJOBS) --outdir=$(OUTDIR) \
+ --arch-and-mode=$(basename $@)
+
+# Clean targets. You can clean each architecture individually, or everything.
+$(addsuffix .clean,$(ARCHES)):
+ rm -f $(OUTDIR)/Makefile-$(basename $@)
+ rm -rf $(OUTDIR)/$(basename $@).release
+ rm -rf $(OUTDIR)/$(basename $@).debug
+ find $(OUTDIR) -regex '.*\(host\|target\)-$(basename $@)\.mk' -delete
+
+clean: $(addsuffix .clean,$(ARCHES))
+
+# GYP file generation targets.
+$(OUTDIR)/Makefile-ia32: $(GYPFILES) $(ENVFILE)
+ build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
+ -Ibuild/standalone.gypi --depth=. -Dtarget_arch=ia32 \
+ -S-ia32 $(GYPFLAGS)
+
+$(OUTDIR)/Makefile-x64: $(GYPFILES) $(ENVFILE)
+ build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
+ -Ibuild/standalone.gypi --depth=. -Dtarget_arch=x64 \
+ -S-x64 $(GYPFLAGS)
+
+$(OUTDIR)/Makefile-arm: $(GYPFILES) $(ENVFILE)
+ build/gyp/gyp --generator-output="$(OUTDIR)" build/all.gyp \
+ -Ibuild/standalone.gypi --depth=. -Ibuild/armu.gypi \
+ -S-arm $(GYPFLAGS)
+
+# Replaces the old with the new environment file if they're different, which
+# will trigger GYP to regenerate Makefiles.
+$(ENVFILE): $(ENVFILE).new
+ @if test -r $(ENVFILE) && cmp $(ENVFILE).new $(ENVFILE) >/dev/null; \
+ then rm $(ENVFILE).new; \
+ else mv $(ENVFILE).new $(ENVFILE); fi
+
+# Stores current GYPFLAGS in a file.
+$(ENVFILE).new:
+ @mkdir -p $(OUTDIR); echo "GYPFLAGS=$(GYPFLAGS)" > $(ENVFILE).new;
diff --git a/SConstruct b/SConstruct
index 8e16a78..f9c33ca 100644
--- a/SConstruct
+++ b/SConstruct
@@ -153,13 +153,19 @@
}
},
'armeabi:softfp' : {
- 'CPPDEFINES' : ['USE_EABI_HARDFLOAT=0', 'CAN_USE_VFP_INSTRUCTIONS'],
+ 'CPPDEFINES' : ['USE_EABI_HARDFLOAT=0'],
+ 'vfp3:on': {
+ 'CPPDEFINES' : ['CAN_USE_VFP_INSTRUCTIONS']
+ },
'simulator:none': {
'CCFLAGS': ['-mfloat-abi=softfp'],
}
},
'armeabi:hard' : {
- 'CPPDEFINES' : ['USE_EABI_HARDFLOAT=1', 'CAN_USE_VFP_INSTRUCTIONS'],
+ 'CPPDEFINES' : ['USE_EABI_HARDFLOAT=1'],
+ 'vfp3:on': {
+ 'CPPDEFINES' : ['CAN_USE_VFP_INSTRUCTIONS']
+ },
'simulator:none': {
'CCFLAGS': ['-mfloat-abi=hard'],
}
@@ -436,7 +442,7 @@
},
'arch:x64': {
'CPPDEFINES': ['V8_TARGET_ARCH_X64'],
- 'LINKFLAGS': ['/STACK:2091752']
+ 'LINKFLAGS': ['/STACK:2097152']
},
}
}
@@ -496,7 +502,10 @@
}
},
'armeabi:hard' : {
- 'CPPDEFINES' : ['USE_EABI_HARDFLOAT=1', 'CAN_USE_VFP_INSTRUCTIONS'],
+ 'CPPDEFINES' : ['USE_EABI_HARDFLOAT=1'],
+ 'vfp3:on': {
+ 'CPPDEFINES' : ['CAN_USE_VFP_INSTRUCTIONS']
+ },
'simulator:none': {
'CCFLAGS': ['-mfloat-abi=hard'],
}
@@ -601,7 +610,7 @@
},
'arch:x64': {
'CPPDEFINES': ['V8_TARGET_ARCH_X64', 'WIN32'],
- 'LINKFLAGS': ['/MACHINE:X64', '/STACK:2091752']
+ 'LINKFLAGS': ['/MACHINE:X64', '/STACK:2097152']
},
'mode:debug': {
'CCFLAGS': ['/Od'],
@@ -756,7 +765,7 @@
},
'arch:x64': {
'CPPDEFINES': ['V8_TARGET_ARCH_X64', 'WIN32'],
- 'LINKFLAGS': ['/MACHINE:X64', '/STACK:2091752']
+ 'LINKFLAGS': ['/MACHINE:X64', '/STACK:2097152']
},
'mode:debug': {
'CCFLAGS': ['/Od'],
@@ -822,6 +831,57 @@
'msvc': {
'all': {
'LIBS': ['winmm', 'ws2_32']
+ },
+ 'verbose:off': {
+ 'CCFLAGS': ['/nologo'],
+ 'LINKFLAGS': ['/NOLOGO']
+ },
+ 'verbose:on': {
+ 'LINKFLAGS': ['/VERBOSE']
+ },
+ 'prof:on': {
+ 'LINKFLAGS': ['/MAP']
+ },
+ 'mode:release': {
+ 'CCFLAGS': ['/O2'],
+ 'LINKFLAGS': ['/OPT:REF', '/OPT:ICF'],
+ 'msvcrt:static': {
+ 'CCFLAGS': ['/MT']
+ },
+ 'msvcrt:shared': {
+ 'CCFLAGS': ['/MD']
+ },
+ 'msvcltcg:on': {
+ 'CCFLAGS': ['/GL'],
+ 'pgo:off': {
+ 'LINKFLAGS': ['/LTCG'],
+ },
+ },
+ 'pgo:instrument': {
+ 'LINKFLAGS': ['/LTCG:PGI']
+ },
+ 'pgo:optimize': {
+ 'LINKFLAGS': ['/LTCG:PGO']
+ }
+ },
+ 'arch:ia32': {
+ 'CPPDEFINES': ['V8_TARGET_ARCH_IA32', 'WIN32'],
+ 'LINKFLAGS': ['/MACHINE:X86']
+ },
+ 'arch:x64': {
+ 'CPPDEFINES': ['V8_TARGET_ARCH_X64', 'WIN32'],
+ 'LINKFLAGS': ['/MACHINE:X64', '/STACK:2097152']
+ },
+ 'mode:debug': {
+ 'CCFLAGS': ['/Od'],
+ 'LINKFLAGS': ['/DEBUG'],
+ 'CPPDEFINES': ['DEBUG'],
+ 'msvcrt:static': {
+ 'CCFLAGS': ['/MTd']
+ },
+ 'msvcrt:shared': {
+ 'CCFLAGS': ['/MDd']
+ }
}
}
}
@@ -1039,6 +1099,12 @@
'default': 'off',
'help': 'compress startup data (snapshot) [Linux only]'
},
+ 'vfp3': {
+ 'values': ['on', 'off'],
+ 'default': 'on',
+ 'help': 'use vfp3 instructions when building the snapshot [Arm only]'
+ },
+
}
ALL_OPTIONS = dict(PLATFORM_OPTIONS, **SIMPLE_OPTIONS)
@@ -1343,10 +1409,12 @@
env['SONAME'] = soname
# Build the object files by invoking SCons recursively.
+ d8_env = Environment(tools=tools)
+ d8_env.Replace(**context.flags['d8'])
(object_files, shell_files, mksnapshot, preparser_files) = env.SConscript(
join('src', 'SConscript'),
build_dir=join('obj', target_id),
- exports='context tools',
+ exports='context tools d8_env',
duplicate=False
)
@@ -1375,8 +1443,6 @@
context.library_targets.append(library)
context.library_targets.append(preparser_library)
- d8_env = Environment(tools=tools)
- d8_env.Replace(**context.flags['d8'])
context.ApplyEnvOverrides(d8_env)
if context.options['library'] == 'static':
shell = d8_env.Program('d8' + suffix, object_files + shell_files)
diff --git a/V8_MERGE_REVISION b/V8_MERGE_REVISION
index ff5fd5d..8caa544 100644
--- a/V8_MERGE_REVISION
+++ b/V8_MERGE_REVISION
@@ -1,2 +1,2 @@
-V8 3.4.14.35
-http://v8.googlecode.com/svn/branches/3.4@9839
+V8 3.5.10.24
+http://v8.googlecode.com/svn/branches/3.5@9918
diff --git a/build/all.gyp b/build/all.gyp
index 38287e3..9c0f05c 100644
--- a/build/all.gyp
+++ b/build/all.gyp
@@ -1,4 +1,4 @@
-# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
@@ -8,6 +8,7 @@
'target_name': 'All',
'type': 'none',
'dependencies': [
+ '../preparser/preparser.gyp:*',
'../samples/samples.gyp:*',
'../src/d8.gyp:d8',
],
diff --git a/build/armu.gypi b/build/armu.gypi
index 3f874c0..d15b8ab 100644
--- a/build/armu.gypi
+++ b/build/armu.gypi
@@ -32,5 +32,5 @@
'armv7': 1,
'arm_neon': 0,
'arm_fpu': 'vfpv3',
- }
+ },
}
diff --git a/build/common.gypi b/build/common.gypi
index 7020495..834516f 100644
--- a/build/common.gypi
+++ b/build/common.gypi
@@ -1,4 +1,4 @@
-# Copyright 2010 the V8 project authors. All rights reserved.
+# Copyright 2011 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -25,159 +25,266 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+# Shared definitions for all V8-related targets.
+
{
'variables': {
- 'library%': 'static_library',
- 'component%': 'static_library',
- 'visibility%': 'hidden',
- 'variables': {
- 'conditions': [
- [ 'OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
- # This handles the Linux platforms we generally deal with. Anything
- # else gets passed through, which probably won't work very well; such
- # hosts should pass an explicit target_arch to gyp.
- 'host_arch%':
- '<!(uname -m | sed -e "s/i.86/ia32/;s/x86_64/x64/;s/amd64/x64/;s/arm.*/arm/")',
- }, { # OS!="linux" and OS!="freebsd" and OS!="openbsd"
- 'host_arch%': 'ia32',
- }],
- ],
- },
- 'host_arch%': '<(host_arch)',
- 'target_arch%': '<(host_arch)',
+ 'use_system_v8%': 0,
+ 'msvs_use_common_release': 0,
+ 'gcc_version%': 'unknown',
+ 'v8_compress_startup_data%': 'off',
'v8_target_arch%': '<(target_arch)',
+
+ # Setting 'v8_can_use_unaligned_accesses' to 'true' will allow the code
+ # generated by V8 to do unaligned memory access, and setting it to 'false'
+ # will ensure that the generated code will always do aligned memory
+ # accesses. The default value of 'default' will try to determine the correct
+ # setting. Note that for Intel architectures (ia32 and x64) unaligned memory
+ # access is allowed for all CPUs.
+ 'v8_can_use_unaligned_accesses%': 'default',
+
+ # Setting 'v8_can_use_vfp_instructions' to 'true' will enable use of ARM VFP
+ # instructions in the V8 generated code. VFP instructions will be enabled
+ # both for the snapshot and for the ARM target. Leaving the default value
+ # of 'false' will avoid VFP instructions in the snapshot and use CPU feature
+ # probing when running on the target.
+ 'v8_can_use_vfp_instructions%': 'false',
+
+ # Setting v8_use_arm_eabi_hardfloat to true will turn on V8 support for ARM
+ # EABI calling convention where double arguments are passed in VFP
+ # registers. Note that the GCC flag '-mfloat-abi=hard' should be used as
+ # well when compiling for the ARM target.
+ 'v8_use_arm_eabi_hardfloat%': 'false',
+
+ 'v8_enable_debugger_support%': 1,
+
+ 'v8_enable_disassembler%': 0,
+
+ 'v8_enable_gdbjit%': 0,
+
+ # Enable profiling support. Only required on Windows.
+ 'v8_enable_prof%': 0,
+
+ # Chrome needs this definition unconditionally. For standalone V8 builds,
+ # it's handled in build/standalone.gypi.
+ 'want_separate_host_toolset%': 1,
+
+ 'v8_use_snapshot%': 'true',
+ 'host_os%': '<(OS)',
+ 'v8_use_liveobjectlist%': 'false',
},
'target_defaults': {
- 'default_configuration': 'Debug',
+ 'conditions': [
+ ['v8_enable_debugger_support==1', {
+ 'defines': ['ENABLE_DEBUGGER_SUPPORT',],
+ }],
+ ['v8_enable_disassembler==1', {
+ 'defines': ['ENABLE_DISASSEMBLER',],
+ }],
+ ['v8_enable_gdbjit==1', {
+ 'defines': ['ENABLE_GDB_JIT_INTERFACE',],
+ }],
+ ['OS!="mac"', {
+ # TODO(mark): The OS!="mac" conditional is temporary. It can be
+ # removed once the Mac Chromium build stops setting target_arch to
+ # ia32 and instead sets it to mac. Other checks in this file for
+ # OS=="mac" can be removed at that time as well. This can be cleaned
+ # up once http://crbug.com/44205 is fixed.
+ 'conditions': [
+ ['v8_target_arch=="arm"', {
+ 'defines': [
+ 'V8_TARGET_ARCH_ARM',
+ ],
+ 'conditions': [
+ [ 'v8_can_use_unaligned_accesses=="true"', {
+ 'defines': [
+ 'CAN_USE_UNALIGNED_ACCESSES=1',
+ ],
+ }],
+ [ 'v8_can_use_unaligned_accesses=="false"', {
+ 'defines': [
+ 'CAN_USE_UNALIGNED_ACCESSES=0',
+ ],
+ }],
+ [ 'v8_can_use_vfp_instructions=="true"', {
+ 'defines': [
+ 'CAN_USE_VFP_INSTRUCTIONS',
+ ],
+ }],
+ [ 'v8_use_arm_eabi_hardfloat=="true"', {
+ 'defines': [
+ 'USE_EABI_HARDFLOAT=1',
+ 'CAN_USE_VFP_INSTRUCTIONS',
+ ],
+ 'cflags': [
+ '-mfloat-abi=hard',
+ ],
+ }, {
+ 'defines': [
+ 'USE_EABI_HARDFLOAT=0',
+ ],
+ }],
+ # The ARM assembler assumes the host is 32 bits,
+ # so force building 32-bit host tools.
+ ['host_arch=="x64"', {
+ 'target_conditions': [
+ ['_toolset=="host"', {
+ 'cflags': ['-m32'],
+ 'ldflags': ['-m32'],
+ }],
+ ],
+ }],
+ ],
+ }],
+ ['v8_target_arch=="ia32"', {
+ 'defines': [
+ 'V8_TARGET_ARCH_IA32',
+ ],
+ }],
+ ['v8_target_arch=="mips"', {
+ 'defines': [
+ 'V8_TARGET_ARCH_MIPS',
+ ],
+ }],
+ ['v8_target_arch=="x64"', {
+ 'defines': [
+ 'V8_TARGET_ARCH_X64',
+ ],
+ }],
+ ],
+ }],
+ ['v8_use_liveobjectlist=="true"', {
+ 'defines': [
+ 'ENABLE_DEBUGGER_SUPPORT',
+ 'INSPECTOR',
+ 'OBJECT_PRINT',
+ 'LIVEOBJECTLIST',
+ ],
+ }],
+ ['v8_compress_startup_data=="bz2"', {
+ 'defines': [
+ 'COMPRESS_STARTUP_DATA_BZ2',
+ ],
+ }],
+ ['OS=="win" and v8_enable_prof==1', {
+ 'msvs_settings': {
+ 'VCLinkerTool': {
+ 'GenerateMapFile': 'true',
+ },
+ },
+ }],
+ ],
'configurations': {
'Debug': {
- 'cflags': [ '-g', '-O0' ],
- 'defines': [ 'ENABLE_DISASSEMBLER', 'DEBUG' ],
+ 'defines': [
+ 'DEBUG',
+ 'ENABLE_DISASSEMBLER',
+ 'V8_ENABLE_CHECKS',
+ 'OBJECT_PRINT',
+ ],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'Optimization': '0',
+
+ 'conditions': [
+ ['OS=="win" and component=="shared_library"', {
+ 'RuntimeLibrary': '3', # /MDd
+ }, {
+ 'RuntimeLibrary': '1', # /MTd
+ }],
+ ],
+ },
+ 'VCLinkerTool': {
+ 'LinkIncremental': '2',
+ # For future reference, the stack size needs to be increased
+ # when building for Windows 64-bit, otherwise some test cases
+ # can cause stack overflow.
+ # 'StackReserveSize': '297152',
+ },
+ },
+ 'conditions': [
+ ['OS=="freebsd" or OS=="openbsd"', {
+ 'cflags': [ '-I/usr/local/include' ],
+ }],
+ ['OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
+ 'cflags': [ '-Wall', '-Werror', '-W', '-Wno-unused-parameter',
+ '-Wnon-virtual-dtor' ],
+ }],
+ ],
},
'Release': {
- 'cflags': [ '-O3', '-fomit-frame-pointer', '-fdata-sections', '-ffunction-sections' ],
+ 'conditions': [
+ ['OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
+ 'cflags!': [
+ '-O2',
+ '-Os',
+ ],
+ 'cflags': [
+ '-fdata-sections',
+ '-ffunction-sections',
+ '-fomit-frame-pointer',
+ '-O3',
+ ],
+ 'conditions': [
+ [ 'gcc_version==44', {
+ 'cflags': [
+ # Avoid crashes with gcc 4.4 in the v8 test suite.
+ '-fno-tree-vrp',
+ ],
+ }],
+ ],
+ }],
+ ['OS=="freebsd" or OS=="openbsd"', {
+ 'cflags': [ '-I/usr/local/include' ],
+ }],
+ ['OS=="mac"', {
+ 'xcode_settings': {
+ 'GCC_OPTIMIZATION_LEVEL': '3', # -O3
+
+ # -fstrict-aliasing. Mainline gcc
+ # enables this at -O2 and above,
+ # but Apple gcc does not unless it
+ # is specified explicitly.
+ 'GCC_STRICT_ALIASING': 'YES',
+ },
+ }],
+ ['OS=="win"', {
+ 'msvs_configuration_attributes': {
+ 'OutputDirectory': '$(SolutionDir)$(ConfigurationName)',
+ 'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
+ 'CharacterSet': '1',
+ },
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'Optimization': '2',
+ 'InlineFunctionExpansion': '2',
+ 'EnableIntrinsicFunctions': 'true',
+ 'FavorSizeOrSpeed': '0',
+ 'OmitFramePointers': 'true',
+ 'StringPooling': 'true',
+
+ 'conditions': [
+ ['OS=="win" and component=="shared_library"', {
+ 'RuntimeLibrary': '2', #/MD
+ }, {
+ 'RuntimeLibrary': '0', #/MT
+ }],
+ ],
+ },
+ 'VCLinkerTool': {
+ 'LinkIncremental': '1',
+ 'OptimizeReferences': '2',
+ 'OptimizeForWindows98': '1',
+ 'EnableCOMDATFolding': '2',
+ # For future reference, the stack size needs to be
+ # increased when building for Windows 64-bit, otherwise
+ # some test cases can cause stack overflow.
+ # 'StackReserveSize': '297152',
+ },
+ },
+ }],
+ ],
},
},
},
- 'conditions': [
- [ 'OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
- 'target_defaults': {
- 'cflags': [ '-Wall', '-pthread', '-fno-rtti', '-fno-exceptions' ],
- 'ldflags': [ '-pthread', ],
- 'conditions': [
- [ 'target_arch=="ia32"', {
- 'cflags': [ '-m32' ],
- 'ldflags': [ '-m32' ],
- }],
- [ 'OS=="linux"', {
- 'cflags': [ '-ansi' ],
- }],
- [ 'visibility=="hidden"', {
- 'cflags': [ '-fvisibility=hidden' ],
- }],
- ],
- },
- }], # 'OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"'
- ['OS=="win"', {
- 'target_defaults': {
- 'defines': [
- 'WIN32',
- '_CRT_SECURE_NO_DEPRECATE',
- '_CRT_NONSTDC_NO_DEPRECATE',
- ],
- 'conditions': [
- ['component=="static_library"', {
- 'defines': [
- '_HAS_EXCEPTIONS=0',
- ],
- }],
- ],
- 'msvs_cygwin_dirs': ['<(DEPTH)/third_party/cygwin'],
- 'msvs_disabled_warnings': [4355, 4800],
- 'msvs_settings': {
- 'VCCLCompilerTool': {
- 'MinimalRebuild': 'false',
- 'BufferSecurityCheck': 'true',
- 'EnableFunctionLevelLinking': 'true',
- 'RuntimeTypeInfo': 'false',
- 'WarningLevel': '3',
- 'WarnAsError': 'true',
- 'DebugInformationFormat': '3',
- 'Detect64BitPortabilityProblems': 'false',
- 'conditions': [
- [ 'msvs_multi_core_compile', {
- 'AdditionalOptions': ['/MP'],
- }],
- ['component=="shared_library"', {
- 'ExceptionHandling': '1', # /EHsc
- }, {
- 'ExceptionHandling': '0',
- }],
- ],
- },
- 'VCLibrarianTool': {
- 'AdditionalOptions': ['/ignore:4221'],
- },
- 'VCLinkerTool': {
- 'AdditionalDependencies': [
- 'ws2_32.lib',
- ],
- 'GenerateDebugInformation': 'true',
- 'MapFileName': '$(OutDir)\\$(TargetName).map',
- 'ImportLibrary': '$(OutDir)\\lib\\$(TargetName).lib',
- 'FixedBaseAddress': '1',
- # LinkIncremental values:
- # 0 == default
- # 1 == /INCREMENTAL:NO
- # 2 == /INCREMENTAL
- 'LinkIncremental': '1',
- # SubSystem values:
- # 0 == not set
- # 1 == /SUBSYSTEM:CONSOLE
- # 2 == /SUBSYSTEM:WINDOWS
- 'SubSystem': '1',
- },
- },
- },
- }], # OS=="win"
- ['OS=="mac"', {
- 'target_defaults': {
- 'xcode_settings': {
- 'ALWAYS_SEARCH_USER_PATHS': 'NO',
- 'GCC_C_LANGUAGE_STANDARD': 'ansi', # -ansi
- 'GCC_CW_ASM_SYNTAX': 'NO', # No -fasm-blocks
- 'GCC_DYNAMIC_NO_PIC': 'NO', # No -mdynamic-no-pic
- # (Equivalent to -fPIC)
- 'GCC_ENABLE_CPP_EXCEPTIONS': 'NO', # -fno-exceptions
- 'GCC_ENABLE_CPP_RTTI': 'NO', # -fno-rtti
- 'GCC_ENABLE_PASCAL_STRINGS': 'NO', # No -mpascal-strings
- # GCC_INLINES_ARE_PRIVATE_EXTERN maps to -fvisibility-inlines-hidden
- 'GCC_INLINES_ARE_PRIVATE_EXTERN': 'YES',
- 'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES', # -fvisibility=hidden
- 'GCC_THREADSAFE_STATICS': 'NO', # -fno-threadsafe-statics
- 'GCC_TREAT_WARNINGS_AS_ERRORS': 'YES', # -Werror
- 'GCC_VERSION': '4.2',
- 'GCC_WARN_ABOUT_MISSING_NEWLINE': 'YES', # -Wnewline-eof
- 'MACOSX_DEPLOYMENT_TARGET': '10.4', # -mmacosx-version-min=10.4
- 'PREBINDING': 'NO', # No -Wl,-prebind
- 'USE_HEADERMAP': 'NO',
- 'OTHER_CFLAGS': [
- '-fno-strict-aliasing',
- ],
- 'WARNING_CFLAGS': [
- '-Wall',
- '-Wendif-labels',
- '-W',
- '-Wno-unused-parameter',
- '-Wnon-virtual-dtor',
- ],
- },
- 'target_conditions': [
- ['_type!="static_library"', {
- 'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-search_paths_first']},
- }],
- ], # target_conditions
- }, # target_defaults
- }], # OS=="mac"
- ],
}
diff --git a/build/gyp_v8 b/build/gyp_v8
index 61b54c2..dfdbe3f 100755
--- a/build/gyp_v8
+++ b/build/gyp_v8
@@ -92,8 +92,8 @@
if os.path.realpath(path) not in specified_includes:
result.append(path)
- # Always include common.gypi & features_override.gypi
- AddInclude(os.path.join(script_dir, 'common.gypi'))
+ # Always include standalone.gypi
+ AddInclude(os.path.join(script_dir, 'standalone.gypi'))
# Optionally add supplemental .gypi files if present.
supplements = glob.glob(os.path.join(v8_root, '*', 'supplement.gypi'))
diff --git a/build/standalone.gypi b/build/standalone.gypi
new file mode 100644
index 0000000..81909f1
--- /dev/null
+++ b/build/standalone.gypi
@@ -0,0 +1,200 @@
+# Copyright 2011 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Definitions to be used when building stand-alone V8 binaries.
+
+{
+ 'variables': {
+ 'library%': 'static_library',
+ 'component%': 'static_library',
+ 'visibility%': 'hidden',
+ 'msvs_multi_core_compile%': '1',
+ 'variables': {
+ 'variables': {
+ 'conditions': [
+ [ 'OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
+ # This handles the Linux platforms we generally deal with. Anything
+ # else gets passed through, which probably won't work very well; such
+ # hosts should pass an explicit target_arch to gyp.
+ 'host_arch%':
+ '<!(uname -m | sed -e "s/i.86/ia32/;s/x86_64/x64/;s/amd64/x64/;s/arm.*/arm/")',
+ }, { # OS!="linux" and OS!="freebsd" and OS!="openbsd"
+ 'host_arch%': 'ia32',
+ }],
+ ],
+ },
+ 'host_arch%': '<(host_arch)',
+ 'target_arch%': '<(host_arch)',
+ 'v8_target_arch%': '<(target_arch)',
+ },
+ 'host_arch%': '<(host_arch)',
+ 'target_arch%': '<(target_arch)',
+ 'v8_target_arch%': '<(v8_target_arch)',
+ 'conditions': [
+ ['(v8_target_arch=="arm" and host_arch!="arm") or \
+ (v8_target_arch=="x64" and host_arch!="x64")', {
+ 'want_separate_host_toolset': 1,
+ }, {
+ 'want_separate_host_toolset': 0,
+ }],
+ ],
+ },
+ 'target_defaults': {
+ 'default_configuration': 'Debug',
+ 'configurations': {
+ 'Debug': {
+ 'cflags': [ '-g', '-O0' ],
+ },
+ },
+ },
+ 'conditions': [
+ [ 'OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"', {
+ 'target_defaults': {
+ 'cflags': [ '-Wall', '-Werror', '-W', '-Wno-unused-parameter',
+ '-Wnon-virtual-dtor', '-pthread', '-fno-rtti',
+ '-fno-exceptions', '-pedantic' ],
+ 'ldflags': [ '-pthread', ],
+ 'conditions': [
+ [ 'target_arch=="ia32"', {
+ 'cflags': [ '-m32' ],
+ 'ldflags': [ '-m32' ],
+ }],
+ [ 'OS=="linux"', {
+ 'cflags': [ '-ansi' ],
+ }],
+ [ 'visibility=="hidden"', {
+ 'cflags': [ '-fvisibility=hidden' ],
+ }],
+ [ 'component=="shared_library"', {
+ 'cflags': [ '-fPIC', ],
+ }],
+ ],
+ },
+ }], # 'OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris"'
+ ['OS=="win"', {
+ 'target_defaults': {
+ 'defines': [
+ 'WIN32',
+ '_CRT_SECURE_NO_DEPRECATE',
+ '_CRT_NONSTDC_NO_DEPRECATE',
+ ],
+ 'conditions': [
+ ['component=="static_library"', {
+ 'defines': [
+ '_HAS_EXCEPTIONS=0',
+ ],
+ }],
+ ],
+ 'msvs_cygwin_dirs': ['<(DEPTH)/third_party/cygwin'],
+ 'msvs_disabled_warnings': [4355, 4800],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'MinimalRebuild': 'false',
+ 'BufferSecurityCheck': 'true',
+ 'EnableFunctionLevelLinking': 'true',
+ 'RuntimeTypeInfo': 'false',
+ 'WarningLevel': '3',
+ 'WarnAsError': 'true',
+ 'DebugInformationFormat': '3',
+ 'Detect64BitPortabilityProblems': 'false',
+ 'conditions': [
+ [ 'msvs_multi_core_compile', {
+ 'AdditionalOptions': ['/MP'],
+ }],
+ ['component=="shared_library"', {
+ 'ExceptionHandling': '1', # /EHsc
+ }, {
+ 'ExceptionHandling': '0',
+ }],
+ ],
+ },
+ 'VCLibrarianTool': {
+ 'AdditionalOptions': ['/ignore:4221'],
+ },
+ 'VCLinkerTool': {
+ 'AdditionalDependencies': [
+ 'ws2_32.lib',
+ ],
+ 'GenerateDebugInformation': 'true',
+ 'MapFileName': '$(OutDir)\\$(TargetName).map',
+ 'ImportLibrary': '$(OutDir)\\lib\\$(TargetName).lib',
+ 'FixedBaseAddress': '1',
+ # LinkIncremental values:
+ # 0 == default
+ # 1 == /INCREMENTAL:NO
+ # 2 == /INCREMENTAL
+ 'LinkIncremental': '1',
+ # SubSystem values:
+ # 0 == not set
+ # 1 == /SUBSYSTEM:CONSOLE
+ # 2 == /SUBSYSTEM:WINDOWS
+ 'SubSystem': '1',
+ },
+ },
+ },
+ }], # OS=="win"
+ ['OS=="mac"', {
+ 'target_defaults': {
+ 'xcode_settings': {
+ 'ALWAYS_SEARCH_USER_PATHS': 'NO',
+ 'GCC_C_LANGUAGE_STANDARD': 'ansi', # -ansi
+ 'GCC_CW_ASM_SYNTAX': 'NO', # No -fasm-blocks
+ 'GCC_DYNAMIC_NO_PIC': 'NO', # No -mdynamic-no-pic
+ # (Equivalent to -fPIC)
+ 'GCC_ENABLE_CPP_EXCEPTIONS': 'NO', # -fno-exceptions
+ 'GCC_ENABLE_CPP_RTTI': 'NO', # -fno-rtti
+ 'GCC_ENABLE_PASCAL_STRINGS': 'NO', # No -mpascal-strings
+ # GCC_INLINES_ARE_PRIVATE_EXTERN maps to -fvisibility-inlines-hidden
+ 'GCC_INLINES_ARE_PRIVATE_EXTERN': 'YES',
+ 'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES', # -fvisibility=hidden
+ 'GCC_THREADSAFE_STATICS': 'NO', # -fno-threadsafe-statics
+ 'GCC_TREAT_WARNINGS_AS_ERRORS': 'YES', # -Werror
+ 'GCC_VERSION': '4.2',
+ 'GCC_WARN_ABOUT_MISSING_NEWLINE': 'YES', # -Wnewline-eof
+ 'MACOSX_DEPLOYMENT_TARGET': '10.4', # -mmacosx-version-min=10.4
+ 'PREBINDING': 'NO', # No -Wl,-prebind
+ 'USE_HEADERMAP': 'NO',
+ 'OTHER_CFLAGS': [
+ '-fno-strict-aliasing',
+ ],
+ 'WARNING_CFLAGS': [
+ '-Wall',
+ '-Wendif-labels',
+ '-W',
+ '-Wno-unused-parameter',
+ '-Wnon-virtual-dtor',
+ ],
+ },
+ 'target_conditions': [
+ ['_type!="static_library"', {
+ 'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-search_paths_first']},
+ }],
+ ], # target_conditions
+ }, # target_defaults
+ }], # OS=="mac"
+ ],
+}
diff --git a/build/v8-features.gypi b/build/v8-features.gypi
deleted file mode 100644
index 4037452..0000000
--- a/build/v8-features.gypi
+++ /dev/null
@@ -1,125 +0,0 @@
-# Copyright 2011 the V8 project authors. All rights reserved.
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following
-# disclaimer in the documentation and/or other materials provided
-# with the distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived
-# from this software without specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-# TODO(sgjesse): This is currently copied from v8.gyp, should probably
-# be refactored.
-{
- 'variables': {
- 'use_system_v8%': 0,
- 'msvs_use_common_release': 0,
- 'gcc_version%': 'unknown',
- 'v8_compress_startup_data%': 'off',
- 'v8_target_arch%': '<(target_arch)',
-
- # Setting 'v8_can_use_unaligned_accesses' to 'true' will allow the code
- # generated by V8 to do unaligned memory access, and setting it to 'false'
- # will ensure that the generated code will always do aligned memory
- # accesses. The default value of 'default' will try to determine the correct
- # setting. Note that for Intel architectures (ia32 and x64) unaligned memory
- # access is allowed for all CPUs.
- 'v8_can_use_unaligned_accesses%': 'default',
-
- # Setting 'v8_can_use_vfp_instructions' to 'true' will enable use of ARM VFP
- # instructions in the V8 generated code. VFP instructions will be enabled
- # both for the snapshot and for the ARM target. Leaving the default value
- # of 'false' will avoid VFP instructions in the snapshot and use CPU feature
- # probing when running on the target.
- 'v8_can_use_vfp_instructions%': 'false',
-
- # Setting v8_use_arm_eabi_hardfloat to true will turn on V8 support for ARM
- # EABI calling convention where double arguments are passed in VFP
- # registers. Note that the GCC flag '-mfloat-abi=hard' should be used as
- # well when compiling for the ARM target.
- 'v8_use_arm_eabi_hardfloat%': 'false',
-
- 'v8_use_snapshot%': 'true',
- 'host_os%': '<(OS)',
- 'v8_use_liveobjectlist%': 'false',
- },
- 'target_defaults': {
- 'conditions': [
- ['OS!="mac"', {
- 'conditions': [
- ['v8_target_arch=="arm"', {
- 'defines': [
- 'V8_TARGET_ARCH_ARM',
- ],
- 'conditions': [
- [ 'v8_can_use_unaligned_accesses=="true"', {
- 'defines': [
- 'CAN_USE_UNALIGNED_ACCESSES=1',
- ],
- }],
- [ 'v8_can_use_unaligned_accesses=="false"', {
- 'defines': [
- 'CAN_USE_UNALIGNED_ACCESSES=0',
- ],
- }],
- [ 'v8_can_use_vfp_instructions=="true"', {
- 'defines': [
- 'CAN_USE_VFP_INSTRUCTIONS',
- ],
- }],
- [ 'v8_use_arm_eabi_hardfloat=="true"', {
- 'defines': [
- 'USE_EABI_HARDFLOAT=1',
- 'CAN_USE_VFP_INSTRUCTIONS',
- ],
- }],
- ],
- }],
- ['v8_target_arch=="ia32"', {
- 'defines': [
- 'V8_TARGET_ARCH_IA32',
- ],
- }],
- ['v8_target_arch=="mips"', {
- 'defines': [
- 'V8_TARGET_ARCH_MIPS',
- ],
- }],
- ['v8_target_arch=="x64"', {
- 'defines': [
- 'V8_TARGET_ARCH_X64',
- ],
- }],
- ],
- }],
- ],
- 'configurations': {
- 'Debug': {
- 'defines': [
- 'DEBUG',
- '_DEBUG',
- 'ENABLE_DISASSEMBLER',
- 'V8_ENABLE_CHECKS',
- 'OBJECT_PRINT',
- ],
- }
- }
- }
-}
diff --git a/include/v8-debug.h b/include/v8-debug.h
old mode 100755
new mode 100644
diff --git a/include/v8-profiler.h b/include/v8-profiler.h
index 4febcb9..f67646f 100644
--- a/include/v8-profiler.h
+++ b/include/v8-profiler.h
@@ -307,6 +307,12 @@
* path from the snapshot root to the current node.
*/
const HeapGraphNode* GetDominatorNode() const;
+
+ /**
+ * Finds and returns a value from the heap corresponding to this node,
+ * if the value is still reachable.
+ */
+ Handle<Value> GetHeapValue() const;
};
diff --git a/include/v8.h b/include/v8.h
index f4f81e4..5a78160 100644
--- a/include/v8.h
+++ b/include/v8.h
@@ -171,12 +171,12 @@
/**
* Creates an empty handle.
*/
- inline Handle();
+ inline Handle() : val_(0) {}
/**
* Creates a new handle for the specified value.
*/
- inline explicit Handle(T* val) : val_(val) { }
+ inline explicit Handle(T* val) : val_(val) {}
/**
* Creates a handle for the contents of the specified handle. This
@@ -203,14 +203,14 @@
*/
inline bool IsEmpty() const { return val_ == 0; }
- inline T* operator->() const { return val_; }
-
- inline T* operator*() const { return val_; }
-
/**
* Sets the handle to be empty. IsEmpty() will then return true.
*/
- inline void Clear() { this->val_ = 0; }
+ inline void Clear() { val_ = 0; }
+
+ inline T* operator->() const { return val_; }
+
+ inline T* operator*() const { return val_; }
/**
* Checks whether two handles are the same.
@@ -1039,29 +1039,30 @@
* \param length The number of characters to copy from the string. For
* WriteUtf8 the number of bytes in the buffer.
* \param nchars_ref The number of characters written, can be NULL.
- * \param hints Various hints that might affect performance of this or
+ * \param options Various options that might affect performance of this or
* subsequent operations.
* \return The number of characters copied to the buffer excluding the null
* terminator. For WriteUtf8: The number of bytes copied to the buffer
- * including the null terminator.
+ * including the null terminator (if written).
*/
- enum WriteHints {
- NO_HINTS = 0,
- HINT_MANY_WRITES_EXPECTED = 1
+ enum WriteOptions {
+ NO_OPTIONS = 0,
+ HINT_MANY_WRITES_EXPECTED = 1,
+ NO_NULL_TERMINATION = 2
};
V8EXPORT int Write(uint16_t* buffer,
int start = 0,
int length = -1,
- WriteHints hints = NO_HINTS) const; // UTF-16
+ int options = NO_OPTIONS) const; // UTF-16
V8EXPORT int WriteAscii(char* buffer,
int start = 0,
int length = -1,
- WriteHints hints = NO_HINTS) const; // ASCII
+ int options = NO_OPTIONS) const; // ASCII
V8EXPORT int WriteUtf8(char* buffer,
int length = -1,
int* nchars_ref = NULL,
- WriteHints hints = NO_HINTS) const; // UTF-8
+ int options = NO_OPTIONS) const; // UTF-8
/**
* A zero length string.
@@ -1335,7 +1336,7 @@
static inline Number* Cast(v8::Value* obj);
private:
V8EXPORT Number();
- static void CheckCast(v8::Value* obj);
+ V8EXPORT static void CheckCast(v8::Value* obj);
};
@@ -1709,7 +1710,7 @@
static inline Array* Cast(Value* obj);
private:
V8EXPORT Array();
- static void CheckCast(Value* obj);
+ V8EXPORT static void CheckCast(Value* obj);
};
@@ -2231,11 +2232,10 @@
void SetHiddenPrototype(bool value);
/**
- * Sets the property attributes of the 'prototype' property of functions
- * created from this FunctionTemplate. Can be any combination of ReadOnly,
- * DontEnum and DontDelete.
+ * Sets the ReadOnly flag in the attributes of the 'prototype' property
+ * of functions created from this FunctionTemplate to true.
*/
- void SetPrototypeAttributes(int attributes);
+ void ReadOnlyPrototype();
/**
* Returns true if the given object is an instance of this function
@@ -3607,7 +3607,7 @@
/**
* Returns whether v8::Locker is being used by this V8 instance.
*/
- static bool IsActive() { return active_; }
+ static bool IsActive();
private:
bool has_lock_;
@@ -3828,10 +3828,6 @@
template <class T>
-Handle<T>::Handle() : val_(0) { }
-
-
-template <class T>
Local<T>::Local() : Handle<T>() { }
diff --git a/preparser/preparser.gyp b/preparser/preparser.gyp
new file mode 100644
index 0000000..0b03382
--- /dev/null
+++ b/preparser/preparser.gyp
@@ -0,0 +1,42 @@
+# Copyright 2011 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+{
+ 'includes': ['../build/common.gypi'],
+ 'targets': [
+ {
+ 'target_name': 'preparser',
+ 'type': 'executable',
+ 'dependencies': [
+ '../tools/gyp/v8.gyp:preparser_lib',
+ ],
+ 'sources': [
+ 'preparser-process.cc',
+ ],
+ },
+ ],
+}
diff --git a/samples/samples.gyp b/samples/samples.gyp
index f383ee2..55b2a98 100644
--- a/samples/samples.gyp
+++ b/samples/samples.gyp
@@ -1,4 +1,4 @@
-# Copyright 2010 the V8 project authors. All rights reserved.
+# Copyright 2011 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -26,23 +26,25 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
{
+ 'includes': ['../build/common.gypi'],
+ 'target_defaults': {
+ 'type': 'executable',
+ 'dependencies': [
+ '../tools/gyp/v8.gyp:v8',
+ ],
+ 'include_dirs': [
+ '../include',
+ ],
+ },
'targets': [
{
'target_name': 'shell',
- 'type': 'executable',
- 'dependencies': [
- '../tools/gyp/v8.gyp:v8',
- ],
'sources': [
'shell.cc',
],
},
{
'target_name': 'process',
- 'type': 'executable',
- 'dependencies': [
- '../tools/gyp/v8.gyp:v8',
- ],
'sources': [
'process.cc',
],
diff --git a/samples/shell.cc b/samples/shell.cc
index f37e731..8ed9d03 100644
--- a/samples/shell.cc
+++ b/samples/shell.cc
@@ -26,39 +26,28 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <v8.h>
-#include <v8-testing.h>
#include <assert.h>
-#ifdef COMPRESS_STARTUP_DATA_BZ2
-#include <bzlib.h>
-#endif
#include <fcntl.h>
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
-// When building with V8 in a shared library we cannot use functions which
-// is not explicitly a part of the public V8 API. This extensive use of
-// #ifndef USING_V8_SHARED/#endif is a hack until we can resolve whether to
-// still use the shell sample for testing or change to use the developer
-// shell d8 TODO(1272).
-#if !(defined(USING_V8_SHARED) || defined(V8_SHARED))
-#include "../src/v8.h"
-#endif // USING_V8_SHARED
-
-#if !defined(_WIN32) && !defined(_WIN64)
-#include <unistd.h> // NOLINT
+#ifdef COMPRESS_STARTUP_DATA_BZ2
+#error Using compressed startup data is not supported for this sample
#endif
-static void ExitShell(int exit_code) {
- // Use _exit instead of exit to avoid races between isolate
- // threads and static destructors.
- fflush(stdout);
- fflush(stderr);
- _exit(exit_code);
-}
+/**
+ * This sample program shows how to implement a simple javascript shell
+ * based on V8. This includes initializing V8 with command line options,
+ * creating global functions, compiling and executing strings.
+ *
+ * For a more sophisticated shell, consider using the debug shell D8.
+ */
+
v8::Persistent<v8::Context> CreateShellContext();
void RunShell(v8::Handle<v8::Context> context);
+int RunMain(int argc, char* argv[]);
bool ExecuteString(v8::Handle<v8::String> source,
v8::Handle<v8::Value> name,
bool print_result,
@@ -68,305 +57,28 @@
v8::Handle<v8::Value> Load(const v8::Arguments& args);
v8::Handle<v8::Value> Quit(const v8::Arguments& args);
v8::Handle<v8::Value> Version(const v8::Arguments& args);
-v8::Handle<v8::Value> Int8Array(const v8::Arguments& args);
-v8::Handle<v8::Value> Uint8Array(const v8::Arguments& args);
-v8::Handle<v8::Value> Int16Array(const v8::Arguments& args);
-v8::Handle<v8::Value> Uint16Array(const v8::Arguments& args);
-v8::Handle<v8::Value> Int32Array(const v8::Arguments& args);
-v8::Handle<v8::Value> Uint32Array(const v8::Arguments& args);
-v8::Handle<v8::Value> Float32Array(const v8::Arguments& args);
-v8::Handle<v8::Value> Float64Array(const v8::Arguments& args);
-v8::Handle<v8::Value> PixelArray(const v8::Arguments& args);
v8::Handle<v8::String> ReadFile(const char* name);
void ReportException(v8::TryCatch* handler);
-static bool last_run = true;
-
-class SourceGroup {
- public:
- SourceGroup() :
-#if !(defined(USING_V8_SHARED) || defined(V8_SHARED))
- next_semaphore_(v8::internal::OS::CreateSemaphore(0)),
- done_semaphore_(v8::internal::OS::CreateSemaphore(0)),
- thread_(NULL),
-#endif // USING_V8_SHARED
- argv_(NULL),
- begin_offset_(0),
- end_offset_(0) { }
-
-#if !(defined(USING_V8_SHARED) || defined(V8_SHARED))
- ~SourceGroup() {
- delete next_semaphore_;
- delete done_semaphore_;
- }
-#endif // USING_V8_SHARED
-
- void Begin(char** argv, int offset) {
- argv_ = const_cast<const char**>(argv);
- begin_offset_ = offset;
- }
-
- void End(int offset) { end_offset_ = offset; }
-
- void Execute() {
- for (int i = begin_offset_; i < end_offset_; ++i) {
- const char* arg = argv_[i];
- if (strcmp(arg, "-e") == 0 && i + 1 < end_offset_) {
- // Execute argument given to -e option directly.
- v8::HandleScope handle_scope;
- v8::Handle<v8::String> file_name = v8::String::New("unnamed");
- v8::Handle<v8::String> source = v8::String::New(argv_[i + 1]);
- if (!ExecuteString(source, file_name, false, true)) {
- ExitShell(1);
- return;
- }
- ++i;
- } else if (arg[0] == '-') {
- // Ignore other options. They have been parsed already.
- } else {
- // Use all other arguments as names of files to load and run.
- v8::HandleScope handle_scope;
- v8::Handle<v8::String> file_name = v8::String::New(arg);
- v8::Handle<v8::String> source = ReadFile(arg);
- if (source.IsEmpty()) {
- printf("Error reading '%s'\n", arg);
- continue;
- }
- if (!ExecuteString(source, file_name, false, true)) {
- ExitShell(1);
- return;
- }
- }
- }
- }
-
-#if !(defined(USING_V8_SHARED) || defined(V8_SHARED))
- void StartExecuteInThread() {
- if (thread_ == NULL) {
- thread_ = new IsolateThread(this);
- thread_->Start();
- }
- next_semaphore_->Signal();
- }
-
- void WaitForThread() {
- if (thread_ == NULL) return;
- if (last_run) {
- thread_->Join();
- thread_ = NULL;
- } else {
- done_semaphore_->Wait();
- }
- }
-#endif // USING_V8_SHARED
-
- private:
-#if !(defined(USING_V8_SHARED) || defined(V8_SHARED))
- static v8::internal::Thread::Options GetThreadOptions() {
- v8::internal::Thread::Options options;
- options.name = "IsolateThread";
- // On some systems (OSX 10.6) the stack size default is 0.5Mb or less
- // which is not enough to parse the big literal expressions used in tests.
- // The stack size should be at least StackGuard::kLimitSize + some
- // OS-specific padding for thread startup code.
- options.stack_size = 2 << 20; // 2 Mb seems to be enough
- return options;
- }
-
- class IsolateThread : public v8::internal::Thread {
- public:
- explicit IsolateThread(SourceGroup* group)
- : v8::internal::Thread(GetThreadOptions()), group_(group) {}
-
- virtual void Run() {
- group_->ExecuteInThread();
- }
-
- private:
- SourceGroup* group_;
- };
-
- void ExecuteInThread() {
- v8::Isolate* isolate = v8::Isolate::New();
- do {
- if (next_semaphore_ != NULL) next_semaphore_->Wait();
- {
- v8::Isolate::Scope iscope(isolate);
- v8::HandleScope scope;
- v8::Persistent<v8::Context> context = CreateShellContext();
- {
- v8::Context::Scope cscope(context);
- Execute();
- }
- context.Dispose();
- }
- if (done_semaphore_ != NULL) done_semaphore_->Signal();
- } while (!last_run);
- isolate->Dispose();
- }
-
- v8::internal::Semaphore* next_semaphore_;
- v8::internal::Semaphore* done_semaphore_;
- v8::internal::Thread* thread_;
-#endif // USING_V8_SHARED
-
- const char** argv_;
- int begin_offset_;
- int end_offset_;
-};
+static bool run_shell;
-static SourceGroup* isolate_sources = NULL;
-
-
-#ifdef COMPRESS_STARTUP_DATA_BZ2
-class BZip2Decompressor : public v8::StartupDataDecompressor {
- public:
- virtual ~BZip2Decompressor() { }
-
- protected:
- virtual int DecompressData(char* raw_data,
- int* raw_data_size,
- const char* compressed_data,
- int compressed_data_size) {
- ASSERT_EQ(v8::StartupData::kBZip2,
- v8::V8::GetCompressedStartupDataAlgorithm());
- unsigned int decompressed_size = *raw_data_size;
- int result =
- BZ2_bzBuffToBuffDecompress(raw_data,
- &decompressed_size,
- const_cast<char*>(compressed_data),
- compressed_data_size,
- 0, 1);
- if (result == BZ_OK) {
- *raw_data_size = decompressed_size;
- }
- return result;
- }
-};
-#endif
-
-
-int RunMain(int argc, char* argv[]) {
+int main(int argc, char* argv[]) {
v8::V8::SetFlagsFromCommandLine(&argc, argv, true);
+ run_shell = (argc == 1);
v8::HandleScope handle_scope;
v8::Persistent<v8::Context> context = CreateShellContext();
- // Enter the newly created execution environment.
- context->Enter();
if (context.IsEmpty()) {
printf("Error creating context\n");
return 1;
}
-
- bool run_shell = (argc == 1);
- int num_isolates = 1;
- for (int i = 1; i < argc; i++) {
- if (strcmp(argv[i], "--isolate") == 0) {
-#if !(defined(USING_V8_SHARED) || defined(V8_SHARED))
- ++num_isolates;
-#else // USING_V8_SHARED
- printf("Error: --isolate not supported when linked with shared "
- "library\n");
- ExitShell(1);
-#endif // USING_V8_SHARED
- }
- }
- if (isolate_sources == NULL) {
- isolate_sources = new SourceGroup[num_isolates];
- SourceGroup* current = isolate_sources;
- current->Begin(argv, 1);
- for (int i = 1; i < argc; i++) {
- const char* str = argv[i];
- if (strcmp(str, "--isolate") == 0) {
- current->End(i);
- current++;
- current->Begin(argv, i + 1);
- } else if (strcmp(str, "--shell") == 0) {
- run_shell = true;
- } else if (strcmp(str, "-f") == 0) {
- // Ignore any -f flags for compatibility with the other stand-
- // alone JavaScript engines.
- continue;
- } else if (strncmp(str, "--", 2) == 0) {
- printf("Warning: unknown flag %s.\nTry --help for options\n", str);
- }
- }
- current->End(argc);
- }
-#if !(defined(USING_V8_SHARED) || defined(V8_SHARED))
- for (int i = 1; i < num_isolates; ++i) {
- isolate_sources[i].StartExecuteInThread();
- }
-#endif // USING_V8_SHARED
- isolate_sources[0].Execute();
+ context->Enter();
+ int result = RunMain(argc, argv);
if (run_shell) RunShell(context);
-#if !(defined(USING_V8_SHARED) || defined(V8_SHARED))
- for (int i = 1; i < num_isolates; ++i) {
- isolate_sources[i].WaitForThread();
- }
-#endif // USING_V8_SHARED
- if (last_run) {
- delete[] isolate_sources;
- isolate_sources = NULL;
- }
context->Exit();
context.Dispose();
- return 0;
-}
-
-
-int main(int argc, char* argv[]) {
- // Figure out if we're requested to stress the optimization
- // infrastructure by running tests multiple times and forcing
- // optimization in the last run.
- bool FLAG_stress_opt = false;
- bool FLAG_stress_deopt = false;
- for (int i = 0; i < argc; i++) {
- if (strcmp(argv[i], "--stress-opt") == 0) {
- FLAG_stress_opt = true;
- argv[i] = NULL;
- } else if (strcmp(argv[i], "--stress-deopt") == 0) {
- FLAG_stress_deopt = true;
- argv[i] = NULL;
- } else if (strcmp(argv[i], "--noalways-opt") == 0) {
- // No support for stressing if we can't use --always-opt.
- FLAG_stress_opt = false;
- FLAG_stress_deopt = false;
- break;
- }
- }
-
-#ifdef COMPRESS_STARTUP_DATA_BZ2
- BZip2Decompressor startup_data_decompressor;
- int bz2_result = startup_data_decompressor.Decompress();
- if (bz2_result != BZ_OK) {
- fprintf(stderr, "bzip error code: %d\n", bz2_result);
- exit(1);
- }
-#endif
-
- v8::V8::SetFlagsFromCommandLine(&argc, argv, true);
- int result = 0;
- if (FLAG_stress_opt || FLAG_stress_deopt) {
- v8::Testing::SetStressRunType(FLAG_stress_opt
- ? v8::Testing::kStressTypeOpt
- : v8::Testing::kStressTypeDeopt);
- int stress_runs = v8::Testing::GetStressRuns();
- for (int i = 0; i < stress_runs && result == 0; i++) {
- printf("============ Stress %d/%d ============\n",
- i + 1, stress_runs);
- v8::Testing::PrepareStressRun(i);
- last_run = (i == stress_runs - 1);
- result = RunMain(argc, argv);
- }
- printf("======== Full Deoptimization =======\n");
- v8::Testing::DeoptimizeAll();
- } else {
- result = RunMain(argc, argv);
- }
v8::V8::Dispose();
-
return result;
}
@@ -393,26 +105,6 @@
// Bind the 'version' function
global->Set(v8::String::New("version"), v8::FunctionTemplate::New(Version));
- // Bind the handlers for external arrays.
- global->Set(v8::String::New("Int8Array"),
- v8::FunctionTemplate::New(Int8Array));
- global->Set(v8::String::New("Uint8Array"),
- v8::FunctionTemplate::New(Uint8Array));
- global->Set(v8::String::New("Int16Array"),
- v8::FunctionTemplate::New(Int16Array));
- global->Set(v8::String::New("Uint16Array"),
- v8::FunctionTemplate::New(Uint16Array));
- global->Set(v8::String::New("Int32Array"),
- v8::FunctionTemplate::New(Int32Array));
- global->Set(v8::String::New("Uint32Array"),
- v8::FunctionTemplate::New(Uint32Array));
- global->Set(v8::String::New("Float32Array"),
- v8::FunctionTemplate::New(Float32Array));
- global->Set(v8::String::New("Float64Array"),
- v8::FunctionTemplate::New(Float64Array));
- global->Set(v8::String::New("PixelArray"),
- v8::FunctionTemplate::New(PixelArray));
-
return v8::Context::New(NULL, global);
}
@@ -486,7 +178,9 @@
// If not arguments are given args[0] will yield undefined which
// converts to the integer value 0.
int exit_code = args[0]->Int32Value();
- ExitShell(exit_code);
+ fflush(stdout);
+ fflush(stderr);
+ exit(exit_code);
return v8::Undefined();
}
@@ -496,113 +190,6 @@
}
-void ExternalArrayWeakCallback(v8::Persistent<v8::Value> object, void* data) {
- free(data);
- object.Dispose();
-}
-
-
-v8::Handle<v8::Value> CreateExternalArray(const v8::Arguments& args,
- v8::ExternalArrayType type,
- size_t element_size) {
- assert(element_size == 1 ||
- element_size == 2 ||
- element_size == 4 ||
- element_size == 8);
- if (args.Length() != 1) {
- return v8::ThrowException(
- v8::String::New("Array constructor needs one parameter."));
- }
- static const int kMaxLength = 0x3fffffff;
- size_t length = 0;
- if (args[0]->IsUint32()) {
- length = args[0]->Uint32Value();
- } else if (args[0]->IsNumber()) {
- double raw_length = args[0]->NumberValue();
- if (raw_length < 0) {
- return v8::ThrowException(
- v8::String::New("Array length must not be negative."));
- }
- if (raw_length > kMaxLength) {
- return v8::ThrowException(
- v8::String::New("Array length exceeds maximum length."));
- }
- length = static_cast<size_t>(raw_length);
- } else {
- return v8::ThrowException(
- v8::String::New("Array length must be a number."));
- }
- if (length > static_cast<size_t>(kMaxLength)) {
- return v8::ThrowException(
- v8::String::New("Array length exceeds maximum length."));
- }
- void* data = calloc(length, element_size);
- if (data == NULL) {
- return v8::ThrowException(v8::String::New("Memory allocation failed."));
- }
- v8::Handle<v8::Object> array = v8::Object::New();
- v8::Persistent<v8::Object> persistent_array =
- v8::Persistent<v8::Object>::New(array);
- persistent_array.MakeWeak(data, ExternalArrayWeakCallback);
- persistent_array.MarkIndependent();
- array->SetIndexedPropertiesToExternalArrayData(data, type, length);
- array->Set(v8::String::New("length"), v8::Int32::New(length),
- v8::ReadOnly);
- array->Set(v8::String::New("BYTES_PER_ELEMENT"),
- v8::Int32::New(element_size));
- return array;
-}
-
-
-v8::Handle<v8::Value> Int8Array(const v8::Arguments& args) {
- return CreateExternalArray(args, v8::kExternalByteArray, sizeof(int8_t));
-}
-
-
-v8::Handle<v8::Value> Uint8Array(const v8::Arguments& args) {
- return CreateExternalArray(args, v8::kExternalUnsignedByteArray,
- sizeof(uint8_t));
-}
-
-
-v8::Handle<v8::Value> Int16Array(const v8::Arguments& args) {
- return CreateExternalArray(args, v8::kExternalShortArray, sizeof(int16_t));
-}
-
-
-v8::Handle<v8::Value> Uint16Array(const v8::Arguments& args) {
- return CreateExternalArray(args, v8::kExternalUnsignedShortArray,
- sizeof(uint16_t));
-}
-
-v8::Handle<v8::Value> Int32Array(const v8::Arguments& args) {
- return CreateExternalArray(args, v8::kExternalIntArray, sizeof(int32_t));
-}
-
-
-v8::Handle<v8::Value> Uint32Array(const v8::Arguments& args) {
- return CreateExternalArray(args, v8::kExternalUnsignedIntArray,
- sizeof(uint32_t));
-}
-
-
-v8::Handle<v8::Value> Float32Array(const v8::Arguments& args) {
- return CreateExternalArray(args, v8::kExternalFloatArray,
- sizeof(float)); // NOLINT
-}
-
-
-v8::Handle<v8::Value> Float64Array(const v8::Arguments& args) {
- return CreateExternalArray(args, v8::kExternalDoubleArray,
- sizeof(double)); // NOLINT
-}
-
-
-v8::Handle<v8::Value> PixelArray(const v8::Arguments& args) {
- return CreateExternalArray(args, v8::kExternalPixelArray, sizeof(uint8_t));
-}
-
-
// Reads a file into a v8 string.
v8::Handle<v8::String> ReadFile(const char* name) {
FILE* file = fopen(name, "rb");
@@ -625,9 +212,41 @@
}
+// Process remaining command line arguments and execute files
+int RunMain(int argc, char* argv[]) {
+ for (int i = 1; i < argc; i++) {
+ const char* str = argv[i];
+ if (strcmp(str, "--shell") == 0) {
+ run_shell = true;
+ } else if (strcmp(str, "-f") == 0) {
+ // Ignore any -f flags for compatibility with the other stand-
+ // alone JavaScript engines.
+ continue;
+ } else if (strncmp(str, "--", 2) == 0) {
+ printf("Warning: unknown flag %s.\nTry --help for options\n", str);
+ } else if (strcmp(str, "-e") == 0 && i + 1 < argc) {
+ // Execute argument given to -e option directly.
+ v8::Handle<v8::String> file_name = v8::String::New("unnamed");
+ v8::Handle<v8::String> source = v8::String::New(argv[++i]);
+ if (!ExecuteString(source, file_name, false, true)) return 1;
+ } else {
+ // Use all other arguments as names of files to load and run.
+ v8::Handle<v8::String> file_name = v8::String::New(str);
+ v8::Handle<v8::String> source = ReadFile(str);
+ if (source.IsEmpty()) {
+ printf("Error reading '%s'\n", str);
+ continue;
+ }
+ if (!ExecuteString(source, file_name, false, true)) return 1;
+ }
+ }
+ return 0;
+}
+
+
// The read-eval-execute loop of the shell.
void RunShell(v8::Handle<v8::Context> context) {
- printf("V8 version %s\n", v8::V8::GetVersion());
+ printf("V8 version %s [sample shell]\n", v8::V8::GetVersion());
static const int kBufferSize = 256;
// Enter the execution environment before evaluating any code.
v8::Context::Scope context_scope(context);
diff --git a/src/SConscript b/src/SConscript
old mode 100755
new mode 100644
index b45a567..453a7c6
--- a/src/SConscript
+++ b/src/SConscript
@@ -32,6 +32,7 @@
import js2c
Import('context')
Import('tools')
+Import('d8_env')
SOURCES = {
@@ -65,6 +66,7 @@
disassembler.cc
diy-fp.cc
dtoa.cc
+ elements.cc
execution.cc
factory.cc
flags.cc
@@ -307,6 +309,7 @@
EXPERIMENTAL_LIBRARY_FILES = '''
proxy.js
+weakmap.js
'''.split()
@@ -338,7 +341,7 @@
else:
d8_files = context.GetRelevantSources(D8_FULL_FILES)
d8_objs = [d8_js_obj]
- d8_objs.append(context.ConfigureObject(env, [d8_files]))
+ d8_objs.append(context.ConfigureObject(d8_env, [d8_files]))
# Combine the JavaScript library files into a single C++ file and
# compile it.
diff --git a/src/api.cc b/src/api.cc
index b0e9775..5bda725 100644
--- a/src/api.cc
+++ b/src/api.cc
@@ -35,6 +35,7 @@
#include "debug.h"
#include "deoptimizer.h"
#include "execution.h"
+#include "flags.h"
#include "global-handles.h"
#include "heap-profiler.h"
#include "messages.h"
@@ -84,7 +85,7 @@
if (has_pending_exception) { \
if (handle_scope_implementer->CallDepthIsZero() && \
(isolate)->is_out_of_memory()) { \
- if (!handle_scope_implementer->ignore_out_of_memory()) \
+ if (!(isolate)->ignore_out_of_memory()) \
i::V8::FatalProcessOutOfMemory(NULL); \
} \
bool call_depth_is_zero = handle_scope_implementer->CallDepthIsZero(); \
@@ -877,7 +878,6 @@
i::Handle<i::FunctionTemplateInfo> info) {
info->set_tag(i::Smi::FromInt(Consts::FUNCTION_TEMPLATE));
info->set_flag(0);
- info->set_prototype_attributes(i::Smi::FromInt(v8::None));
}
@@ -1100,14 +1100,13 @@
}
-void FunctionTemplate::SetPrototypeAttributes(int attributes) {
+void FunctionTemplate::ReadOnlyPrototype() {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
if (IsDeadCheck(isolate, "v8::FunctionTemplate::SetPrototypeAttributes()")) {
return;
}
ENTER_V8(isolate);
- Utils::OpenHandle(this)->set_prototype_attributes(
- i::Smi::FromInt(attributes));
+ Utils::OpenHandle(this)->set_read_only_prototype(true);
}
@@ -1407,7 +1406,7 @@
ScriptData* ScriptData::PreCompile(const char* input, int length) {
i::Utf8ToUC16CharacterStream stream(
reinterpret_cast<const unsigned char*>(input), length);
- return i::ParserApi::PreParse(&stream, NULL);
+ return i::ParserApi::PreParse(&stream, NULL, i::FLAG_harmony_block_scoping);
}
@@ -1416,10 +1415,10 @@
if (str->IsExternalTwoByteString()) {
i::ExternalTwoByteStringUC16CharacterStream stream(
i::Handle<i::ExternalTwoByteString>::cast(str), 0, str->length());
- return i::ParserApi::PreParse(&stream, NULL);
+ return i::ParserApi::PreParse(&stream, NULL, i::FLAG_harmony_block_scoping);
} else {
i::GenericStringUC16CharacterStream stream(str, 0, str->length());
- return i::ParserApi::PreParse(&stream, NULL);
+ return i::ParserApi::PreParse(&stream, NULL, i::FLAG_harmony_block_scoping);
}
}
@@ -3165,10 +3164,9 @@
i::Object* constructor = object->map()->constructor();
i::JSFunction* function;
if (!constructor->IsJSFunction()) {
- // API functions have null as a constructor,
+ // Functions have null as a constructor,
// but any JSFunction knows its context immediately.
- ASSERT(object->IsJSFunction() &&
- i::JSFunction::cast(object)->shared()->IsApiFunction());
+ ASSERT(object->IsJSFunction());
function = i::JSFunction::cast(object);
} else {
function = i::JSFunction::cast(constructor);
@@ -3194,39 +3192,7 @@
ENTER_V8(isolate);
i::HandleScope scope(isolate);
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
- i::Handle<i::Object> hidden_props_obj(i::GetHiddenProperties(self, true));
- if (!hidden_props_obj->IsJSObject()) {
- // We failed to create hidden properties. That's a detached
- // global proxy.
- ASSERT(hidden_props_obj->IsUndefined());
- return 0;
- }
- i::Handle<i::JSObject> hidden_props =
- i::Handle<i::JSObject>::cast(hidden_props_obj);
- i::Handle<i::String> hash_symbol = isolate->factory()->identity_hash_symbol();
- if (hidden_props->HasLocalProperty(*hash_symbol)) {
- i::Handle<i::Object> hash = i::GetProperty(hidden_props, hash_symbol);
- CHECK(!hash.is_null());
- CHECK(hash->IsSmi());
- return i::Smi::cast(*hash)->value();
- }
-
- int hash_value;
- int attempts = 0;
- do {
- // Generate a random 32-bit hash value but limit range to fit
- // within a smi.
- hash_value = i::V8::Random(self->GetIsolate()) & i::Smi::kMaxValue;
- attempts++;
- } while (hash_value == 0 && attempts < 30);
- hash_value = hash_value != 0 ? hash_value : 1; // never return 0
- CHECK(!i::SetLocalPropertyIgnoreAttributes(
- hidden_props,
- hash_symbol,
- i::Handle<i::Object>(i::Smi::FromInt(hash_value)),
- static_cast<PropertyAttributes>(None)).is_null());
-
- return hash_value;
+ return i::GetIdentityHash(self);
}
@@ -3237,7 +3203,9 @@
ENTER_V8(isolate);
i::HandleScope scope(isolate);
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
- i::Handle<i::Object> hidden_props(i::GetHiddenProperties(self, true));
+ i::Handle<i::Object> hidden_props(i::GetHiddenProperties(
+ self,
+ i::JSObject::ALLOW_CREATION));
i::Handle<i::Object> key_obj = Utils::OpenHandle(*key);
i::Handle<i::Object> value_obj = Utils::OpenHandle(*value);
EXCEPTION_PREAMBLE(isolate);
@@ -3259,7 +3227,9 @@
return Local<v8::Value>());
ENTER_V8(isolate);
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
- i::Handle<i::Object> hidden_props(i::GetHiddenProperties(self, false));
+ i::Handle<i::Object> hidden_props(i::GetHiddenProperties(
+ self,
+ i::JSObject::OMIT_CREATION));
if (hidden_props->IsUndefined()) {
return v8::Local<v8::Value>();
}
@@ -3281,7 +3251,9 @@
ENTER_V8(isolate);
i::HandleScope scope(isolate);
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
- i::Handle<i::Object> hidden_props(i::GetHiddenProperties(self, false));
+ i::Handle<i::Object> hidden_props(i::GetHiddenProperties(
+ self,
+ i::JSObject::OMIT_CREATION));
if (hidden_props->IsUndefined()) {
return true;
}
@@ -3649,7 +3621,7 @@
int String::WriteUtf8(char* buffer,
int capacity,
int* nchars_ref,
- WriteHints hints) const {
+ int options) const {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
if (IsDeadCheck(isolate, "v8::String::WriteUtf8()")) return 0;
LOG_API(isolate, "String::WriteUtf8");
@@ -3657,7 +3629,7 @@
i::StringInputBuffer& write_input_buffer = *isolate->write_input_buffer();
i::Handle<i::String> str = Utils::OpenHandle(this);
isolate->string_tracker()->RecordWrite(str);
- if (hints & HINT_MANY_WRITES_EXPECTED) {
+ if (options & HINT_MANY_WRITES_EXPECTED) {
// Flatten the string for efficiency. This applies whether we are
// using StringInputBuffer or Get(i) to access the characters.
str->TryFlatten();
@@ -3697,7 +3669,8 @@
}
}
if (nchars_ref != NULL) *nchars_ref = nchars;
- if (i == len && (capacity == -1 || pos < capacity))
+ if (!(options & NO_NULL_TERMINATION) &&
+ (i == len && (capacity == -1 || pos < capacity)))
buffer[pos++] = '\0';
return pos;
}
@@ -3706,7 +3679,7 @@
int String::WriteAscii(char* buffer,
int start,
int length,
- WriteHints hints) const {
+ int options) const {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
if (IsDeadCheck(isolate, "v8::String::WriteAscii()")) return 0;
LOG_API(isolate, "String::WriteAscii");
@@ -3715,7 +3688,7 @@
ASSERT(start >= 0 && length >= -1);
i::Handle<i::String> str = Utils::OpenHandle(this);
isolate->string_tracker()->RecordWrite(str);
- if (hints & HINT_MANY_WRITES_EXPECTED) {
+ if (options & HINT_MANY_WRITES_EXPECTED) {
// Flatten the string for efficiency. This applies whether we are
// using StringInputBuffer or Get(i) to access the characters.
str->TryFlatten();
@@ -3731,7 +3704,7 @@
if (c == '\0') c = ' ';
buffer[i] = c;
}
- if (length == -1 || i < length)
+ if (!(options & NO_NULL_TERMINATION) && (length == -1 || i < length))
buffer[i] = '\0';
return i;
}
@@ -3740,7 +3713,7 @@
int String::Write(uint16_t* buffer,
int start,
int length,
- WriteHints hints) const {
+ int options) const {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
if (IsDeadCheck(isolate, "v8::String::Write()")) return 0;
LOG_API(isolate, "String::Write");
@@ -3748,7 +3721,7 @@
ASSERT(start >= 0 && length >= -1);
i::Handle<i::String> str = Utils::OpenHandle(this);
isolate->string_tracker()->RecordWrite(str);
- if (hints & HINT_MANY_WRITES_EXPECTED) {
+ if (options & HINT_MANY_WRITES_EXPECTED) {
// Flatten the string for efficiency. This applies whether we are
// using StringInputBuffer or Get(i) to access the characters.
str->TryFlatten();
@@ -3758,7 +3731,8 @@
end = str->length();
if (end < 0) return 0;
i::String::WriteToFlat(*str, buffer, start, end);
- if (length == -1 || end - start < length) {
+ if (!(options & NO_NULL_TERMINATION) &&
+ (length == -1 || end - start < length)) {
buffer[end - start] = '\0';
}
return end - start;
@@ -4146,7 +4120,7 @@
v8::Local<v8::Context> Context::GetEntered() {
i::Isolate* isolate = i::Isolate::Current();
- if (IsDeadCheck(isolate, "v8::Context::GetEntered()")) {
+ if (!EnsureInitializedForIsolate(isolate, "v8::Context::GetEntered()")) {
return Local<Context>();
}
i::Handle<i::Object> last =
@@ -4287,8 +4261,8 @@
Local<Value> v8::External::Wrap(void* data) {
i::Isolate* isolate = i::Isolate::Current();
STATIC_ASSERT(sizeof(data) == sizeof(i::Address));
- LOG_API(isolate, "External::Wrap");
EnsureInitializedForIsolate(isolate, "v8::External::Wrap()");
+ LOG_API(isolate, "External::Wrap");
ENTER_V8(isolate);
v8::Local<v8::Value> result = CanBeEncodedAsSmi(data)
@@ -4332,8 +4306,8 @@
Local<External> v8::External::New(void* data) {
STATIC_ASSERT(sizeof(data) == sizeof(i::Address));
i::Isolate* isolate = i::Isolate::Current();
- LOG_API(isolate, "External::New");
EnsureInitializedForIsolate(isolate, "v8::External::New()");
+ LOG_API(isolate, "External::New");
ENTER_V8(isolate);
return ExternalNewImpl(data);
}
@@ -4825,8 +4799,7 @@
void V8::IgnoreOutOfMemoryException() {
- EnterIsolateIfNeeded()->handle_scope_implementer()->set_ignore_out_of_memory(
- true);
+ EnterIsolateIfNeeded()->set_ignore_out_of_memory(true);
}
@@ -5793,6 +5766,16 @@
}
+v8::Handle<v8::Value> HeapGraphNode::GetHeapValue() const {
+ i::Isolate* isolate = i::Isolate::Current();
+ IsDeadCheck(isolate, "v8::HeapGraphNode::GetHeapValue");
+ i::Handle<i::HeapObject> object = ToInternal(this)->GetHeapObject();
+ return v8::Handle<Value>(!object.is_null() ?
+ ToApi<Value>(object) : ToApi<Value>(
+ isolate->factory()->undefined_value()));
+}
+
+
static i::HeapSnapshot* ToInternal(const HeapSnapshot* snapshot) {
return const_cast<i::HeapSnapshot*>(
reinterpret_cast<const i::HeapSnapshot*>(snapshot));
diff --git a/src/api.h b/src/api.h
index 8d2e778..07723cb 100644
--- a/src/api.h
+++ b/src/api.h
@@ -404,9 +404,12 @@
entered_contexts_(0),
saved_contexts_(0),
spare_(NULL),
- ignore_out_of_memory_(false),
call_depth_(0) { }
+ ~HandleScopeImplementer() {
+ DeleteArray(spare_);
+ }
+
// Threading support for handle data.
static int ArchiveSpacePerThread();
char* RestoreThread(char* from);
@@ -437,10 +440,6 @@
inline bool HasSavedContexts();
inline List<internal::Object**>* blocks() { return &blocks_; }
- inline bool ignore_out_of_memory() { return ignore_out_of_memory_; }
- inline void set_ignore_out_of_memory(bool value) {
- ignore_out_of_memory_ = value;
- }
private:
void ResetAfterArchive() {
@@ -448,7 +447,6 @@
entered_contexts_.Initialize(0);
saved_contexts_.Initialize(0);
spare_ = NULL;
- ignore_out_of_memory_ = false;
call_depth_ = 0;
}
@@ -473,7 +471,6 @@
// Used as a stack to keep track of saved contexts.
List<Context*> saved_contexts_;
Object** spare_;
- bool ignore_out_of_memory_;
int call_depth_;
// This is only used for threading support.
v8::ImplementationUtilities::HandleScopeData handle_scope_data_;
diff --git a/src/apinatives.js b/src/apinatives.js
index 193863f..e94da9f 100644
--- a/src/apinatives.js
+++ b/src/apinatives.js
@@ -49,7 +49,10 @@
return InstantiateFunction(data, name);
case kNewObjectTag:
var Constructor = %GetTemplateField(data, kApiConstructorOffset);
- var result = Constructor ? new (Instantiate(Constructor))() : {};
+ // Note: Do not directly use a function template as a condition, our
+ // internal ToBoolean doesn't handle that!
+ var result = typeof Constructor === 'undefined' ?
+ {} : new (Instantiate(Constructor))();
ConfigureTemplateInstance(result, data);
result = %ToFastProperties(result);
return result;
@@ -73,18 +76,19 @@
if (name) %FunctionSetName(fun, name);
cache[serialNumber] = fun;
var prototype = %GetTemplateField(data, kApiPrototypeTemplateOffset);
- var attributes = %GetTemplateField(data, kApiPrototypeAttributesOffset);
- if (attributes != NONE) {
- %IgnoreAttributesAndSetProperty(
- fun, "prototype",
- prototype ? Instantiate(prototype) : {},
- attributes);
- } else {
- fun.prototype = prototype ? Instantiate(prototype) : {};
+ var flags = %GetTemplateField(data, kApiFlagOffset);
+ // Note: Do not directly use an object template as a condition, our
+ // internal ToBoolean doesn't handle that!
+ fun.prototype = typeof prototype === 'undefined' ?
+ {} : Instantiate(prototype);
+ if (flags & (1 << kReadOnlyPrototypeBit)) {
+ %FunctionSetReadOnlyPrototype(fun);
}
%SetProperty(fun.prototype, "constructor", fun, DONT_ENUM);
var parent = %GetTemplateField(data, kApiParentTemplateOffset);
- if (parent) {
+ // Note: Do not directly use a function template as a condition, our
+ // internal ToBoolean doesn't handle that!
+ if (!(typeof parent === 'undefined')) {
var parent_fun = Instantiate(parent);
fun.prototype.__proto__ = parent_fun.prototype;
}
diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc
index 89df079..0ec3692 100644
--- a/src/arm/assembler-arm.cc
+++ b/src/arm/assembler-arm.cc
@@ -692,11 +692,11 @@
void Assembler::next(Label* L) {
ASSERT(L->is_linked());
int link = target_at(L->pos());
- if (link > 0) {
- L->link_to(link);
- } else {
- ASSERT(link == kEndOfChain);
+ if (link == kEndOfChain) {
L->Unuse();
+ } else {
+ ASSERT(link >= 0);
+ L->link_to(link);
}
}
diff --git a/src/arm/assembler-arm.h b/src/arm/assembler-arm.h
index 97d4226..9a58693 100644
--- a/src/arm/assembler-arm.h
+++ b/src/arm/assembler-arm.h
@@ -377,6 +377,9 @@
// immediate
INLINE(explicit Operand(int32_t immediate,
RelocInfo::Mode rmode = RelocInfo::NONE));
+ INLINE(static Operand Zero()) {
+ return Operand(static_cast<int32_t>(0));
+ }
INLINE(explicit Operand(const ExternalReference& f));
explicit Operand(Handle<Object> handle);
INLINE(explicit Operand(Smi* value));
diff --git a/src/arm/builtins-arm.cc b/src/arm/builtins-arm.cc
index 328102b..a35380c 100644
--- a/src/arm/builtins-arm.cc
+++ b/src/arm/builtins-arm.cc
@@ -138,7 +138,7 @@
__ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
// Clear the heap tag on the elements array.
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ sub(scratch1, scratch1, Operand(kHeapObjectTag));
// Initialize the FixedArray and fill it with holes. FixedArray length is
@@ -207,7 +207,7 @@
// Allocate the JSArray object together with space for a FixedArray with the
// requested number of elements.
__ bind(¬_empty);
- ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
+ STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ mov(elements_array_end,
Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize));
__ add(elements_array_end,
@@ -243,7 +243,7 @@
FieldMemOperand(result, JSArray::kElementsOffset));
// Clear the heap tag on the elements array.
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ sub(elements_array_storage,
elements_array_storage,
Operand(kHeapObjectTag));
@@ -255,7 +255,7 @@
__ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
__ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex));
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ tst(array_size, array_size);
// Length of the FixedArray is the number of pre-allocated elements if
// the actual JSArray has length 0 and the size of the JSArray for non-empty
@@ -272,7 +272,7 @@
// result: JSObject
// elements_array_storage: elements array element storage
// array_size: smi-tagged size of elements array
- ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
__ add(elements_array_end,
elements_array_storage,
Operand(array_size, LSL, kPointerSizeLog2 - kSmiTagSize));
@@ -337,14 +337,14 @@
__ bind(&argc_one_or_more);
__ cmp(r0, Operand(1));
__ b(ne, &argc_two_or_more);
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ ldr(r2, MemOperand(sp)); // Get the argument from the stack.
__ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
__ b(ne, call_generic_code);
// Handle construction of an empty array of a certain size. Bail out if size
// is too large to actually allocate an elements array.
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
__ b(ge, call_generic_code);
@@ -571,7 +571,7 @@
// Is it a String?
__ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset));
- ASSERT(kNotStringTag != 0);
+ STATIC_ASSERT(kNotStringTag != 0);
__ tst(r3, Operand(kIsNotStringMask));
__ b(ne, &convert_argument);
__ mov(argument, r0);
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index eaad9f2..09d2c17 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -549,7 +549,7 @@
// | s | exp | mantissa |
// Check for zero.
- __ cmp(int_scratch, Operand(0));
+ __ cmp(int_scratch, Operand::Zero());
__ mov(dst2, int_scratch);
__ mov(dst1, int_scratch);
__ b(eq, &done);
@@ -557,7 +557,7 @@
// Preload the sign of the value.
__ and_(dst2, int_scratch, Operand(HeapNumber::kSignMask), SetCC);
// Get the absolute value of the object (as an unsigned integer).
- __ rsb(int_scratch, int_scratch, Operand(0), SetCC, mi);
+ __ rsb(int_scratch, int_scratch, Operand::Zero(), SetCC, mi);
// Get mantisssa[51:20].
@@ -589,7 +589,7 @@
__ mov(scratch2, Operand(int_scratch, LSL, scratch2));
__ orr(dst2, dst2, scratch2);
// Set dst1 to 0.
- __ mov(dst1, Operand(0));
+ __ mov(dst1, Operand::Zero());
}
__ bind(&done);
}
@@ -657,7 +657,7 @@
// Check for 0 and -0.
__ bic(scratch1, dst1, Operand(HeapNumber::kSignMask));
__ orr(scratch1, scratch1, Operand(dst2));
- __ cmp(scratch1, Operand(0));
+ __ cmp(scratch1, Operand::Zero());
__ b(eq, &done);
// Check that the value can be exactly represented by a 32-bit integer.
@@ -730,7 +730,7 @@
// Check for 0 and -0.
__ bic(dst, scratch1, Operand(HeapNumber::kSignMask));
__ orr(dst, scratch2, Operand(dst));
- __ cmp(dst, Operand(0));
+ __ cmp(dst, Operand::Zero());
__ b(eq, &done);
DoubleIs32BitInteger(masm, scratch1, scratch2, dst, scratch3, not_int32);
@@ -747,7 +747,7 @@
// Set the sign.
__ ldr(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset));
__ tst(scratch1, Operand(HeapNumber::kSignMask));
- __ rsb(dst, dst, Operand(0), LeaveCC, mi);
+ __ rsb(dst, dst, Operand::Zero(), LeaveCC, mi);
}
__ bind(&done);
@@ -1603,83 +1603,113 @@
}
-// The stub returns zero for false, and a non-zero value for true.
+// The stub expects its argument in the tos_ register and returns its result in
+// it, too: zero for false, and a non-zero value for true.
void ToBooleanStub::Generate(MacroAssembler* masm) {
// This stub uses VFP3 instructions.
CpuFeatures::Scope scope(VFP3);
- Label false_result, true_result, not_string;
+ Label patch;
const Register map = r9.is(tos_) ? r7 : r9;
- // undefined -> false
- __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
- __ cmp(tos_, ip);
- __ b(eq, &false_result);
+ // undefined -> false.
+ CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false);
- // Boolean -> its value
- __ LoadRoot(ip, Heap::kFalseValueRootIndex);
- __ cmp(tos_, ip);
- __ b(eq, &false_result);
- __ LoadRoot(ip, Heap::kTrueValueRootIndex);
- __ cmp(tos_, ip);
- // "tos_" is a register and contains a non-zero value. Hence we implicitly
- // return true if the equal condition is satisfied.
- __ Ret(eq);
+ // Boolean -> its value.
+ CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false);
+ CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true);
- // Smis: 0 -> false, all other -> true
- __ tst(tos_, tos_);
- __ b(eq, &false_result);
- __ tst(tos_, Operand(kSmiTagMask));
- // "tos_" is a register and contains a non-zero value. Hence we implicitly
- // return true if the not equal condition is satisfied.
- __ Ret(eq);
+ // 'null' -> false.
+ CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false);
- // 'null' -> false
- __ LoadRoot(ip, Heap::kNullValueRootIndex);
- __ cmp(tos_, ip);
- __ b(eq, &false_result);
+ if (types_.Contains(SMI)) {
+ // Smis: 0 -> false, all other -> true
+ __ tst(tos_, Operand(kSmiTagMask));
+ // tos_ contains the correct return value already
+ __ Ret(eq);
+ } else if (types_.NeedsMap()) {
+ // If we need a map later and have a Smi -> patch.
+ __ JumpIfSmi(tos_, &patch);
+ }
- // Get the map of the heap object.
- __ ldr(map, FieldMemOperand(tos_, HeapObject::kMapOffset));
+ if (types_.NeedsMap()) {
+ __ ldr(map, FieldMemOperand(tos_, HeapObject::kMapOffset));
- // Undetectable -> false.
- __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset));
- __ tst(ip, Operand(1 << Map::kIsUndetectable));
- __ b(&false_result, ne);
+ if (types_.CanBeUndetectable()) {
+ __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset));
+ __ tst(ip, Operand(1 << Map::kIsUndetectable));
+ // Undetectable -> false.
+ __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, ne);
+ __ Ret(ne);
+ }
+ }
- // JavaScript object -> true.
- __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE);
- // "tos_" is a register and contains a non-zero value. Hence we implicitly
- // return true if the greater than condition is satisfied.
- __ Ret(ge);
+ if (types_.Contains(SPEC_OBJECT)) {
+ // Spec object -> true.
+ __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE);
+ // tos_ contains the correct non-zero return value already.
+ __ Ret(ge);
+ }
- // String value -> false iff empty.
+ if (types_.Contains(STRING)) {
+ // String value -> false iff empty.
__ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE);
- __ b(¬_string, ge);
- __ ldr(tos_, FieldMemOperand(tos_, String::kLengthOffset));
- // Return string length as boolean value, i.e. return false iff length is 0.
- __ Ret();
+ __ ldr(tos_, FieldMemOperand(tos_, String::kLengthOffset), lt);
+ __ Ret(lt); // the string length is OK as the return value
+ }
- __ bind(¬_string);
- // HeapNumber -> false iff +0, -0, or NaN.
- __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
- __ b(&true_result, ne);
- __ vldr(d1, FieldMemOperand(tos_, HeapNumber::kValueOffset));
- __ VFPCompareAndSetFlags(d1, 0.0);
- // "tos_" is a register, and contains a non zero value by default.
- // Hence we only need to overwrite "tos_" with zero to return false for
- // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true.
- __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO
- __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN
- __ Ret();
+ if (types_.Contains(HEAP_NUMBER)) {
+ // Heap number -> false iff +0, -0, or NaN.
+ Label not_heap_number;
+ __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
+ __ b(ne, ¬_heap_number);
+ __ vldr(d1, FieldMemOperand(tos_, HeapNumber::kValueOffset));
+ __ VFPCompareAndSetFlags(d1, 0.0);
+ // "tos_" is a register, and contains a non zero value by default.
+ // Hence we only need to overwrite "tos_" with zero to return false for
+ // FP_ZERO or FP_NAN cases. Otherwise, by default it returns true.
+ __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq); // for FP_ZERO
+ __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, vs); // for FP_NAN
+ __ Ret();
+ __ bind(¬_heap_number);
+ }
- // Return 1/0 for true/false in tos_.
- __ bind(&true_result);
- __ mov(tos_, Operand(1, RelocInfo::NONE));
- __ Ret();
- __ bind(&false_result);
- __ mov(tos_, Operand(0, RelocInfo::NONE));
- __ Ret();
+ __ bind(&patch);
+ GenerateTypeTransition(masm);
+}
+
+
+void ToBooleanStub::CheckOddball(MacroAssembler* masm,
+ Type type,
+ Heap::RootListIndex value,
+ bool result) {
+ if (types_.Contains(type)) {
+ // If we see an expected oddball, return its ToBoolean value tos_.
+ __ LoadRoot(ip, value);
+ __ cmp(tos_, ip);
+ // The value of a root is never NULL, so we can avoid loading a non-null
+ // value into tos_ when we want to return 'true'.
+ if (!result) {
+ __ mov(tos_, Operand(0, RelocInfo::NONE), LeaveCC, eq);
+ }
+ __ Ret(eq);
+ }
+}
+
+
+void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
+ if (!tos_.is(r3)) {
+ __ mov(r3, Operand(tos_));
+ }
+ __ mov(r2, Operand(Smi::FromInt(tos_.code())));
+ __ mov(r1, Operand(Smi::FromInt(types_.ToByte())));
+ __ Push(r3, r2, r1);
+ // Patch the caller to an appropriate specialized stub and return the
+ // operation result to the caller of the stub.
+ __ TailCallExternalReference(
+ ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
+ 3,
+ 1);
}
@@ -2394,7 +2424,6 @@
Register left = r1;
Register right = r0;
Register scratch1 = r7;
- Register scratch2 = r9;
// Perform combined smi check on both operands.
__ orr(scratch1, left, Operand(right));
@@ -2588,7 +2617,7 @@
__ b(mi, &return_heap_number);
// Check for minus zero. Return heap number for minus zero.
Label not_zero;
- __ cmp(scratch1, Operand(0));
+ __ cmp(scratch1, Operand::Zero());
__ b(ne, ¬_zero);
__ vmov(scratch2, d5.high());
__ tst(scratch2, Operand(HeapNumber::kSignMask));
@@ -3080,7 +3109,6 @@
Label no_update;
Label skip_cache;
- const Register heap_number_map = r5;
// Call C function to calculate the result and update the cache.
// Register r0 holds precalculated cache entry address; preserve
@@ -3551,7 +3579,7 @@
ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate);
__ mov(r5, Operand(ExternalReference(js_entry_sp)));
__ ldr(r6, MemOperand(r5));
- __ cmp(r6, Operand(0));
+ __ cmp(r6, Operand::Zero());
__ b(ne, &non_outermost_js);
__ str(fp, MemOperand(r5));
__ mov(ip, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
@@ -3626,7 +3654,7 @@
__ pop(r5);
__ cmp(r5, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
__ b(ne, &non_outermost_js_2);
- __ mov(r6, Operand(0));
+ __ mov(r6, Operand::Zero());
__ mov(r5, Operand(ExternalReference(js_entry_sp)));
__ str(r6, MemOperand(r5));
__ bind(&non_outermost_js_2);
@@ -3827,7 +3855,7 @@
__ Push(r0, r1);
__ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
__ LeaveInternalFrame();
- __ cmp(r0, Operand(0));
+ __ cmp(r0, Operand::Zero());
__ LoadRoot(r0, Heap::kTrueValueRootIndex, eq);
__ LoadRoot(r0, Heap::kFalseValueRootIndex, ne);
__ Ret(HasArgsInRegisters() ? 0 : 2);
@@ -3961,7 +3989,7 @@
FixedArray::kHeaderSize + 2 * kPointerSize;
// If there are no mapped parameters, we do not need the parameter_map.
__ cmp(r1, Operand(Smi::FromInt(0)));
- __ mov(r9, Operand(0), LeaveCC, eq);
+ __ mov(r9, Operand::Zero(), LeaveCC, eq);
__ mov(r9, Operand(r1, LSL, 1), LeaveCC, ne);
__ add(r9, r9, Operand(kParameterMapHeaderSize), LeaveCC, ne);
@@ -3985,7 +4013,7 @@
__ ldr(r4, MemOperand(r8, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ ldr(r4, FieldMemOperand(r4, GlobalObject::kGlobalContextOffset));
- __ cmp(r1, Operand(0));
+ __ cmp(r1, Operand::Zero());
__ ldr(r4, MemOperand(r4, kNormalOffset), eq);
__ ldr(r4, MemOperand(r4, kAliasedOffset), ne);
@@ -4339,6 +4367,8 @@
__ cmp(r2, Operand(r0, ASR, kSmiTagSize));
__ b(gt, &runtime);
+ // Reset offset for possibly sliced string.
+ __ mov(r9, Operand(0));
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
// Check the representation and encoding of the subject string.
@@ -4346,33 +4376,45 @@
__ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
__ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
// First check for flat string.
- __ tst(r0, Operand(kIsNotStringMask | kStringRepresentationMask));
+ __ and_(r1, r0, Operand(kIsNotStringMask | kStringRepresentationMask), SetCC);
STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
__ b(eq, &seq_string);
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
- // Check for flat cons string.
+ // Check for flat cons string or sliced string.
// A flat cons string is a cons string where the second part is the empty
// string. In that case the subject string is just the first part of the cons
// string. Also in this case the first part of the cons string is known to be
// a sequential string or an external string.
- STATIC_ASSERT(kExternalStringTag !=0);
- STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
- __ tst(r0, Operand(kIsNotStringMask | kExternalStringTag));
- __ b(ne, &runtime);
+ // In the case of a sliced string its offset has to be taken into account.
+ Label cons_string, check_encoding;
+ STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+ STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
+ __ cmp(r1, Operand(kExternalStringTag));
+ __ b(lt, &cons_string);
+ __ b(eq, &runtime);
+
+ // String is sliced.
+ __ ldr(r9, FieldMemOperand(subject, SlicedString::kOffsetOffset));
+ __ mov(r9, Operand(r9, ASR, kSmiTagSize));
+ __ ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset));
+ // r9: offset of sliced string, smi-tagged.
+ __ jmp(&check_encoding);
+ // String is a cons string, check whether it is flat.
+ __ bind(&cons_string);
__ ldr(r0, FieldMemOperand(subject, ConsString::kSecondOffset));
__ LoadRoot(r1, Heap::kEmptyStringRootIndex);
__ cmp(r0, r1);
__ b(ne, &runtime);
__ ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
+ // Is first part of cons or parent of slice a flat string?
+ __ bind(&check_encoding);
__ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset));
__ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset));
- // Is first part a flat string?
STATIC_ASSERT(kSeqStringTag == 0);
__ tst(r0, Operand(kStringRepresentationMask));
__ b(ne, &runtime);
-
__ bind(&seq_string);
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
@@ -4438,21 +4480,30 @@
// For arguments 4 and 3 get string length, calculate start of string data and
// calculate the shift of the index (0 for ASCII and 1 for two byte).
- __ ldr(r0, FieldMemOperand(subject, String::kLengthOffset));
- __ mov(r0, Operand(r0, ASR, kSmiTagSize));
STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize);
- __ add(r9, subject, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ add(r8, subject, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
__ eor(r3, r3, Operand(1));
- // Argument 4 (r3): End of string data
- // Argument 3 (r2): Start of string data
+ // Load the length from the original subject string from the previous stack
+ // frame. Therefore we have to use fp, which points exactly to two pointer
+ // sizes below the previous sp. (Because creating a new stack frame pushes
+ // the previous fp onto the stack and moves up sp by 2 * kPointerSize.)
+ __ ldr(r0, MemOperand(fp, kSubjectOffset + 2 * kPointerSize));
+ // If slice offset is not 0, load the length from the original sliced string.
+ // Argument 4, r3: End of string data
+ // Argument 3, r2: Start of string data
+ // Prepare start and end index of the input.
+ __ add(r9, r8, Operand(r9, LSL, r3));
__ add(r2, r9, Operand(r1, LSL, r3));
- __ add(r3, r9, Operand(r0, LSL, r3));
+
+ __ ldr(r8, FieldMemOperand(r0, String::kLengthOffset));
+ __ mov(r8, Operand(r8, ASR, kSmiTagSize));
+ __ add(r3, r9, Operand(r8, LSL, r3));
// Argument 2 (r1): Previous index.
// Already there
// Argument 1 (r0): Subject string.
- __ mov(r0, subject);
+ // Already there
// Locate the code entry and call it.
__ add(r7, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
@@ -4469,12 +4520,12 @@
// Check the result.
Label success;
- __ cmp(r0, Operand(NativeRegExpMacroAssembler::SUCCESS));
+ __ cmp(subject, Operand(NativeRegExpMacroAssembler::SUCCESS));
__ b(eq, &success);
Label failure;
- __ cmp(r0, Operand(NativeRegExpMacroAssembler::FAILURE));
+ __ cmp(subject, Operand(NativeRegExpMacroAssembler::FAILURE));
__ b(eq, &failure);
- __ cmp(r0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
+ __ cmp(subject, Operand(NativeRegExpMacroAssembler::EXCEPTION));
// If not exception it can only be retry. Handle that in the runtime system.
__ b(ne, &runtime);
// Result must now be exception. If there is no pending exception already a
@@ -4486,18 +4537,18 @@
__ mov(r2, Operand(ExternalReference(Isolate::k_pending_exception_address,
isolate)));
__ ldr(r0, MemOperand(r2, 0));
- __ cmp(r0, r1);
+ __ cmp(subject, r1);
__ b(eq, &runtime);
__ str(r1, MemOperand(r2, 0)); // Clear pending exception.
// Check if the exception is a termination. If so, throw as uncatchable.
__ LoadRoot(ip, Heap::kTerminationExceptionRootIndex);
- __ cmp(r0, ip);
+ __ cmp(subject, ip);
Label termination_exception;
__ b(eq, &termination_exception);
- __ Throw(r0); // Expects thrown value in r0.
+ __ Throw(subject); // Expects thrown value in r0.
__ bind(&termination_exception);
__ ThrowUncatchable(TERMINATION, r0); // Expects thrown value in r0.
@@ -4775,6 +4826,7 @@
Label flat_string;
Label ascii_string;
Label got_char_code;
+ Label sliced_string;
// If the receiver is a smi trigger the non-string case.
__ JumpIfSmi(object_, receiver_not_string_);
@@ -4804,7 +4856,11 @@
__ b(eq, &flat_string);
// Handle non-flat strings.
- __ tst(result_, Operand(kIsConsStringMask));
+ __ and_(result_, result_, Operand(kStringRepresentationMask));
+ STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+ STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
+ __ cmp(result_, Operand(kExternalStringTag));
+ __ b(gt, &sliced_string);
__ b(eq, &call_runtime_);
// ConsString.
@@ -4812,15 +4868,26 @@
// this is really a flat string in a cons string). If that is not
// the case we would rather go to the runtime system now to flatten
// the string.
+ Label assure_seq_string;
__ ldr(result_, FieldMemOperand(object_, ConsString::kSecondOffset));
__ LoadRoot(ip, Heap::kEmptyStringRootIndex);
__ cmp(result_, Operand(ip));
__ b(ne, &call_runtime_);
// Get the first of the two strings and load its instance type.
__ ldr(object_, FieldMemOperand(object_, ConsString::kFirstOffset));
+ __ jmp(&assure_seq_string);
+
+ // SlicedString, unpack and add offset.
+ __ bind(&sliced_string);
+ __ ldr(result_, FieldMemOperand(object_, SlicedString::kOffsetOffset));
+ __ add(scratch_, scratch_, result_);
+ __ ldr(object_, FieldMemOperand(object_, SlicedString::kParentOffset));
+
+ // Assure that we are dealing with a sequential string. Go to runtime if not.
+ __ bind(&assure_seq_string);
__ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
__ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
- // If the first cons component is also non-flat, then go to runtime.
+ // Check that parent is not an external string. Go to runtime otherwise.
STATIC_ASSERT(kSeqStringTag == 0);
__ tst(result_, Operand(kStringRepresentationMask));
__ b(ne, &call_runtime_);
@@ -5400,10 +5467,17 @@
// Check bounds and smi-ness.
Register to = r6;
Register from = r7;
+
+ if (FLAG_string_slices) {
+ __ nop(0); // Jumping as first instruction would crash the code generation.
+ __ jmp(&runtime);
+ }
+
__ Ldrd(to, from, MemOperand(sp, kToOffset));
STATIC_ASSERT(kFromOffset == kToOffset + 4);
STATIC_ASSERT(kSmiTag == 0);
STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
+
// I.e., arithmetic shift right by one un-smi-tags.
__ mov(r2, Operand(to, ASR, 1), SetCC);
__ mov(r3, Operand(from, ASR, 1), SetCC, cc);
@@ -5412,7 +5486,6 @@
__ b(mi, &runtime); // From is negative.
// Both to and from are smis.
-
__ sub(r2, r2, Operand(r3), SetCC);
__ b(mi, &runtime); // Fail if from > to.
// Special handling of sub-strings of length 1 and 2. One character strings
@@ -5667,7 +5740,7 @@
Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
__ add(left, left, Operand(scratch1));
__ add(right, right, Operand(scratch1));
- __ rsb(length, length, Operand(0));
+ __ rsb(length, length, Operand::Zero());
Register index = length; // index = -length;
// Compare loop.
@@ -6285,12 +6358,8 @@
void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
ExternalReference function) {
- __ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
- RelocInfo::CODE_TARGET));
__ mov(r2, Operand(function));
- // Push return address (accessible to GC through exit frame pc).
- __ str(pc, MemOperand(sp, 0));
- __ Jump(r2); // Call the api function.
+ GenerateCall(masm, r2);
}
@@ -6299,8 +6368,14 @@
__ mov(lr, Operand(reinterpret_cast<intptr_t>(GetCode().location()),
RelocInfo::CODE_TARGET));
// Push return address (accessible to GC through exit frame pc).
- __ str(pc, MemOperand(sp, 0));
+ // Note that using pc with str is deprecated.
+ Label start;
+ __ bind(&start);
+ __ add(ip, pc, Operand(Assembler::kInstrSize));
+ __ str(ip, MemOperand(sp, 0));
__ Jump(target); // Call the C++ function.
+ ASSERT_EQ(Assembler::kInstrSize + Assembler::kPcLoadDelta,
+ masm->SizeOfCodeGeneratedSince(&start));
}
@@ -6523,7 +6598,7 @@
// treated as a lookup success. For positive lookup probing failure
// should be treated as lookup failure.
if (mode_ == POSITIVE_LOOKUP) {
- __ mov(result, Operand(0));
+ __ mov(result, Operand::Zero());
__ Ret();
}
@@ -6532,7 +6607,7 @@
__ Ret();
__ bind(¬_in_dictionary);
- __ mov(result, Operand(0));
+ __ mov(result, Operand::Zero());
__ Ret();
}
diff --git a/src/arm/deoptimizer-arm.cc b/src/arm/deoptimizer-arm.cc
index 4b994e5..00357f7 100644
--- a/src/arm/deoptimizer-arm.cc
+++ b/src/arm/deoptimizer-arm.cc
@@ -35,7 +35,7 @@
namespace v8 {
namespace internal {
-int Deoptimizer::table_entry_size_ = 16;
+const int Deoptimizer::table_entry_size_ = 16;
int Deoptimizer::patch_size() {
@@ -533,8 +533,6 @@
output_frame->SetContinuation(
reinterpret_cast<uint32_t>(continuation->entry()));
}
-
- if (output_count_ - 1 == frame_index) iterator->Done();
}
@@ -595,6 +593,8 @@
__ vstm(db_w, sp, first, last);
// Push all 16 registers (needed to populate FrameDescription::registers_).
+ // TODO(1588) Note that using pc with stm is deprecated, so we should perhaps
+ // handle this a bit differently.
__ stm(db_w, sp, restored_regs | sp.bit() | lr.bit() | pc.bit());
const int kSavedRegistersAreaSize =
diff --git a/src/arm/disasm-arm.cc b/src/arm/disasm-arm.cc
index d4bd81c..603b3cf 100644
--- a/src/arm/disasm-arm.cc
+++ b/src/arm/disasm-arm.cc
@@ -200,7 +200,7 @@
// These shift names are defined in a way to match the native disassembler
// formatting. See for example the command "objdump -d <binary file>".
-static const char* shift_names[kNumberOfShifts] = {
+static const char* const shift_names[kNumberOfShifts] = {
"lsl", "lsr", "asr", "ror"
};
diff --git a/src/arm/frames-arm.h b/src/arm/frames-arm.h
index 84e108b..26bbd82 100644
--- a/src/arm/frames-arm.h
+++ b/src/arm/frames-arm.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -93,10 +93,11 @@
class StackHandlerConstants : public AllStatic {
public:
- static const int kNextOffset = 0 * kPointerSize;
- static const int kStateOffset = 1 * kPointerSize;
- static const int kFPOffset = 2 * kPointerSize;
- static const int kPCOffset = 3 * kPointerSize;
+ static const int kNextOffset = 0 * kPointerSize;
+ static const int kStateOffset = 1 * kPointerSize;
+ static const int kContextOffset = 2 * kPointerSize;
+ static const int kFPOffset = 3 * kPointerSize;
+ static const int kPCOffset = 4 * kPointerSize;
static const int kSize = kPCOffset + kPointerSize;
};
diff --git a/src/arm/full-codegen-arm.cc b/src/arm/full-codegen-arm.cc
index c3440eb..1604883 100644
--- a/src/arm/full-codegen-arm.cc
+++ b/src/arm/full-codegen-arm.cc
@@ -47,7 +47,6 @@
static unsigned GetPropertyId(Property* property) {
- if (property->is_synthetic()) return AstNode::kNoNumber;
return property->id();
}
@@ -694,104 +693,73 @@
Comment cmnt(masm_, "[ Declaration");
ASSERT(variable != NULL); // Must have been resolved.
Slot* slot = variable->AsSlot();
- Property* prop = variable->AsProperty();
-
- if (slot != NULL) {
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
- if (mode == Variable::CONST) {
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ str(ip, MemOperand(fp, SlotOffset(slot)));
- } else if (function != NULL) {
- VisitForAccumulatorValue(function);
- __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
- }
- break;
-
- case Slot::CONTEXT:
- // We bypass the general EmitSlotSearch because we know more about
- // this specific context.
-
- // The variable in the decl always resides in the current function
- // context.
- ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
- if (FLAG_debug_code) {
- // Check that we're not inside a with or catch context.
- __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
- __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
- __ Check(ne, "Declaration in with context.");
- __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
- __ Check(ne, "Declaration in catch context.");
- }
- if (mode == Variable::CONST) {
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ str(ip, ContextOperand(cp, slot->index()));
- // No write barrier since the_hole_value is in old space.
- } else if (function != NULL) {
- VisitForAccumulatorValue(function);
- __ str(result_register(), ContextOperand(cp, slot->index()));
- int offset = Context::SlotOffset(slot->index());
- // We know that we have written a function, which is not a smi.
- __ mov(r1, Operand(cp));
- __ RecordWrite(r1, Operand(offset), r2, result_register());
- }
- break;
-
- case Slot::LOOKUP: {
- __ mov(r2, Operand(variable->name()));
- // Declaration nodes are always introduced in one of two modes.
- ASSERT(mode == Variable::VAR ||
- mode == Variable::CONST);
- PropertyAttributes attr =
- (mode == Variable::VAR) ? NONE : READ_ONLY;
- __ mov(r1, Operand(Smi::FromInt(attr)));
- // Push initial value, if any.
- // Note: For variables we must not push an initial value (such as
- // 'undefined') because we may have a (legal) redeclaration and we
- // must not destroy the current value.
- if (mode == Variable::CONST) {
- __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
- __ Push(cp, r2, r1, r0);
- } else if (function != NULL) {
- __ Push(cp, r2, r1);
- // Push initial value for function declaration.
- VisitForStackValue(function);
- } else {
- __ mov(r0, Operand(Smi::FromInt(0))); // No initial value!
- __ Push(cp, r2, r1, r0);
- }
- __ CallRuntime(Runtime::kDeclareContextSlot, 4);
- break;
+ ASSERT(slot != NULL);
+ switch (slot->type()) {
+ case Slot::PARAMETER:
+ case Slot::LOCAL:
+ if (function != NULL) {
+ VisitForAccumulatorValue(function);
+ __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+ __ str(ip, MemOperand(fp, SlotOffset(slot)));
}
- }
+ break;
- } else if (prop != NULL) {
- // A const declaration aliasing a parameter is an illegal redeclaration.
- ASSERT(mode != Variable::CONST);
- if (function != NULL) {
- // We are declaring a function that rewrites to a property.
- // Use (keyed) IC to set the initial value. We cannot visit the
- // rewrite because it's shared and we risk recording duplicate AST
- // IDs for bailouts from optimized code.
- ASSERT(prop->obj()->AsVariableProxy() != NULL);
- { AccumulatorValueContext for_object(this);
- EmitVariableLoad(prop->obj()->AsVariableProxy());
+ case Slot::CONTEXT:
+ // We bypass the general EmitSlotSearch because we know more about
+ // this specific context.
+
+ // The variable in the decl always resides in the current function
+ // context.
+ ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
+ if (FLAG_debug_code) {
+ // Check that we're not inside a with or catch context.
+ __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
+ __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
+ __ Check(ne, "Declaration in with context.");
+ __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
+ __ Check(ne, "Declaration in catch context.");
}
+ if (function != NULL) {
+ VisitForAccumulatorValue(function);
+ __ str(result_register(), ContextOperand(cp, slot->index()));
+ int offset = Context::SlotOffset(slot->index());
+ // We know that we have written a function, which is not a smi.
+ __ mov(r1, Operand(cp));
+ __ RecordWrite(r1, Operand(offset), r2, result_register());
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+ __ str(ip, ContextOperand(cp, slot->index()));
+ // No write barrier since the_hole_value is in old space.
+ }
+ break;
- __ push(r0);
- VisitForAccumulatorValue(function);
- __ pop(r2);
-
- ASSERT(prop->key()->AsLiteral() != NULL &&
- prop->key()->AsLiteral()->handle()->IsSmi());
- __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
-
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
- : isolate()->builtins()->KeyedStoreIC_Initialize();
- __ Call(ic);
- // Value in r0 is ignored (declarations are statements).
+ case Slot::LOOKUP: {
+ __ mov(r2, Operand(variable->name()));
+ // Declaration nodes are always introduced in one of two modes.
+ ASSERT(mode == Variable::VAR ||
+ mode == Variable::CONST ||
+ mode == Variable::LET);
+ PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE;
+ __ mov(r1, Operand(Smi::FromInt(attr)));
+ // Push initial value, if any.
+ // Note: For variables we must not push an initial value (such as
+ // 'undefined') because we may have a (legal) redeclaration and we
+ // must not destroy the current value.
+ if (function != NULL) {
+ __ Push(cp, r2, r1);
+ // Push initial value for function declaration.
+ VisitForStackValue(function);
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
+ __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
+ __ Push(cp, r2, r1, r0);
+ } else {
+ __ mov(r0, Operand(Smi::FromInt(0))); // No initial value!
+ __ Push(cp, r2, r1, r0);
+ }
+ __ CallRuntime(Runtime::kDeclareContextSlot, 4);
+ break;
}
}
}
@@ -878,7 +846,7 @@
__ bind(&next_test);
__ Drop(1); // Switch value is no longer needed.
if (default_clause == NULL) {
- __ b(nested_statement.break_target());
+ __ b(nested_statement.break_label());
} else {
__ b(default_clause->body_target());
}
@@ -892,7 +860,7 @@
VisitStatements(clause->statements());
}
- __ bind(nested_statement.break_target());
+ __ bind(nested_statement.break_label());
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
}
@@ -1023,7 +991,7 @@
// Load the current count to r0, load the length to r1.
__ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
__ cmp(r0, r1); // Compare to the array length.
- __ b(hs, loop_statement.break_target());
+ __ b(hs, loop_statement.break_label());
// Get the current entry of the array into register r3.
__ ldr(r2, MemOperand(sp, 2 * kPointerSize));
@@ -1049,7 +1017,7 @@
__ push(r3); // Current entry.
__ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
__ mov(r3, Operand(r0), SetCC);
- __ b(eq, loop_statement.continue_target());
+ __ b(eq, loop_statement.continue_label());
// Update the 'each' property or variable from the possibly filtered
// entry in register r3.
@@ -1065,7 +1033,7 @@
// Generate code for the going to the next element by incrementing
// the index (smi) stored on top of the stack.
- __ bind(loop_statement.continue_target());
+ __ bind(loop_statement.continue_label());
__ pop(r0);
__ add(r0, r0, Operand(Smi::FromInt(1)));
__ push(r0);
@@ -1074,7 +1042,7 @@
__ b(&loop);
// Remove the pointers stored on the stack.
- __ bind(loop_statement.break_target());
+ __ bind(loop_statement.break_label());
__ Drop(5);
// Exit and decrement the loop depth.
@@ -1311,6 +1279,20 @@
__ cmp(r0, ip);
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
context()->Plug(r0);
+ } else if (var->mode() == Variable::LET) {
+ // Let bindings may be the hole value if they have not been initialized.
+ // Throw a type error in this case.
+ Label done;
+ MemOperand slot_operand = EmitSlotSearch(slot, r0);
+ __ ldr(r0, slot_operand);
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+ __ cmp(r0, ip);
+ __ b(ne, &done);
+ __ mov(r0, Operand(var->name()));
+ __ push(r0);
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ __ bind(&done);
+ context()->Plug(r0);
} else {
context()->Plug(slot);
}
@@ -1891,6 +1873,59 @@
}
__ bind(&skip);
+ } else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
+ // Perform the assignment for non-const variables. Const assignments
+ // are simply skipped.
+ Slot* slot = var->AsSlot();
+ switch (slot->type()) {
+ case Slot::PARAMETER:
+ case Slot::LOCAL: {
+ Label assign;
+ // Check for an initialized let binding.
+ __ ldr(r1, MemOperand(fp, SlotOffset(slot)));
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+ __ cmp(r1, ip);
+ __ b(ne, &assign);
+ __ mov(r1, Operand(var->name()));
+ __ push(r1);
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ // Perform the assignment.
+ __ bind(&assign);
+ __ str(result_register(), MemOperand(fp, SlotOffset(slot)));
+ break;
+ }
+ case Slot::CONTEXT: {
+ // Let variables may be the hole value if they have not been
+ // initialized. Throw a type error in this case.
+ Label assign;
+ MemOperand target = EmitSlotSearch(slot, r1);
+ // Check for an initialized let binding.
+ __ ldr(r3, target);
+ __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
+ __ cmp(r3, ip);
+ __ b(ne, &assign);
+ __ mov(r3, Operand(var->name()));
+ __ push(r3);
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ // Perform the assignment.
+ __ bind(&assign);
+ __ str(result_register(), target);
+ // RecordWrite may destroy all its register arguments.
+ __ mov(r3, result_register());
+ int offset = Context::SlotOffset(slot->index());
+ __ RecordWrite(r1, Operand(offset), r2, r3);
+ break;
+ }
+ case Slot::LOOKUP:
+ // Call the runtime for the assignment.
+ __ push(r0); // Value.
+ __ mov(r1, Operand(slot->var()->name()));
+ __ mov(r0, Operand(Smi::FromInt(strict_mode_flag())));
+ __ Push(cp, r1, r0); // Context, name, strict mode.
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
+ break;
+ }
+
} else if (var->mode() != Variable::CONST) {
// Perform the assignment for non-const variables. Const assignments
// are simply skipped.
@@ -2272,36 +2307,10 @@
EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
} else {
// Call to a keyed property.
- // For a synthetic property use keyed load IC followed by function call,
- // for a regular property use EmitKeyedCallWithIC.
- if (prop->is_synthetic()) {
- // Do not visit the object and key subexpressions (they are shared
- // by all occurrences of the same rewritten parameter).
- ASSERT(prop->obj()->AsVariableProxy() != NULL);
- ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
- Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
- MemOperand operand = EmitSlotSearch(slot, r1);
- __ ldr(r1, operand);
-
- ASSERT(prop->key()->AsLiteral() != NULL);
- ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
- __ mov(r0, Operand(prop->key()->AsLiteral()->handle()));
-
- // Record source code position for IC call.
- SetSourcePosition(prop->position());
-
- Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
- __ ldr(r1, GlobalObjectOperand());
- __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
- __ Push(r0, r1); // Function, receiver.
- EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
- } else {
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(prop->obj());
- }
- EmitKeyedCallWithIC(expr, prop->key());
+ { PreservePositionScope scope(masm()->positions_recorder());
+ VisitForStackValue(prop->obj());
}
+ EmitKeyedCallWithIC(expr, prop->key());
}
} else {
{ PreservePositionScope scope(masm()->positions_recorder());
@@ -2753,7 +2762,7 @@
// Objects with a non-function constructor have class 'Object'.
__ bind(&non_function_constructor);
- __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex);
+ __ LoadRoot(r0, Heap::kObject_symbolRootIndex);
__ jmp(&done);
// Non-JS objects have class null.
@@ -3252,7 +3261,7 @@
Label done, not_found;
// tmp now holds finger offset as a smi.
- ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
__ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
// r2 now holds finger offset as a smi.
__ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
@@ -3580,39 +3589,6 @@
}
-void FullCodeGenerator::EmitIsNativeOrStrictMode(ZoneList<Expression*>* args) {
- ASSERT(args->length() == 1);
-
- // Load the function into r0.
- VisitForAccumulatorValue(args->at(0));
-
- // Prepare for the test.
- Label materialize_true, materialize_false;
- Label* if_true = NULL;
- Label* if_false = NULL;
- Label* fall_through = NULL;
- context()->PrepareTest(&materialize_true, &materialize_false,
- &if_true, &if_false, &fall_through);
-
- // Test for strict mode function.
- __ ldr(r1, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
- __ ldr(r1, FieldMemOperand(r1, SharedFunctionInfo::kCompilerHintsOffset));
- __ tst(r1, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
- kSmiTagSize)));
- __ b(ne, if_true);
-
- // Test for native function.
- __ tst(r1, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
- __ b(ne, if_true);
-
- // Not native or strict-mode function.
- __ b(if_false);
-
- PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
- context()->Plug(if_true, if_false);
-}
-
-
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
Handle<String> name = expr->name();
if (name->length() > 0 && name->Get(0) == '_') {
@@ -3664,18 +3640,12 @@
Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
if (prop != NULL) {
- if (prop->is_synthetic()) {
- // Result of deleting parameters is false, even when they rewrite
- // to accesses on the arguments object.
- context()->Plug(false);
- } else {
- VisitForStackValue(prop->obj());
- VisitForStackValue(prop->key());
- __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
- __ push(r1);
- __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
- context()->Plug(r0);
- }
+ VisitForStackValue(prop->obj());
+ VisitForStackValue(prop->key());
+ __ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
+ __ push(r1);
+ __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
+ context()->Plug(r0);
} else if (var != NULL) {
// Delete of an unqualified identifier is disallowed in strict mode
// but "delete this" is.
@@ -4030,6 +4000,10 @@
__ b(eq, if_true);
__ CompareRoot(r0, Heap::kFalseValueRootIndex);
Split(eq, if_true, if_false, fall_through);
+ } else if (FLAG_harmony_typeof &&
+ check->Equals(isolate()->heap()->null_symbol())) {
+ __ CompareRoot(r0, Heap::kNullValueRootIndex);
+ Split(eq, if_true, if_false, fall_through);
} else if (check->Equals(isolate()->heap()->undefined_symbol())) {
__ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
__ b(eq, if_true);
@@ -4047,8 +4021,10 @@
} else if (check->Equals(isolate()->heap()->object_symbol())) {
__ JumpIfSmi(r0, if_false);
- __ CompareRoot(r0, Heap::kNullValueRootIndex);
- __ b(eq, if_true);
+ if (!FLAG_harmony_typeof) {
+ __ CompareRoot(r0, Heap::kNullValueRootIndex);
+ __ b(eq, if_true);
+ }
// Check for JS objects => true.
__ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
__ b(lt, if_false);
@@ -4123,11 +4099,8 @@
default: {
VisitForAccumulatorValue(expr->right());
Condition cond = eq;
- bool strict = false;
switch (op) {
case Token::EQ_STRICT:
- strict = true;
- // Fall through
case Token::EQ:
cond = eq;
__ pop(r1);
@@ -4276,7 +4249,7 @@
// Cook return address in link register to stack (smi encoded Code* delta)
__ sub(r1, lr, Operand(masm_->CodeObject()));
ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
__ add(r1, r1, Operand(r1)); // Convert to smi.
__ push(r1);
}
@@ -4296,6 +4269,34 @@
#undef __
+#define __ ACCESS_MASM(masm())
+
+FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
+ int* stack_depth,
+ int* context_length) {
+ // The macros used here must preserve the result register.
+
+ // Because the handler block contains the context of the finally
+ // code, we can restore it directly from there for the finally code
+ // rather than iteratively unwinding contexts via their previous
+ // links.
+ __ Drop(*stack_depth); // Down to the handler block.
+ if (*context_length > 0) {
+ // Restore the context to its dedicated register and the stack.
+ __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
+ __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ }
+ __ PopTryHandler();
+ __ bl(finally_entry_);
+
+ *stack_depth = 0;
+ *context_length = 0;
+ return previous_;
+}
+
+
+#undef __
+
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_ARM
diff --git a/src/arm/ic-arm.cc b/src/arm/ic-arm.cc
index 6038153..6bad5ac 100644
--- a/src/arm/ic-arm.cc
+++ b/src/arm/ic-arm.cc
@@ -337,7 +337,7 @@
// Fast case: Do the load.
__ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
// The key is a smi.
- ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
__ ldr(scratch2,
MemOperand(scratch1, key, LSL, kPointerSizeLog2 - kSmiTagSize));
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
@@ -370,7 +370,7 @@
// Is the string a symbol?
// map: key map
__ ldrb(hash, FieldMemOperand(map, Map::kInstanceTypeOffset));
- ASSERT(kSymbolTag != 0);
+ STATIC_ASSERT(kSymbolTag != 0);
__ tst(hash, Operand(kIsSymbolMask));
__ b(eq, not_symbol);
}
@@ -1333,7 +1333,7 @@
__ cmp(key, Operand(ip));
__ b(hs, &slow);
// Calculate key + 1 as smi.
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
__ add(r4, key, Operand(Smi::FromInt(1)));
__ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
__ b(&fast);
diff --git a/src/arm/lithium-arm.cc b/src/arm/lithium-arm.cc
index b96805e..6292ff8 100644
--- a/src/arm/lithium-arm.cc
+++ b/src/arm/lithium-arm.cc
@@ -1043,7 +1043,7 @@
: instr->SecondSuccessor();
return new LGoto(successor->block_id());
}
- return new LBranch(UseRegisterAtStart(v));
+ return AssignEnvironment(new LBranch(UseRegister(v)));
}
@@ -1403,7 +1403,6 @@
LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
Token::Value op = instr->token();
- Representation r = instr->GetInputRepresentation();
ASSERT(instr->left()->representation().IsTagged());
ASSERT(instr->right()->representation().IsTagged());
bool reversed = (op == Token::GT || op == Token::LTE);
@@ -1513,16 +1512,10 @@
}
-LInstruction* LChunkBuilder::DoExternalArrayLength(
- HExternalArrayLength* instr) {
+LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
+ HFixedArrayBaseLength* instr) {
LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LExternalArrayLength(array));
-}
-
-
-LInstruction* LChunkBuilder::DoFixedArrayLength(HFixedArrayLength* instr) {
- LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LFixedArrayLength(array));
+ return DefineAsRegister(new LFixedArrayBaseLength(array));
}
@@ -2010,8 +2003,8 @@
LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
- LOperand* string = UseRegister(instr->string());
- LOperand* index = UseRegisterOrConstant(instr->index());
+ LOperand* string = UseTempRegister(instr->string());
+ LOperand* index = UseTempRegister(instr->index());
LStringCharCodeAt* result = new LStringCharCodeAt(string, index);
return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
}
diff --git a/src/arm/lithium-arm.h b/src/arm/lithium-arm.h
index b477e99..e14e6fc 100644
--- a/src/arm/lithium-arm.h
+++ b/src/arm/lithium-arm.h
@@ -92,8 +92,7 @@
V(DivI) \
V(DoubleToI) \
V(ElementsKind) \
- V(ExternalArrayLength) \
- V(FixedArrayLength) \
+ V(FixedArrayBaseLength) \
V(FunctionLiteral) \
V(GetCachedArrayIndex) \
V(GlobalObject) \
@@ -915,25 +914,15 @@
};
-class LExternalArrayLength: public LTemplateInstruction<1, 1, 0> {
+class LFixedArrayBaseLength: public LTemplateInstruction<1, 1, 0> {
public:
- explicit LExternalArrayLength(LOperand* value) {
+ explicit LFixedArrayBaseLength(LOperand* value) {
inputs_[0] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(ExternalArrayLength, "external-array-length")
- DECLARE_HYDROGEN_ACCESSOR(ExternalArrayLength)
-};
-
-
-class LFixedArrayLength: public LTemplateInstruction<1, 1, 0> {
- public:
- explicit LFixedArrayLength(LOperand* value) {
- inputs_[0] = value;
- }
-
- DECLARE_CONCRETE_INSTRUCTION(FixedArrayLength, "fixed-array-length")
- DECLARE_HYDROGEN_ACCESSOR(FixedArrayLength)
+ DECLARE_CONCRETE_INSTRUCTION(FixedArrayBaseLength,
+ "fixed-array-base-length")
+ DECLARE_HYDROGEN_ACCESSOR(FixedArrayBaseLength)
};
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc
index ad8091b..976576b 100644
--- a/src/arm/lithium-codegen-arm.cc
+++ b/src/arm/lithium-codegen-arm.cc
@@ -1378,17 +1378,10 @@
}
-void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) {
+void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
Register result = ToRegister(instr->result());
Register array = ToRegister(instr->InputAt(0));
- __ ldr(result, FieldMemOperand(array, ExternalArray::kLengthOffset));
-}
-
-
-void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
- Register result = ToRegister(instr->result());
- Register array = ToRegister(instr->InputAt(0));
- __ ldr(result, FieldMemOperand(array, FixedArray::kLengthOffset));
+ __ ldr(result, FieldMemOperand(array, FixedArrayBase::kLengthOffset));
}
@@ -1564,52 +1557,96 @@
} else {
ASSERT(r.IsTagged());
Register reg = ToRegister(instr->InputAt(0));
- if (instr->hydrogen()->value()->type().IsBoolean()) {
- __ LoadRoot(ip, Heap::kTrueValueRootIndex);
- __ cmp(reg, ip);
+ HType type = instr->hydrogen()->value()->type();
+ if (type.IsBoolean()) {
+ __ CompareRoot(reg, Heap::kTrueValueRootIndex);
EmitBranch(true_block, false_block, eq);
+ } else if (type.IsSmi()) {
+ __ cmp(reg, Operand(0));
+ EmitBranch(true_block, false_block, ne);
} else {
Label* true_label = chunk_->GetAssemblyLabel(true_block);
Label* false_label = chunk_->GetAssemblyLabel(false_block);
- __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
- __ cmp(reg, ip);
- __ b(eq, false_label);
- __ LoadRoot(ip, Heap::kTrueValueRootIndex);
- __ cmp(reg, ip);
- __ b(eq, true_label);
- __ LoadRoot(ip, Heap::kFalseValueRootIndex);
- __ cmp(reg, ip);
- __ b(eq, false_label);
- __ cmp(reg, Operand(0));
- __ b(eq, false_label);
- __ JumpIfSmi(reg, true_label);
+ ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
+ // Avoid deopts in the case where we've never executed this path before.
+ if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
- // Test double values. Zero and NaN are false.
- Label call_stub;
- DoubleRegister dbl_scratch = double_scratch0();
- Register scratch = scratch0();
- __ ldr(scratch, FieldMemOperand(reg, HeapObject::kMapOffset));
- __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
- __ cmp(scratch, Operand(ip));
- __ b(ne, &call_stub);
- __ sub(ip, reg, Operand(kHeapObjectTag));
- __ vldr(dbl_scratch, ip, HeapNumber::kValueOffset);
- __ VFPCompareAndLoadFlags(dbl_scratch, 0.0, scratch);
- __ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
- __ b(ne, false_label);
- __ b(true_label);
+ if (expected.Contains(ToBooleanStub::UNDEFINED)) {
+ // undefined -> false.
+ __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
+ __ b(eq, false_label);
+ }
+ if (expected.Contains(ToBooleanStub::BOOLEAN)) {
+ // Boolean -> its value.
+ __ CompareRoot(reg, Heap::kTrueValueRootIndex);
+ __ b(eq, true_label);
+ __ CompareRoot(reg, Heap::kFalseValueRootIndex);
+ __ b(eq, false_label);
+ }
+ if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
+ // 'null' -> false.
+ __ CompareRoot(reg, Heap::kNullValueRootIndex);
+ __ b(eq, false_label);
+ }
- // The conversion stub doesn't cause garbage collections so it's
- // safe to not record a safepoint after the call.
- __ bind(&call_stub);
- ToBooleanStub stub(reg);
- RegList saved_regs = kJSCallerSaved | kCalleeSaved;
- __ stm(db_w, sp, saved_regs);
- __ CallStub(&stub);
- __ cmp(reg, Operand(0));
- __ ldm(ia_w, sp, saved_regs);
- EmitBranch(true_block, false_block, ne);
+ if (expected.Contains(ToBooleanStub::SMI)) {
+ // Smis: 0 -> false, all other -> true.
+ __ cmp(reg, Operand(0));
+ __ b(eq, false_label);
+ __ JumpIfSmi(reg, true_label);
+ } else if (expected.NeedsMap()) {
+ // If we need a map later and have a Smi -> deopt.
+ __ tst(reg, Operand(kSmiTagMask));
+ DeoptimizeIf(eq, instr->environment());
+ }
+
+ const Register map = scratch0();
+ if (expected.NeedsMap()) {
+ __ ldr(map, FieldMemOperand(reg, HeapObject::kMapOffset));
+
+ if (expected.CanBeUndetectable()) {
+ // Undetectable -> false.
+ __ ldrb(ip, FieldMemOperand(map, Map::kBitFieldOffset));
+ __ tst(ip, Operand(1 << Map::kIsUndetectable));
+ __ b(ne, false_label);
+ }
+ }
+
+ if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
+ // spec object -> true.
+ __ CompareInstanceType(map, ip, FIRST_SPEC_OBJECT_TYPE);
+ __ b(ge, true_label);
+ }
+
+ if (expected.Contains(ToBooleanStub::STRING)) {
+ // String value -> false iff empty.
+ Label not_string;
+ __ CompareInstanceType(map, ip, FIRST_NONSTRING_TYPE);
+ __ b(ge, ¬_string);
+ __ ldr(ip, FieldMemOperand(reg, String::kLengthOffset));
+ __ cmp(ip, Operand(0));
+ __ b(ne, true_label);
+ __ b(false_label);
+ __ bind(¬_string);
+ }
+
+ if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
+ // heap number -> false iff +0, -0, or NaN.
+ DoubleRegister dbl_scratch = double_scratch0();
+ Label not_heap_number;
+ __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
+ __ b(ne, ¬_heap_number);
+ __ vldr(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset));
+ __ VFPCompareAndSetFlags(dbl_scratch, 0.0);
+ __ b(vs, false_label); // NaN -> false.
+ __ b(eq, false_label); // +0, -0 -> false.
+ __ b(true_label);
+ __ bind(¬_heap_number);
+ }
+
+ // We've seen something for the first time -> deopt.
+ DeoptimizeIf(al, instr->environment());
}
}
}
@@ -1767,7 +1804,6 @@
void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
Register reg = ToRegister(instr->InputAt(0));
Register temp1 = ToRegister(instr->TempAt(0));
- Register temp2 = scratch0();
int true_block = chunk_->LookupDestination(instr->true_block_id());
int false_block = chunk_->LookupDestination(instr->false_block_id());
@@ -2722,7 +2758,6 @@
void LCodeGen::DoGlobalObject(LGlobalObject* instr) {
- Register context = ToRegister(instr->context());
Register result = ToRegister(instr->result());
__ ldr(result, ContextOperand(cp, Context::GLOBAL_INDEX));
}
@@ -2928,19 +2963,18 @@
void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
DoubleRegister input = ToDoubleRegister(instr->InputAt(0));
Register result = ToRegister(instr->result());
- Register scratch1 = result;
- Register scratch2 = scratch0();
+ Register scratch = scratch0();
Label done, check_sign_on_zero;
// Extract exponent bits.
- __ vmov(scratch1, input.high());
- __ ubfx(scratch2,
- scratch1,
+ __ vmov(result, input.high());
+ __ ubfx(scratch,
+ result,
HeapNumber::kExponentShift,
HeapNumber::kExponentBits);
// If the number is in ]-0.5, +0.5[, the result is +/- 0.
- __ cmp(scratch2, Operand(HeapNumber::kExponentBias - 2));
+ __ cmp(scratch, Operand(HeapNumber::kExponentBias - 2));
__ mov(result, Operand(0), LeaveCC, le);
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
__ b(le, &check_sign_on_zero);
@@ -2950,19 +2984,19 @@
// The following conversion will not work with numbers
// outside of ]-2^32, 2^32[.
- __ cmp(scratch2, Operand(HeapNumber::kExponentBias + 32));
+ __ cmp(scratch, Operand(HeapNumber::kExponentBias + 32));
DeoptimizeIf(ge, instr->environment());
// Save the original sign for later comparison.
- __ and_(scratch2, scratch1, Operand(HeapNumber::kSignMask));
+ __ and_(scratch, result, Operand(HeapNumber::kSignMask));
__ Vmov(double_scratch0(), 0.5);
__ vadd(input, input, double_scratch0());
// Check sign of the result: if the sign changed, the input
// value was in ]0.5, 0[ and the result should be -0.
- __ vmov(scratch1, input.high());
- __ eor(scratch1, scratch1, Operand(scratch2), SetCC);
+ __ vmov(result, input.high());
+ __ eor(result, result, Operand(scratch), SetCC);
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
DeoptimizeIf(mi, instr->environment());
} else {
@@ -2973,8 +3007,8 @@
__ EmitVFPTruncate(kRoundToMinusInf,
double_scratch0().low(),
input,
- scratch1,
- scratch2);
+ result,
+ scratch);
DeoptimizeIf(ne, instr->environment());
__ vmov(result, double_scratch0().low());
@@ -2983,8 +3017,8 @@
__ cmp(result, Operand(0));
__ b(ne, &done);
__ bind(&check_sign_on_zero);
- __ vmov(scratch1, input.high());
- __ tst(scratch1, Operand(HeapNumber::kSignMask));
+ __ vmov(scratch, input.high());
+ __ tst(scratch, Operand(HeapNumber::kSignMask));
DeoptimizeIf(ne, instr->environment());
}
__ bind(&done);
@@ -3421,97 +3455,81 @@
LStringCharCodeAt* instr_;
};
- Register scratch = scratch0();
Register string = ToRegister(instr->string());
- Register index = no_reg;
- int const_index = -1;
- if (instr->index()->IsConstantOperand()) {
- const_index = ToInteger32(LConstantOperand::cast(instr->index()));
- STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
- if (!Smi::IsValid(const_index)) {
- // Guaranteed to be out of bounds because of the assert above.
- // So the bounds check that must dominate this instruction must
- // have deoptimized already.
- if (FLAG_debug_code) {
- __ Abort("StringCharCodeAt: out of bounds index.");
- }
- // No code needs to be generated.
- return;
- }
- } else {
- index = ToRegister(instr->index());
- }
+ Register index = ToRegister(instr->index());
Register result = ToRegister(instr->result());
DeferredStringCharCodeAt* deferred =
new DeferredStringCharCodeAt(this, instr);
- Label flat_string, ascii_string, done;
-
// Fetch the instance type of the receiver into result register.
__ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
__ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
- // We need special handling for non-flat strings.
- STATIC_ASSERT(kSeqStringTag == 0);
- __ tst(result, Operand(kStringRepresentationMask));
- __ b(eq, &flat_string);
+ // We need special handling for indirect strings.
+ Label check_sequential;
+ __ tst(result, Operand(kIsIndirectStringMask));
+ __ b(eq, &check_sequential);
- // Handle non-flat strings.
- __ tst(result, Operand(kIsConsStringMask));
- __ b(eq, deferred->entry());
+ // Dispatch on the indirect string shape: slice or cons.
+ Label cons_string;
+ __ tst(result, Operand(kSlicedNotConsMask));
+ __ b(eq, &cons_string);
- // ConsString.
+ // Handle slices.
+ Label indirect_string_loaded;
+ __ ldr(result, FieldMemOperand(string, SlicedString::kOffsetOffset));
+ __ add(index, index, Operand(result, ASR, kSmiTagSize));
+ __ ldr(string, FieldMemOperand(string, SlicedString::kParentOffset));
+ __ jmp(&indirect_string_loaded);
+
+ // Handle conses.
// Check whether the right hand side is the empty string (i.e. if
// this is really a flat string in a cons string). If that is not
// the case we would rather go to the runtime system now to flatten
// the string.
- __ ldr(scratch, FieldMemOperand(string, ConsString::kSecondOffset));
+ __ bind(&cons_string);
+ __ ldr(result, FieldMemOperand(string, ConsString::kSecondOffset));
__ LoadRoot(ip, Heap::kEmptyStringRootIndex);
- __ cmp(scratch, ip);
+ __ cmp(result, ip);
__ b(ne, deferred->entry());
// Get the first of the two strings and load its instance type.
__ ldr(string, FieldMemOperand(string, ConsString::kFirstOffset));
+
+ __ bind(&indirect_string_loaded);
__ ldr(result, FieldMemOperand(string, HeapObject::kMapOffset));
__ ldrb(result, FieldMemOperand(result, Map::kInstanceTypeOffset));
- // If the first cons component is also non-flat, then go to runtime.
+
+ // Check whether the string is sequential. The only non-sequential
+ // shapes we support have just been unwrapped above.
+ __ bind(&check_sequential);
STATIC_ASSERT(kSeqStringTag == 0);
__ tst(result, Operand(kStringRepresentationMask));
__ b(ne, deferred->entry());
- // Check for 1-byte or 2-byte string.
- __ bind(&flat_string);
+ // Dispatch on the encoding: ASCII or two-byte.
+ Label ascii_string;
STATIC_ASSERT(kAsciiStringTag != 0);
__ tst(result, Operand(kStringEncodingMask));
__ b(ne, &ascii_string);
- // 2-byte string.
- // Load the 2-byte character code into the result register.
- STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
- if (instr->index()->IsConstantOperand()) {
- __ ldrh(result,
- FieldMemOperand(string,
- SeqTwoByteString::kHeaderSize + 2 * const_index));
- } else {
- __ add(scratch,
- string,
- Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
- __ ldrh(result, MemOperand(scratch, index, LSL, 1));
- }
+ // Two-byte string.
+ // Load the two-byte character code into the result register.
+ Label done;
+ __ add(result,
+ string,
+ Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
+ __ ldrh(result, MemOperand(result, index, LSL, 1));
__ jmp(&done);
// ASCII string.
// Load the byte into the result register.
__ bind(&ascii_string);
- if (instr->index()->IsConstantOperand()) {
- __ ldrb(result, FieldMemOperand(string,
- SeqAsciiString::kHeaderSize + const_index));
- } else {
- __ add(scratch,
- string,
- Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
- __ ldrb(result, MemOperand(scratch, index));
- }
+ __ add(result,
+ string,
+ Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ ldrb(result, MemOperand(result, index));
+
__ bind(&done);
__ bind(deferred->exit());
}
@@ -3739,7 +3757,7 @@
LOperand* input = instr->InputAt(0);
ASSERT(input->IsRegister() && input->Equals(instr->result()));
if (instr->needs_check()) {
- ASSERT(kHeapObjectTag == 1);
+ STATIC_ASSERT(kHeapObjectTag == 1);
// If the input is a HeapObject, SmiUntag will set the carry flag.
__ SmiUntag(ToRegister(input), SetCC);
DeoptimizeIf(cs, instr->environment());
@@ -3824,7 +3842,7 @@
// The input was optimistically untagged; revert it.
// The carry flag is set when we reach this deferred code as we just executed
// SmiUntag(heap_object, SetCC)
- ASSERT(kHeapObjectTag == 1);
+ STATIC_ASSERT(kHeapObjectTag == 1);
__ adc(input_reg, input_reg, Operand(input_reg));
// Heap number map check.
@@ -3929,7 +3947,6 @@
Register scratch1 = scratch0();
Register scratch2 = ToRegister(instr->TempAt(0));
DwVfpRegister double_input = ToDoubleRegister(instr->InputAt(0));
- DwVfpRegister double_scratch = double_scratch0();
SwVfpRegister single_scratch = double_scratch0().low();
Label done;
@@ -4070,7 +4087,7 @@
// conversions.
__ cmp(input_reg, Operand(factory()->undefined_value()));
DeoptimizeIf(ne, instr->environment());
- __ movt(input_reg, 0);
+ __ mov(result_reg, Operand(0));
__ jmp(&done);
// Heap number
@@ -4309,6 +4326,10 @@
__ CompareRoot(input, Heap::kFalseValueRootIndex);
final_branch_condition = eq;
+ } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
+ __ CompareRoot(input, Heap::kNullValueRootIndex);
+ final_branch_condition = eq;
+
} else if (type_name->Equals(heap()->undefined_symbol())) {
__ CompareRoot(input, Heap::kUndefinedValueRootIndex);
__ b(eq, true_label);
@@ -4327,8 +4348,10 @@
} else if (type_name->Equals(heap()->object_symbol())) {
__ JumpIfSmi(input, false_label);
- __ CompareRoot(input, Heap::kNullValueRootIndex);
- __ b(eq, true_label);
+ if (!FLAG_harmony_typeof) {
+ __ CompareRoot(input, Heap::kNullValueRootIndex);
+ __ b(eq, true_label);
+ }
__ CompareObjectType(input, input, scratch,
FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
__ b(lt, false_label);
diff --git a/src/arm/lithium-gap-resolver-arm.cc b/src/arm/lithium-gap-resolver-arm.cc
index 02608a6..26f60fa 100644
--- a/src/arm/lithium-gap-resolver-arm.cc
+++ b/src/arm/lithium-gap-resolver-arm.cc
@@ -254,7 +254,6 @@
} else {
ASSERT(destination->IsStackSlot());
ASSERT(!in_cycle_); // Constant moves happen after all cycles are gone.
- MemOperand destination_operand = cgen_->ToMemOperand(destination);
__ mov(kSavedValueRegister, source_operand);
__ str(kSavedValueRegister, cgen_->ToMemOperand(destination));
}
@@ -265,8 +264,7 @@
__ vmov(cgen_->ToDoubleRegister(destination), source_register);
} else {
ASSERT(destination->IsDoubleStackSlot());
- MemOperand destination_operand = cgen_->ToMemOperand(destination);
- __ vstr(source_register, destination_operand);
+ __ vstr(source_register, cgen_->ToMemOperand(destination));
}
} else if (source->IsDoubleStackSlot()) {
diff --git a/src/arm/macro-assembler-arm.cc b/src/arm/macro-assembler-arm.cc
index c34a579..88477bb 100644
--- a/src/arm/macro-assembler-arm.cc
+++ b/src/arm/macro-assembler-arm.cc
@@ -1102,7 +1102,13 @@
void MacroAssembler::PushTryHandler(CodeLocation try_location,
HandlerType type) {
// Adjust this code if not the case.
- ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
+
// The pc (return address) is passed in register lr.
if (try_location == IN_JAVASCRIPT) {
if (type == TRY_CATCH_HANDLER) {
@@ -1110,14 +1116,10 @@
} else {
mov(r3, Operand(StackHandler::TRY_FINALLY));
}
- ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
- && StackHandlerConstants::kFPOffset == 2 * kPointerSize
- && StackHandlerConstants::kPCOffset == 3 * kPointerSize);
- stm(db_w, sp, r3.bit() | fp.bit() | lr.bit());
+ stm(db_w, sp, r3.bit() | cp.bit() | fp.bit() | lr.bit());
// Save the current handler as the next handler.
mov(r3, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
ldr(r1, MemOperand(r3));
- ASSERT(StackHandlerConstants::kNextOffset == 0);
push(r1);
// Link this handler as the new current one.
str(sp, MemOperand(r3));
@@ -1127,16 +1129,13 @@
// The frame pointer does not point to a JS frame so we save NULL
// for fp. We expect the code throwing an exception to check fp
// before dereferencing it to restore the context.
- mov(ip, Operand(0, RelocInfo::NONE)); // To save a NULL frame pointer.
- mov(r6, Operand(StackHandler::ENTRY));
- ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
- && StackHandlerConstants::kFPOffset == 2 * kPointerSize
- && StackHandlerConstants::kPCOffset == 3 * kPointerSize);
- stm(db_w, sp, r6.bit() | ip.bit() | lr.bit());
+ mov(r5, Operand(StackHandler::ENTRY)); // State.
+ mov(r6, Operand(Smi::FromInt(0))); // Indicates no context.
+ mov(r7, Operand(0, RelocInfo::NONE)); // NULL frame pointer.
+ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | lr.bit());
// Save the current handler as the next handler.
mov(r7, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
ldr(r6, MemOperand(r7));
- ASSERT(StackHandlerConstants::kNextOffset == 0);
push(r6);
// Link this handler as the new current one.
str(sp, MemOperand(r7));
@@ -1145,7 +1144,7 @@
void MacroAssembler::PopTryHandler() {
- ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
pop(r1);
mov(ip, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
add(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
@@ -1154,39 +1153,40 @@
void MacroAssembler::Throw(Register value) {
+ // Adjust this code if not the case.
+ STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
// r0 is expected to hold the exception.
if (!value.is(r0)) {
mov(r0, value);
}
- // Adjust this code if not the case.
- STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
-
// Drop the sp to the top of the handler.
mov(r3, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
ldr(sp, MemOperand(r3));
- // Restore the next handler and frame pointer, discard handler state.
- STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ // Restore the next handler.
pop(r2);
str(r2, MemOperand(r3));
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
- ldm(ia_w, sp, r3.bit() | fp.bit()); // r3: discarded state.
- // Before returning we restore the context from the frame pointer if
- // not NULL. The frame pointer is NULL in the exception handler of a
- // JS entry frame.
- cmp(fp, Operand(0, RelocInfo::NONE));
- // Set cp to NULL if fp is NULL.
- mov(cp, Operand(0, RelocInfo::NONE), LeaveCC, eq);
- // Restore cp otherwise.
- ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
+ // Restore context and frame pointer, discard state (r3).
+ ldm(ia_w, sp, r3.bit() | cp.bit() | fp.bit());
+
+ // If the handler is a JS frame, restore the context to the frame.
+ // (r3 == ENTRY) == (fp == 0) == (cp == 0), so we could test any
+ // of them.
+ cmp(r3, Operand(StackHandler::ENTRY));
+ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
+
#ifdef DEBUG
if (emit_debug_code()) {
mov(lr, Operand(pc));
}
#endif
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
pop(pc);
}
@@ -1194,8 +1194,12 @@
void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
Register value) {
// Adjust this code if not the case.
- STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
-
+ STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
// r0 is expected to hold the exception.
if (!value.is(r0)) {
mov(r0, value);
@@ -1220,7 +1224,6 @@
bind(&done);
// Set the top handler address to next handler past the current ENTRY handler.
- STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
pop(r2);
str(r2, MemOperand(r3));
@@ -1242,26 +1245,17 @@
// Stack layout at this point. See also StackHandlerConstants.
// sp -> state (ENTRY)
+ // cp
// fp
// lr
- // Discard handler state (r2 is not used) and restore frame pointer.
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
- ldm(ia_w, sp, r2.bit() | fp.bit()); // r2: discarded state.
- // Before returning we restore the context from the frame pointer if
- // not NULL. The frame pointer is NULL in the exception handler of a
- // JS entry frame.
- cmp(fp, Operand(0, RelocInfo::NONE));
- // Set cp to NULL if fp is NULL.
- mov(cp, Operand(0, RelocInfo::NONE), LeaveCC, eq);
- // Restore cp otherwise.
- ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne);
+ // Restore context and frame pointer, discard state (r2).
+ ldm(ia_w, sp, r2.bit() | cp.bit() | fp.bit());
#ifdef DEBUG
if (emit_debug_code()) {
mov(lr, Operand(pc));
}
#endif
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
pop(pc);
}
diff --git a/src/arm/regexp-macro-assembler-arm.cc b/src/arm/regexp-macro-assembler-arm.cc
index 983a528..cd76edb 100644
--- a/src/arm/regexp-macro-assembler-arm.cc
+++ b/src/arm/regexp-macro-assembler-arm.cc
@@ -1034,12 +1034,13 @@
}
// Prepare for possible GC.
- HandleScope handles;
+ HandleScope handles(isolate);
Handle<Code> code_handle(re_code);
Handle<String> subject(frame_entry<String*>(re_frame, kInputString));
+
// Current string.
- bool is_ascii = subject->IsAsciiRepresentation();
+ bool is_ascii = subject->IsAsciiRepresentationUnderneath();
ASSERT(re_code->instruction_start() <= *return_address);
ASSERT(*return_address <=
@@ -1048,7 +1049,7 @@
MaybeObject* result = Execution::HandleStackGuardInterrupt();
if (*code_handle != re_code) { // Return address no longer valid
- int delta = *code_handle - re_code;
+ int delta = code_handle->address() - re_code->address();
// Overwrite the return address on the stack.
*return_address += delta;
}
@@ -1057,8 +1058,20 @@
return EXCEPTION;
}
+ Handle<String> subject_tmp = subject;
+ int slice_offset = 0;
+
+ // Extract the underlying string and the slice offset.
+ if (StringShape(*subject_tmp).IsCons()) {
+ subject_tmp = Handle<String>(ConsString::cast(*subject_tmp)->first());
+ } else if (StringShape(*subject_tmp).IsSliced()) {
+ SlicedString* slice = SlicedString::cast(*subject_tmp);
+ subject_tmp = Handle<String>(slice->parent());
+ slice_offset = slice->offset();
+ }
+
// String might have changed.
- if (subject->IsAsciiRepresentation() != is_ascii) {
+ if (subject_tmp->IsAsciiRepresentation() != is_ascii) {
// If we changed between an ASCII and an UC16 string, the specialized
// code cannot be used, and we need to restart regexp matching from
// scratch (including, potentially, compiling a new version of the code).
@@ -1069,8 +1082,8 @@
// be a sequential or external string with the same content.
// Update the start and end pointers in the stack frame to the current
// location (whether it has actually moved or not).
- ASSERT(StringShape(*subject).IsSequential() ||
- StringShape(*subject).IsExternal());
+ ASSERT(StringShape(*subject_tmp).IsSequential() ||
+ StringShape(*subject_tmp).IsExternal());
// The original start address of the characters to match.
const byte* start_address = frame_entry<const byte*>(re_frame, kInputStart);
@@ -1078,13 +1091,14 @@
// Find the current start address of the same character at the current string
// position.
int start_index = frame_entry<int>(re_frame, kStartIndex);
- const byte* new_address = StringCharacterPosition(*subject, start_index);
+ const byte* new_address = StringCharacterPosition(*subject_tmp,
+ start_index + slice_offset);
if (start_address != new_address) {
// If there is a difference, update the object pointer and start and end
// addresses in the RegExp stack frame to match the new value.
const byte* end_address = frame_entry<const byte* >(re_frame, kInputEnd);
- int byte_length = end_address - start_address;
+ int byte_length = static_cast<int>(end_address - start_address);
frame_entry<const String*>(re_frame, kInputString) = *subject;
frame_entry<const byte*>(re_frame, kInputStart) = new_address;
frame_entry<const byte*>(re_frame, kInputEnd) = new_address + byte_length;
diff --git a/src/arm/stub-cache-arm.cc b/src/arm/stub-cache-arm.cc
index c2665f8..5345892 100644
--- a/src/arm/stub-cache-arm.cc
+++ b/src/arm/stub-cache-arm.cc
@@ -1183,9 +1183,8 @@
__ JumpIfSmi(receiver, miss);
// Check that the maps haven't changed.
- Register reg =
- CheckPrototypes(object, receiver, holder,
- scratch1, scratch2, scratch3, name, miss);
+ CheckPrototypes(object, receiver, holder, scratch1, scratch2, scratch3, name,
+ miss);
// Return the constant value.
__ mov(r0, Operand(Handle<Object>(value)));
@@ -3489,16 +3488,16 @@
// Check that the index is in range.
__ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
- __ cmp(ip, Operand(key, ASR, kSmiTagSize));
+ __ cmp(key, ip);
// Unsigned comparison catches both negative and too-large values.
- __ b(lo, &miss_force_generic);
+ __ b(hs, &miss_force_generic);
__ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
// r3: base pointer of external storage
// We are not untagging smi key and instead work with it
// as if it was premultiplied by 2.
- ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
+ STATIC_ASSERT((kSmiTag == 0) && (kSmiTagSize == 1));
Register value = r2;
switch (elements_kind) {
@@ -3811,22 +3810,20 @@
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
-
// Check that the key is a smi.
__ JumpIfNotSmi(key, &miss_force_generic);
+ __ ldr(r3, FieldMemOperand(receiver, JSObject::kElementsOffset));
+
// Check that the index is in range
- __ SmiUntag(r4, key);
__ ldr(ip, FieldMemOperand(r3, ExternalArray::kLengthOffset));
- __ cmp(r4, ip);
+ __ cmp(key, ip);
// Unsigned comparison catches both negative and too-large values.
__ b(hs, &miss_force_generic);
// Handle both smis and HeapNumbers in the fast path. Go to the
// runtime for all other kinds of values.
// r3: external array.
- // r4: key (integer).
if (elements_kind == JSObject::EXTERNAL_PIXEL_ELEMENTS) {
// Double to pixel conversion is only implemented in the runtime for now.
__ JumpIfNotSmi(value, &slow);
@@ -3837,32 +3834,32 @@
__ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
// r3: base pointer of external storage.
- // r4: key (integer).
// r5: value (integer).
switch (elements_kind) {
case JSObject::EXTERNAL_PIXEL_ELEMENTS:
// Clamp the value to [0..255].
__ Usat(r5, 8, Operand(r5));
- __ strb(r5, MemOperand(r3, r4, LSL, 0));
+ __ strb(r5, MemOperand(r3, key, LSR, 1));
break;
case JSObject::EXTERNAL_BYTE_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- __ strb(r5, MemOperand(r3, r4, LSL, 0));
+ __ strb(r5, MemOperand(r3, key, LSR, 1));
break;
case JSObject::EXTERNAL_SHORT_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- __ strh(r5, MemOperand(r3, r4, LSL, 1));
+ __ strh(r5, MemOperand(r3, key, LSL, 0));
break;
case JSObject::EXTERNAL_INT_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
- __ str(r5, MemOperand(r3, r4, LSL, 2));
+ __ str(r5, MemOperand(r3, key, LSL, 1));
break;
case JSObject::EXTERNAL_FLOAT_ELEMENTS:
// Perform int-to-float conversion and store to memory.
+ __ SmiUntag(r4, key);
StoreIntAsFloat(masm, r3, r4, r5, r6, r7, r9);
break;
case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
- __ add(r3, r3, Operand(r4, LSL, 3));
+ __ add(r3, r3, Operand(key, LSL, 2));
// r3: effective address of the double element
FloatingPointHelper::Destination destination;
if (CpuFeatures::IsSupported(VFP3)) {
@@ -3895,7 +3892,6 @@
if (elements_kind != JSObject::EXTERNAL_PIXEL_ELEMENTS) {
// r3: external array.
- // r4: index (integer).
__ bind(&check_heap_number);
__ CompareObjectType(value, r5, r6, HEAP_NUMBER_TYPE);
__ b(ne, &slow);
@@ -3903,7 +3899,6 @@
__ ldr(r3, FieldMemOperand(r3, ExternalArray::kExternalPointerOffset));
// r3: base pointer of external storage.
- // r4: key (integer).
// The WebGL specification leaves the behavior of storing NaN and
// +/-Infinity into integer arrays basically undefined. For more
@@ -3916,13 +3911,13 @@
// include -kHeapObjectTag into it.
__ sub(r5, r0, Operand(kHeapObjectTag));
__ vldr(d0, r5, HeapNumber::kValueOffset);
- __ add(r5, r3, Operand(r4, LSL, 2));
+ __ add(r5, r3, Operand(key, LSL, 1));
__ vcvt_f32_f64(s0, d0);
__ vstr(s0, r5, 0);
} else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) {
__ sub(r5, r0, Operand(kHeapObjectTag));
__ vldr(d0, r5, HeapNumber::kValueOffset);
- __ add(r5, r3, Operand(r4, LSL, 3));
+ __ add(r5, r3, Operand(key, LSL, 2));
__ vstr(d0, r5, 0);
} else {
// Hoisted load. vldr requires offset to be a multiple of 4 so we can
@@ -3934,15 +3929,15 @@
switch (elements_kind) {
case JSObject::EXTERNAL_BYTE_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- __ strb(r5, MemOperand(r3, r4, LSL, 0));
+ __ strb(r5, MemOperand(r3, key, LSR, 1));
break;
case JSObject::EXTERNAL_SHORT_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- __ strh(r5, MemOperand(r3, r4, LSL, 1));
+ __ strh(r5, MemOperand(r3, key, LSL, 0));
break;
case JSObject::EXTERNAL_INT_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
- __ str(r5, MemOperand(r3, r4, LSL, 2));
+ __ str(r5, MemOperand(r3, key, LSL, 1));
break;
case JSObject::EXTERNAL_PIXEL_ELEMENTS:
case JSObject::EXTERNAL_FLOAT_ELEMENTS:
@@ -4004,7 +3999,7 @@
__ orr(r5, r7, Operand(r9, LSL, kBinary32ExponentShift));
__ bind(&done);
- __ str(r5, MemOperand(r3, r4, LSL, 2));
+ __ str(r5, MemOperand(r3, key, LSL, 1));
// Entry registers are intact, r0 holds the value which is the return
// value.
__ Ret();
@@ -4017,7 +4012,7 @@
__ orr(r5, r9, Operand(r6, LSR, kMantissaInLoWordShift));
__ b(&done);
} else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) {
- __ add(r7, r3, Operand(r4, LSL, 3));
+ __ add(r7, r3, Operand(key, LSL, 2));
// r7: effective address of destination element.
__ str(r6, MemOperand(r7, 0));
__ str(r5, MemOperand(r7, Register::kSizeInBytes));
@@ -4073,15 +4068,15 @@
switch (elements_kind) {
case JSObject::EXTERNAL_BYTE_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- __ strb(r5, MemOperand(r3, r4, LSL, 0));
+ __ strb(r5, MemOperand(r3, key, LSR, 1));
break;
case JSObject::EXTERNAL_SHORT_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- __ strh(r5, MemOperand(r3, r4, LSL, 1));
+ __ strh(r5, MemOperand(r3, key, LSL, 0));
break;
case JSObject::EXTERNAL_INT_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
- __ str(r5, MemOperand(r3, r4, LSL, 2));
+ __ str(r5, MemOperand(r3, key, LSL, 1));
break;
case JSObject::EXTERNAL_PIXEL_ELEMENTS:
case JSObject::EXTERNAL_FLOAT_ELEMENTS:
@@ -4152,7 +4147,7 @@
// Load the result and make sure it's not the hole.
__ add(r3, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
__ ldr(r4,
MemOperand(r3, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
@@ -4284,7 +4279,7 @@
__ add(scratch,
elements_reg, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
__ str(value_reg,
MemOperand(scratch, key_reg, LSL, kPointerSizeLog2 - kSmiTagSize));
__ RecordWrite(scratch,
@@ -4399,11 +4394,18 @@
} else {
destination = FloatingPointHelper::kCoreRegisters;
}
- __ SmiUntag(value_reg, value_reg);
+
+ Register untagged_value = receiver_reg;
+ __ SmiUntag(untagged_value, value_reg);
FloatingPointHelper::ConvertIntToDouble(
- masm, value_reg, destination,
- d0, mantissa_reg, exponent_reg, // These are: double_dst, dst1, dst2.
- scratch4, s2); // These are: scratch2, single_scratch.
+ masm,
+ untagged_value,
+ destination,
+ d0,
+ mantissa_reg,
+ exponent_reg,
+ scratch4,
+ s2);
if (destination == FloatingPointHelper::kVFPRegisters) {
CpuFeatures::Scope scope(VFP3);
__ vstr(d0, scratch, 0);
diff --git a/src/array.js b/src/array.js
index e6c13d9..f73b0c1 100644
--- a/src/array.js
+++ b/src/array.js
@@ -172,12 +172,12 @@
} else {
for (var i = 0; i < length; i++) {
var e = array[i];
- if (IS_NUMBER(e)) {
- e = %_NumberToString(e);
- } else if (!IS_STRING(e)) {
- e = convert(e);
- }
- elements[i] = e;
+ if (IS_NUMBER(e)) {
+ e = %_NumberToString(e);
+ } else if (!IS_STRING(e)) {
+ e = convert(e);
+ }
+ elements[i] = e;
}
}
var result = %_FastAsciiArrayJoin(elements, separator);
@@ -742,8 +742,7 @@
else return x < y ? -1 : 1;
};
}
- var receiver =
- %_IsNativeOrStrictMode(comparefn) ? void 0 : %GetGlobalReceiver();
+ var receiver = %GetDefaultReceiver(comparefn);
function InsertionSort(a, from, to) {
for (var i = from + 1; i < to; i++) {
@@ -997,6 +996,9 @@
if (!IS_FUNCTION(f)) {
throw MakeTypeError('called_non_callable', [ f ]);
}
+ if (IS_NULL_OR_UNDEFINED(receiver)) {
+ receiver = %GetDefaultReceiver(f) || receiver;
+ }
// Pull out the length so that modifications to the length in the
// loop will not affect the looping.
var length = ToUint32(this.length);
@@ -1005,7 +1007,7 @@
for (var i = 0; i < length; i++) {
var current = this[i];
if (!IS_UNDEFINED(current) || i in this) {
- if (f.call(receiver, current, i, this)) {
+ if (%_CallFunction(receiver, current, i, this, f)) {
result[result_length++] = current;
}
}
@@ -1023,13 +1025,16 @@
if (!IS_FUNCTION(f)) {
throw MakeTypeError('called_non_callable', [ f ]);
}
+ if (IS_NULL_OR_UNDEFINED(receiver)) {
+ receiver = %GetDefaultReceiver(f) || receiver;
+ }
// Pull out the length so that modifications to the length in the
// loop will not affect the looping.
var length = TO_UINT32(this.length);
for (var i = 0; i < length; i++) {
var current = this[i];
if (!IS_UNDEFINED(current) || i in this) {
- f.call(receiver, current, i, this);
+ %_CallFunction(receiver, current, i, this, f);
}
}
}
@@ -1046,13 +1051,16 @@
if (!IS_FUNCTION(f)) {
throw MakeTypeError('called_non_callable', [ f ]);
}
+ if (IS_NULL_OR_UNDEFINED(receiver)) {
+ receiver = %GetDefaultReceiver(f) || receiver;
+ }
// Pull out the length so that modifications to the length in the
// loop will not affect the looping.
var length = TO_UINT32(this.length);
for (var i = 0; i < length; i++) {
var current = this[i];
if (!IS_UNDEFINED(current) || i in this) {
- if (f.call(receiver, current, i, this)) return true;
+ if (%_CallFunction(receiver, current, i, this, f)) return true;
}
}
return false;
@@ -1068,13 +1076,16 @@
if (!IS_FUNCTION(f)) {
throw MakeTypeError('called_non_callable', [ f ]);
}
+ if (IS_NULL_OR_UNDEFINED(receiver)) {
+ receiver = %GetDefaultReceiver(f) || receiver;
+ }
// Pull out the length so that modifications to the length in the
// loop will not affect the looping.
var length = TO_UINT32(this.length);
for (var i = 0; i < length; i++) {
var current = this[i];
if (!IS_UNDEFINED(current) || i in this) {
- if (!f.call(receiver, current, i, this)) return false;
+ if (!%_CallFunction(receiver, current, i, this, f)) return false;
}
}
return true;
@@ -1089,6 +1100,9 @@
if (!IS_FUNCTION(f)) {
throw MakeTypeError('called_non_callable', [ f ]);
}
+ if (IS_NULL_OR_UNDEFINED(receiver)) {
+ receiver = %GetDefaultReceiver(f) || receiver;
+ }
// Pull out the length so that modifications to the length in the
// loop will not affect the looping.
var length = TO_UINT32(this.length);
@@ -1097,7 +1111,7 @@
for (var i = 0; i < length; i++) {
var current = this[i];
if (!IS_UNDEFINED(current) || i in this) {
- accumulator[i] = f.call(receiver, current, i, this);
+ accumulator[i] = %_CallFunction(receiver, current, i, this, f);
}
}
%MoveArrayContents(accumulator, result);
@@ -1234,6 +1248,7 @@
if (!IS_FUNCTION(callback)) {
throw MakeTypeError('called_non_callable', [callback]);
}
+
// Pull out the length so that modifications to the length in the
// loop will not affect the looping.
var length = ToUint32(this.length);
@@ -1250,10 +1265,11 @@
throw MakeTypeError('reduce_no_initial', []);
}
+ var receiver = %GetDefaultReceiver(callback);
for (; i < length; i++) {
var element = this[i];
if (!IS_UNDEFINED(element) || i in this) {
- current = callback.call(void 0, current, element, i, this);
+ current = %_CallFunction(receiver, current, element, i, this, callback);
}
}
return current;
@@ -1281,10 +1297,11 @@
throw MakeTypeError('reduce_no_initial', []);
}
+ var receiver = %GetDefaultReceiver(callback);
for (; i >= 0; i--) {
var element = this[i];
if (!IS_UNDEFINED(element) || i in this) {
- current = callback.call(void 0, current, element, i, this);
+ current = %_CallFunction(receiver, current, element, i, this, callback);
}
}
return current;
diff --git a/src/assembler.cc b/src/assembler.cc
index fbd8089..ad5f350 100644
--- a/src/assembler.cc
+++ b/src/assembler.cc
@@ -74,7 +74,7 @@
const double DoubleConstant::canonical_non_hole_nan = OS::nan_value();
const double DoubleConstant::the_hole_nan = BitCast<double>(kHoleNanInt64);
const double DoubleConstant::negative_infinity = -V8_INFINITY;
-const char* RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
+const char* const RelocInfo::kFillerCommentString = "DEOPTIMIZATION PADDING";
// -----------------------------------------------------------------------------
// Implementation of AssemblerBase
diff --git a/src/assembler.h b/src/assembler.h
index 2d14f06..d58034d 100644
--- a/src/assembler.h
+++ b/src/assembler.h
@@ -171,7 +171,7 @@
// where we are not sure to have enough space for patching in during
// lazy deoptimization. This is the case if we have indirect calls for which
// we do not normally record relocation info.
- static const char* kFillerCommentString;
+ static const char* const kFillerCommentString;
// The minimum size of a comment is equal to three bytes for the extra tagged
// pc + the tag for the data, and kPointerSize for the actual pointer to the
diff --git a/src/ast-inl.h b/src/ast-inl.h
index c750e6b..731ad2f 100644
--- a/src/ast-inl.h
+++ b/src/ast-inl.h
@@ -50,7 +50,8 @@
bool is_initializer_block)
: BreakableStatement(isolate, labels, TARGET_FOR_NAMED_ONLY),
statements_(capacity),
- is_initializer_block_(is_initializer_block) {
+ is_initializer_block_(is_initializer_block),
+ block_scope_(NULL) {
}
diff --git a/src/ast.cc b/src/ast.cc
index 2df62ee..7319abe 100644
--- a/src/ast.cc
+++ b/src/ast.cc
@@ -139,8 +139,7 @@
assignment_id_(GetNextId(isolate)),
block_start_(false),
block_end_(false),
- is_monomorphic_(false),
- receiver_types_(NULL) {
+ is_monomorphic_(false) {
ASSERT(Token::IsAssignmentOp(op));
if (is_compound()) {
binary_operation_ =
@@ -426,7 +425,7 @@
}
-bool EnterWithContextStatement::IsInlineable() const {
+bool WithStatement::IsInlineable() const {
return false;
}
@@ -652,6 +651,7 @@
void Property::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
// Record type feedback from the oracle in the AST.
is_monomorphic_ = oracle->LoadIsMonomorphicNormal(this);
+ receiver_types_.Clear();
if (key()->IsPropertyName()) {
if (oracle->LoadIsBuiltin(this, Builtins::kLoadIC_ArrayLength)) {
is_array_length_ = true;
@@ -664,16 +664,15 @@
Literal* lit_key = key()->AsLiteral();
ASSERT(lit_key != NULL && lit_key->handle()->IsString());
Handle<String> name = Handle<String>::cast(lit_key->handle());
- ZoneMapList* types = oracle->LoadReceiverTypes(this, name);
- receiver_types_ = types;
+ oracle->LoadReceiverTypes(this, name, &receiver_types_);
}
} else if (oracle->LoadIsBuiltin(this, Builtins::kKeyedLoadIC_String)) {
is_string_access_ = true;
} else if (is_monomorphic_) {
- monomorphic_receiver_type_ = oracle->LoadMonomorphicReceiverType(this);
+ receiver_types_.Add(oracle->LoadMonomorphicReceiverType(this));
} else if (oracle->LoadIsMegamorphicWithTypeInfo(this)) {
- receiver_types_ = new ZoneMapList(kMaxKeyedPolymorphism);
- oracle->CollectKeyedReceiverTypes(this->id(), receiver_types_);
+ receiver_types_.Reserve(kMaxKeyedPolymorphism);
+ oracle->CollectKeyedReceiverTypes(this->id(), &receiver_types_);
}
}
@@ -682,30 +681,31 @@
Property* prop = target()->AsProperty();
ASSERT(prop != NULL);
is_monomorphic_ = oracle->StoreIsMonomorphicNormal(this);
+ receiver_types_.Clear();
if (prop->key()->IsPropertyName()) {
Literal* lit_key = prop->key()->AsLiteral();
ASSERT(lit_key != NULL && lit_key->handle()->IsString());
Handle<String> name = Handle<String>::cast(lit_key->handle());
- ZoneMapList* types = oracle->StoreReceiverTypes(this, name);
- receiver_types_ = types;
+ oracle->StoreReceiverTypes(this, name, &receiver_types_);
} else if (is_monomorphic_) {
// Record receiver type for monomorphic keyed stores.
- monomorphic_receiver_type_ = oracle->StoreMonomorphicReceiverType(this);
+ receiver_types_.Add(oracle->StoreMonomorphicReceiverType(this));
} else if (oracle->StoreIsMegamorphicWithTypeInfo(this)) {
- receiver_types_ = new ZoneMapList(kMaxKeyedPolymorphism);
- oracle->CollectKeyedReceiverTypes(this->id(), receiver_types_);
+ receiver_types_.Reserve(kMaxKeyedPolymorphism);
+ oracle->CollectKeyedReceiverTypes(this->id(), &receiver_types_);
}
}
void CountOperation::RecordTypeFeedback(TypeFeedbackOracle* oracle) {
is_monomorphic_ = oracle->StoreIsMonomorphicNormal(this);
+ receiver_types_.Clear();
if (is_monomorphic_) {
// Record receiver type for monomorphic keyed stores.
- monomorphic_receiver_type_ = oracle->StoreMonomorphicReceiverType(this);
+ receiver_types_.Add(oracle->StoreMonomorphicReceiverType(this));
} else if (oracle->StoreIsMegamorphicWithTypeInfo(this)) {
- receiver_types_ = new ZoneMapList(kMaxKeyedPolymorphism);
- oracle->CollectKeyedReceiverTypes(this->id(), receiver_types_);
+ receiver_types_.Reserve(kMaxKeyedPolymorphism);
+ oracle->CollectKeyedReceiverTypes(this->id(), &receiver_types_);
}
}
@@ -789,15 +789,14 @@
Literal* key = property->key()->AsLiteral();
ASSERT(key != NULL && key->handle()->IsString());
Handle<String> name = Handle<String>::cast(key->handle());
- receiver_types_ = oracle->CallReceiverTypes(this, name, call_kind);
+ receiver_types_.Clear();
+ oracle->CallReceiverTypes(this, name, call_kind, &receiver_types_);
#ifdef DEBUG
if (FLAG_enable_slow_asserts) {
- if (receiver_types_ != NULL) {
- int length = receiver_types_->length();
- for (int i = 0; i < length; i++) {
- Handle<Map> map = receiver_types_->at(i);
- ASSERT(!map.is_null() && *map != NULL);
- }
+ int length = receiver_types_.length();
+ for (int i = 0; i < length; i++) {
+ Handle<Map> map = receiver_types_.at(i);
+ ASSERT(!map.is_null() && *map != NULL);
}
}
#endif
@@ -805,9 +804,9 @@
check_type_ = oracle->GetCallCheckType(this);
if (is_monomorphic_) {
Handle<Map> map;
- if (receiver_types_ != NULL && receiver_types_->length() > 0) {
+ if (receiver_types_.length() > 0) {
ASSERT(check_type_ == RECEIVER_MAP_CHECK);
- map = receiver_types_->at(0);
+ map = receiver_types_.at(0);
} else {
ASSERT(check_type_ != RECEIVER_MAP_CHECK);
holder_ = Handle<JSObject>(
diff --git a/src/ast.h b/src/ast.h
index b4705f6..74182d5 100644
--- a/src/ast.h
+++ b/src/ast.h
@@ -33,6 +33,7 @@
#include "factory.h"
#include "jsregexp.h"
#include "runtime.h"
+#include "small-pointer-list.h"
#include "token.h"
#include "variables.h"
@@ -60,7 +61,7 @@
V(ContinueStatement) \
V(BreakStatement) \
V(ReturnStatement) \
- V(EnterWithContextStatement) \
+ V(WithStatement) \
V(ExitContextStatement) \
V(SwitchStatement) \
V(DoWhileStatement) \
@@ -207,6 +208,36 @@
};
+class SmallMapList {
+ public:
+ SmallMapList() {}
+ explicit SmallMapList(int capacity) : list_(capacity) {}
+
+ void Reserve(int capacity) { list_.Reserve(capacity); }
+ void Clear() { list_.Clear(); }
+
+ bool is_empty() const { return list_.is_empty(); }
+ int length() const { return list_.length(); }
+
+ void Add(Handle<Map> handle) {
+ list_.Add(handle.location());
+ }
+
+ Handle<Map> at(int i) const {
+ return Handle<Map>(list_.at(i));
+ }
+
+ Handle<Map> first() const { return at(0); }
+ Handle<Map> last() const { return at(length() - 1); }
+
+ private:
+ // The list stores pointers to Map*, that is Map**, so it's GC safe.
+ SmallPointerList<Map*> list_;
+
+ DISALLOW_COPY_AND_ASSIGN(SmallMapList);
+};
+
+
class Expression: public AstNode {
public:
enum Context {
@@ -265,13 +296,15 @@
UNREACHABLE();
return false;
}
- virtual ZoneMapList* GetReceiverTypes() {
+ virtual SmallMapList* GetReceiverTypes() {
UNREACHABLE();
return NULL;
}
- virtual Handle<Map> GetMonomorphicReceiverType() {
- UNREACHABLE();
- return Handle<Map>();
+ Handle<Map> GetMonomorphicReceiverType() {
+ ASSERT(IsMonomorphic());
+ SmallMapList* types = GetReceiverTypes();
+ ASSERT(types != NULL && types->length() == 1);
+ return types->at(0);
}
unsigned id() const { return id_; }
@@ -359,9 +392,13 @@
ZoneList<Statement*>* statements() { return &statements_; }
bool is_initializer_block() const { return is_initializer_block_; }
+ Scope* block_scope() const { return block_scope_; }
+ void set_block_scope(Scope* block_scope) { block_scope_ = block_scope; }
+
private:
ZoneList<Statement*> statements_;
bool is_initializer_block_;
+ Scope* block_scope_;
};
@@ -371,9 +408,11 @@
: proxy_(proxy),
mode_(mode),
fun_(fun) {
- ASSERT(mode == Variable::VAR || mode == Variable::CONST);
+ ASSERT(mode == Variable::VAR ||
+ mode == Variable::CONST ||
+ mode == Variable::LET);
// At the moment there are no "const functions"'s in JavaScript...
- ASSERT(fun == NULL || mode == Variable::VAR);
+ ASSERT(fun == NULL || mode == Variable::VAR || mode == Variable::LET);
}
DECLARE_NODE_TYPE(Declaration)
@@ -627,19 +666,21 @@
};
-class EnterWithContextStatement: public Statement {
+class WithStatement: public Statement {
public:
- explicit EnterWithContextStatement(Expression* expression)
- : expression_(expression) { }
+ WithStatement(Expression* expression, Statement* statement)
+ : expression_(expression), statement_(statement) { }
- DECLARE_NODE_TYPE(EnterWithContextStatement)
+ DECLARE_NODE_TYPE(WithStatement)
Expression* expression() const { return expression_; }
+ Statement* statement() const { return statement_; }
virtual bool IsInlineable() const;
private:
Expression* expression_;
+ Statement* statement_;
};
@@ -1190,22 +1231,14 @@
class Property: public Expression {
public:
- // Synthetic properties are property lookups introduced by the system,
- // to objects that aren't visible to the user. Function calls to synthetic
- // properties should use the global object as receiver, not the base object
- // of the resolved Reference.
- enum Type { NORMAL, SYNTHETIC };
Property(Isolate* isolate,
Expression* obj,
Expression* key,
- int pos,
- Type type = NORMAL)
+ int pos)
: Expression(isolate),
obj_(obj),
key_(key),
pos_(pos),
- type_(type),
- receiver_types_(NULL),
is_monomorphic_(false),
is_array_length_(false),
is_string_length_(false),
@@ -1220,7 +1253,6 @@
Expression* obj() const { return obj_; }
Expression* key() const { return key_; }
virtual int position() const { return pos_; }
- bool is_synthetic() const { return type_ == SYNTHETIC; }
bool IsStringLength() const { return is_string_length_; }
bool IsStringAccess() const { return is_string_access_; }
@@ -1229,25 +1261,20 @@
// Type feedback information.
void RecordTypeFeedback(TypeFeedbackOracle* oracle);
virtual bool IsMonomorphic() { return is_monomorphic_; }
- virtual ZoneMapList* GetReceiverTypes() { return receiver_types_; }
+ virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; }
virtual bool IsArrayLength() { return is_array_length_; }
- virtual Handle<Map> GetMonomorphicReceiverType() {
- return monomorphic_receiver_type_;
- }
private:
Expression* obj_;
Expression* key_;
int pos_;
- Type type_;
- ZoneMapList* receiver_types_;
+ SmallMapList receiver_types_;
bool is_monomorphic_ : 1;
bool is_array_length_ : 1;
bool is_string_length_ : 1;
bool is_string_access_ : 1;
bool is_function_prototype_ : 1;
- Handle<Map> monomorphic_receiver_type_;
};
@@ -1263,7 +1290,6 @@
pos_(pos),
is_monomorphic_(false),
check_type_(RECEIVER_MAP_CHECK),
- receiver_types_(NULL),
return_id_(GetNextId(isolate)) {
}
@@ -1277,7 +1303,7 @@
void RecordTypeFeedback(TypeFeedbackOracle* oracle,
CallKind call_kind);
- virtual ZoneMapList* GetReceiverTypes() { return receiver_types_; }
+ virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; }
virtual bool IsMonomorphic() { return is_monomorphic_; }
CheckType check_type() const { return check_type_; }
Handle<JSFunction> target() { return target_; }
@@ -1302,7 +1328,7 @@
bool is_monomorphic_;
CheckType check_type_;
- ZoneMapList* receiver_types_;
+ SmallMapList receiver_types_;
Handle<JSFunction> target_;
Handle<JSObject> holder_;
Handle<JSGlobalPropertyCell> cell_;
@@ -1477,8 +1503,7 @@
expression_(expr),
pos_(pos),
assignment_id_(GetNextId(isolate)),
- count_id_(GetNextId(isolate)),
- receiver_types_(NULL) { }
+ count_id_(GetNextId(isolate)) {}
DECLARE_NODE_TYPE(CountOperation)
@@ -1499,10 +1524,7 @@
void RecordTypeFeedback(TypeFeedbackOracle* oracle);
virtual bool IsMonomorphic() { return is_monomorphic_; }
- virtual Handle<Map> GetMonomorphicReceiverType() {
- return monomorphic_receiver_type_;
- }
- virtual ZoneMapList* GetReceiverTypes() { return receiver_types_; }
+ virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; }
// Bailout support.
int AssignmentId() const { return assignment_id_; }
@@ -1516,8 +1538,7 @@
int pos_;
int assignment_id_;
int count_id_;
- Handle<Map> monomorphic_receiver_type_;
- ZoneMapList* receiver_types_;
+ SmallMapList receiver_types_;
};
@@ -1665,10 +1686,7 @@
// Type feedback information.
void RecordTypeFeedback(TypeFeedbackOracle* oracle);
virtual bool IsMonomorphic() { return is_monomorphic_; }
- virtual ZoneMapList* GetReceiverTypes() { return receiver_types_; }
- virtual Handle<Map> GetMonomorphicReceiverType() {
- return monomorphic_receiver_type_;
- }
+ virtual SmallMapList* GetReceiverTypes() { return &receiver_types_; }
// Bailout support.
int CompoundLoadId() const { return compound_load_id_; }
@@ -1687,8 +1705,7 @@
bool block_end_;
bool is_monomorphic_;
- ZoneMapList* receiver_types_;
- Handle<Map> monomorphic_receiver_type_;
+ SmallMapList receiver_types_;
};
diff --git a/src/bootstrapper.cc b/src/bootstrapper.cc
index 5375cde..9f01664 100644
--- a/src/bootstrapper.cc
+++ b/src/bootstrapper.cc
@@ -199,6 +199,7 @@
// New context initialization. Used for creating a context from scratch.
void InitializeGlobal(Handle<GlobalObject> inner_global,
Handle<JSFunction> empty_function);
+ void InitializeExperimentalGlobal();
// Installs the contents of the native .js files on the global objects.
// Used for creating a context from scratch.
void InstallNativeFunctions();
@@ -1159,7 +1160,7 @@
{
- // Setup the call-as-function delegate.
+ // Set up the call-as-function delegate.
Handle<Code> code =
Handle<Code>(isolate->builtins()->builtin(
Builtins::kHandleApiCallAsFunction));
@@ -1171,7 +1172,7 @@
}
{
- // Setup the call-as-constructor delegate.
+ // Set up the call-as-constructor delegate.
Handle<Code> code =
Handle<Code>(isolate->builtins()->builtin(
Builtins::kHandleApiCallAsConstructor));
@@ -1190,6 +1191,20 @@
}
+void Genesis::InitializeExperimentalGlobal() {
+ Handle<JSObject> global = Handle<JSObject>(global_context()->global());
+
+ // TODO(mstarzinger): Move this into Genesis::InitializeGlobal once we no
+ // longer need to live behind a flag, so WeakMap gets added to the snapshot.
+ if (FLAG_harmony_weakmaps) { // -- W e a k M a p
+ Handle<JSObject> prototype =
+ factory()->NewJSObject(isolate()->object_function(), TENURED);
+ InstallFunction(global, "WeakMap", JS_WEAK_MAP_TYPE, JSWeakMap::kSize,
+ prototype, Builtins::kIllegal, true);
+ }
+}
+
+
bool Genesis::CompileBuiltin(Isolate* isolate, int index) {
Vector<const char> name = Natives::GetScriptName(index);
Handle<String> source_code =
@@ -1680,6 +1695,11 @@
"native proxy.js") == 0) {
if (!CompileExperimentalBuiltin(isolate(), i)) return false;
}
+ if (FLAG_harmony_weakmaps &&
+ strcmp(ExperimentalNatives::GetScriptName(i).start(),
+ "native weakmap.js") == 0) {
+ if (!CompileExperimentalBuiltin(isolate(), i)) return false;
+ }
}
InstallExperimentalNativeFunctions();
@@ -2169,7 +2189,8 @@
isolate->counters()->contexts_created_from_scratch()->Increment();
}
- // Install experimental natives.
+ // Initialize experimental globals and install experimental natives.
+ InitializeExperimentalGlobal();
if (!InstallExperimentalNatives()) return;
result_ = global_context_;
diff --git a/src/checks.h b/src/checks.h
index a560b2f..2f359f6 100644
--- a/src/checks.h
+++ b/src/checks.h
@@ -251,9 +251,9 @@
// actually causes each use to introduce a new defined type with a
// name depending on the source line.
template <int> class StaticAssertionHelper { };
-#define STATIC_CHECK(test) \
- typedef \
- StaticAssertionHelper<sizeof(StaticAssertion<static_cast<bool>(test)>)> \
+#define STATIC_CHECK(test) \
+ typedef \
+ StaticAssertionHelper<sizeof(StaticAssertion<static_cast<bool>((test))>)> \
SEMI_STATIC_JOIN(__StaticAssertTypedef__, __LINE__)
diff --git a/src/code-stubs.cc b/src/code-stubs.cc
index 1d1128f..5535d17 100644
--- a/src/code-stubs.cc
+++ b/src/code-stubs.cc
@@ -329,4 +329,84 @@
stream->Add("CallFunctionStub_Args%d%s%s", argc_, in_loop_name, flags_name);
}
+
+void ToBooleanStub::PrintName(StringStream* stream) {
+ stream->Add("ToBooleanStub_");
+ types_.Print(stream);
+}
+
+
+void ToBooleanStub::Types::Print(StringStream* stream) const {
+ if (IsEmpty()) stream->Add("None");
+ if (Contains(UNDEFINED)) stream->Add("Undefined");
+ if (Contains(BOOLEAN)) stream->Add("Bool");
+ if (Contains(NULL_TYPE)) stream->Add("Null");
+ if (Contains(SMI)) stream->Add("Smi");
+ if (Contains(SPEC_OBJECT)) stream->Add("SpecObject");
+ if (Contains(STRING)) stream->Add("String");
+ if (Contains(HEAP_NUMBER)) stream->Add("HeapNumber");
+}
+
+
+void ToBooleanStub::Types::TraceTransition(Types to) const {
+ if (!FLAG_trace_ic) return;
+ char buffer[100];
+ NoAllocationStringAllocator allocator(buffer,
+ static_cast<unsigned>(sizeof(buffer)));
+ StringStream stream(&allocator);
+ stream.Add("[ToBooleanIC (");
+ Print(&stream);
+ stream.Add("->");
+ to.Print(&stream);
+ stream.Add(")]\n");
+ stream.OutputToStdOut();
+}
+
+
+bool ToBooleanStub::Types::Record(Handle<Object> object) {
+ if (object->IsUndefined()) {
+ Add(UNDEFINED);
+ return false;
+ } else if (object->IsBoolean()) {
+ Add(BOOLEAN);
+ return object->IsTrue();
+ } else if (object->IsNull()) {
+ Add(NULL_TYPE);
+ return false;
+ } else if (object->IsSmi()) {
+ Add(SMI);
+ return Smi::cast(*object)->value() != 0;
+ } else if (object->IsSpecObject()) {
+ Add(SPEC_OBJECT);
+ return !object->IsUndetectableObject();
+ } else if (object->IsString()) {
+ Add(STRING);
+ return !object->IsUndetectableObject() &&
+ String::cast(*object)->length() != 0;
+ } else if (object->IsHeapNumber()) {
+ ASSERT(!object->IsUndetectableObject());
+ Add(HEAP_NUMBER);
+ double value = HeapNumber::cast(*object)->value();
+ return value != 0 && !isnan(value);
+ } else {
+ // We should never see an internal object at runtime here!
+ UNREACHABLE();
+ return true;
+ }
+}
+
+
+bool ToBooleanStub::Types::NeedsMap() const {
+ return Contains(ToBooleanStub::SPEC_OBJECT)
+ || Contains(ToBooleanStub::STRING)
+ || Contains(ToBooleanStub::HEAP_NUMBER);
+}
+
+
+bool ToBooleanStub::Types::CanBeUndetectable() const {
+ return Contains(ToBooleanStub::SPEC_OBJECT)
+ || Contains(ToBooleanStub::STRING);
+}
+
+
} } // namespace v8::internal
diff --git a/src/code-stubs.h b/src/code-stubs.h
index 17c245c..89e99a8 100644
--- a/src/code-stubs.h
+++ b/src/code-stubs.h
@@ -900,14 +900,66 @@
class ToBooleanStub: public CodeStub {
public:
- explicit ToBooleanStub(Register tos) : tos_(tos) { }
+ enum Type {
+ UNDEFINED,
+ BOOLEAN,
+ NULL_TYPE,
+ SMI,
+ SPEC_OBJECT,
+ STRING,
+ HEAP_NUMBER,
+ NUMBER_OF_TYPES
+ };
+
+ // At most 8 different types can be distinguished, because the Code object
+ // only has room for a single byte to hold a set of these types. :-P
+ STATIC_ASSERT(NUMBER_OF_TYPES <= 8);
+
+ class Types {
+ public:
+ Types() {}
+ explicit Types(byte bits) : set_(bits) {}
+
+ bool IsEmpty() const { return set_.IsEmpty(); }
+ bool Contains(Type type) const { return set_.Contains(type); }
+ void Add(Type type) { set_.Add(type); }
+ byte ToByte() const { return set_.ToIntegral(); }
+ void Print(StringStream* stream) const;
+ void TraceTransition(Types to) const;
+ bool Record(Handle<Object> object);
+ bool NeedsMap() const;
+ bool CanBeUndetectable() const;
+
+ private:
+ EnumSet<Type, byte> set_;
+ };
+
+ static Types no_types() { return Types(); }
+ static Types all_types() { return Types((1 << NUMBER_OF_TYPES) - 1); }
+
+ explicit ToBooleanStub(Register tos, Types types = Types())
+ : tos_(tos), types_(types) { }
void Generate(MacroAssembler* masm);
+ virtual int GetCodeKind() { return Code::TO_BOOLEAN_IC; }
+ virtual void PrintName(StringStream* stream);
private:
- Register tos_;
Major MajorKey() { return ToBoolean; }
- int MinorKey() { return tos_.code(); }
+ int MinorKey() { return (tos_.code() << NUMBER_OF_TYPES) | types_.ToByte(); }
+
+ virtual void FinishCode(Code* code) {
+ code->set_to_boolean_state(types_.ToByte());
+ }
+
+ void CheckOddball(MacroAssembler* masm,
+ Type type,
+ Heap::RootListIndex value,
+ bool result);
+ void GenerateTypeTransition(MacroAssembler* masm);
+
+ Register tos_;
+ Types types_;
};
} } // namespace v8::internal
diff --git a/src/codegen.cc b/src/codegen.cc
index fb723a3..cdc9ba1 100644
--- a/src/codegen.cc
+++ b/src/codegen.cc
@@ -169,7 +169,6 @@
#endif // ENABLE_DISASSEMBLER
}
-static Vector<const char> kRegexp = CStrVector("regexp");
bool CodeGenerator::ShouldGenerateLog(Expression* type) {
ASSERT(type != NULL);
@@ -179,7 +178,7 @@
}
Handle<String> name = Handle<String>::cast(type->AsLiteral()->handle());
if (FLAG_log_regexp) {
- if (name->IsEqualTo(kRegexp))
+ if (name->IsEqualTo(CStrVector("regexp")))
return true;
}
return false;
diff --git a/src/compiler.cc b/src/compiler.cc
old mode 100755
new mode 100644
index abff8b6..b33c374
--- a/src/compiler.cc
+++ b/src/compiler.cc
@@ -478,15 +478,21 @@
// that would be compiled lazily anyway, so we skip the preparse step
// in that case too.
ScriptDataImpl* pre_data = input_pre_data;
+ bool harmony_block_scoping = natives != NATIVES_CODE &&
+ FLAG_harmony_block_scoping;
if (pre_data == NULL
&& source_length >= FLAG_min_preparse_length) {
if (source->IsExternalTwoByteString()) {
ExternalTwoByteStringUC16CharacterStream stream(
Handle<ExternalTwoByteString>::cast(source), 0, source->length());
- pre_data = ParserApi::PartialPreParse(&stream, extension);
+ pre_data = ParserApi::PartialPreParse(&stream,
+ extension,
+ harmony_block_scoping);
} else {
GenericStringUC16CharacterStream stream(source, 0, source->length());
- pre_data = ParserApi::PartialPreParse(&stream, extension);
+ pre_data = ParserApi::PartialPreParse(&stream,
+ extension,
+ harmony_block_scoping);
}
}
diff --git a/src/contexts.cc b/src/contexts.cc
index d066d34..4f93abd 100644
--- a/src/contexts.cc
+++ b/src/contexts.cc
@@ -87,13 +87,15 @@
Handle<Object> Context::Lookup(Handle<String> name,
ContextLookupFlags flags,
int* index_,
- PropertyAttributes* attributes) {
+ PropertyAttributes* attributes,
+ BindingFlags* binding_flags) {
Isolate* isolate = GetIsolate();
Handle<Context> context(this, isolate);
bool follow_context_chain = (flags & FOLLOW_CONTEXT_CHAIN) != 0;
*index_ = -1;
*attributes = ABSENT;
+ *binding_flags = MISSING_BINDING;
if (FLAG_trace_contexts) {
PrintF("Context::Lookup(");
@@ -109,7 +111,7 @@
}
// Check extension/with/global object.
- if (context->has_extension()) {
+ if (!context->IsBlockContext() && context->has_extension()) {
if (context->IsCatchContext()) {
// Catch contexts have the variable name in the extension slot.
if (name->Equals(String::cast(context->extension()))) {
@@ -118,9 +120,13 @@
}
*index_ = Context::THROWN_OBJECT_INDEX;
*attributes = NONE;
+ *binding_flags = MUTABLE_IS_INITIALIZED;
return context;
}
} else {
+ ASSERT(context->IsGlobalContext() ||
+ context->IsFunctionContext() ||
+ context->IsWithContext());
// Global, function, and with contexts may have an object in the
// extension slot.
Handle<JSObject> extension(JSObject::cast(context->extension()),
@@ -145,11 +151,20 @@
}
}
- // Only functions can have locals, parameters, and a function name.
- if (context->IsFunctionContext()) {
+ // Check serialized scope information of functions and blocks. Only
+ // functions can have parameters, and a function name.
+ if (context->IsFunctionContext() || context->IsBlockContext()) {
// We may have context-local slots. Check locals in the context.
- Handle<SerializedScopeInfo> scope_info(
- context->closure()->shared()->scope_info(), isolate);
+ Handle<SerializedScopeInfo> scope_info;
+ if (context->IsFunctionContext()) {
+ scope_info = Handle<SerializedScopeInfo>(
+ context->closure()->shared()->scope_info(), isolate);
+ } else {
+ ASSERT(context->IsBlockContext());
+ scope_info = Handle<SerializedScopeInfo>(
+ SerializedScopeInfo::cast(context->extension()), isolate);
+ }
+
Variable::Mode mode;
int index = scope_info->ContextSlotIndex(*name, &mode);
ASSERT(index < 0 || index >= MIN_CONTEXT_SLOTS);
@@ -169,9 +184,15 @@
case Variable::INTERNAL: // Fall through.
case Variable::VAR:
*attributes = NONE;
+ *binding_flags = MUTABLE_IS_INITIALIZED;
+ break;
+ case Variable::LET:
+ *attributes = NONE;
+ *binding_flags = MUTABLE_CHECK_INITIALIZED;
break;
case Variable::CONST:
*attributes = READ_ONLY;
+ *binding_flags = IMMUTABLE_CHECK_INITIALIZED;
break;
case Variable::DYNAMIC:
case Variable::DYNAMIC_GLOBAL:
@@ -194,6 +215,7 @@
}
*index_ = index;
*attributes = READ_ONLY;
+ *binding_flags = IMMUTABLE_IS_INITIALIZED;
return context;
}
}
diff --git a/src/contexts.h b/src/contexts.h
index 53b40f1..505f86c 100644
--- a/src/contexts.h
+++ b/src/contexts.h
@@ -44,6 +44,30 @@
};
+// ES5 10.2 defines lexical environments with mutable and immutable bindings.
+// Immutable bindings have two states, initialized and uninitialized, and
+// their state is changed by the InitializeImmutableBinding method.
+//
+// The harmony proposal for block scoped bindings also introduces the
+// uninitialized state for mutable bindings. A 'let' declared variable
+// is a mutable binding that is created uninitalized upon activation of its
+// lexical environment and it is initialized when evaluating its declaration
+// statement. Var declared variables are mutable bindings that are
+// immediately initialized upon creation. The BindingFlags enum represents
+// information if a binding has definitely been initialized. 'const' declared
+// variables are created as uninitialized immutable bindings.
+
+// In harmony mode accessing an uninitialized binding produces a reference
+// error.
+enum BindingFlags {
+ MUTABLE_IS_INITIALIZED,
+ MUTABLE_CHECK_INITIALIZED,
+ IMMUTABLE_IS_INITIALIZED,
+ IMMUTABLE_CHECK_INITIALIZED,
+ MISSING_BINDING
+};
+
+
// Heap-allocated activation contexts.
//
// Contexts are implemented as FixedArray objects; the Context
@@ -295,6 +319,10 @@
Map* map = this->map();
return map == map->GetHeap()->with_context_map();
}
+ bool IsBlockContext() {
+ Map* map = this->map();
+ return map == map->GetHeap()->block_context_map();
+ }
// Tells whether the global context is marked with out of memory.
inline bool has_out_of_memory();
@@ -347,8 +375,11 @@
// 4) index_ < 0 && result.is_null():
// there was no context found with the corresponding property.
// attributes == ABSENT.
- Handle<Object> Lookup(Handle<String> name, ContextLookupFlags flags,
- int* index_, PropertyAttributes* attributes);
+ Handle<Object> Lookup(Handle<String> name,
+ ContextLookupFlags flags,
+ int* index_,
+ PropertyAttributes* attributes,
+ BindingFlags* binding_flags);
// Determine if a local variable with the given name exists in a
// context. Do not consider context extension objects. This is
diff --git a/src/cpu-profiler.cc b/src/cpu-profiler.cc
index bb480fc..8b3333e 100644
--- a/src/cpu-profiler.cc
+++ b/src/cpu-profiler.cc
@@ -562,12 +562,12 @@
sampler->Stop();
need_to_stop_sampler_ = false;
}
+ NoBarrier_Store(&is_profiling_, false);
processor_->Stop();
processor_->Join();
delete processor_;
delete generator_;
processor_ = NULL;
- NoBarrier_Store(&is_profiling_, false);
generator_ = NULL;
logger->logging_nesting_ = saved_logging_nesting_;
}
diff --git a/src/d8.cc b/src/d8.cc
index 5fd9d27..4d7e988 100644
--- a/src/d8.cc
+++ b/src/d8.cc
@@ -26,8 +26,8 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#ifdef V8_SHARED
-#define USING_V8_SHARED
+#ifdef USING_V8_SHARED // Defined when linking against shared lib on Windows.
+#define V8_SHARED
#endif
#ifdef COMPRESS_STARTUP_DATA_BZ2
@@ -37,15 +37,16 @@
#include <errno.h>
#include <stdlib.h>
#include <string.h>
+#include <sys/stat.h>
-#ifdef USING_V8_SHARED
+#ifdef V8_SHARED
#include <assert.h>
#include "../include/v8-testing.h"
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
#include "d8.h"
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
#include "api.h"
#include "checks.h"
#include "d8-debug.h"
@@ -53,20 +54,20 @@
#include "natives.h"
#include "platform.h"
#include "v8.h"
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
#if !defined(_WIN32) && !defined(_WIN64)
#include <unistd.h> // NOLINT
#endif
-#ifdef USING_V8_SHARED
+#ifndef ASSERT
#define ASSERT(condition) assert(condition)
-#endif // USING_V8_SHARED
+#endif
namespace v8 {
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
LineEditor *LineEditor::first_ = NULL;
const char* Shell::kHistoryFileName = ".d8_history";
@@ -116,20 +117,20 @@
CounterCollection* Shell::counters_ = &local_counters_;
i::Mutex* Shell::context_mutex_(i::OS::CreateMutex());
Persistent<Context> Shell::utility_context_;
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
Persistent<Context> Shell::evaluation_context_;
ShellOptions Shell::options;
const char* Shell::kPrompt = "d8> ";
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
bool CounterMap::Match(void* key1, void* key2) {
const char* name1 = reinterpret_cast<const char*>(key1);
const char* name2 = reinterpret_cast<const char*>(key2);
return strcmp(name1, name2) == 0;
}
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
// Converts a V8 value to a C string.
@@ -143,11 +144,11 @@
Handle<Value> name,
bool print_result,
bool report_exceptions) {
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
bool FLAG_debugger = i::FLAG_debugger;
#else
bool FLAG_debugger = false;
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
HandleScope handle_scope;
TryCatch try_catch;
options.script_executed = true;
@@ -199,7 +200,7 @@
printf(" ");
}
v8::String::Utf8Value str(args[i]);
- int n = fwrite(*str, sizeof(**str), str.length(), stdout);
+ int n = static_cast<int>(fwrite(*str, sizeof(**str), str.length(), stdout));
if (n != str.length()) {
printf("Error in fwrite\n");
exit(1);
@@ -226,17 +227,24 @@
static const int kBufferSize = 256;
char buffer[kBufferSize];
Handle<String> accumulator = String::New("");
- bool linebreak;
int length;
- do { // Repeat if the line ends with an escape '\'.
- // fgets got an error. Just give up.
+ while (true) {
+ // Continue reading if the line ends with an escape '\\' or the line has
+ // not been fully read into the buffer yet (does not end with '\n').
+ // If fgets gets an error, just give up.
if (fgets(buffer, kBufferSize, stdin) == NULL) return Null();
- length = strlen(buffer);
- linebreak = (length > 1 && buffer[length-2] == '\\');
- if (linebreak) buffer[length-2] = '\n';
- accumulator = String::Concat(accumulator, String::New(buffer, length-1));
- } while (linebreak);
- return accumulator;
+ length = static_cast<int>(strlen(buffer));
+ if (length == 0) {
+ return accumulator;
+ } else if (buffer[length-1] != '\n') {
+ accumulator = String::Concat(accumulator, String::New(buffer, length));
+ } else if (length > 1 && buffer[length-2] == '\\') {
+ buffer[length-2] = '\n';
+ accumulator = String::Concat(accumulator, String::New(buffer, length-1));
+ } else {
+ return String::Concat(accumulator, String::New(buffer, length-1));
+ }
+ }
}
@@ -269,9 +277,9 @@
String::New("Array constructor needs one parameter."));
}
static const int kMaxLength = 0x3fffffff;
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
ASSERT(kMaxLength == i::ExternalArray::kMaxLength);
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
size_t length = 0;
if (args[0]->IsUint32()) {
length = args[0]->Uint32Value();
@@ -299,9 +307,12 @@
Persistent<Object> persistent_array = Persistent<Object>::New(array);
persistent_array.MakeWeak(data, ExternalArrayWeakCallback);
persistent_array.MarkIndependent();
- array->SetIndexedPropertiesToExternalArrayData(data, type, length);
- array->Set(String::New("length"), Int32::New(length), ReadOnly);
- array->Set(String::New("BYTES_PER_ELEMENT"), Int32::New(element_size));
+ array->SetIndexedPropertiesToExternalArrayData(data, type,
+ static_cast<int>(length));
+ array->Set(String::New("length"),
+ Int32::New(static_cast<int32_t>(length)), ReadOnly);
+ array->Set(String::New("BYTES_PER_ELEMENT"),
+ Int32::New(static_cast<int32_t>(element_size)));
return array;
}
@@ -368,9 +379,9 @@
Handle<Value> Shell::Quit(const Arguments& args) {
int exit_code = args[0]->Int32Value();
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
OnExit();
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
exit(exit_code);
return Undefined();
}
@@ -419,7 +430,7 @@
}
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
Handle<Array> Shell::GetCompletions(Handle<String> text, Handle<String> full) {
HandleScope handle_scope;
Context::Scope context_scope(utility_context_);
@@ -454,10 +465,10 @@
return val;
}
#endif // ENABLE_DEBUGGER_SUPPORT
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
int32_t* Counter::Bind(const char* name, bool is_histogram) {
int i;
for (i = 0; i < kMaxNameSize - 1 && name[i]; i++)
@@ -605,7 +616,7 @@
}
#endif // ENABLE_DEBUGGER_SUPPORT
}
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
#ifdef COMPRESS_STARTUP_DATA_BZ2
@@ -667,16 +678,16 @@
FunctionTemplate::New(PixelArray));
#ifdef LIVE_OBJECT_LIST
- global_template->Set(String::New("lol_is_enabled"), Boolean::New(true));
+ global_template->Set(String::New("lol_is_enabled"), True());
#else
- global_template->Set(String::New("lol_is_enabled"), Boolean::New(false));
+ global_template->Set(String::New("lol_is_enabled"), False());
#endif
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
Handle<ObjectTemplate> os_templ = ObjectTemplate::New();
AddOSMethods(os_templ);
global_template->Set(String::New("os"), os_templ);
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
return global_template;
}
@@ -692,7 +703,7 @@
}
#endif
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
Shell::counter_map_ = new CounterMap();
// Set up counters
if (i::StrLength(i::FLAG_map_counters) != 0)
@@ -702,10 +713,10 @@
V8::SetCreateHistogramFunction(CreateHistogram);
V8::SetAddHistogramSampleFunction(AddHistogramSample);
}
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
if (options.test_shell) return;
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
Locker lock;
HandleScope scope;
Handle<ObjectTemplate> global_template = CreateGlobalTemplate();
@@ -717,21 +728,22 @@
v8::Debug::EnableAgent("d8 shell", i::FLAG_debugger_port, true);
}
#endif // ENABLE_DEBUGGER_SUPPORT
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
}
Persistent<Context> Shell::CreateEvaluationContext() {
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
// This needs to be a critical section since this is not thread-safe
i::ScopedLock lock(context_mutex_);
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
// Initialize the global objects
Handle<ObjectTemplate> global_template = CreateGlobalTemplate();
Persistent<Context> context = Context::New(NULL, global_template);
+ ASSERT(!context.IsEmpty());
Context::Scope scope(context);
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
i::JSArguments js_args = i::FLAG_js_arguments;
i::Handle<i::FixedArray> arguments_array =
FACTORY->NewFixedArray(js_args.argc());
@@ -744,12 +756,12 @@
FACTORY->NewJSArrayWithElements(arguments_array);
context->Global()->Set(String::New("arguments"),
Utils::ToLocal(arguments_jsarray));
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
return context;
}
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
void Shell::OnExit() {
if (i::FLAG_dump_counters) {
printf("+----------------------------------------+-------------+\n");
@@ -769,18 +781,34 @@
if (counters_file_ != NULL)
delete counters_file_;
}
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
+
+
+static FILE* FOpen(const char* path, const char* mode) {
+#if (defined(_WIN32) || defined(_WIN64))
+ FILE* result;
+ if (fopen_s(&result, path, mode) == 0) {
+ return result;
+ } else {
+ return NULL;
+ }
+#else
+ FILE* file = fopen(path, mode);
+ if (file == NULL) return NULL;
+ struct stat file_stat;
+ if (fstat(fileno(file), &file_stat) != 0) return NULL;
+ bool is_regular_file = ((file_stat.st_mode & S_IFREG) != 0);
+ if (is_regular_file) return file;
+ fclose(file);
+ return NULL;
+#endif
+}
static char* ReadChars(const char* name, int* size_out) {
// Release the V8 lock while reading files.
v8::Unlocker unlocker(Isolate::GetCurrent());
-#ifndef USING_V8_SHARED
- FILE* file = i::OS::FOpen(name, "rb");
-#else
- // TODO(yangguo@chromium.org): reading from a directory hangs!
- FILE* file = fopen(name, "rb");
-#endif // USING_V8_SHARED
+ FILE* file = FOpen(name, "rb");
if (file == NULL) return NULL;
fseek(file, 0, SEEK_END);
@@ -790,7 +818,7 @@
char* chars = new char[size + 1];
chars[size] = '\0';
for (int i = 0; i < size;) {
- int read = fread(&chars[i], 1, size - i, file);
+ int read = static_cast<int>(fread(&chars[i], 1, size - i, file));
i += read;
}
fclose(file);
@@ -799,7 +827,7 @@
}
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
static char* ReadToken(char* data, char token) {
char* next = i::OS::StrChr(data, token);
if (next != NULL) {
@@ -819,7 +847,7 @@
static char* ReadWord(char* data) {
return ReadToken(data, ' ');
}
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
// Reads a file into a v8 string.
@@ -838,7 +866,7 @@
Context::Scope context_scope(evaluation_context_);
HandleScope handle_scope;
Handle<String> name = String::New("(d8)");
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
LineEditor* editor = LineEditor::Get();
printf("V8 version %s [console: %s]\n", V8::GetVersion(), editor->name());
if (i::FLAG_debugger) {
@@ -861,12 +889,12 @@
if (fgets(buffer, kBufferSize, stdin) == NULL) break;
ExecuteString(String::New(buffer), name, true, true);
}
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
printf("\n");
}
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
class ShellThread : public i::Thread {
public:
ShellThread(int no, i::Vector<const char> files)
@@ -919,7 +947,7 @@
ptr = next_line;
}
}
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
void SourceGroup::ExitShell(int exit_code) {
@@ -966,32 +994,16 @@
Handle<String> SourceGroup::ReadFile(const char* name) {
-#ifndef USING_V8_SHARED
- FILE* file = i::OS::FOpen(name, "rb");
-#else
- // TODO(yangguo@chromium.org): reading from a directory hangs!
- FILE* file = fopen(name, "rb");
-#endif // USING_V8_SHARED
- if (file == NULL) return Handle<String>();
-
- fseek(file, 0, SEEK_END);
- int size = ftell(file);
- rewind(file);
-
- char* chars = new char[size + 1];
- chars[size] = '\0';
- for (int i = 0; i < size;) {
- int read = fread(&chars[i], 1, size - i, file);
- i += read;
- }
- fclose(file);
+ int size;
+ const char* chars = ReadChars(name, &size);
+ if (chars == NULL) return Handle<String>();
Handle<String> result = String::New(chars, size);
delete[] chars;
return result;
}
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
i::Thread::Options SourceGroup::GetThreadOptions() {
i::Thread::Options options;
options.name = "IsolateThread";
@@ -1043,7 +1055,7 @@
done_semaphore_->Wait();
}
}
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
bool Shell::SetOptions(int argc, char* argv[]) {
@@ -1065,23 +1077,23 @@
options.test_shell = true;
argv[i] = NULL;
} else if (strcmp(argv[i], "--preemption") == 0) {
-#ifdef USING_V8_SHARED
+#ifdef V8_SHARED
printf("D8 with shared library does not support multi-threading\n");
return false;
#else
options.use_preemption = true;
argv[i] = NULL;
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
} else if (strcmp(argv[i], "--no-preemption") == 0) {
-#ifdef USING_V8_SHARED
+#ifdef V8_SHARED
printf("D8 with shared library does not support multi-threading\n");
return false;
#else
options.use_preemption = false;
argv[i] = NULL;
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
} else if (strcmp(argv[i], "--preemption-interval") == 0) {
-#ifdef USING_V8_SHARED
+#ifdef V8_SHARED
printf("D8 with shared library does not support multi-threading\n");
return false;
#else
@@ -1100,19 +1112,19 @@
printf("Missing value for --preemption-interval\n");
return false;
}
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
} else if (strcmp(argv[i], "-f") == 0) {
// Ignore any -f flags for compatibility with other stand-alone
// JavaScript engines.
continue;
} else if (strcmp(argv[i], "--isolate") == 0) {
-#ifdef USING_V8_SHARED
+#ifdef V8_SHARED
printf("D8 with shared library does not support multi-threading\n");
return false;
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
options.num_isolates++;
}
-#ifdef USING_V8_SHARED
+#ifdef V8_SHARED
else if (strcmp(argv[i], "--dump-counters") == 0) {
printf("D8 with shared library does not include counters\n");
return false;
@@ -1123,10 +1135,10 @@
printf("Javascript debugger not included\n");
return false;
}
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
}
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
// Run parallel threads if we are not using --isolate
for (int i = 1; i < argc; i++) {
if (argv[i] == NULL) continue;
@@ -1147,9 +1159,10 @@
}
argv[i] = NULL;
options.parallel_files->Add(i::Vector<const char>(files, size));
+ delete[] files;
}
}
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
v8::V8::SetFlagsFromCommandLine(&argc, argv, true);
@@ -1174,7 +1187,7 @@
int Shell::RunMain(int argc, char* argv[]) {
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
i::List<i::Thread*> threads(1);
if (options.parallel_files != NULL)
for (int i = 0; i < options.parallel_files->length(); i++) {
@@ -1187,7 +1200,7 @@
for (int i = 1; i < options.num_isolates; ++i) {
options.isolate_sources[i].StartExecuteInThread();
}
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
{ // NOLINT
Locker lock;
HandleScope scope;
@@ -1211,17 +1224,17 @@
context.Dispose();
}
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
// Start preemption if threads have been created and preemption is enabled.
if (options.parallel_files != NULL
&& threads.length() > 0
&& options.use_preemption) {
Locker::StartPreemption(options.preemption_interval);
}
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
}
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
for (int i = 1; i < options.num_isolates; ++i) {
options.isolate_sources[i].WaitForThread();
}
@@ -1232,9 +1245,7 @@
thread->Join();
delete thread;
}
-
- OnExit();
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
return 0;
}
@@ -1262,14 +1273,14 @@
}
-#if !defined(USING_V8_SHARED) && defined(ENABLE_DEBUGGER_SUPPORT)
+#if !defined(V8_SHARED) && defined(ENABLE_DEBUGGER_SUPPORT)
// Run remote debugger if requested, but never on --test
if (i::FLAG_remote_debugger && !options.test_shell) {
InstallUtilityScript();
RunRemoteDebugger(i::FLAG_debugger_port);
return 0;
}
-#endif // !USING_V8_SHARED && ENABLE_DEBUGGER_SUPPORT
+#endif // !V8_SHARED && ENABLE_DEBUGGER_SUPPORT
// Run interactive shell if explicitly requested or if no script has been
// executed, but never on --test
@@ -1277,16 +1288,20 @@
if (( options.interactive_shell
|| !options.script_executed )
&& !options.test_shell ) {
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
if (!i::FLAG_debugger) {
InstallUtilityScript();
}
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
RunShell();
}
V8::Dispose();
+#ifndef V8_SHARED
+ OnExit();
+#endif // V8_SHARED
+
return result;
}
diff --git a/src/d8.gyp b/src/d8.gyp
index 48442b1..70186cf 100644
--- a/src/d8.gyp
+++ b/src/d8.gyp
@@ -1,4 +1,4 @@
-# Copyright 2011 the V8 project authors. All rights reserved.
+# Copyright 2010 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@@ -26,6 +26,7 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
{
+ 'includes': ['../build/common.gypi'],
'variables': {
'console%': '',
},
@@ -36,6 +37,7 @@
'dependencies': [
'../tools/gyp/v8.gyp:v8',
],
+ # Generated source files need this explicitly:
'include_dirs+': [
'../src',
],
@@ -47,9 +49,17 @@
],
'conditions': [
[ 'component!="shared_library"', {
- 'dependencies': [ 'd8_js2c#host', ],
'sources': [ 'd8-debug.cc', '<(SHARED_INTERMEDIATE_DIR)/d8-js.cc', ],
'conditions': [
+ [ 'want_separate_host_toolset==1', {
+ 'dependencies': [
+ 'd8_js2c#host',
+ ],
+ }, {
+ 'dependencies': [
+ 'd8_js2c',
+ ],
+ }],
[ 'console=="readline"', {
'libraries': [ '-lreadline', ],
'sources': [ 'd8-readline.cc' ],
@@ -68,13 +78,19 @@
{
'target_name': 'd8_js2c',
'type': 'none',
- 'toolsets': ['host'],
'variables': {
'js_files': [
'd8.js',
'macros.py',
],
},
+ 'conditions': [
+ [ 'want_separate_host_toolset==1', {
+ 'toolsets': ['host'],
+ }, {
+ 'toolsets': ['target'],
+ }]
+ ],
'actions': [
{
'action_name': 'd8_js2c',
diff --git a/src/d8.h b/src/d8.h
index 840ca1e..28321f5 100644
--- a/src/d8.h
+++ b/src/d8.h
@@ -29,22 +29,18 @@
#define V8_D8_H_
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
#include "v8.h"
#include "allocation.h"
#include "hashmap.h"
#else
#include "../include/v8.h"
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
namespace v8 {
-#ifndef USING_V8_SHARED
-namespace i = v8::internal;
-#endif // USING_V8_SHARED
-
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
// A single counter in a counter collection.
class Counter {
public:
@@ -117,17 +113,17 @@
static bool Match(void* key1, void* key2);
i::HashMap hash_map_;
};
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
class SourceGroup {
public:
SourceGroup() :
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
next_semaphore_(v8::internal::OS::CreateSemaphore(0)),
done_semaphore_(v8::internal::OS::CreateSemaphore(0)),
thread_(NULL),
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
argv_(NULL),
begin_offset_(0),
end_offset_(0) { }
@@ -141,7 +137,7 @@
void Execute();
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
void StartExecuteInThread();
void WaitForThread();
@@ -165,7 +161,7 @@
i::Semaphore* next_semaphore_;
i::Semaphore* done_semaphore_;
i::Thread* thread_;
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
void ExitShell(int exit_code);
Handle<String> ReadFile(const char* name);
@@ -179,11 +175,11 @@
class ShellOptions {
public:
ShellOptions() :
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
use_preemption(true),
preemption_interval(10),
parallel_files(NULL),
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
script_executed(false),
last_run(true),
stress_opt(false),
@@ -193,11 +189,11 @@
num_isolates(1),
isolate_sources(NULL) { }
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
bool use_preemption;
int preemption_interval;
i::List< i::Vector<const char> >* parallel_files;
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
bool script_executed;
bool last_run;
bool stress_opt;
@@ -208,11 +204,11 @@
SourceGroup* isolate_sources;
};
-#ifdef USING_V8_SHARED
+#ifdef V8_SHARED
class Shell {
#else
class Shell : public i::AllStatic {
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
public:
static bool ExecuteString(Handle<String> source,
Handle<Value> name,
@@ -225,7 +221,7 @@
static int RunMain(int argc, char* argv[]);
static int Main(int argc, char* argv[]);
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
static Handle<Array> GetCompletions(Handle<String> text,
Handle<String> full);
static void OnExit();
@@ -236,7 +232,7 @@
size_t buckets);
static void AddHistogramSample(void* histogram, int sample);
static void MapCounters(const char* name);
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
#ifdef ENABLE_DEBUGGER_SUPPORT
static Handle<Object> DebugMessageDetails(Handle<String> message);
@@ -300,15 +296,15 @@
static Handle<Value> RemoveDirectory(const Arguments& args);
static void AddOSMethods(Handle<ObjectTemplate> os_template);
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
static const char* kHistoryFileName;
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
static const char* kPrompt;
static ShellOptions options;
private:
static Persistent<Context> evaluation_context_;
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
static Persistent<Context> utility_context_;
static CounterMap* counter_map_;
// We statically allocate a set of local counters to be used if we
@@ -320,7 +316,7 @@
static Counter* GetCounter(const char* name, bool is_histogram);
static void InstallUtilityScript();
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
static void Initialize();
static void RunShell();
static bool SetOptions(int argc, char* argv[]);
@@ -332,7 +328,7 @@
};
-#ifndef USING_V8_SHARED
+#ifndef V8_SHARED
class LineEditor {
public:
enum Type { DUMB = 0, READLINE = 1 };
@@ -352,7 +348,7 @@
LineEditor* next_;
static LineEditor* first_;
};
-#endif // USING_V8_SHARED
+#endif // V8_SHARED
} // namespace v8
diff --git a/src/d8.js b/src/d8.js
index 033455e..a2b9585 100644
--- a/src/d8.js
+++ b/src/d8.js
@@ -103,7 +103,8 @@
Local: 1,
With: 2,
Closure: 3,
- Catch: 4 };
+ Catch: 4,
+ Block: 5 };
// Current debug state.
@@ -391,14 +392,14 @@
this.frameCommandToJSONRequest_('' +
(Debug.State.currentFrame + 1));
break;
-
+
case 'down':
case 'do':
this.request_ =
this.frameCommandToJSONRequest_('' +
(Debug.State.currentFrame - 1));
break;
-
+
case 'set':
case 'print':
case 'p':
@@ -1071,7 +1072,7 @@
arg2 = 'uncaught';
}
excType = arg2;
-
+
// Check for:
// en[able] [all|unc[aught]] exc[eptions]
// dis[able] [all|unc[aught]] exc[eptions]
@@ -1130,7 +1131,7 @@
request.arguments.ignoreCount = parseInt(otherArgs);
break;
default:
- throw new Error('Invalid arguments.');
+ throw new Error('Invalid arguments.');
}
} else {
throw new Error('Invalid arguments.');
@@ -1251,7 +1252,7 @@
start_index = parseInt(args[i]);
// The user input start index starts at 1:
if (start_index <= 0) {
- throw new Error('Invalid index ' + args[i] + '.');
+ throw new Error('Invalid index ' + args[i] + '.');
}
start_index -= 1;
is_verbose = true;
@@ -2020,7 +2021,7 @@
} else if (body.breakOnUncaughtExceptions) {
result += '* breaking on UNCAUGHT exceptions is enabled\n';
} else {
- result += '* all exception breakpoints are disabled\n';
+ result += '* all exception breakpoints are disabled\n';
}
details.text = result;
break;
diff --git a/src/debug-debugger.js b/src/debug-debugger.js
index 36b624e..d254ee5 100644
--- a/src/debug-debugger.js
+++ b/src/debug-debugger.js
@@ -404,7 +404,7 @@
return this.script_name_ == script.nameOrSourceURL();
} else if (this.type_ == Debug.ScriptBreakPointType.ScriptRegExp) {
return this.script_regexp_object_.test(script.nameOrSourceURL());
- } else {
+ } else {
throw new Error("Unexpected breakpoint type " + this.type_);
}
}
@@ -1579,7 +1579,7 @@
response.failed('Missing argument "type" or "target"');
return;
}
-
+
// Either function or script break point.
var break_point_number;
if (type == 'function') {
@@ -1623,10 +1623,10 @@
break_point_number =
Debug.setScriptBreakPointByName(target, line, column, condition,
groupId);
- } else if (type == 'scriptId') {
+ } else if (type == 'scriptId') {
break_point_number =
Debug.setScriptBreakPointById(target, line, column, condition, groupId);
- } else if (type == 'scriptRegExp') {
+ } else if (type == 'scriptRegExp') {
break_point_number =
Debug.setScriptBreakPointByRegExp(target, line, column, condition,
groupId);
@@ -1797,7 +1797,7 @@
description.type = 'scriptRegExp';
description.script_regexp = break_point.script_regexp_object().source;
} else {
- throw new Error("Internal error: Unexpected breakpoint type: " + break_point.type());
+ throw new Error("Internal error: Unexpected breakpoint type: " + break_point.type());
}
array.push(description);
}
@@ -1838,7 +1838,7 @@
enabled = !Debug.isBreakOnException();
} else if (type == 'uncaught') {
enabled = !Debug.isBreakOnUncaughtException();
- }
+ }
// Pull out and check the 'enabled' argument if present:
if (!IS_UNDEFINED(request.arguments.enabled)) {
@@ -2022,22 +2022,22 @@
if (!IS_UNDEFINED(frame) && global) {
return response.failed('Arguments "frame" and "global" are exclusive');
}
-
+
var additional_context_object;
if (additional_context) {
additional_context_object = {};
for (var i = 0; i < additional_context.length; i++) {
var mapping = additional_context[i];
if (!IS_STRING(mapping.name) || !IS_NUMBER(mapping.handle)) {
- return response.failed("Context element #" + i +
+ return response.failed("Context element #" + i +
" must contain name:string and handle:number");
- }
+ }
var context_value_mirror = LookupMirror(mapping.handle);
if (!context_value_mirror) {
return response.failed("Context object '" + mapping.name +
"' #" + mapping.handle + "# not found");
}
- additional_context_object[mapping.name] = context_value_mirror.value();
+ additional_context_object[mapping.name] = context_value_mirror.value();
}
}
diff --git a/src/debug.cc b/src/debug.cc
index aecbb46..2d58ce1 100644
--- a/src/debug.cc
+++ b/src/debug.cc
@@ -169,7 +169,8 @@
if ((code->is_inline_cache_stub() &&
!code->is_binary_op_stub() &&
!code->is_unary_op_stub() &&
- !code->is_compare_ic_stub()) ||
+ !code->is_compare_ic_stub() &&
+ !code->is_to_boolean_ic_stub()) ||
RelocInfo::IsConstructCall(rmode())) {
break_point_++;
return;
@@ -1964,7 +1965,7 @@
Debugger::Debugger(Isolate* isolate)
- : debugger_access_(OS::CreateMutex()),
+ : debugger_access_(isolate->debugger_access()),
event_listener_(Handle<Object>()),
event_listener_data_(Handle<Object>()),
compiling_natives_(false),
@@ -1986,8 +1987,6 @@
Debugger::~Debugger() {
- delete debugger_access_;
- debugger_access_ = 0;
delete dispatch_handler_access_;
dispatch_handler_access_ = 0;
delete command_received_;
diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc
index c90df45..94b2ff5 100644
--- a/src/deoptimizer.cc
+++ b/src/deoptimizer.cc
@@ -1187,11 +1187,11 @@
int32_t TranslationIterator::Next() {
- ASSERT(HasNext());
// Run through the bytes until we reach one with a least significant
// bit of zero (marks the end).
uint32_t bits = 0;
for (int i = 0; true; i += 7) {
+ ASSERT(HasNext());
uint8_t next = buffer_->get(index_++);
bits |= (next >> 1) << i;
if ((next & 1) == 0) break;
@@ -1442,6 +1442,7 @@
UNREACHABLE();
}
+#ifdef ENABLE_DEBUGGER_SUPPORT
DeoptimizedFrameInfo::DeoptimizedFrameInfo(
Deoptimizer* deoptimizer, int frame_index) {
@@ -1471,5 +1472,6 @@
v->VisitPointers(expression_stack_, expression_stack_ + expression_count_);
}
+#endif // ENABLE_DEBUGGER_SUPPORT
} } // namespace v8::internal
diff --git a/src/deoptimizer.h b/src/deoptimizer.h
index 9265905..8641261 100644
--- a/src/deoptimizer.h
+++ b/src/deoptimizer.h
@@ -317,7 +317,7 @@
List<HeapNumberMaterializationDescriptor> deferred_heap_numbers_;
- static int table_entry_size_;
+ static const int table_entry_size_;
friend class FrameDescription;
friend class DeoptimizingCodeListNode;
@@ -336,6 +336,10 @@
return malloc(size + frame_size - kPointerSize);
}
+ void operator delete(void* pointer, uint32_t frame_size) {
+ free(pointer);
+ }
+
void operator delete(void* description) {
free(description);
}
@@ -497,9 +501,7 @@
int32_t Next();
- bool HasNext() const { return index_ >= 0; }
-
- void Done() { index_ = -1; }
+ bool HasNext() const { return index_ < buffer_->length(); }
void Skip(int n) {
for (int i = 0; i < n; i++) Next();
diff --git a/src/disassembler.cc b/src/disassembler.cc
index 368c3a8..79076d6 100644
--- a/src/disassembler.cc
+++ b/src/disassembler.cc
@@ -97,14 +97,17 @@
}
-static void DumpBuffer(FILE* f, char* buff) {
+static void DumpBuffer(FILE* f, StringBuilder* out) {
if (f == NULL) {
- PrintF("%s", buff);
+ PrintF("%s\n", out->Finalize());
} else {
- fprintf(f, "%s", buff);
+ fprintf(f, "%s\n", out->Finalize());
}
+ out->Reset();
}
+
+
static const int kOutBufferSize = 2048 + String::kMaxShortPrintLength;
static const int kRelocInfoPosition = 57;
@@ -119,6 +122,7 @@
v8::internal::EmbeddedVector<char, 128> decode_buffer;
v8::internal::EmbeddedVector<char, kOutBufferSize> out_buffer;
+ StringBuilder out(out_buffer.start(), out_buffer.length());
byte* pc = begin;
disasm::Disassembler d(converter);
RelocIterator* it = NULL;
@@ -181,17 +185,12 @@
}
}
- StringBuilder out(out_buffer.start(), out_buffer.length());
-
// Comments.
for (int i = 0; i < comments.length(); i++) {
- out.AddFormatted(" %s\n", comments[i]);
+ out.AddFormatted(" %s", comments[i]);
+ DumpBuffer(f, &out);
}
- // Write out comments, resets outp so that we can format the next line.
- DumpBuffer(f, out.Finalize());
- out.Reset();
-
// Instruction address and instruction offset.
out.AddFormatted("%p %4d ", prev_pc, prev_pc - begin);
@@ -209,7 +208,7 @@
out.AddPadding(' ', kRelocInfoPosition - out.position());
} else {
// Additional reloc infos are printed on separate lines.
- out.AddFormatted("\n");
+ DumpBuffer(f, &out);
out.AddPadding(' ', kRelocInfoPosition);
}
@@ -299,9 +298,18 @@
out.AddFormatted(" ;; %s", RelocInfo::RelocModeName(rmode));
}
}
- out.AddString("\n");
- DumpBuffer(f, out.Finalize());
- out.Reset();
+ DumpBuffer(f, &out);
+ }
+
+ // Emit comments following the last instruction (if any).
+ if (it != NULL) {
+ for ( ; !it->done(); it->next()) {
+ if (RelocInfo::IsComment(it->rinfo()->rmode())) {
+ out.AddFormatted(" %s",
+ reinterpret_cast<const char*>(it->rinfo()->data()));
+ DumpBuffer(f, &out);
+ }
+ }
}
delete it;
diff --git a/src/elements.cc b/src/elements.cc
new file mode 100644
index 0000000..1afc5da
--- /dev/null
+++ b/src/elements.cc
@@ -0,0 +1,634 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+
+#include "objects.h"
+#include "elements.h"
+#include "utils.h"
+
+namespace v8 {
+namespace internal {
+
+
+ElementsAccessor** ElementsAccessor::elements_accessors_;
+
+
+bool HasKey(FixedArray* array, Object* key) {
+ int len0 = array->length();
+ for (int i = 0; i < len0; i++) {
+ Object* element = array->get(i);
+ if (element->IsSmi() && element == key) return true;
+ if (element->IsString() &&
+ key->IsString() && String::cast(element)->Equals(String::cast(key))) {
+ return true;
+ }
+ }
+ return false;
+}
+
+
+// Base class for element handler implementations. Contains the
+// the common logic for objects with different ElementsKinds.
+// Subclasses must specialize method for which the element
+// implementation differs from the base class implementation.
+//
+// This class is intended to be used in the following way:
+//
+// class SomeElementsAccessor :
+// public ElementsAccessorBase<SomeElementsAccessor,
+// BackingStoreClass> {
+// ...
+// }
+//
+// This is an example of the Curiously Recurring Template Pattern (see
+// http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern). We use
+// CRTP to guarantee aggressive compile time optimizations (i.e. inlining and
+// specialization of SomeElementsAccessor methods).
+template <typename ElementsAccessorSubclass, typename BackingStoreClass>
+class ElementsAccessorBase : public ElementsAccessor {
+ protected:
+ ElementsAccessorBase() { }
+ virtual MaybeObject* Get(FixedArrayBase* backing_store,
+ uint32_t key,
+ JSObject* obj,
+ Object* receiver) {
+ return ElementsAccessorSubclass::Get(
+ BackingStoreClass::cast(backing_store), key, obj, receiver);
+ }
+
+ static MaybeObject* Get(BackingStoreClass* backing_store,
+ uint32_t key,
+ JSObject* obj,
+ Object* receiver) {
+ if (key < ElementsAccessorSubclass::GetCapacity(backing_store)) {
+ return backing_store->get(key);
+ }
+ return backing_store->GetHeap()->the_hole_value();
+ }
+
+ virtual MaybeObject* Delete(JSObject* obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) = 0;
+
+ virtual MaybeObject* AddElementsToFixedArray(FixedArrayBase* from,
+ FixedArray* to,
+ JSObject* holder,
+ Object* receiver) {
+ int len0 = to->length();
+#ifdef DEBUG
+ if (FLAG_enable_slow_asserts) {
+ for (int i = 0; i < len0; i++) {
+ ASSERT(!to->get(i)->IsTheHole());
+ }
+ }
+#endif
+ BackingStoreClass* backing_store = BackingStoreClass::cast(from);
+ uint32_t len1 = ElementsAccessorSubclass::GetCapacity(backing_store);
+
+ // Optimize if 'other' is empty.
+ // We cannot optimize if 'this' is empty, as other may have holes.
+ if (len1 == 0) return to;
+
+ // Compute how many elements are not in other.
+ int extra = 0;
+ for (uint32_t y = 0; y < len1; y++) {
+ if (ElementsAccessorSubclass::HasElementAtIndex(backing_store,
+ y,
+ holder,
+ receiver)) {
+ uint32_t key =
+ ElementsAccessorSubclass::GetKeyForIndex(backing_store, y);
+ MaybeObject* maybe_value =
+ ElementsAccessorSubclass::Get(backing_store, key, holder, receiver);
+ Object* value;
+ if (!maybe_value->ToObject(&value)) return maybe_value;
+ ASSERT(!value->IsTheHole());
+ if (!HasKey(to, value)) {
+ extra++;
+ }
+ }
+ }
+
+ if (extra == 0) return to;
+
+ // Allocate the result
+ FixedArray* result;
+ MaybeObject* maybe_obj =
+ backing_store->GetHeap()->AllocateFixedArray(len0 + extra);
+ if (!maybe_obj->To<FixedArray>(&result)) return maybe_obj;
+
+ // Fill in the content
+ {
+ AssertNoAllocation no_gc;
+ WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
+ for (int i = 0; i < len0; i++) {
+ Object* e = to->get(i);
+ ASSERT(e->IsString() || e->IsNumber());
+ result->set(i, e, mode);
+ }
+ }
+ // Fill in the extra values.
+ int index = 0;
+ for (uint32_t y = 0; y < len1; y++) {
+ if (ElementsAccessorSubclass::HasElementAtIndex(backing_store,
+ y,
+ holder,
+ receiver)) {
+ uint32_t key =
+ ElementsAccessorSubclass::GetKeyForIndex(backing_store, y);
+ MaybeObject* maybe_value =
+ ElementsAccessorSubclass::Get(backing_store, key, holder, receiver);
+ Object* value;
+ if (!maybe_value->ToObject(&value)) return maybe_value;
+ if (!value->IsTheHole() && !HasKey(to, value)) {
+ result->set(len0 + index, value);
+ index++;
+ }
+ }
+ }
+ ASSERT(extra == index);
+ return result;
+ }
+
+ protected:
+ static uint32_t GetCapacity(BackingStoreClass* backing_store) {
+ return backing_store->length();
+ }
+
+ virtual uint32_t GetCapacity(FixedArrayBase* backing_store) {
+ return ElementsAccessorSubclass::GetCapacity(
+ BackingStoreClass::cast(backing_store));
+ }
+
+ static bool HasElementAtIndex(BackingStoreClass* backing_store,
+ uint32_t index,
+ JSObject* holder,
+ Object* receiver) {
+ uint32_t key =
+ ElementsAccessorSubclass::GetKeyForIndex(backing_store, index);
+ MaybeObject* element = ElementsAccessorSubclass::Get(backing_store,
+ key,
+ holder,
+ receiver);
+ return !element->IsTheHole();
+ }
+
+ virtual bool HasElementAtIndex(FixedArrayBase* backing_store,
+ uint32_t index,
+ JSObject* holder,
+ Object* receiver) {
+ return ElementsAccessorSubclass::HasElementAtIndex(
+ BackingStoreClass::cast(backing_store), index, holder, receiver);
+ }
+
+ static uint32_t GetKeyForIndex(BackingStoreClass* backing_store,
+ uint32_t index) {
+ return index;
+ }
+
+ virtual uint32_t GetKeyForIndex(FixedArrayBase* backing_store,
+ uint32_t index) {
+ return ElementsAccessorSubclass::GetKeyForIndex(
+ BackingStoreClass::cast(backing_store), index);
+ }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(ElementsAccessorBase);
+};
+
+
+class FastElementsAccessor
+ : public ElementsAccessorBase<FastElementsAccessor, FixedArray> {
+ public:
+ static MaybeObject* DeleteCommon(JSObject* obj,
+ uint32_t key) {
+ ASSERT(obj->HasFastElements() || obj->HasFastArgumentsElements());
+ Heap* heap = obj->GetHeap();
+ FixedArray* backing_store = FixedArray::cast(obj->elements());
+ if (backing_store->map() == heap->non_strict_arguments_elements_map()) {
+ backing_store = FixedArray::cast(backing_store->get(1));
+ } else {
+ Object* writable;
+ MaybeObject* maybe = obj->EnsureWritableFastElements();
+ if (!maybe->ToObject(&writable)) return maybe;
+ backing_store = FixedArray::cast(writable);
+ }
+ uint32_t length = static_cast<uint32_t>(
+ obj->IsJSArray()
+ ? Smi::cast(JSArray::cast(obj)->length())->value()
+ : backing_store->length());
+ if (key < length) {
+ backing_store->set_the_hole(key);
+ // If an old space backing store is larger than a certain size and
+ // has too few used values, normalize it.
+ // To avoid doing the check on every delete we require at least
+ // one adjacent hole to the value being deleted.
+ Object* hole = heap->the_hole_value();
+ const int kMinLengthForSparsenessCheck = 64;
+ if (backing_store->length() >= kMinLengthForSparsenessCheck &&
+ !heap->InNewSpace(backing_store) &&
+ ((key > 0 && backing_store->get(key - 1) == hole) ||
+ (key + 1 < length && backing_store->get(key + 1) == hole))) {
+ int num_used = 0;
+ for (int i = 0; i < backing_store->length(); ++i) {
+ if (backing_store->get(i) != hole) ++num_used;
+ // Bail out early if more than 1/4 is used.
+ if (4 * num_used > backing_store->length()) break;
+ }
+ if (4 * num_used <= backing_store->length()) {
+ MaybeObject* result = obj->NormalizeElements();
+ if (result->IsFailure()) return result;
+ }
+ }
+ }
+ return heap->true_value();
+ }
+
+ protected:
+ virtual MaybeObject* Delete(JSObject* obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) {
+ return DeleteCommon(obj, key);
+ }
+};
+
+
+class FastDoubleElementsAccessor
+ : public ElementsAccessorBase<FastDoubleElementsAccessor,
+ FixedDoubleArray> {
+ protected:
+ friend class ElementsAccessorBase<FastDoubleElementsAccessor,
+ FixedDoubleArray>;
+
+ virtual MaybeObject* Delete(JSObject* obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) {
+ int length = obj->IsJSArray()
+ ? Smi::cast(JSArray::cast(obj)->length())->value()
+ : FixedDoubleArray::cast(obj->elements())->length();
+ if (key < static_cast<uint32_t>(length)) {
+ FixedDoubleArray::cast(obj->elements())->set_the_hole(key);
+ }
+ return obj->GetHeap()->true_value();
+ }
+
+ static bool HasElementAtIndex(FixedDoubleArray* backing_store,
+ uint32_t index,
+ JSObject* holder,
+ Object* receiver) {
+ return !backing_store->is_the_hole(index);
+ }
+};
+
+
+// Super class for all external element arrays.
+template<typename ExternalElementsAccessorSubclass,
+ typename ExternalArray>
+class ExternalElementsAccessor
+ : public ElementsAccessorBase<ExternalElementsAccessorSubclass,
+ ExternalArray> {
+ protected:
+ friend class ElementsAccessorBase<ExternalElementsAccessorSubclass,
+ ExternalArray>;
+
+ static MaybeObject* Get(ExternalArray* backing_store,
+ uint32_t key,
+ JSObject* obj,
+ Object* receiver) {
+ if (key < ExternalElementsAccessorSubclass::GetCapacity(backing_store)) {
+ return backing_store->get(key);
+ } else {
+ return backing_store->GetHeap()->undefined_value();
+ }
+ }
+
+ virtual MaybeObject* Delete(JSObject* obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) {
+ // External arrays always ignore deletes.
+ return obj->GetHeap()->true_value();
+ }
+};
+
+
+class ExternalByteElementsAccessor
+ : public ExternalElementsAccessor<ExternalByteElementsAccessor,
+ ExternalByteArray> {
+};
+
+
+class ExternalUnsignedByteElementsAccessor
+ : public ExternalElementsAccessor<ExternalUnsignedByteElementsAccessor,
+ ExternalUnsignedByteArray> {
+};
+
+
+class ExternalShortElementsAccessor
+ : public ExternalElementsAccessor<ExternalShortElementsAccessor,
+ ExternalShortArray> {
+};
+
+
+class ExternalUnsignedShortElementsAccessor
+ : public ExternalElementsAccessor<ExternalUnsignedShortElementsAccessor,
+ ExternalUnsignedShortArray> {
+};
+
+
+class ExternalIntElementsAccessor
+ : public ExternalElementsAccessor<ExternalIntElementsAccessor,
+ ExternalIntArray> {
+};
+
+
+class ExternalUnsignedIntElementsAccessor
+ : public ExternalElementsAccessor<ExternalUnsignedIntElementsAccessor,
+ ExternalUnsignedIntArray> {
+};
+
+
+class ExternalFloatElementsAccessor
+ : public ExternalElementsAccessor<ExternalFloatElementsAccessor,
+ ExternalFloatArray> {
+};
+
+
+class ExternalDoubleElementsAccessor
+ : public ExternalElementsAccessor<ExternalDoubleElementsAccessor,
+ ExternalDoubleArray> {
+};
+
+
+class PixelElementsAccessor
+ : public ExternalElementsAccessor<PixelElementsAccessor,
+ ExternalPixelArray> {
+};
+
+
+class DictionaryElementsAccessor
+ : public ElementsAccessorBase<DictionaryElementsAccessor,
+ NumberDictionary> {
+ public:
+ static MaybeObject* DeleteCommon(JSObject* obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) {
+ Isolate* isolate = obj->GetIsolate();
+ Heap* heap = isolate->heap();
+ FixedArray* backing_store = FixedArray::cast(obj->elements());
+ bool is_arguments =
+ (obj->GetElementsKind() == JSObject::NON_STRICT_ARGUMENTS_ELEMENTS);
+ if (is_arguments) {
+ backing_store = FixedArray::cast(backing_store->get(1));
+ }
+ NumberDictionary* dictionary = NumberDictionary::cast(backing_store);
+ int entry = dictionary->FindEntry(key);
+ if (entry != NumberDictionary::kNotFound) {
+ Object* result = dictionary->DeleteProperty(entry, mode);
+ if (result == heap->true_value()) {
+ MaybeObject* maybe_elements = dictionary->Shrink(key);
+ FixedArray* new_elements = NULL;
+ if (!maybe_elements->To(&new_elements)) {
+ return maybe_elements;
+ }
+ if (is_arguments) {
+ FixedArray::cast(obj->elements())->set(1, new_elements);
+ } else {
+ obj->set_elements(new_elements);
+ }
+ }
+ if (mode == JSObject::STRICT_DELETION &&
+ result == heap->false_value()) {
+ // In strict mode, attempting to delete a non-configurable property
+ // throws an exception.
+ HandleScope scope(isolate);
+ Handle<Object> holder(obj);
+ Handle<Object> name = isolate->factory()->NewNumberFromUint(key);
+ Handle<Object> args[2] = { name, holder };
+ Handle<Object> error =
+ isolate->factory()->NewTypeError("strict_delete_property",
+ HandleVector(args, 2));
+ return isolate->Throw(*error);
+ }
+ }
+ return heap->true_value();
+ }
+
+ protected:
+ friend class ElementsAccessorBase<DictionaryElementsAccessor,
+ NumberDictionary>;
+
+ virtual MaybeObject* Delete(JSObject* obj,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) {
+ return DeleteCommon(obj, key, mode);
+ }
+
+ static MaybeObject* Get(NumberDictionary* backing_store,
+ uint32_t key,
+ JSObject* obj,
+ Object* receiver) {
+ int entry = backing_store->FindEntry(key);
+ if (entry != NumberDictionary::kNotFound) {
+ Object* element = backing_store->ValueAt(entry);
+ PropertyDetails details = backing_store->DetailsAt(entry);
+ if (details.type() == CALLBACKS) {
+ return obj->GetElementWithCallback(receiver,
+ element,
+ key,
+ obj);
+ } else {
+ return element;
+ }
+ }
+ return obj->GetHeap()->the_hole_value();
+ }
+
+ static uint32_t GetKeyForIndex(NumberDictionary* dict,
+ uint32_t index) {
+ Object* key = dict->KeyAt(index);
+ return Smi::cast(key)->value();
+ }
+};
+
+
+class NonStrictArgumentsElementsAccessor
+ : public ElementsAccessorBase<NonStrictArgumentsElementsAccessor,
+ FixedArray> {
+ protected:
+ friend class ElementsAccessorBase<NonStrictArgumentsElementsAccessor,
+ FixedArray>;
+
+ static MaybeObject* Get(FixedArray* parameter_map,
+ uint32_t key,
+ JSObject* obj,
+ Object* receiver) {
+ Object* probe = GetParameterMapArg(parameter_map, key);
+ if (!probe->IsTheHole()) {
+ Context* context = Context::cast(parameter_map->get(0));
+ int context_index = Smi::cast(probe)->value();
+ ASSERT(!context->get(context_index)->IsTheHole());
+ return context->get(context_index);
+ } else {
+ // Object is not mapped, defer to the arguments.
+ FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
+ return ElementsAccessor::ForArray(arguments)->Get(arguments,
+ key,
+ obj,
+ receiver);
+ }
+ }
+
+ virtual MaybeObject* Delete(JSObject* obj,
+ uint32_t key
+ ,
+ JSReceiver::DeleteMode mode) {
+ FixedArray* parameter_map = FixedArray::cast(obj->elements());
+ Object* probe = GetParameterMapArg(parameter_map, key);
+ if (!probe->IsTheHole()) {
+ // TODO(kmillikin): We could check if this was the last aliased
+ // parameter, and revert to normal elements in that case. That
+ // would enable GC of the context.
+ parameter_map->set_the_hole(key + 2);
+ } else {
+ FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
+ if (arguments->IsDictionary()) {
+ return DictionaryElementsAccessor::DeleteCommon(obj, key, mode);
+ } else {
+ return FastElementsAccessor::DeleteCommon(obj, key);
+ }
+ }
+ return obj->GetHeap()->true_value();
+ }
+
+ static uint32_t GetCapacity(FixedArray* parameter_map) {
+ FixedArrayBase* arguments = FixedArrayBase::cast(parameter_map->get(1));
+ return Max(static_cast<uint32_t>(parameter_map->length() - 2),
+ ForArray(arguments)->GetCapacity(arguments));
+ }
+
+ static uint32_t GetKeyForIndex(FixedArray* dict,
+ uint32_t index) {
+ return index;
+ }
+
+ static bool HasElementAtIndex(FixedArray* parameter_map,
+ uint32_t index,
+ JSObject* holder,
+ Object* receiver) {
+ Object* probe = GetParameterMapArg(parameter_map, index);
+ if (!probe->IsTheHole()) {
+ return true;
+ } else {
+ FixedArrayBase* arguments = FixedArrayBase::cast(parameter_map->get(1));
+ ElementsAccessor* accessor = ElementsAccessor::ForArray(arguments);
+ return !accessor->Get(arguments, index, holder, receiver)->IsTheHole();
+ }
+ }
+
+ private:
+ static Object* GetParameterMapArg(FixedArray* parameter_map,
+ uint32_t key) {
+ uint32_t length = parameter_map->length();
+ return key < (length - 2 )
+ ? parameter_map->get(key + 2)
+ : parameter_map->GetHeap()->the_hole_value();
+ }
+};
+
+
+ElementsAccessor* ElementsAccessor::ForArray(FixedArrayBase* array) {
+ switch (array->map()->instance_type()) {
+ case FIXED_ARRAY_TYPE:
+ if (array->IsDictionary()) {
+ return elements_accessors_[JSObject::DICTIONARY_ELEMENTS];
+ } else {
+ return elements_accessors_[JSObject::FAST_ELEMENTS];
+ }
+ case EXTERNAL_BYTE_ARRAY_TYPE:
+ return elements_accessors_[JSObject::EXTERNAL_BYTE_ELEMENTS];
+ case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
+ return elements_accessors_[JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS];
+ case EXTERNAL_SHORT_ARRAY_TYPE:
+ return elements_accessors_[JSObject::EXTERNAL_SHORT_ELEMENTS];
+ case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
+ return elements_accessors_[JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS];
+ case EXTERNAL_INT_ARRAY_TYPE:
+ return elements_accessors_[JSObject::EXTERNAL_INT_ELEMENTS];
+ case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
+ return elements_accessors_[JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS];
+ case EXTERNAL_FLOAT_ARRAY_TYPE:
+ return elements_accessors_[JSObject::EXTERNAL_FLOAT_ELEMENTS];
+ case EXTERNAL_DOUBLE_ARRAY_TYPE:
+ return elements_accessors_[JSObject::EXTERNAL_DOUBLE_ELEMENTS];
+ case EXTERNAL_PIXEL_ARRAY_TYPE:
+ return elements_accessors_[JSObject::EXTERNAL_PIXEL_ELEMENTS];
+ default:
+ UNREACHABLE();
+ return NULL;
+ }
+}
+
+
+void ElementsAccessor::InitializeOncePerProcess() {
+ static struct ConcreteElementsAccessors {
+ FastElementsAccessor fast_elements_handler;
+ FastDoubleElementsAccessor fast_double_elements_handler;
+ DictionaryElementsAccessor dictionary_elements_handler;
+ NonStrictArgumentsElementsAccessor non_strict_arguments_elements_handler;
+ ExternalByteElementsAccessor byte_elements_handler;
+ ExternalUnsignedByteElementsAccessor unsigned_byte_elements_handler;
+ ExternalShortElementsAccessor short_elements_handler;
+ ExternalUnsignedShortElementsAccessor unsigned_short_elements_handler;
+ ExternalIntElementsAccessor int_elements_handler;
+ ExternalUnsignedIntElementsAccessor unsigned_int_elements_handler;
+ ExternalFloatElementsAccessor float_elements_handler;
+ ExternalDoubleElementsAccessor double_elements_handler;
+ PixelElementsAccessor pixel_elements_handler;
+ } element_accessors;
+
+ static ElementsAccessor* accessor_array[] = {
+ &element_accessors.fast_elements_handler,
+ &element_accessors.fast_double_elements_handler,
+ &element_accessors.dictionary_elements_handler,
+ &element_accessors.non_strict_arguments_elements_handler,
+ &element_accessors.byte_elements_handler,
+ &element_accessors.unsigned_byte_elements_handler,
+ &element_accessors.short_elements_handler,
+ &element_accessors.unsigned_short_elements_handler,
+ &element_accessors.int_elements_handler,
+ &element_accessors.unsigned_int_elements_handler,
+ &element_accessors.float_elements_handler,
+ &element_accessors.double_elements_handler,
+ &element_accessors.pixel_elements_handler
+ };
+
+ elements_accessors_ = accessor_array;
+}
+
+
+} } // namespace v8::internal
diff --git a/src/elements.h b/src/elements.h
new file mode 100644
index 0000000..3eae303
--- /dev/null
+++ b/src/elements.h
@@ -0,0 +1,95 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef V8_ELEMENTS_H_
+#define V8_ELEMENTS_H_
+
+#include "objects.h"
+
+namespace v8 {
+namespace internal {
+
+// Abstract base class for handles that can operate on objects with differing
+// ElementsKinds.
+class ElementsAccessor {
+ public:
+ ElementsAccessor() { }
+ virtual ~ElementsAccessor() { }
+ virtual MaybeObject* Get(FixedArrayBase* backing_store,
+ uint32_t key,
+ JSObject* holder,
+ Object* receiver) = 0;
+
+ virtual MaybeObject* Delete(JSObject* holder,
+ uint32_t key,
+ JSReceiver::DeleteMode mode) = 0;
+
+ virtual MaybeObject* AddElementsToFixedArray(FixedArrayBase* from,
+ FixedArray* to,
+ JSObject* holder,
+ Object* receiver) = 0;
+
+ // Returns a shared ElementsAccessor for the specified ElementsKind.
+ static ElementsAccessor* ForKind(JSObject::ElementsKind elements_kind) {
+ ASSERT(elements_kind < JSObject::kElementsKindCount);
+ return elements_accessors_[elements_kind];
+ }
+
+ static ElementsAccessor* ForArray(FixedArrayBase* array);
+
+ static void InitializeOncePerProcess();
+
+ protected:
+ friend class NonStrictArgumentsElementsAccessor;
+
+ virtual uint32_t GetCapacity(FixedArrayBase* backing_store) = 0;
+
+ virtual bool HasElementAtIndex(FixedArrayBase* backing_store,
+ uint32_t index,
+ JSObject* holder,
+ Object* receiver) = 0;
+
+ // Element handlers distinguish between indexes and keys when the manipulate
+ // elements. Indexes refer to elements in terms of their location in the
+ // underlying storage's backing store representation, and are between 0
+ // GetCapacity. Keys refer to elements in terms of the value that would be
+ // specific in JavaScript to access the element. In most implementations, keys
+ // are equivalent to indexes, and GetKeyForIndex returns the same value it is
+ // passed. In the NumberDictionary ElementsAccessor, GetKeyForIndex maps the
+ // index to a key using the KeyAt method on the NumberDictionary.
+ virtual uint32_t GetKeyForIndex(FixedArrayBase* backing_store,
+ uint32_t index) = 0;
+
+ private:
+ static ElementsAccessor** elements_accessors_;
+
+ DISALLOW_COPY_AND_ASSIGN(ElementsAccessor);
+};
+
+} } // namespace v8::internal
+
+#endif // V8_ELEMENTS_H_
diff --git a/src/execution.cc b/src/execution.cc
index 6ab73e7..bdbdca8 100644
--- a/src/execution.cc
+++ b/src/execution.cc
@@ -132,7 +132,7 @@
if (*has_pending_exception) {
isolate->ReportPendingMessages();
if (isolate->pending_exception() == Failure::OutOfMemoryException()) {
- if (!isolate->handle_scope_implementer()->ignore_out_of_memory()) {
+ if (!isolate->ignore_out_of_memory()) {
V8::FatalProcessOutOfMemory("JS", true);
}
}
diff --git a/src/extensions/experimental/datetime-format.cc b/src/extensions/experimental/datetime-format.cc
index 7f46302..94a29ac 100644
--- a/src/extensions/experimental/datetime-format.cc
+++ b/src/extensions/experimental/datetime-format.cc
@@ -135,7 +135,7 @@
v8::Handle<v8::Value> DateTimeFormat::GetWeekdays(const v8::Arguments& args) {
icu::SimpleDateFormat* date_format = UnpackDateTimeFormat(args.Holder());
if (!date_format) {
- ThrowUnexpectedObjectError();
+ return ThrowUnexpectedObjectError();
}
const icu::DateFormatSymbols* symbols = date_format->getDateFormatSymbols();
diff --git a/src/extensions/experimental/number-format.cc b/src/extensions/experimental/number-format.cc
index 51e0b95..2932c52 100644
--- a/src/extensions/experimental/number-format.cc
+++ b/src/extensions/experimental/number-format.cc
@@ -36,6 +36,8 @@
#include "unicode/numfmt.h"
#include "unicode/uchar.h"
#include "unicode/ucurr.h"
+#include "unicode/unum.h"
+#include "unicode/uversion.h"
namespace v8 {
namespace internal {
@@ -231,6 +233,8 @@
}
// Generates ICU number format pattern from given skeleton.
+// TODO(cira): Remove once ICU includes equivalent method
+// (see http://bugs.icu-project.org/trac/ticket/8610).
static icu::DecimalFormat* CreateFormatterFromSkeleton(
const icu::Locale& icu_locale,
const icu::UnicodeString& skeleton,
@@ -251,6 +255,7 @@
// Case of non-consecutive U+00A4 is taken care of in i18n.js.
int32_t end_index = skeleton.lastIndexOf(currency_symbol, index);
+#if (U_ICU_VERSION_MAJOR_NUM == 4) && (U_ICU_VERSION_MINOR_NUM <= 6)
icu::NumberFormat::EStyles style;
switch (end_index - index) {
case 0:
@@ -262,6 +267,19 @@
default:
style = icu::NumberFormat::kPluralCurrencyStyle;
}
+#else // ICU version is 4.8 or above (we ignore versions below 4.0).
+ UNumberFormatStyle style;
+ switch (end_index - index) {
+ case 0:
+ style = UNUM_CURRENCY;
+ break;
+ case 1:
+ style = UNUM_CURRENCY_ISO;
+ break;
+ default:
+ style = UNUM_CURRENCY_PLURAL;
+ }
+#endif
base_format = static_cast<icu::DecimalFormat*>(
icu::NumberFormat::createInstance(icu_locale, style, *status));
diff --git a/src/factory.cc b/src/factory.cc
index ac96668..ee5c37b 100644
--- a/src/factory.cc
+++ b/src/factory.cc
@@ -34,6 +34,7 @@
#include "macro-assembler.h"
#include "objects.h"
#include "objects-visiting.h"
+#include "scopeinfo.h"
namespace v8 {
namespace internal {
@@ -84,6 +85,14 @@
}
+Handle<ObjectHashTable> Factory::NewObjectHashTable(int at_least_space_for) {
+ ASSERT(0 <= at_least_space_for);
+ CALL_HEAP_FUNCTION(isolate(),
+ ObjectHashTable::Allocate(at_least_space_for),
+ ObjectHashTable);
+}
+
+
Handle<DescriptorArray> Factory::NewDescriptorArray(int number_of_descriptors) {
ASSERT(0 <= number_of_descriptors);
CALL_HEAP_FUNCTION(isolate(),
@@ -283,6 +292,19 @@
}
+Handle<Context> Factory::NewBlockContext(
+ Handle<JSFunction> function,
+ Handle<Context> previous,
+ Handle<SerializedScopeInfo> scope_info) {
+ CALL_HEAP_FUNCTION(
+ isolate(),
+ isolate()->heap()->AllocateBlockContext(*function,
+ *previous,
+ *scope_info),
+ Context);
+}
+
+
Handle<Struct> Factory::NewStruct(InstanceType type) {
CALL_HEAP_FUNCTION(
isolate(),
@@ -726,6 +748,14 @@
}
+Handle<SerializedScopeInfo> Factory::NewSerializedScopeInfo(int length) {
+ CALL_HEAP_FUNCTION(
+ isolate(),
+ isolate()->heap()->AllocateSerializedScopeInfo(length),
+ SerializedScopeInfo);
+}
+
+
Handle<Code> Factory::NewCode(const CodeDesc& desc,
Code::Flags flags,
Handle<Object> self_ref,
diff --git a/src/factory.h b/src/factory.h
index 19f3827..a69b05b 100644
--- a/src/factory.h
+++ b/src/factory.h
@@ -58,6 +58,8 @@
Handle<StringDictionary> NewStringDictionary(int at_least_space_for);
+ Handle<ObjectHashTable> NewObjectHashTable(int at_least_space_for);
+
Handle<DescriptorArray> NewDescriptorArray(int number_of_descriptors);
Handle<DeoptimizationInputData> NewDeoptimizationInputData(
int deopt_entry_count,
@@ -165,6 +167,11 @@
Handle<Context> previous,
Handle<JSObject> extension);
+ // Create a 'block' context.
+ Handle<Context> NewBlockContext(Handle<JSFunction> function,
+ Handle<Context> previous,
+ Handle<SerializedScopeInfo> scope_info);
+
// Return the Symbol matching the passed in string.
Handle<String> SymbolFromString(Handle<String> value);
@@ -275,6 +282,8 @@
Handle<Context> context,
PretenureFlag pretenure = TENURED);
+ Handle<SerializedScopeInfo> NewSerializedScopeInfo(int length);
+
Handle<Code> NewCode(const CodeDesc& desc,
Code::Flags flags,
Handle<Object> self_reference,
diff --git a/src/flag-definitions.h b/src/flag-definitions.h
index 2db44c3..7df2b0b 100644
--- a/src/flag-definitions.h
+++ b/src/flag-definitions.h
@@ -97,10 +97,14 @@
#define FLAG FLAG_FULL
// Flags for experimental language features.
+DEFINE_bool(harmony_typeof, false, "enable harmony semantics for typeof")
DEFINE_bool(harmony_proxies, false, "enable harmony proxies")
+DEFINE_bool(harmony_weakmaps, false, "enable harmony weak maps")
+DEFINE_bool(harmony_block_scoping, false, "enable harmony block scoping")
// Flags for experimental implementation features.
-DEFINE_bool(unbox_double_arrays, false, "automatically unbox arrays of doubles")
+DEFINE_bool(unbox_double_arrays, true, "automatically unbox arrays of doubles")
+DEFINE_bool(string_slices, false, "use string slices")
// Flags for Crankshaft.
#ifdef V8_TARGET_ARCH_MIPS
@@ -400,6 +404,7 @@
DEFINE_bool(print_builtin_json_ast, false,
"print source AST for builtins as JSON")
DEFINE_string(stop_at, "", "function name where to insert a breakpoint")
+DEFINE_bool(verify_stack_height, false, "verify stack height tracing on ia32")
// compiler.cc
DEFINE_bool(print_builtin_scopes, false, "print scopes for builtins")
diff --git a/src/frames-inl.h b/src/frames-inl.h
index 5951806..7ba79bf 100644
--- a/src/frames-inl.h
+++ b/src/frames-inl.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -67,6 +67,7 @@
inline void StackHandler::Iterate(ObjectVisitor* v, Code* holder) const {
+ v->VisitPointer(context_address());
StackFrame::IteratePc(v, pc_address(), holder);
}
@@ -82,6 +83,12 @@
}
+inline Object** StackHandler::context_address() const {
+ const int offset = StackHandlerConstants::kContextOffset;
+ return reinterpret_cast<Object**>(address() + offset);
+}
+
+
inline Address* StackHandler::pc_address() const {
const int offset = StackHandlerConstants::kPCOffset;
return reinterpret_cast<Address*>(address() + offset);
diff --git a/src/frames.h b/src/frames.h
index f542a92..4f94ebc 100644
--- a/src/frames.h
+++ b/src/frames.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -114,6 +114,7 @@
// Accessors.
inline State state() const;
+ inline Object** context_address() const;
inline Address* pc_address() const;
DISALLOW_IMPLICIT_CONSTRUCTORS(StackHandler);
diff --git a/src/full-codegen.cc b/src/full-codegen.cc
index 8c2f0d1..ca2026b 100644
--- a/src/full-codegen.cc
+++ b/src/full-codegen.cc
@@ -35,6 +35,7 @@
#include "macro-assembler.h"
#include "prettyprinter.h"
#include "scopes.h"
+#include "scopeinfo.h"
#include "stub-cache.h"
namespace v8 {
@@ -90,8 +91,7 @@
}
-void BreakableStatementChecker::VisitEnterWithContextStatement(
- EnterWithContextStatement* stmt) {
+void BreakableStatementChecker::VisitWithStatement(WithStatement* stmt) {
Visit(stmt->expression());
}
@@ -317,7 +317,6 @@
// field, and then a sequence of entries. Each entry is a pair of AST id
// and code-relative pc offset.
masm()->Align(kIntSize);
- masm()->RecordComment("[ Stack check table");
unsigned offset = masm()->pc_offset();
unsigned length = stack_checks_.length();
__ dd(length);
@@ -325,7 +324,6 @@
__ dd(stack_checks_[i].id);
__ dd(stack_checks_[i].pc_and_state);
}
- masm()->RecordComment("]");
return offset;
}
@@ -437,6 +435,7 @@
void FullCodeGenerator::StackValueContext::Plug(Register reg) const {
__ push(reg);
+ codegen()->increment_stack_height();
}
@@ -450,11 +449,13 @@
void FullCodeGenerator::EffectContext::PlugTOS() const {
__ Drop(1);
+ codegen()->decrement_stack_height();
}
void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const {
__ pop(result_register());
+ codegen()->decrement_stack_height();
}
@@ -465,6 +466,7 @@
void FullCodeGenerator::TestContext::PlugTOS() const {
// For simplicity we always test the accumulator register.
__ pop(result_register());
+ codegen()->decrement_stack_height();
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(this);
}
@@ -843,9 +845,24 @@
Breakable nested_statement(this, stmt);
SetStatementPosition(stmt);
+ Scope* saved_scope = scope();
+ if (stmt->block_scope() != NULL) {
+ { Comment cmnt(masm_, "[ Extend block context");
+ scope_ = stmt->block_scope();
+ __ Push(scope_->GetSerializedScopeInfo());
+ PushFunctionArgumentForContextAllocation();
+ __ CallRuntime(Runtime::kPushBlockContext, 2);
+ StoreToFrameField(StandardFrameConstants::kContextOffset,
+ context_register());
+ }
+ { Comment cmnt(masm_, "[ Declarations");
+ VisitDeclarations(scope_->declarations());
+ }
+ }
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
VisitStatements(stmt->statements());
- __ bind(nested_statement.break_target());
+ scope_ = saved_scope;
+ __ bind(nested_statement.break_label());
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
}
@@ -896,19 +913,26 @@
SetStatementPosition(stmt);
NestedStatement* current = nesting_stack_;
int stack_depth = 0;
+ int context_length = 0;
// When continuing, we clobber the unpredictable value in the accumulator
// with one that's safe for GC. If we hit an exit from the try block of
// try...finally on our way out, we will unconditionally preserve the
// accumulator on the stack.
ClearAccumulator();
while (!current->IsContinueTarget(stmt->target())) {
- stack_depth = current->Exit(stack_depth);
- current = current->outer();
+ current = current->Exit(&stack_depth, &context_length);
}
__ Drop(stack_depth);
+ if (context_length > 0) {
+ while (context_length > 0) {
+ LoadContextField(context_register(), Context::PREVIOUS_INDEX);
+ --context_length;
+ }
+ StoreToFrameField(StandardFrameConstants::kContextOffset,
+ context_register());
+ }
- Iteration* loop = current->AsIteration();
- __ jmp(loop->continue_target());
+ __ jmp(current->AsIteration()->continue_label());
}
@@ -917,19 +941,26 @@
SetStatementPosition(stmt);
NestedStatement* current = nesting_stack_;
int stack_depth = 0;
+ int context_length = 0;
// When breaking, we clobber the unpredictable value in the accumulator
// with one that's safe for GC. If we hit an exit from the try block of
// try...finally on our way out, we will unconditionally preserve the
// accumulator on the stack.
ClearAccumulator();
while (!current->IsBreakTarget(stmt->target())) {
- stack_depth = current->Exit(stack_depth);
- current = current->outer();
+ current = current->Exit(&stack_depth, &context_length);
}
__ Drop(stack_depth);
+ if (context_length > 0) {
+ while (context_length > 0) {
+ LoadContextField(context_register(), Context::PREVIOUS_INDEX);
+ --context_length;
+ }
+ StoreToFrameField(StandardFrameConstants::kContextOffset,
+ context_register());
+ }
- Breakable* target = current->AsBreakable();
- __ jmp(target->break_target());
+ __ jmp(current->AsBreakable()->break_label());
}
@@ -942,9 +973,9 @@
// Exit all nested statements.
NestedStatement* current = nesting_stack_;
int stack_depth = 0;
+ int context_length = 0;
while (current != NULL) {
- stack_depth = current->Exit(stack_depth);
- current = current->outer();
+ current = current->Exit(&stack_depth, &context_length);
}
__ Drop(stack_depth);
@@ -952,14 +983,23 @@
}
-void FullCodeGenerator::VisitEnterWithContextStatement(
- EnterWithContextStatement* stmt) {
- Comment cmnt(masm_, "[ EnterWithContextStatement");
+void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) {
+ Comment cmnt(masm_, "[ WithStatement");
SetStatementPosition(stmt);
VisitForStackValue(stmt->expression());
PushFunctionArgumentForContextAllocation();
__ CallRuntime(Runtime::kPushWithContext, 2);
+ decrement_stack_height();
+ StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
+
+ { WithOrCatch body(this);
+ Visit(stmt->statement());
+ }
+
+ // Pop context.
+ LoadContextField(context_register(), Context::PREVIOUS_INDEX);
+ // Update local stack frame context field.
StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
}
@@ -988,12 +1028,12 @@
// Record the position of the do while condition and make sure it is
// possible to break on the condition.
- __ bind(loop_statement.continue_target());
+ __ bind(loop_statement.continue_label());
PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
SetExpressionPosition(stmt->cond(), stmt->condition_position());
VisitForControl(stmt->cond(),
&stack_check,
- loop_statement.break_target(),
+ loop_statement.break_label(),
&stack_check);
// Check stack before looping.
@@ -1003,7 +1043,7 @@
__ jmp(&body);
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
- __ bind(loop_statement.break_target());
+ __ bind(loop_statement.break_label());
decrement_loop_depth();
}
@@ -1024,7 +1064,7 @@
// Emit the statement position here as this is where the while
// statement code starts.
- __ bind(loop_statement.continue_target());
+ __ bind(loop_statement.continue_label());
SetStatementPosition(stmt);
// Check stack before looping.
@@ -1033,11 +1073,11 @@
__ bind(&test);
VisitForControl(stmt->cond(),
&body,
- loop_statement.break_target(),
- loop_statement.break_target());
+ loop_statement.break_label(),
+ loop_statement.break_label());
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
- __ bind(loop_statement.break_target());
+ __ bind(loop_statement.break_label());
decrement_loop_depth();
}
@@ -1060,7 +1100,7 @@
Visit(stmt->body());
PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
- __ bind(loop_statement.continue_target());
+ __ bind(loop_statement.continue_label());
SetStatementPosition(stmt);
if (stmt->next() != NULL) {
Visit(stmt->next());
@@ -1077,14 +1117,14 @@
if (stmt->cond() != NULL) {
VisitForControl(stmt->cond(),
&body,
- loop_statement.break_target(),
- loop_statement.break_target());
+ loop_statement.break_label(),
+ loop_statement.break_label());
} else {
__ jmp(&body);
}
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
- __ bind(loop_statement.break_target());
+ __ bind(loop_statement.break_label());
decrement_loop_depth();
}
@@ -1102,7 +1142,7 @@
// to introduce a new scope to bind the catch variable and to remove
// that scope again afterwards.
- Label try_handler_setup, catch_entry, done;
+ Label try_handler_setup, done;
__ Call(&try_handler_setup);
// Try handler code, exception in result register.
@@ -1119,17 +1159,22 @@
Scope* saved_scope = scope();
scope_ = stmt->scope();
ASSERT(scope_->declarations()->is_empty());
- Visit(stmt->catch_block());
+ { WithOrCatch body(this);
+ Visit(stmt->catch_block());
+ }
scope_ = saved_scope;
__ jmp(&done);
// Try block code. Sets up the exception handler chain.
__ bind(&try_handler_setup);
{
- TryCatch try_block(this, &catch_entry);
+ const int delta = StackHandlerConstants::kSize / kPointerSize;
+ TryCatch try_block(this);
__ PushTryHandler(IN_JAVASCRIPT, TRY_CATCH_HANDLER);
+ increment_stack_height(delta);
Visit(stmt->try_block());
__ PopTryHandler();
+ decrement_stack_height(delta);
}
__ bind(&done);
}
@@ -1161,6 +1206,7 @@
// cooked before GC.
Label finally_entry;
Label try_handler_setup;
+ const int original_stack_height = stack_height();
// Setup the try-handler chain. Use a call to
// Jump to try-handler setup and try-block code. Use call to put try-handler
@@ -1169,9 +1215,9 @@
// Try handler code. Return address of call is pushed on handler stack.
{
// This code is only executed during stack-handler traversal when an
- // exception is thrown. The execption is in the result register, which
+ // exception is thrown. The exception is in the result register, which
// is retained by the finally block.
- // Call the finally block and then rethrow the exception.
+ // Call the finally block and then rethrow the exception if it returns.
__ Call(&finally_entry);
__ push(result_register());
__ CallRuntime(Runtime::kReThrow, 1);
@@ -1182,6 +1228,7 @@
// Finally block implementation.
Finally finally_block(this);
EnterFinallyBlock();
+ set_stack_height(original_stack_height + Finally::kElementCount);
Visit(stmt->finally_block());
ExitFinallyBlock(); // Return to the calling code.
}
@@ -1189,10 +1236,13 @@
__ bind(&try_handler_setup);
{
// Setup try handler (stack pointer registers).
+ const int delta = StackHandlerConstants::kSize / kPointerSize;
TryFinally try_block(this, &finally_entry);
__ PushTryHandler(IN_JAVASCRIPT, TRY_FINALLY_HANDLER);
+ set_stack_height(original_stack_height + delta);
Visit(stmt->try_block());
__ PopTryHandler();
+ set_stack_height(original_stack_height);
}
// Execute the finally block on the way out. Clobber the unpredictable
// value in the accumulator with one that's safe for GC. The finally
@@ -1222,6 +1272,7 @@
__ bind(&true_case);
SetExpressionPosition(expr->then_expression(),
expr->then_expression_position());
+ int start_stack_height = stack_height();
if (context()->IsTest()) {
const TestContext* for_test = TestContext::cast(context());
VisitForControl(expr->then_expression(),
@@ -1235,6 +1286,7 @@
PrepareForBailoutForId(expr->ElseId(), NO_REGISTERS);
__ bind(&false_case);
+ set_stack_height(start_stack_height);
if (context()->IsTest()) ForwardBailoutToChild(expr);
SetExpressionPosition(expr->else_expression(),
expr->else_expression_position());
@@ -1275,26 +1327,23 @@
void FullCodeGenerator::VisitThrow(Throw* expr) {
Comment cmnt(masm_, "[ Throw");
+ // Throw has no effect on the stack height or the current expression context.
+ // Usually the expression context is null, because throw is a statement.
VisitForStackValue(expr->exception());
__ CallRuntime(Runtime::kThrow, 1);
+ decrement_stack_height();
// Never returns here.
}
-int FullCodeGenerator::TryFinally::Exit(int stack_depth) {
+FullCodeGenerator::NestedStatement* FullCodeGenerator::TryCatch::Exit(
+ int* stack_depth,
+ int* context_length) {
// The macros used here must preserve the result register.
- __ Drop(stack_depth);
+ __ Drop(*stack_depth);
__ PopTryHandler();
- __ Call(finally_entry_);
- return 0;
-}
-
-
-int FullCodeGenerator::TryCatch::Exit(int stack_depth) {
- // The macros used here must preserve the result register.
- __ Drop(stack_depth);
- __ PopTryHandler();
- return 0;
+ *stack_depth = 0;
+ return previous_;
}
diff --git a/src/full-codegen.h b/src/full-codegen.h
index 6b174f7..0ed26a1 100644
--- a/src/full-codegen.h
+++ b/src/full-codegen.h
@@ -83,6 +83,7 @@
scope_(NULL),
nesting_stack_(NULL),
loop_depth_(0),
+ stack_height_(0),
context_(NULL),
bailout_entries_(0),
stack_checks_(2), // There's always at least one.
@@ -110,10 +111,7 @@
private:
class Breakable;
class Iteration;
- class TryCatch;
- class TryFinally;
- class Finally;
- class ForIn;
+
class TestContext;
class NestedStatement BASE_EMBEDDED {
@@ -131,134 +129,135 @@
virtual Breakable* AsBreakable() { return NULL; }
virtual Iteration* AsIteration() { return NULL; }
- virtual TryCatch* AsTryCatch() { return NULL; }
- virtual TryFinally* AsTryFinally() { return NULL; }
- virtual Finally* AsFinally() { return NULL; }
- virtual ForIn* AsForIn() { return NULL; }
virtual bool IsContinueTarget(Statement* target) { return false; }
virtual bool IsBreakTarget(Statement* target) { return false; }
- // Generate code to leave the nested statement. This includes
- // cleaning up any stack elements in use and restoring the
- // stack to the expectations of the surrounding statements.
- // Takes a number of stack elements currently on top of the
- // nested statement's stack, and returns a number of stack
- // elements left on top of the surrounding statement's stack.
- // The generated code must preserve the result register (which
- // contains the value in case of a return).
- virtual int Exit(int stack_depth) {
- // Default implementation for the case where there is
- // nothing to clean up.
- return stack_depth;
+ // Notify the statement that we are exiting it via break, continue, or
+ // return and give it a chance to generate cleanup code. Return the
+ // next outer statement in the nesting stack. We accumulate in
+ // *stack_depth the amount to drop the stack and in *context_length the
+ // number of context chain links to unwind as we traverse the nesting
+ // stack from an exit to its target.
+ virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
+ return previous_;
}
- NestedStatement* outer() { return previous_; }
protected:
MacroAssembler* masm() { return codegen_->masm(); }
- private:
FullCodeGenerator* codegen_;
NestedStatement* previous_;
DISALLOW_COPY_AND_ASSIGN(NestedStatement);
};
+ // A breakable statement such as a block.
class Breakable : public NestedStatement {
public:
- Breakable(FullCodeGenerator* codegen,
- BreakableStatement* break_target)
- : NestedStatement(codegen),
- target_(break_target) {}
- virtual ~Breakable() {}
- virtual Breakable* AsBreakable() { return this; }
- virtual bool IsBreakTarget(Statement* statement) {
- return target_ == statement;
+ Breakable(FullCodeGenerator* codegen, BreakableStatement* statement)
+ : NestedStatement(codegen), statement_(statement) {
}
- BreakableStatement* statement() { return target_; }
- Label* break_target() { return &break_target_label_; }
+ virtual ~Breakable() {}
+
+ virtual Breakable* AsBreakable() { return this; }
+ virtual bool IsBreakTarget(Statement* target) {
+ return statement() == target;
+ }
+
+ BreakableStatement* statement() { return statement_; }
+ Label* break_label() { return &break_label_; }
+
private:
- BreakableStatement* target_;
- Label break_target_label_;
- DISALLOW_COPY_AND_ASSIGN(Breakable);
+ BreakableStatement* statement_;
+ Label break_label_;
};
+ // An iteration statement such as a while, for, or do loop.
class Iteration : public Breakable {
public:
- Iteration(FullCodeGenerator* codegen,
- IterationStatement* iteration_statement)
- : Breakable(codegen, iteration_statement) {}
- virtual ~Iteration() {}
- virtual Iteration* AsIteration() { return this; }
- virtual bool IsContinueTarget(Statement* statement) {
- return this->statement() == statement;
+ Iteration(FullCodeGenerator* codegen, IterationStatement* statement)
+ : Breakable(codegen, statement) {
}
- Label* continue_target() { return &continue_target_label_; }
+ virtual ~Iteration() {}
+
+ virtual Iteration* AsIteration() { return this; }
+ virtual bool IsContinueTarget(Statement* target) {
+ return statement() == target;
+ }
+
+ Label* continue_label() { return &continue_label_; }
+
private:
- Label continue_target_label_;
- DISALLOW_COPY_AND_ASSIGN(Iteration);
+ Label continue_label_;
};
- // The environment inside the try block of a try/catch statement.
+ // The try block of a try/catch statement.
class TryCatch : public NestedStatement {
public:
- explicit TryCatch(FullCodeGenerator* codegen, Label* catch_entry)
- : NestedStatement(codegen), catch_entry_(catch_entry) { }
+ explicit TryCatch(FullCodeGenerator* codegen) : NestedStatement(codegen) {
+ }
virtual ~TryCatch() {}
- virtual TryCatch* AsTryCatch() { return this; }
- Label* catch_entry() { return catch_entry_; }
- virtual int Exit(int stack_depth);
- private:
- Label* catch_entry_;
- DISALLOW_COPY_AND_ASSIGN(TryCatch);
+
+ virtual NestedStatement* Exit(int* stack_depth, int* context_length);
};
- // The environment inside the try block of a try/finally statement.
+ // The try block of a try/finally statement.
class TryFinally : public NestedStatement {
public:
- explicit TryFinally(FullCodeGenerator* codegen, Label* finally_entry)
- : NestedStatement(codegen), finally_entry_(finally_entry) { }
+ TryFinally(FullCodeGenerator* codegen, Label* finally_entry)
+ : NestedStatement(codegen), finally_entry_(finally_entry) {
+ }
virtual ~TryFinally() {}
- virtual TryFinally* AsTryFinally() { return this; }
- Label* finally_entry() { return finally_entry_; }
- virtual int Exit(int stack_depth);
+
+ virtual NestedStatement* Exit(int* stack_depth, int* context_length);
+
private:
Label* finally_entry_;
- DISALLOW_COPY_AND_ASSIGN(TryFinally);
};
- // A FinallyEnvironment represents being inside a finally block.
- // Abnormal termination of the finally block needs to clean up
- // the block's parameters from the stack.
+ // The finally block of a try/finally statement.
class Finally : public NestedStatement {
public:
+ static const int kElementCount = 2;
+
explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) { }
virtual ~Finally() {}
- virtual Finally* AsFinally() { return this; }
- virtual int Exit(int stack_depth) {
- return stack_depth + kFinallyStackElementCount;
+
+ virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
+ *stack_depth += kElementCount;
+ return previous_;
}
- private:
- // Number of extra stack slots occupied during a finally block.
- static const int kFinallyStackElementCount = 2;
- DISALLOW_COPY_AND_ASSIGN(Finally);
};
- // A ForInEnvironment represents being inside a for-in loop.
- // Abnormal termination of the for-in block needs to clean up
- // the block's temporary storage from the stack.
+ // The body of a for/in loop.
class ForIn : public Iteration {
public:
- ForIn(FullCodeGenerator* codegen,
- ForInStatement* statement)
- : Iteration(codegen, statement) { }
- virtual ~ForIn() {}
- virtual ForIn* AsForIn() { return this; }
- virtual int Exit(int stack_depth) {
- return stack_depth + kForInStackElementCount;
+ static const int kElementCount = 5;
+
+ ForIn(FullCodeGenerator* codegen, ForInStatement* statement)
+ : Iteration(codegen, statement) {
}
- private:
- static const int kForInStackElementCount = 5;
- DISALLOW_COPY_AND_ASSIGN(ForIn);
+ virtual ~ForIn() {}
+
+ virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
+ *stack_depth += kElementCount;
+ return previous_;
+ }
+ };
+
+
+ // The body of a with or catch.
+ class WithOrCatch : public NestedStatement {
+ public:
+ explicit WithOrCatch(FullCodeGenerator* codegen)
+ : NestedStatement(codegen) {
+ }
+ virtual ~WithOrCatch() {}
+
+ virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
+ ++(*context_length);
+ return previous_;
+ }
};
// The forward bailout stack keeps track of the expressions that can
@@ -519,6 +518,35 @@
loop_depth_--;
}
+#if defined(V8_TARGET_ARCH_IA32)
+ int stack_height() { return stack_height_; }
+ void set_stack_height(int depth) { stack_height_ = depth; }
+ void increment_stack_height() { stack_height_++; }
+ void increment_stack_height(int delta) { stack_height_ += delta; }
+ void decrement_stack_height() {
+ if (FLAG_verify_stack_height) {
+ ASSERT(stack_height_ > 0);
+ }
+ stack_height_--;
+ }
+ void decrement_stack_height(int delta) {
+ stack_height_-= delta;
+ if (FLAG_verify_stack_height) {
+ ASSERT(stack_height_ >= 0);
+ }
+ }
+ // Call this function only if FLAG_verify_stack_height is true.
+ void verify_stack_height(); // Generates a runtime check of esp - ebp.
+#else
+ int stack_height() { return 0; }
+ void set_stack_height(int depth) {}
+ void increment_stack_height() {}
+ void increment_stack_height(int delta) {}
+ void decrement_stack_height() {}
+ void decrement_stack_height(int delta) {}
+ void verify_stack_height() {}
+#endif // V8_TARGET_ARCH_IA32
+
MacroAssembler* masm() { return masm_; }
class ExpressionContext;
@@ -578,6 +606,10 @@
virtual ~ExpressionContext() {
codegen_->set_new_context(old_);
+ if (FLAG_verify_stack_height) {
+ ASSERT_EQ(expected_stack_height_, codegen()->stack_height());
+ codegen()->verify_stack_height();
+ }
}
Isolate* isolate() const { return codegen_->isolate(); }
@@ -631,6 +663,7 @@
FullCodeGenerator* codegen() const { return codegen_; }
MacroAssembler* masm() const { return masm_; }
MacroAssembler* masm_;
+ int expected_stack_height_; // The expected stack height esp - ebp on exit.
private:
const ExpressionContext* old_;
@@ -640,7 +673,9 @@
class AccumulatorValueContext : public ExpressionContext {
public:
explicit AccumulatorValueContext(FullCodeGenerator* codegen)
- : ExpressionContext(codegen) { }
+ : ExpressionContext(codegen) {
+ expected_stack_height_ = codegen->stack_height();
+ }
virtual void Plug(bool flag) const;
virtual void Plug(Register reg) const;
@@ -661,7 +696,9 @@
class StackValueContext : public ExpressionContext {
public:
explicit StackValueContext(FullCodeGenerator* codegen)
- : ExpressionContext(codegen) { }
+ : ExpressionContext(codegen) {
+ expected_stack_height_ = codegen->stack_height() + 1;
+ }
virtual void Plug(bool flag) const;
virtual void Plug(Register reg) const;
@@ -690,7 +727,9 @@
condition_(condition),
true_label_(true_label),
false_label_(false_label),
- fall_through_(fall_through) { }
+ fall_through_(fall_through) {
+ expected_stack_height_ = codegen->stack_height();
+ }
static const TestContext* cast(const ExpressionContext* context) {
ASSERT(context->IsTest());
@@ -727,7 +766,10 @@
class EffectContext : public ExpressionContext {
public:
explicit EffectContext(FullCodeGenerator* codegen)
- : ExpressionContext(codegen) { }
+ : ExpressionContext(codegen) {
+ expected_stack_height_ = codegen->stack_height();
+ }
+
virtual void Plug(bool flag) const;
virtual void Plug(Register reg) const;
@@ -751,6 +793,7 @@
Label return_label_;
NestedStatement* nesting_stack_;
int loop_depth_;
+ int stack_height_;
const ExpressionContext* context_;
ZoneList<BailoutEntry> bailout_entries_;
ZoneList<BailoutEntry> stack_checks_;
diff --git a/src/func-name-inferrer.h b/src/func-name-inferrer.h
index bec3a5c..1a57268 100644
--- a/src/func-name-inferrer.h
+++ b/src/func-name-inferrer.h
@@ -70,6 +70,12 @@
}
}
+ void RemoveLastFunction() {
+ if (IsOpen() && !funcs_to_infer_.is_empty()) {
+ funcs_to_infer_.RemoveLast();
+ }
+ }
+
// Infers a function name and leaves names collection state.
void Infer() {
ASSERT(IsOpen());
diff --git a/src/handles.cc b/src/handles.cc
index b03efbd..8c6439b 100644
--- a/src/handles.cc
+++ b/src/handles.cc
@@ -422,48 +422,18 @@
Handle<Object> GetHiddenProperties(Handle<JSObject> obj,
- bool create_if_needed) {
- Isolate* isolate = obj->GetIsolate();
- Object* holder = obj->BypassGlobalProxy();
- if (holder->IsUndefined()) return isolate->factory()->undefined_value();
- obj = Handle<JSObject>(JSObject::cast(holder), isolate);
+ JSObject::HiddenPropertiesFlag flag) {
+ CALL_HEAP_FUNCTION(obj->GetIsolate(),
+ obj->GetHiddenProperties(flag),
+ Object);
+}
- if (obj->HasFastProperties()) {
- // If the object has fast properties, check whether the first slot
- // in the descriptor array matches the hidden symbol. Since the
- // hidden symbols hash code is zero (and no other string has hash
- // code zero) it will always occupy the first entry if present.
- DescriptorArray* descriptors = obj->map()->instance_descriptors();
- if ((descriptors->number_of_descriptors() > 0) &&
- (descriptors->GetKey(0) == isolate->heap()->hidden_symbol()) &&
- descriptors->IsProperty(0)) {
- ASSERT(descriptors->GetType(0) == FIELD);
- return Handle<Object>(obj->FastPropertyAt(descriptors->GetFieldIndex(0)),
- isolate);
- }
- }
- // Only attempt to find the hidden properties in the local object and not
- // in the prototype chain. Note that HasLocalProperty() can cause a GC in
- // the general case in the presence of interceptors.
- if (!obj->HasHiddenPropertiesObject()) {
- // Hidden properties object not found. Allocate a new hidden properties
- // object if requested. Otherwise return the undefined value.
- if (create_if_needed) {
- Handle<Object> hidden_obj =
- isolate->factory()->NewJSObject(isolate->object_function());
-
- // Don't allow leakage of the hidden object through accessors
- // on Object.prototype.
- SetPrototype(Handle<JSObject>::cast(hidden_obj),
- isolate->factory()->null_value());
- CALL_HEAP_FUNCTION(isolate,
- obj->SetHiddenPropertiesObject(*hidden_obj), Object);
- } else {
- return isolate->factory()->undefined_value();
- }
- }
- return Handle<Object>(obj->GetHiddenPropertiesObject(), isolate);
+int GetIdentityHash(Handle<JSObject> obj) {
+ CALL_AND_RETRY(obj->GetIsolate(),
+ obj->GetIdentityHash(JSObject::ALLOW_CREATION),
+ return Smi::cast(__object__)->value(),
+ return 0);
}
@@ -647,15 +617,17 @@
{
AssertNoAllocation no_heap_allocation; // ensure vectors stay valid.
// Dispatch on type of strings.
- if (src->IsAsciiRepresentation()) {
+ String::FlatContent content = src->GetFlatContent();
+ ASSERT(content.IsFlat());
+ if (content.IsAscii()) {
CalculateLineEnds(isolate,
&line_ends,
- src->ToAsciiVector(),
+ content.ToAsciiVector(),
with_last_line);
} else {
CalculateLineEnds(isolate,
&line_ends,
- src->ToUC16Vector(),
+ content.ToUC16Vector(),
with_last_line);
}
}
@@ -913,6 +885,15 @@
}
+Handle<ObjectHashTable> PutIntoObjectHashTable(Handle<ObjectHashTable> table,
+ Handle<JSObject> key,
+ Handle<Object> value) {
+ CALL_HEAP_FUNCTION(table->GetIsolate(),
+ table->Put(*key, *value),
+ ObjectHashTable);
+}
+
+
bool EnsureCompiled(Handle<SharedFunctionInfo> shared,
ClearExceptionFlag flag) {
return shared->is_compiled() || CompileLazyShared(shared, flag);
diff --git a/src/handles.h b/src/handles.h
index 13c6dd6..9bb3b1f 100644
--- a/src/handles.h
+++ b/src/handles.h
@@ -264,9 +264,13 @@
Handle<Object> SetPrototype(Handle<JSObject> obj, Handle<Object> value);
// Return the object's hidden properties object. If the object has no hidden
-// properties and create_if_needed is true, then a new hidden property object
-// will be allocated. Otherwise the Heap::undefined_value is returned.
-Handle<Object> GetHiddenProperties(Handle<JSObject> obj, bool create_if_needed);
+// properties and HiddenPropertiesFlag::ALLOW_CREATION is passed, then a new
+// hidden property object will be allocated. Otherwise Heap::undefined_value
+// is returned.
+Handle<Object> GetHiddenProperties(Handle<JSObject> obj,
+ JSObject::HiddenPropertiesFlag flag);
+
+int GetIdentityHash(Handle<JSObject> obj);
Handle<Object> DeleteElement(Handle<JSObject> obj, uint32_t index);
Handle<Object> DeleteProperty(Handle<JSObject> obj, Handle<String> prop);
@@ -343,6 +347,10 @@
Handle<Object> PreventExtensions(Handle<JSObject> object);
+Handle<ObjectHashTable> PutIntoObjectHashTable(Handle<ObjectHashTable> table,
+ Handle<JSObject> key,
+ Handle<Object> value);
+
// Does lazy compilation of the given function. Returns true on success and
// false if the compilation resulted in a stack overflow.
enum ClearExceptionFlag { KEEP_EXCEPTION, CLEAR_EXCEPTION };
diff --git a/src/heap-inl.h b/src/heap-inl.h
index b0b4fbe..7b666af 100644
--- a/src/heap-inl.h
+++ b/src/heap-inl.h
@@ -142,6 +142,11 @@
}
+MaybeObject* Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
+ return CopyFixedDoubleArrayWithMap(src, src->map());
+}
+
+
MaybeObject* Heap::AllocateRaw(int size_in_bytes,
AllocationSpace space,
AllocationSpace retry_space) {
@@ -318,10 +323,10 @@
ASSERT(type != JS_GLOBAL_PROPERTY_CELL_TYPE);
if (type < FIRST_NONSTRING_TYPE) {
- // There are three string representations: sequential strings, cons
- // strings, and external strings. Only cons strings contain
- // non-map-word pointers to heap objects.
- return ((type & kStringRepresentationMask) == kConsStringTag)
+ // There are four string representations: sequential strings, external
+ // strings, cons strings, and sliced strings.
+ // Only the latter two contain non-map-word pointers to heap objects.
+ return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
? OLD_POINTER_SPACE
: OLD_DATA_SPACE;
} else {
diff --git a/src/heap.cc b/src/heap.cc
index 2d27570..279f30b 100644
--- a/src/heap.cc
+++ b/src/heap.cc
@@ -438,7 +438,9 @@
#if defined(DEBUG)
ReportStatisticsAfterGC();
#endif // DEBUG
+#ifdef ENABLE_DEBUGGER_SUPPORT
isolate_->debug()->AfterGarbageCollection();
+#endif // ENABLE_DEBUGGER_SUPPORT
}
@@ -1288,10 +1290,18 @@
&ObjectEvacuationStrategy<POINTER_OBJECT>::
template VisitSpecialized<ConsString::kSize>);
+ table_.Register(kVisitSlicedString,
+ &ObjectEvacuationStrategy<POINTER_OBJECT>::
+ template VisitSpecialized<SlicedString::kSize>);
+
table_.Register(kVisitSharedFunctionInfo,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
template VisitSpecialized<SharedFunctionInfo::kSize>);
+ table_.Register(kVisitJSWeakMap,
+ &ObjectEvacuationStrategy<POINTER_OBJECT>::
+ Visit);
+
table_.Register(kVisitJSRegExp,
&ObjectEvacuationStrategy<POINTER_OBJECT>::
Visit);
@@ -1739,6 +1749,12 @@
set_fixed_cow_array_map(Map::cast(obj));
ASSERT(fixed_array_map() != fixed_cow_array_map());
+ { MaybeObject* maybe_obj =
+ AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
+ set_serialized_scope_info_map(Map::cast(obj));
+
{ MaybeObject* maybe_obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
if (!maybe_obj->ToObject(&obj)) return false;
}
@@ -1904,6 +1920,12 @@
AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
if (!maybe_obj->ToObject(&obj)) return false;
}
+ set_block_context_map(Map::cast(obj));
+
+ { MaybeObject* maybe_obj =
+ AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
+ if (!maybe_obj->ToObject(&obj)) return false;
+ }
Map* global_context_map = Map::cast(obj);
global_context_map->set_visitor_id(StaticVisitorBase::kVisitGlobalContext);
set_global_context_map(global_context_map);
@@ -2546,6 +2568,8 @@
// If the resulting string is small make a flat string.
if (length < String::kMinNonFlatLength) {
+ // Note that neither of the two inputs can be a slice because:
+ STATIC_ASSERT(String::kMinNonFlatLength <= SlicedString::kMinLength);
ASSERT(first->IsFlat());
ASSERT(second->IsFlat());
if (is_ascii) {
@@ -2637,24 +2661,69 @@
// Make an attempt to flatten the buffer to reduce access time.
buffer = buffer->TryFlattenGetString();
- Object* result;
- { MaybeObject* maybe_result = buffer->IsAsciiRepresentation()
- ? AllocateRawAsciiString(length, pretenure )
- : AllocateRawTwoByteString(length, pretenure);
- if (!maybe_result->ToObject(&result)) return maybe_result;
- }
- String* string_result = String::cast(result);
- // Copy the characters into the new object.
- if (buffer->IsAsciiRepresentation()) {
- ASSERT(string_result->IsAsciiRepresentation());
- char* dest = SeqAsciiString::cast(string_result)->GetChars();
- String::WriteToFlat(buffer, dest, start, end);
- } else {
- ASSERT(string_result->IsTwoByteRepresentation());
- uc16* dest = SeqTwoByteString::cast(string_result)->GetChars();
- String::WriteToFlat(buffer, dest, start, end);
+ // TODO(1626): For now slicing external strings is not supported. However,
+ // a flat cons string can have an external string as first part in some cases.
+ // Therefore we have to single out this case as well.
+ if (!FLAG_string_slices ||
+ (buffer->IsConsString() &&
+ (!buffer->IsFlat() ||
+ !ConsString::cast(buffer)->first()->IsSeqString())) ||
+ buffer->IsExternalString() ||
+ length < SlicedString::kMinLength ||
+ pretenure == TENURED) {
+ Object* result;
+ { MaybeObject* maybe_result = buffer->IsAsciiRepresentation()
+ ? AllocateRawAsciiString(length, pretenure)
+ : AllocateRawTwoByteString(length, pretenure);
+ if (!maybe_result->ToObject(&result)) return maybe_result;
+ }
+ String* string_result = String::cast(result);
+ // Copy the characters into the new object.
+ if (buffer->IsAsciiRepresentation()) {
+ ASSERT(string_result->IsAsciiRepresentation());
+ char* dest = SeqAsciiString::cast(string_result)->GetChars();
+ String::WriteToFlat(buffer, dest, start, end);
+ } else {
+ ASSERT(string_result->IsTwoByteRepresentation());
+ uc16* dest = SeqTwoByteString::cast(string_result)->GetChars();
+ String::WriteToFlat(buffer, dest, start, end);
+ }
+ return result;
}
+ ASSERT(buffer->IsFlat());
+ ASSERT(!buffer->IsExternalString());
+#if DEBUG
+ buffer->StringVerify();
+#endif
+
+ Object* result;
+ { Map* map = buffer->IsAsciiRepresentation()
+ ? sliced_ascii_string_map()
+ : sliced_string_map();
+ MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
+ if (!maybe_result->ToObject(&result)) return maybe_result;
+ }
+
+ AssertNoAllocation no_gc;
+ SlicedString* sliced_string = SlicedString::cast(result);
+ sliced_string->set_length(length);
+ sliced_string->set_hash_field(String::kEmptyHashField);
+ if (buffer->IsConsString()) {
+ ConsString* cons = ConsString::cast(buffer);
+ ASSERT(cons->second()->length() == 0);
+ sliced_string->set_parent(cons->first());
+ sliced_string->set_offset(start);
+ } else if (buffer->IsSlicedString()) {
+ // Prevent nesting sliced strings.
+ SlicedString* parent_slice = SlicedString::cast(buffer);
+ sliced_string->set_parent(parent_slice->parent());
+ sliced_string->set_offset(start + parent_slice->offset());
+ } else {
+ sliced_string->set_parent(buffer);
+ sliced_string->set_offset(start);
+ }
+ ASSERT(sliced_string->parent()->IsSeqString());
return result;
}
@@ -3389,17 +3458,22 @@
object_size);
}
- FixedArray* elements = FixedArray::cast(source->elements());
+ FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
FixedArray* properties = FixedArray::cast(source->properties());
// Update elements if necessary.
if (elements->length() > 0) {
Object* elem;
- { MaybeObject* maybe_elem =
- (elements->map() == fixed_cow_array_map()) ?
- elements : CopyFixedArray(elements);
+ { MaybeObject* maybe_elem;
+ if (elements->map() == fixed_cow_array_map()) {
+ maybe_elem = FixedArray::cast(elements);
+ } else if (source->HasFastDoubleElements()) {
+ maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements));
+ } else {
+ maybe_elem = CopyFixedArray(FixedArray::cast(elements));
+ }
if (!maybe_elem->ToObject(&elem)) return maybe_elem;
}
- JSObject::cast(clone)->set_elements(FixedArray::cast(elem));
+ JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem));
}
// Update properties if necessary.
if (properties->length() > 0) {
@@ -3758,6 +3832,23 @@
}
+MaybeObject* Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src,
+ Map* map) {
+ int len = src->length();
+ Object* obj;
+ { MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(len, NOT_TENURED);
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+ }
+ HeapObject* dst = HeapObject::cast(obj);
+ dst->set_map(map);
+ CopyBlock(
+ dst->address() + FixedDoubleArray::kLengthOffset,
+ src->address() + FixedDoubleArray::kLengthOffset,
+ FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset);
+ return obj;
+}
+
+
MaybeObject* Heap::AllocateFixedArray(int length) {
ASSERT(length >= 0);
if (length == 0) return empty_fixed_array();
@@ -3989,6 +4080,36 @@
}
+MaybeObject* Heap::AllocateBlockContext(JSFunction* function,
+ Context* previous,
+ SerializedScopeInfo* scope_info) {
+ Object* result;
+ { MaybeObject* maybe_result =
+ AllocateFixedArrayWithHoles(scope_info->NumberOfContextSlots());
+ if (!maybe_result->ToObject(&result)) return maybe_result;
+ }
+ Context* context = reinterpret_cast<Context*>(result);
+ context->set_map(block_context_map());
+ context->set_closure(function);
+ context->set_previous(previous);
+ context->set_extension(scope_info);
+ context->set_global(previous->global());
+ return context;
+}
+
+
+MaybeObject* Heap::AllocateSerializedScopeInfo(int length) {
+ Object* result;
+ { MaybeObject* maybe_result = AllocateFixedArray(length, TENURED);
+ if (!maybe_result->ToObject(&result)) return maybe_result;
+ }
+ SerializedScopeInfo* scope_info =
+ reinterpret_cast<SerializedScopeInfo*>(result);
+ scope_info->set_map(serialized_scope_info_map());
+ return scope_info;
+}
+
+
MaybeObject* Heap::AllocateStruct(InstanceType type) {
Map* map;
switch (type) {
diff --git a/src/heap.h b/src/heap.h
index 6cd4f84..0f69fab 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -65,6 +65,7 @@
V(Map, heap_number_map, HeapNumberMap) \
V(Map, global_context_map, GlobalContextMap) \
V(Map, fixed_array_map, FixedArrayMap) \
+ V(Map, serialized_scope_info_map, SerializedScopeInfoMap) \
V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
@@ -87,6 +88,8 @@
V(Map, symbol_map, SymbolMap) \
V(Map, cons_string_map, ConsStringMap) \
V(Map, cons_ascii_string_map, ConsAsciiStringMap) \
+ V(Map, sliced_string_map, SlicedStringMap) \
+ V(Map, sliced_ascii_string_map, SlicedAsciiStringMap) \
V(Map, ascii_symbol_map, AsciiSymbolMap) \
V(Map, cons_symbol_map, ConsSymbolMap) \
V(Map, cons_ascii_symbol_map, ConsAsciiSymbolMap) \
@@ -111,6 +114,7 @@
V(Map, function_context_map, FunctionContextMap) \
V(Map, catch_context_map, CatchContextMap) \
V(Map, with_context_map, WithContextMap) \
+ V(Map, block_context_map, BlockContextMap) \
V(Map, code_map, CodeMap) \
V(Map, oddball_map, OddballMap) \
V(Map, global_property_cell_map, GlobalPropertyCellMap) \
@@ -160,6 +164,7 @@
V(length_symbol, "length") \
V(name_symbol, "name") \
V(native_symbol, "native") \
+ V(null_symbol, "null") \
V(number_symbol, "number") \
V(Number_symbol, "Number") \
V(nan_symbol, "NaN") \
@@ -220,7 +225,8 @@
V(closure_symbol, "(closure)") \
V(use_strict, "use strict") \
V(dot_symbol, ".") \
- V(anonymous_function_symbol, "(anonymous function)")
+ V(anonymous_function_symbol, "(anonymous function)") \
+ V(block_scope_symbol, ".block")
// Forward declarations.
class GCTracer;
@@ -483,6 +489,9 @@
// Allocates an empty code cache.
MUST_USE_RESULT MaybeObject* AllocateCodeCache();
+ // Allocates a serialized scope info.
+ MUST_USE_RESULT MaybeObject* AllocateSerializedScopeInfo(int length);
+
// Allocates an empty PolymorphicCodeCache.
MUST_USE_RESULT MaybeObject* AllocatePolymorphicCodeCache();
@@ -617,6 +626,16 @@
// Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map);
+ // Make a copy of src and return it. Returns
+ // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
+ MUST_USE_RESULT inline MaybeObject* CopyFixedDoubleArray(
+ FixedDoubleArray* src);
+
+ // Make a copy of src, set the map, and return the copy. Returns
+ // Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
+ MUST_USE_RESULT MaybeObject* CopyFixedDoubleArrayWithMap(
+ FixedDoubleArray* src, Map* map);
+
// Allocates a fixed array initialized with the hole values.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed.
@@ -658,6 +677,11 @@
Context* previous,
JSObject* extension);
+ // Allocate a block context.
+ MUST_USE_RESULT MaybeObject* AllocateBlockContext(JSFunction* function,
+ Context* previous,
+ SerializedScopeInfo* info);
+
// Allocates a new utility object in the old generation.
MUST_USE_RESULT MaybeObject* AllocateStruct(InstanceType type);
@@ -1636,6 +1660,7 @@
friend class Page;
friend class Isolate;
friend class MarkCompactCollector;
+ friend class StaticMarkingVisitor;
friend class MapCompact;
DISALLOW_COPY_AND_ASSIGN(Heap);
diff --git a/src/hydrogen-instructions.cc b/src/hydrogen-instructions.cc
index d282f37..d3cc8a6 100644
--- a/src/hydrogen-instructions.cc
+++ b/src/hydrogen-instructions.cc
@@ -635,6 +635,13 @@
}
+void HBoundsCheck::PrintDataTo(StringStream* stream) {
+ index()->PrintNameTo(stream);
+ stream->Add(" ");
+ length()->PrintNameTo(stream);
+}
+
+
void HCallConstantFunction::PrintDataTo(StringStream* stream) {
if (IsApplyFunction()) {
stream->Add("optimized apply ");
@@ -771,7 +778,7 @@
void HTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
value()->PrintNameTo(stream);
stream->Add(" == ");
- stream->Add(type_literal_->ToAsciiVector());
+ stream->Add(type_literal_->GetFlatContent().ToAsciiVector());
}
@@ -862,19 +869,25 @@
Range* HValue::InferRange() {
- if (representation().IsTagged()) {
- // Tagged values are always in int32 range when converted to integer,
- // but they can contain -0.
- Range* result = new Range();
- result->set_can_be_minus_zero(true);
- return result;
- } else if (representation().IsNone()) {
- return NULL;
- } else {
- // Untagged integer32 cannot be -0 and we don't compute ranges for
- // untagged doubles.
- return new Range();
+ // Untagged integer32 cannot be -0, all other representations can.
+ Range* result = new Range();
+ result->set_can_be_minus_zero(!representation().IsInteger32());
+ return result;
+}
+
+
+Range* HChange::InferRange() {
+ Range* input_range = value()->range();
+ if (from().IsInteger32() &&
+ to().IsTagged() &&
+ input_range != NULL && input_range->IsInSmiRange()) {
+ set_type(HType::Smi());
}
+ Range* result = (input_range != NULL)
+ ? input_range->Copy()
+ : HValue::InferRange();
+ if (to().IsInteger32()) result->set_can_be_minus_zero(false);
+ return result;
}
@@ -1223,6 +1236,7 @@
? left()->range()->Copy()
: new Range();
result->Sar(c->Integer32Value());
+ result->set_can_be_minus_zero(false);
return result;
}
}
@@ -1230,6 +1244,31 @@
}
+Range* HShr::InferRange() {
+ if (right()->IsConstant()) {
+ HConstant* c = HConstant::cast(right());
+ if (c->HasInteger32Value()) {
+ int shift_count = c->Integer32Value() & 0x1f;
+ if (left()->range()->CanBeNegative()) {
+ // Only compute bounds if the result always fits into an int32.
+ return (shift_count >= 1)
+ ? new Range(0, static_cast<uint32_t>(0xffffffff) >> shift_count)
+ : new Range();
+ } else {
+ // For positive inputs we can use the >> operator.
+ Range* result = (left()->range() != NULL)
+ ? left()->range()->Copy()
+ : new Range();
+ result->Sar(c->Integer32Value());
+ result->set_can_be_minus_zero(false);
+ return result;
+ }
+ }
+ }
+ return HValue::InferRange();
+}
+
+
Range* HShl::InferRange() {
if (right()->IsConstant()) {
HConstant* c = HConstant::cast(right());
@@ -1238,6 +1277,7 @@
? left()->range()->Copy()
: new Range();
result->Shl(c->Integer32Value());
+ result->set_can_be_minus_zero(false);
return result;
}
}
@@ -1285,7 +1325,7 @@
HLoadNamedFieldPolymorphic::HLoadNamedFieldPolymorphic(HValue* context,
HValue* object,
- ZoneMapList* types,
+ SmallMapList* types,
Handle<String> name)
: types_(Min(types->length(), kMaxLoadPolymorphism)),
name_(name),
@@ -1349,6 +1389,20 @@
}
+void HLoadNamedFieldPolymorphic::PrintDataTo(StringStream* stream) {
+ object()->PrintNameTo(stream);
+ stream->Add(" .");
+ stream->Add(*String::cast(*name())->ToCString());
+}
+
+
+void HLoadNamedGeneric::PrintDataTo(StringStream* stream) {
+ object()->PrintNameTo(stream);
+ stream->Add(" .");
+ stream->Add(*String::cast(*name())->ToCString());
+}
+
+
void HLoadKeyedFastElement::PrintDataTo(StringStream* stream) {
object()->PrintNameTo(stream);
stream->Add("[");
@@ -1798,11 +1852,6 @@
}
-void HBoundsCheck::Verify() {
- HInstruction::Verify();
-}
-
-
void HCheckSmi::Verify() {
HInstruction::Verify();
ASSERT(HasNoUses());
@@ -1815,18 +1864,6 @@
}
-void HCheckInstanceType::Verify() {
- HInstruction::Verify();
- ASSERT(HasNoUses());
-}
-
-
-void HCheckMap::Verify() {
- HInstruction::Verify();
- ASSERT(HasNoUses());
-}
-
-
void HCheckFunction::Verify() {
HInstruction::Verify();
ASSERT(HasNoUses());
diff --git a/src/hydrogen-instructions.h b/src/hydrogen-instructions.h
index 15186ff..76007d7 100644
--- a/src/hydrogen-instructions.h
+++ b/src/hydrogen-instructions.h
@@ -104,8 +104,7 @@
V(Div) \
V(ElementsKind) \
V(EnterInlined) \
- V(ExternalArrayLength) \
- V(FixedArrayLength) \
+ V(FixedArrayBaseLength) \
V(ForceRepresentation) \
V(FunctionLiteral) \
V(GetCachedArrayIndex) \
@@ -184,6 +183,7 @@
V(InobjectFields) \
V(BackingStoreFields) \
V(ArrayElements) \
+ V(DoubleArrayElements) \
V(SpecializedArrayElements) \
V(GlobalVars) \
V(Maps) \
@@ -227,14 +227,20 @@
Range* next() const { return next_; }
Range* CopyClearLower() const { return new Range(kMinInt, upper_); }
Range* CopyClearUpper() const { return new Range(lower_, kMaxInt); }
- Range* Copy() const { return new Range(lower_, upper_); }
+ Range* Copy() const {
+ Range* result = new Range(lower_, upper_);
+ result->set_can_be_minus_zero(CanBeMinusZero());
+ return result;
+ }
int32_t Mask() const;
void set_can_be_minus_zero(bool b) { can_be_minus_zero_ = b; }
bool CanBeMinusZero() const { return CanBeZero() && can_be_minus_zero_; }
bool CanBeZero() const { return upper_ >= 0 && lower_ <= 0; }
bool CanBeNegative() const { return lower_ < 0; }
bool Includes(int value) const { return lower_ <= value && upper_ >= value; }
- bool IsMostGeneric() const { return lower_ == kMinInt && upper_ == kMaxInt; }
+ bool IsMostGeneric() const {
+ return lower_ == kMinInt && upper_ == kMaxInt && CanBeMinusZero();
+ }
bool IsInSmiRange() const {
return lower_ >= Smi::kMinValue && upper_ <= Smi::kMaxValue;
}
@@ -578,9 +584,9 @@
virtual bool IsConvertibleToInteger() const { return true; }
HType type() const { return type_; }
- void set_type(HType type) {
- ASSERT(HasNoUses());
- type_ = type;
+ void set_type(HType new_type) {
+ ASSERT(new_type.IsSubtypeOf(type_));
+ type_ = new_type;
}
// An operation needs to override this function iff:
@@ -933,8 +939,12 @@
class HBranch: public HUnaryControlInstruction {
public:
- HBranch(HValue* value, HBasicBlock* true_target, HBasicBlock* false_target)
- : HUnaryControlInstruction(value, true_target, false_target) {
+ HBranch(HValue* value,
+ HBasicBlock* true_target,
+ HBasicBlock* false_target,
+ ToBooleanStub::Types expected_input_types = ToBooleanStub::no_types())
+ : HUnaryControlInstruction(value, true_target, false_target),
+ expected_input_types_(expected_input_types) {
ASSERT(true_target != NULL && false_target != NULL);
}
explicit HBranch(HValue* value)
@@ -945,7 +955,14 @@
return Representation::None();
}
+ ToBooleanStub::Types expected_input_types() const {
+ return expected_input_types_;
+ }
+
DECLARE_CONCRETE_INSTRUCTION(Branch)
+
+ private:
+ ToBooleanStub::Types expected_input_types_;
};
@@ -1089,10 +1106,6 @@
set_representation(to);
SetFlag(kUseGVN);
if (is_truncating) SetFlag(kTruncatingToInt32);
- if (from.IsInteger32() && to.IsTagged() && value->range() != NULL &&
- value->range()->IsInSmiRange()) {
- set_type(HType::Smi());
- }
}
virtual HValue* EnsureAndPropagateNotMinusZero(BitVector* visited);
@@ -1104,6 +1117,8 @@
return from_;
}
+ virtual Range* InferRange();
+
virtual void PrintDataTo(StringStream* stream);
DECLARE_CONCRETE_INSTRUCTION(Change)
@@ -1663,12 +1678,14 @@
};
-class HJSArrayLength: public HUnaryOperation {
+class HJSArrayLength: public HTemplateInstruction<2> {
public:
- explicit HJSArrayLength(HValue* value) : HUnaryOperation(value) {
+ HJSArrayLength(HValue* value, HValue* typecheck) {
// The length of an array is stored as a tagged value in the array
// object. It is guaranteed to be 32 bit integer, but it can be
// represented as either a smi or heap number.
+ SetOperandAt(0, value);
+ SetOperandAt(1, typecheck);
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
SetFlag(kDependsOnArrayLengths);
@@ -1679,6 +1696,8 @@
return Representation::Tagged();
}
+ HValue* value() { return OperandAt(0); }
+
DECLARE_CONCRETE_INSTRUCTION(JSArrayLength)
protected:
@@ -1686,9 +1705,9 @@
};
-class HFixedArrayLength: public HUnaryOperation {
+class HFixedArrayBaseLength: public HUnaryOperation {
public:
- explicit HFixedArrayLength(HValue* value) : HUnaryOperation(value) {
+ explicit HFixedArrayBaseLength(HValue* value) : HUnaryOperation(value) {
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
SetFlag(kDependsOnArrayLengths);
@@ -1698,28 +1717,7 @@
return Representation::Tagged();
}
- DECLARE_CONCRETE_INSTRUCTION(FixedArrayLength)
-
- protected:
- virtual bool DataEquals(HValue* other) { return true; }
-};
-
-
-class HExternalArrayLength: public HUnaryOperation {
- public:
- explicit HExternalArrayLength(HValue* value) : HUnaryOperation(value) {
- set_representation(Representation::Integer32());
- // The result of this instruction is idempotent as long as its inputs don't
- // change. The length of a pixel array cannot change once set, so it's not
- // necessary to introduce a kDependsOnArrayLengths or any other dependency.
- SetFlag(kUseGVN);
- }
-
- virtual Representation RequiredInputRepresentation(int index) const {
- return Representation::Tagged();
- }
-
- DECLARE_CONCRETE_INSTRUCTION(ExternalArrayLength)
+ DECLARE_CONCRETE_INSTRUCTION(FixedArrayBaseLength)
protected:
virtual bool DataEquals(HValue* other) { return true; }
@@ -1894,10 +1892,14 @@
};
-class HCheckMap: public HUnaryOperation {
+class HCheckMap: public HTemplateInstruction<2> {
public:
- HCheckMap(HValue* value, Handle<Map> map)
- : HUnaryOperation(value), map_(map) {
+ HCheckMap(HValue* value, Handle<Map> map, HValue* typecheck = NULL)
+ : map_(map) {
+ SetOperandAt(0, value);
+ // If callers don't depend on a typecheck, they can pass in NULL. In that
+ // case we use a copy of the |value| argument as a dummy value.
+ SetOperandAt(1, typecheck != NULL ? typecheck : value);
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
SetFlag(kDependsOnMaps);
@@ -1909,10 +1911,7 @@
virtual void PrintDataTo(StringStream* stream);
virtual HType CalculateInferredType();
-#ifdef DEBUG
- virtual void Verify();
-#endif
-
+ HValue* value() { return OperandAt(0); }
Handle<Map> map() const { return map_; }
DECLARE_CONCRETE_INSTRUCTION(CheckMap)
@@ -1980,10 +1979,6 @@
return Representation::Tagged();
}
-#ifdef DEBUG
- virtual void Verify();
-#endif
-
virtual HValue* Canonicalize();
bool is_interval_check() const { return check_ <= LAST_INTERVAL_CHECK; }
@@ -2458,9 +2453,7 @@
return Representation::Integer32();
}
-#ifdef DEBUG
- virtual void Verify();
-#endif
+ virtual void PrintDataTo(StringStream* stream);
HValue* index() { return OperandAt(0); }
HValue* length() { return OperandAt(1); }
@@ -3063,6 +3056,7 @@
HShr(HValue* context, HValue* left, HValue* right)
: HBitwiseBinaryOperation(context, left, right) { }
+ virtual Range* InferRange();
virtual HType CalculateInferredType();
DECLARE_CONCRETE_INSTRUCTION(Shr)
@@ -3415,12 +3409,12 @@
public:
HLoadNamedFieldPolymorphic(HValue* context,
HValue* object,
- ZoneMapList* types,
+ SmallMapList* types,
Handle<String> name);
HValue* context() { return OperandAt(0); }
HValue* object() { return OperandAt(1); }
- ZoneMapList* types() { return &types_; }
+ SmallMapList* types() { return &types_; }
Handle<String> name() { return name_; }
bool need_generic() { return need_generic_; }
@@ -3428,6 +3422,8 @@
return Representation::Tagged();
}
+ virtual void PrintDataTo(StringStream* stream);
+
DECLARE_CONCRETE_INSTRUCTION(LoadNamedFieldPolymorphic)
static const int kMaxLoadPolymorphism = 4;
@@ -3436,7 +3432,7 @@
virtual bool DataEquals(HValue* value);
private:
- ZoneMapList types_;
+ SmallMapList types_;
Handle<String> name_;
bool need_generic_;
};
@@ -3461,6 +3457,8 @@
return Representation::Tagged();
}
+ virtual void PrintDataTo(StringStream* stream);
+
DECLARE_CONCRETE_INSTRUCTION(LoadNamedGeneric)
private:
@@ -3527,7 +3525,7 @@
SetOperandAt(0, elements);
SetOperandAt(1, key);
set_representation(Representation::Double());
- SetFlag(kDependsOnArrayElements);
+ SetFlag(kDependsOnDoubleArrayElements);
SetFlag(kUseGVN);
}
@@ -3745,7 +3743,7 @@
SetOperandAt(0, elements);
SetOperandAt(1, key);
SetOperandAt(2, val);
- SetFlag(kChangesArrayElements);
+ SetFlag(kChangesDoubleArrayElements);
}
virtual Representation RequiredInputRepresentation(int index) const {
diff --git a/src/hydrogen.cc b/src/hydrogen.cc
index f105703..5772141 100644
--- a/src/hydrogen.cc
+++ b/src/hydrogen.cc
@@ -736,6 +736,8 @@
HPhase phase("Assign dominators", this);
for (int i = 0; i < blocks_.length(); ++i) {
if (blocks_[i]->IsLoopHeader()) {
+ // Only the first predecessor of a loop header is from outside the loop.
+ // All others are back edges, and thus cannot dominate the loop header.
blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->first());
} else {
for (int j = 0; j < blocks_[i]->predecessors()->length(); ++j) {
@@ -743,13 +745,15 @@
}
}
}
+}
- // Propagate flag marking blocks containing unconditional deoptimize.
+// Mark all blocks that are dominated by an unconditional soft deoptimize to
+// prevent code motion across those blocks.
+void HGraph::PropagateDeoptimizingMark() {
+ HPhase phase("Propagate deoptimizing mark", this);
MarkAsDeoptimizingRecursively(entry_block());
}
-
-// Mark all blocks that are dominated by an unconditional deoptimize.
void HGraph::MarkAsDeoptimizingRecursively(HBasicBlock* block) {
for (int i = 0; i < block->dominated_blocks()->length(); ++i) {
HBasicBlock* dominated = block->dominated_blocks()->at(i);
@@ -2182,7 +2186,9 @@
}
HBasicBlock* empty_true = builder->graph()->CreateBasicBlock();
HBasicBlock* empty_false = builder->graph()->CreateBasicBlock();
- HBranch* test = new(zone()) HBranch(value, empty_true, empty_false);
+ unsigned test_id = condition()->test_id();
+ ToBooleanStub::Types expected(builder->oracle()->ToBooleanTypes(test_id));
+ HBranch* test = new(zone()) HBranch(value, empty_true, empty_false, expected);
builder->current_block()->Finish(test);
empty_true->Goto(if_true());
@@ -2317,6 +2323,7 @@
graph()->OrderBlocks();
graph()->AssignDominators();
+ graph()->PropagateDeoptimizingMark();
if (!graph()->CheckConstPhiUses()) {
Bailout("Unsupported phi use of const variable");
return NULL;
@@ -2332,20 +2339,11 @@
HInferRepresentation rep(graph());
rep.Analyze();
- if (FLAG_use_range) {
- HRangeAnalysis rangeAnalysis(graph());
- rangeAnalysis.Analyze();
- }
+ graph()->MarkDeoptimizeOnUndefined();
+ graph()->InsertRepresentationChanges();
graph()->InitializeInferredTypes();
graph()->Canonicalize();
- graph()->MarkDeoptimizeOnUndefined();
- graph()->InsertRepresentationChanges();
- graph()->ComputeMinusZeroChecks();
-
- // Eliminate redundant stack checks on backwards branches.
- HStackCheckEliminator sce(graph());
- sce.Process();
// Perform common subexpression elimination and loop-invariant code motion.
if (FLAG_use_gvn) {
@@ -2354,6 +2352,16 @@
gvn.Analyze();
}
+ if (FLAG_use_range) {
+ HRangeAnalysis rangeAnalysis(graph());
+ rangeAnalysis.Analyze();
+ }
+ graph()->ComputeMinusZeroChecks();
+
+ // Eliminate redundant stack checks on backwards branches.
+ HStackCheckEliminator sce(graph());
+ sce.Process();
+
// Replace the results of check instructions with the original value, if the
// result is used. This is safe now, since we don't do code motion after this
// point. It enables better register allocation since the value produced by
@@ -2495,6 +2503,9 @@
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
+ if (stmt->block_scope() != NULL) {
+ return Bailout("ScopedBlock");
+ }
BreakAndContinueInfo break_info(stmt);
{ BreakAndContinueScope push(&break_info, this);
CHECK_BAILOUT(VisitStatements(stmt->statements()));
@@ -2646,12 +2657,11 @@
}
-void HGraphBuilder::VisitEnterWithContextStatement(
- EnterWithContextStatement* stmt) {
+void HGraphBuilder::VisitWithStatement(WithStatement* stmt) {
ASSERT(!HasStackOverflow());
ASSERT(current_block() != NULL);
ASSERT(current_block()->HasPredecessor());
- return Bailout("EnterWithContextStatement");
+ return Bailout("WithStatement");
}
@@ -3134,6 +3144,8 @@
Variable* variable = expr->AsVariable();
if (variable == NULL) {
return Bailout("reference to rewritten variable");
+ } else if (variable->mode() == Variable::LET) {
+ return Bailout("reference to let variable");
} else if (variable->IsStackAllocated()) {
HValue* value = environment()->Lookup(variable);
if (variable->mode() == Variable::CONST &&
@@ -3311,8 +3323,8 @@
// Load the elements array before the first store.
if (elements == NULL) {
- elements = new(zone()) HLoadElements(literal);
- AddInstruction(elements);
+ elements = new(zone()) HLoadElements(literal);
+ AddInstruction(elements);
}
HValue* key = AddInstruction(
@@ -3408,7 +3420,7 @@
ASSERT(!name.is_null());
LookupResult lookup;
- ZoneMapList* types = expr->GetReceiverTypes();
+ SmallMapList* types = expr->GetReceiverTypes();
bool is_monomorphic = expr->IsMonomorphic() &&
ComputeStoredField(types->first(), name, &lookup);
@@ -3422,7 +3434,7 @@
void HGraphBuilder::HandlePolymorphicStoreNamedField(Assignment* expr,
HValue* object,
HValue* value,
- ZoneMapList* types,
+ SmallMapList* types,
Handle<String> name) {
// TODO(ager): We should recognize when the prototype chains for different
// maps are identical. In that case we can avoid repeatedly generating the
@@ -3513,7 +3525,7 @@
Handle<String> name = Handle<String>::cast(key->handle());
ASSERT(!name.is_null());
- ZoneMapList* types = expr->GetReceiverTypes();
+ SmallMapList* types = expr->GetReceiverTypes();
LookupResult lookup;
if (expr->IsMonomorphic()) {
@@ -3599,8 +3611,9 @@
BinaryOperation* operation = expr->binary_operation();
if (var != NULL) {
- if (var->mode() == Variable::CONST) {
- return Bailout("unsupported const compound assignment");
+ if (var->mode() == Variable::CONST ||
+ var->mode() == Variable::LET) {
+ return Bailout("unsupported let or const compound assignment");
}
CHECK_ALIVE(VisitForValue(operation));
@@ -3743,6 +3756,8 @@
// variables (e.g. initialization inside a loop).
HValue* old_value = environment()->Lookup(var);
AddInstruction(new HUseConst(old_value));
+ } else if (var->mode() == Variable::LET) {
+ return Bailout("unsupported assignment to let");
}
if (proxy->IsArguments()) return Bailout("assignment to arguments");
@@ -3945,13 +3960,17 @@
: BuildLoadKeyedGeneric(object, key);
}
AddInstruction(new(zone()) HCheckNonSmi(object));
- AddInstruction(new(zone()) HCheckMap(object, map));
- HInstruction* elements = new(zone()) HLoadElements(object);
+ HInstruction* mapcheck = AddInstruction(new(zone()) HCheckMap(object, map));
+ HInstruction* elements = AddInstruction(new(zone()) HLoadElements(object));
+ bool fast_double_elements = map->has_fast_double_elements();
+ if (is_store && map->has_fast_elements()) {
+ AddInstruction(new(zone()) HCheckMap(
+ elements, isolate()->factory()->fixed_array_map()));
+ }
HInstruction* length = NULL;
HInstruction* checked_key = NULL;
if (map->has_external_array_elements()) {
- AddInstruction(elements);
- length = AddInstruction(new(zone()) HExternalArrayLength(elements));
+ length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
HLoadExternalArrayPointer* external_elements =
new(zone()) HLoadExternalArrayPointer(elements);
@@ -3959,25 +3978,13 @@
return BuildExternalArrayElementAccess(external_elements, checked_key,
val, map->elements_kind(), is_store);
}
- bool fast_double_elements = map->has_fast_double_elements();
ASSERT(map->has_fast_elements() || fast_double_elements);
if (map->instance_type() == JS_ARRAY_TYPE) {
- length = AddInstruction(new(zone()) HJSArrayLength(object));
- checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
- AddInstruction(elements);
- if (is_store && !fast_double_elements) {
- AddInstruction(new(zone()) HCheckMap(
- elements, isolate()->factory()->fixed_array_map()));
- }
+ length = AddInstruction(new(zone()) HJSArrayLength(object, mapcheck));
} else {
- AddInstruction(elements);
- if (is_store && !fast_double_elements) {
- AddInstruction(new(zone()) HCheckMap(
- elements, isolate()->factory()->fixed_array_map()));
- }
- length = AddInstruction(new(zone()) HFixedArrayLength(elements));
- checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
+ length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
}
+ checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
if (is_store) {
if (fast_double_elements) {
return new(zone()) HStoreKeyedFastDoubleElement(elements,
@@ -4007,7 +4014,7 @@
*has_side_effects = false;
AddInstruction(new(zone()) HCheckNonSmi(object));
AddInstruction(HCheckInstanceType::NewIsSpecObject(object));
- ZoneMapList* maps = prop->GetReceiverTypes();
+ SmallMapList* maps = prop->GetReceiverTypes();
bool todo_external_array = false;
static const int kNumElementTypes = JSObject::kElementsKindCount;
@@ -4029,7 +4036,8 @@
HInstruction* elements_kind_instr =
AddInstruction(new(zone()) HElementsKind(object));
- HInstruction* elements = NULL;
+ HCompareConstantEqAndBranch* elements_kind_branch = NULL;
+ HInstruction* elements = AddInstruction(new(zone()) HLoadElements(object));
HLoadExternalArrayPointer* external_elements = NULL;
HInstruction* checked_key = NULL;
@@ -4045,16 +4053,8 @@
JSObject::LAST_ELEMENTS_KIND);
if (elements_kind == JSObject::FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND
&& todo_external_array) {
- elements = AddInstruction(new(zone()) HLoadElements(object));
- // We need to forcibly prevent some ElementsKind-dependent instructions
- // from being hoisted out of any loops they might occur in, because
- // the current loop-invariant-code-motion algorithm isn't clever enough
- // to deal with them properly.
- // There's some performance to be gained by developing a smarter
- // solution for this.
- elements->ClearFlag(HValue::kUseGVN);
HInstruction* length =
- AddInstruction(new(zone()) HExternalArrayLength(elements));
+ AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
external_elements = new(zone()) HLoadExternalArrayPointer(elements);
AddInstruction(external_elements);
@@ -4062,18 +4062,23 @@
if (type_todo[elements_kind]) {
HBasicBlock* if_true = graph()->CreateBasicBlock();
HBasicBlock* if_false = graph()->CreateBasicBlock();
- HCompareConstantEqAndBranch* compare =
- new(zone()) HCompareConstantEqAndBranch(elements_kind_instr,
- elements_kind,
- Token::EQ_STRICT);
- compare->SetSuccessorAt(0, if_true);
- compare->SetSuccessorAt(1, if_false);
- current_block()->Finish(compare);
+ elements_kind_branch = new(zone()) HCompareConstantEqAndBranch(
+ elements_kind_instr, elements_kind, Token::EQ_STRICT);
+ elements_kind_branch->SetSuccessorAt(0, if_true);
+ elements_kind_branch->SetSuccessorAt(1, if_false);
+ current_block()->Finish(elements_kind_branch);
set_current_block(if_true);
HInstruction* access;
if (elements_kind == JSObject::FAST_ELEMENTS ||
elements_kind == JSObject::FAST_DOUBLE_ELEMENTS) {
+ bool fast_double_elements =
+ elements_kind == JSObject::FAST_DOUBLE_ELEMENTS;
+ if (is_store && elements_kind == JSObject::FAST_ELEMENTS) {
+ AddInstruction(new(zone()) HCheckMap(
+ elements, isolate()->factory()->fixed_array_map(),
+ elements_kind_branch));
+ }
HBasicBlock* if_jsarray = graph()->CreateBasicBlock();
HBasicBlock* if_fastobject = graph()->CreateBasicBlock();
HHasInstanceTypeAndBranch* typecheck =
@@ -4083,14 +4088,9 @@
current_block()->Finish(typecheck);
set_current_block(if_jsarray);
- HInstruction* length = new(zone()) HJSArrayLength(object);
+ HInstruction* length = new(zone()) HJSArrayLength(object, typecheck);
AddInstruction(length);
- length->ClearFlag(HValue::kUseGVN);
checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
- elements = AddInstruction(new(zone()) HLoadElements(object));
- elements->ClearFlag(HValue::kUseGVN);
- bool fast_double_elements =
- elements_kind == JSObject::FAST_DOUBLE_ELEMENTS;
if (is_store) {
if (fast_double_elements) {
access = AddInstruction(
@@ -4098,8 +4098,6 @@
checked_key,
val));
} else {
- AddInstruction(new(zone()) HCheckMap(
- elements, isolate()->factory()->fixed_array_map()));
access = AddInstruction(
new(zone()) HStoreKeyedFastElement(elements, checked_key, val));
}
@@ -4120,13 +4118,7 @@
if_jsarray->Goto(join);
set_current_block(if_fastobject);
- elements = AddInstruction(new(zone()) HLoadElements(object));
- elements->ClearFlag(HValue::kUseGVN);
- if (is_store && !fast_double_elements) {
- AddInstruction(new(zone()) HCheckMap(
- elements, isolate()->factory()->fixed_array_map()));
- }
- length = AddInstruction(new(zone()) HFixedArrayLength(elements));
+ length = AddInstruction(new(zone()) HFixedArrayBaseLength(elements));
checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
if (is_store) {
if (fast_double_elements) {
@@ -4270,8 +4262,9 @@
if (expr->IsArrayLength()) {
HValue* array = Pop();
AddInstruction(new(zone()) HCheckNonSmi(array));
- AddInstruction(HCheckInstanceType::NewIsJSArray(array));
- instr = new(zone()) HJSArrayLength(array);
+ HInstruction* mapcheck =
+ AddInstruction(HCheckInstanceType::NewIsJSArray(array));
+ instr = new(zone()) HJSArrayLength(array, mapcheck);
} else if (expr->IsStringLength()) {
HValue* string = Pop();
@@ -4295,7 +4288,7 @@
} else if (expr->key()->IsPropertyName()) {
Handle<String> name = expr->key()->AsLiteral()->AsPropertyName();
- ZoneMapList* types = expr->GetReceiverTypes();
+ SmallMapList* types = expr->GetReceiverTypes();
HValue* obj = Pop();
if (expr->IsMonomorphic()) {
@@ -4356,7 +4349,7 @@
void HGraphBuilder::HandlePolymorphicCallNamed(Call* expr,
HValue* receiver,
- ZoneMapList* types,
+ SmallMapList* types,
Handle<String> name) {
// TODO(ager): We should recognize when the prototype chains for different
// maps are identical. In that case we can avoid repeatedly generating the
@@ -4886,13 +4879,14 @@
Handle<String> name = prop->key()->AsLiteral()->AsPropertyName();
- ZoneMapList* types = expr->GetReceiverTypes();
+ SmallMapList* types = expr->GetReceiverTypes();
HValue* receiver =
environment()->ExpressionStackAt(expr->arguments()->length());
if (expr->IsMonomorphic()) {
- Handle<Map> receiver_map =
- (types == NULL) ? Handle<Map>::null() : types->first();
+ Handle<Map> receiver_map = (types == NULL || types->is_empty())
+ ? Handle<Map>::null()
+ : types->first();
if (TryInlineBuiltinFunction(expr,
receiver,
receiver_map,
@@ -5109,19 +5103,13 @@
// The subexpression does not have side effects.
return ast_context()->ReturnValue(graph()->GetConstantFalse());
} else if (prop != NULL) {
- if (prop->is_synthetic()) {
- // Result of deleting parameters is false, even when they rewrite
- // to accesses on the arguments object.
- return ast_context()->ReturnValue(graph()->GetConstantFalse());
- } else {
- CHECK_ALIVE(VisitForValue(prop->obj()));
- CHECK_ALIVE(VisitForValue(prop->key()));
- HValue* key = Pop();
- HValue* obj = Pop();
- HValue* context = environment()->LookupContext();
- HDeleteProperty* instr = new(zone()) HDeleteProperty(context, obj, key);
- return ast_context()->ReturnInstruction(instr, expr->id());
- }
+ CHECK_ALIVE(VisitForValue(prop->obj()));
+ CHECK_ALIVE(VisitForValue(prop->key()));
+ HValue* key = Pop();
+ HValue* obj = Pop();
+ HValue* context = environment()->LookupContext();
+ HDeleteProperty* instr = new(zone()) HDeleteProperty(context, obj, key);
+ return ast_context()->ReturnInstruction(instr, expr->id());
} else if (var->is_global()) {
Bailout("delete with global variable");
} else {
@@ -5566,9 +5554,11 @@
// We need an extra block to maintain edge-split form.
HBasicBlock* empty_block = graph()->CreateBasicBlock();
HBasicBlock* eval_right = graph()->CreateBasicBlock();
+ unsigned test_id = expr->left()->test_id();
+ ToBooleanStub::Types expected(oracle()->ToBooleanTypes(test_id));
HBranch* test = is_logical_and
- ? new(zone()) HBranch(Top(), eval_right, empty_block)
- : new(zone()) HBranch(Top(), empty_block, eval_right);
+ ? new(zone()) HBranch(Top(), eval_right, empty_block, expected)
+ : new(zone()) HBranch(Top(), empty_block, eval_right, expected);
current_block()->Finish(test);
set_current_block(eval_right);
@@ -5843,7 +5833,9 @@
void HGraphBuilder::VisitDeclaration(Declaration* decl) {
// We support only declarations that do not require code generation.
Variable* var = decl->proxy()->var();
- if (!var->IsStackAllocated() || decl->fun() != NULL) {
+ if (!var->IsStackAllocated() ||
+ decl->fun() != NULL ||
+ decl->mode() == Variable::LET) {
return Bailout("unsupported declaration");
}
@@ -6255,11 +6247,6 @@
}
-void HGraphBuilder::GenerateIsNativeOrStrictMode(CallRuntime* call) {
- return Bailout("inlined runtime function: IsNativeOrStrictMode");
-}
-
-
#undef CHECK_BAILOUT
#undef CHECK_ALIVE
diff --git a/src/hydrogen.h b/src/hydrogen.h
index ffa8aa7..1066d2c 100644
--- a/src/hydrogen.h
+++ b/src/hydrogen.h
@@ -238,7 +238,7 @@
void OrderBlocks();
void AssignDominators();
void ReplaceCheckedValues();
- void MarkAsDeoptimizingRecursively(HBasicBlock* block);
+ void PropagateDeoptimizingMark();
// Returns false if there are phi-uses of the arguments-object
// which are not supported by the optimizing compiler.
@@ -299,6 +299,7 @@
HConstant* GetConstant(SetOncePointer<HConstant>* pointer,
Object* value);
+ void MarkAsDeoptimizingRecursively(HBasicBlock* block);
void InsertTypeConversions(HInstruction* instr);
void PropagateMinusZeroChecks(HValue* value, BitVector* visited);
void RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi* phi);
@@ -725,6 +726,8 @@
HBasicBlock* second,
int join_id);
+ TypeFeedbackOracle* oracle() const { return function_state()->oracle(); }
+
private:
// Type of a member function that generates inline code for a native function.
typedef void (HGraphBuilder::*InlineFunctionGenerator)(CallRuntime* call);
@@ -753,7 +756,6 @@
CompilationInfo* info() const {
return function_state()->compilation_info();
}
- TypeFeedbackOracle* oracle() const { return function_state()->oracle(); }
AstContext* call_context() const {
return function_state()->call_context();
@@ -901,11 +903,11 @@
void HandlePolymorphicStoreNamedField(Assignment* expr,
HValue* object,
HValue* value,
- ZoneMapList* types,
+ SmallMapList* types,
Handle<String> name);
void HandlePolymorphicCallNamed(Call* expr,
HValue* receiver,
- ZoneMapList* types,
+ SmallMapList* types,
Handle<String> name);
void HandleLiteralCompareTypeof(CompareOperation* compare_expr,
Expression* expr,
diff --git a/src/ia32/assembler-ia32.cc b/src/ia32/assembler-ia32.cc
index 0dc5194..9996474 100644
--- a/src/ia32/assembler-ia32.cc
+++ b/src/ia32/assembler-ia32.cc
@@ -1957,6 +1957,18 @@
}
+void Assembler::roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode) {
+ ASSERT(CpuFeatures::IsEnabled(SSE4_1));
+ EnsureSpace ensure_space(this);
+ EMIT(0x66);
+ EMIT(0x0F);
+ EMIT(0x3A);
+ EMIT(0x0B);
+ emit_sse_operand(dst, src);
+ // Mask precision exeption.
+ EMIT(static_cast<byte>(mode) | 0x8);
+}
+
void Assembler::movmskpd(Register dst, XMMRegister src) {
ASSERT(CpuFeatures::IsEnabled(SSE2));
EnsureSpace ensure_space(this);
diff --git a/src/ia32/assembler-ia32.h b/src/ia32/assembler-ia32.h
index da38e13..c186094 100644
--- a/src/ia32/assembler-ia32.h
+++ b/src/ia32/assembler-ia32.h
@@ -941,6 +941,16 @@
void andpd(XMMRegister dst, XMMRegister src);
void ucomisd(XMMRegister dst, XMMRegister src);
+
+ enum RoundingMode {
+ kRoundToNearest = 0x0,
+ kRoundDown = 0x1,
+ kRoundUp = 0x2,
+ kRoundToZero = 0x3
+ };
+
+ void roundsd(XMMRegister dst, XMMRegister src, RoundingMode mode);
+
void movmskpd(Register dst, XMMRegister src);
void cmpltsd(XMMRegister dst, XMMRegister src);
diff --git a/src/ia32/builtins-ia32.cc b/src/ia32/builtins-ia32.cc
index f8a85de..845a073 100644
--- a/src/ia32/builtins-ia32.cc
+++ b/src/ia32/builtins-ia32.cc
@@ -373,7 +373,7 @@
__ LeaveConstructFrame();
// Remove caller arguments from the stack and return.
- ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
+ STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ pop(ecx);
__ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
__ push(ecx);
@@ -923,7 +923,7 @@
// Fill the FixedArray with the hole value. Inline the code if short.
// Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
static const int kLoopUnfoldLimit = 4;
- ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
+ STATIC_ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
if (initial_capacity <= kLoopUnfoldLimit) {
// Use a scratch register here to have only one reloc info when unfolding
// the loop.
@@ -975,7 +975,7 @@
// Allocate the JSArray object together with space for a FixedArray with the
// requested elements.
- ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
+ STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
times_half_pointer_size, // array_size is a smi.
array_size,
@@ -1100,7 +1100,7 @@
__ bind(&argc_one_or_more);
__ cmp(eax, 1);
__ j(not_equal, &argc_two_or_more);
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ mov(ecx, Operand(esp, (push_count + 1) * kPointerSize));
__ test(ecx, Operand(ecx));
__ j(not_zero, ¬_empty_array);
@@ -1155,7 +1155,7 @@
// Handle construction of an array from a list of arguments.
__ bind(&argc_two_or_more);
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ SmiTag(eax); // Convet argc to a smi.
// eax: array_size (smi)
// edi: constructor
@@ -1437,7 +1437,7 @@
// Preserve the number of arguments on the stack. Must preserve eax,
// ebx and ecx because these registers are used when copying the
// arguments and the receiver.
- ASSERT(kSmiTagSize == 1);
+ STATIC_ASSERT(kSmiTagSize == 1);
__ lea(edi, Operand(eax, eax, times_1, kSmiTag));
__ push(edi);
}
@@ -1451,7 +1451,7 @@
__ leave();
// Remove caller arguments from the stack.
- ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
+ STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ pop(ecx);
__ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
__ push(ecx);
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc
index 71aacf9..85e74b8 100644
--- a/src/ia32/code-stubs-ia32.cc
+++ b/src/ia32/code-stubs-ia32.cc
@@ -236,69 +236,141 @@
}
-// The stub returns zero for false, and a non-zero value for true.
+// The stub expects its argument on the stack and returns its result in tos_:
+// zero for false, and a non-zero value for true.
void ToBooleanStub::Generate(MacroAssembler* masm) {
- Label false_result, true_result, not_string;
+ Label patch;
Factory* factory = masm->isolate()->factory();
+ const Register argument = eax;
const Register map = edx;
- __ mov(eax, Operand(esp, 1 * kPointerSize));
+ if (!types_.IsEmpty()) {
+ __ mov(argument, Operand(esp, 1 * kPointerSize));
+ }
// undefined -> false
- __ cmp(eax, factory->undefined_value());
- __ j(equal, &false_result);
+ CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false);
// Boolean -> its value
- __ cmp(eax, factory->false_value());
- __ j(equal, &false_result);
- __ cmp(eax, factory->true_value());
- __ j(equal, &true_result);
-
- // Smis: 0 -> false, all other -> true
- __ test(eax, Operand(eax));
- __ j(zero, &false_result);
- __ JumpIfSmi(eax, &true_result);
+ CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false);
+ CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true);
// 'null' -> false.
- __ cmp(eax, factory->null_value());
- __ j(equal, &false_result, Label::kNear);
+ CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false);
- // Get the map of the heap object.
- __ mov(map, FieldOperand(eax, HeapObject::kMapOffset));
+ if (types_.Contains(SMI)) {
+ // Smis: 0 -> false, all other -> true
+ Label not_smi;
+ __ JumpIfNotSmi(argument, ¬_smi, Label::kNear);
+ // argument contains the correct return value already.
+ if (!tos_.is(argument)) {
+ __ mov(tos_, argument);
+ }
+ __ ret(1 * kPointerSize);
+ __ bind(¬_smi);
+ } else if (types_.NeedsMap()) {
+ // If we need a map later and have a Smi -> patch.
+ __ JumpIfSmi(argument, &patch, Label::kNear);
+ }
- // Undetectable -> false.
- __ test_b(FieldOperand(map, Map::kBitFieldOffset),
- 1 << Map::kIsUndetectable);
- __ j(not_zero, &false_result, Label::kNear);
+ if (types_.NeedsMap()) {
+ __ mov(map, FieldOperand(argument, HeapObject::kMapOffset));
- // JavaScript object -> true.
- __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
- __ j(above_equal, &true_result, Label::kNear);
+ if (types_.CanBeUndetectable()) {
+ __ test_b(FieldOperand(map, Map::kBitFieldOffset),
+ 1 << Map::kIsUndetectable);
+ // Undetectable -> false.
+ Label not_undetectable;
+ __ j(zero, ¬_undetectable, Label::kNear);
+ __ Set(tos_, Immediate(0));
+ __ ret(1 * kPointerSize);
+ __ bind(¬_undetectable);
+ }
+ }
- // String value -> false iff empty.
- __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
- __ j(above_equal, ¬_string, Label::kNear);
- __ cmp(FieldOperand(eax, String::kLengthOffset), Immediate(0));
- __ j(zero, &false_result, Label::kNear);
- __ jmp(&true_result, Label::kNear);
+ if (types_.Contains(SPEC_OBJECT)) {
+ // spec object -> true.
+ Label not_js_object;
+ __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
+ __ j(below, ¬_js_object, Label::kNear);
+ // argument contains the correct return value already.
+ if (!tos_.is(argument)) {
+ __ Set(tos_, Immediate(1));
+ }
+ __ ret(1 * kPointerSize);
+ __ bind(¬_js_object);
+ }
- __ bind(¬_string);
- // HeapNumber -> false iff +0, -0, or NaN.
- __ cmp(map, factory->heap_number_map());
- __ j(not_equal, &true_result, Label::kNear);
- __ fldz();
- __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
- __ FCmp();
- __ j(zero, &false_result, Label::kNear);
- // Fall through to |true_result|.
+ if (types_.Contains(STRING)) {
+ // String value -> false iff empty.
+ Label not_string;
+ __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
+ __ j(above_equal, ¬_string, Label::kNear);
+ __ mov(tos_, FieldOperand(argument, String::kLengthOffset));
+ __ ret(1 * kPointerSize); // the string length is OK as the return value
+ __ bind(¬_string);
+ }
- // Return 1/0 for true/false in tos_.
- __ bind(&true_result);
- __ mov(tos_, 1);
- __ ret(1 * kPointerSize);
- __ bind(&false_result);
- __ mov(tos_, 0);
- __ ret(1 * kPointerSize);
+ if (types_.Contains(HEAP_NUMBER)) {
+ // heap number -> false iff +0, -0, or NaN.
+ Label not_heap_number, false_result;
+ __ cmp(map, factory->heap_number_map());
+ __ j(not_equal, ¬_heap_number, Label::kNear);
+ __ fldz();
+ __ fld_d(FieldOperand(argument, HeapNumber::kValueOffset));
+ __ FCmp();
+ __ j(zero, &false_result, Label::kNear);
+ // argument contains the correct return value already.
+ if (!tos_.is(argument)) {
+ __ Set(tos_, Immediate(1));
+ }
+ __ ret(1 * kPointerSize);
+ __ bind(&false_result);
+ __ Set(tos_, Immediate(0));
+ __ ret(1 * kPointerSize);
+ __ bind(¬_heap_number);
+ }
+
+ __ bind(&patch);
+ GenerateTypeTransition(masm);
+}
+
+
+void ToBooleanStub::CheckOddball(MacroAssembler* masm,
+ Type type,
+ Heap::RootListIndex value,
+ bool result) {
+ const Register argument = eax;
+ if (types_.Contains(type)) {
+ // If we see an expected oddball, return its ToBoolean value tos_.
+ Label different_value;
+ __ CompareRoot(argument, value);
+ __ j(not_equal, &different_value, Label::kNear);
+ if (!result) {
+ // If we have to return zero, there is no way around clearing tos_.
+ __ Set(tos_, Immediate(0));
+ } else if (!tos_.is(argument)) {
+ // If we have to return non-zero, we can re-use the argument if it is the
+ // same register as the result, because we never see Smi-zero here.
+ __ Set(tos_, Immediate(1));
+ }
+ __ ret(1 * kPointerSize);
+ __ bind(&different_value);
+ }
+}
+
+
+void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
+ __ pop(ecx); // Get return address, operand is now on top of stack.
+ __ push(Immediate(Smi::FromInt(tos_.code())));
+ __ push(Immediate(Smi::FromInt(types_.ToByte())));
+ __ push(ecx); // Push return address.
+ // Patch the caller to an appropriate specialized stub and return the
+ // operation result to the caller of the stub.
+ __ TailCallExternalReference(
+ ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
+ 3,
+ 1);
}
@@ -421,10 +493,10 @@
__ cmp(Operand(scratch2), Immediate(non_smi_exponent));
// If we have a match of the int32-but-not-Smi exponent then skip some
// logic.
- __ j(equal, &right_exponent);
+ __ j(equal, &right_exponent, Label::kNear);
// If the exponent is higher than that then go to slow case. This catches
// numbers that don't fit in a signed int32, infinities and NaNs.
- __ j(less, &normal_exponent);
+ __ j(less, &normal_exponent, Label::kNear);
{
// Handle a big exponent. The only reason we have this code is that the
@@ -453,9 +525,9 @@
__ or_(ecx, Operand(scratch2));
// We have the answer in ecx, but we may need to negate it.
__ test(scratch, Operand(scratch));
- __ j(positive, &done);
+ __ j(positive, &done, Label::kNear);
__ neg(ecx);
- __ jmp(&done);
+ __ jmp(&done, Label::kNear);
}
__ bind(&normal_exponent);
@@ -468,7 +540,7 @@
(HeapNumber::kExponentBias + 0) << HeapNumber::kExponentShift;
__ sub(Operand(scratch2), Immediate(zero_exponent));
// ecx already has a Smi zero.
- __ j(less, &done);
+ __ j(less, &done, Label::kNear);
// We have a shifted exponent between 0 and 30 in scratch2.
__ shr(scratch2, HeapNumber::kExponentShift);
@@ -693,7 +765,7 @@
Label slow_allocate_heapnumber, heapnumber_allocated;
__ AllocateHeapNumber(eax, ebx, ecx, &slow_allocate_heapnumber);
- __ jmp(&heapnumber_allocated);
+ __ jmp(&heapnumber_allocated, Label::kNear);
__ bind(&slow_allocate_heapnumber);
__ EnterInternalFrame();
@@ -1370,14 +1442,14 @@
Register right = eax;
// Test if left operand is a string.
- __ JumpIfSmi(left, &call_runtime);
+ __ JumpIfSmi(left, &call_runtime, Label::kNear);
__ CmpObjectType(left, FIRST_NONSTRING_TYPE, ecx);
- __ j(above_equal, &call_runtime);
+ __ j(above_equal, &call_runtime, Label::kNear);
// Test if right operand is a string.
- __ JumpIfSmi(right, &call_runtime);
+ __ JumpIfSmi(right, &call_runtime, Label::kNear);
__ CmpObjectType(right, FIRST_NONSTRING_TYPE, ecx);
- __ j(above_equal, &call_runtime);
+ __ j(above_equal, &call_runtime, Label::kNear);
StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
GenerateRegisterArgsPush(masm);
@@ -1491,7 +1563,7 @@
} else {
// Check if result fits in a smi.
__ cmp(eax, 0xc0000000);
- __ j(negative, &non_smi_result);
+ __ j(negative, &non_smi_result, Label::kNear);
}
// Tag smi result and return.
__ SmiTag(eax);
@@ -1705,7 +1777,7 @@
} else {
// Check if result fits in a smi.
__ cmp(eax, 0xc0000000);
- __ j(negative, &non_smi_result);
+ __ j(negative, &non_smi_result, Label::kNear);
}
// Tag smi result and return.
__ SmiTag(eax);
@@ -1904,7 +1976,7 @@
} else {
// Check if result fits in a smi.
__ cmp(eax, 0xc0000000);
- __ j(negative, &non_smi_result);
+ __ j(negative, &non_smi_result, Label::kNear);
}
// Tag smi result and return.
__ SmiTag(eax);
@@ -2379,7 +2451,7 @@
Label load_arg2, done;
// Test if arg1 is a Smi.
- __ JumpIfNotSmi(edx, &arg1_is_object);
+ __ JumpIfNotSmi(edx, &arg1_is_object, Label::kNear);
__ SmiUntag(edx);
__ jmp(&load_arg2);
@@ -2405,7 +2477,7 @@
__ bind(&load_arg2);
// Test if arg2 is a Smi.
- __ JumpIfNotSmi(eax, &arg2_is_object);
+ __ JumpIfNotSmi(eax, &arg2_is_object, Label::kNear);
__ SmiUntag(eax);
__ mov(ecx, eax);
@@ -2795,7 +2867,7 @@
// Check that the key is a smi.
Label slow;
- __ JumpIfNotSmi(edx, &slow);
+ __ JumpIfNotSmi(edx, &slow, Label::kNear);
// Check if the calling frame is an arguments adaptor frame.
Label adaptor;
@@ -2808,7 +2880,7 @@
// through register eax. Use unsigned comparison to get negative
// check for free.
__ cmp(edx, Operand(eax));
- __ j(above_equal, &slow);
+ __ j(above_equal, &slow, Label::kNear);
// Read the argument from the stack and return it.
STATIC_ASSERT(kSmiTagSize == 1);
@@ -2824,7 +2896,7 @@
__ bind(&adaptor);
__ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ cmp(edx, Operand(ecx));
- __ j(above_equal, &slow);
+ __ j(above_equal, &slow, Label::kNear);
// Read the argument from the stack and return it.
STATIC_ASSERT(kSmiTagSize == 1);
@@ -3103,11 +3175,11 @@
__ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
__ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
__ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
- __ j(equal, &adaptor_frame);
+ __ j(equal, &adaptor_frame, Label::kNear);
// Get the length from the frame.
__ mov(ecx, Operand(esp, 1 * kPointerSize));
- __ jmp(&try_allocate);
+ __ jmp(&try_allocate, Label::kNear);
// Patch the arguments.length and the parameters pointer.
__ bind(&adaptor_frame);
@@ -3153,7 +3225,7 @@
// If there are no actual arguments, we're done.
Label done;
__ test(ecx, Operand(ecx));
- __ j(zero, &done);
+ __ j(zero, &done, Label::kNear);
// Get the parameters pointer from the stack.
__ mov(edx, Operand(esp, 2 * kPointerSize));
@@ -3299,6 +3371,8 @@
__ cmp(edx, Operand(eax));
__ j(greater, &runtime);
+ // Reset offset for possibly sliced string.
+ __ Set(edi, Immediate(0));
// ecx: RegExp data (FixedArray)
// Check the representation and encoding of the subject string.
Label seq_ascii_string, seq_two_byte_string, check_code;
@@ -3309,36 +3383,45 @@
__ and_(ebx,
kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
- __ j(zero, &seq_two_byte_string);
+ __ j(zero, &seq_two_byte_string, Label::kNear);
// Any other flat string must be a flat ascii string.
- __ test(Operand(ebx),
+ __ and_(Operand(ebx),
Immediate(kIsNotStringMask | kStringRepresentationMask));
- __ j(zero, &seq_ascii_string);
+ __ j(zero, &seq_ascii_string, Label::kNear);
- // Check for flat cons string.
+ // Check for flat cons string or sliced string.
// A flat cons string is a cons string where the second part is the empty
// string. In that case the subject string is just the first part of the cons
// string. Also in this case the first part of the cons string is known to be
// a sequential string or an external string.
- STATIC_ASSERT(kExternalStringTag != 0);
- STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
- __ test(Operand(ebx),
- Immediate(kIsNotStringMask | kExternalStringTag));
- __ j(not_zero, &runtime);
- // String is a cons string.
- __ mov(edx, FieldOperand(eax, ConsString::kSecondOffset));
- __ cmp(Operand(edx), factory->empty_string());
+ // In the case of a sliced string its offset has to be taken into account.
+ Label cons_string, check_encoding;
+ STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+ STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
+ __ cmp(Operand(ebx), Immediate(kExternalStringTag));
+ __ j(less, &cons_string);
+ __ j(equal, &runtime);
+
+ // String is sliced.
+ __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
+ __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
+ // edi: offset of sliced string, smi-tagged.
+ // eax: parent string.
+ __ jmp(&check_encoding, Label::kNear);
+ // String is a cons string, check whether it is flat.
+ __ bind(&cons_string);
+ __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
__ j(not_equal, &runtime);
__ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
+ __ bind(&check_encoding);
__ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
- // String is a cons string with empty second part.
- // eax: first part of cons string.
- // ebx: map of first part of cons string.
- // Is first part a flat two byte string?
+ // eax: first part of cons string or parent of sliced string.
+ // ebx: map of first part of cons string or map of parent of sliced string.
+ // Is first part of cons or parent of slice a flat two byte string?
__ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
kStringRepresentationMask | kStringEncodingMask);
STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
- __ j(zero, &seq_two_byte_string);
+ __ j(zero, &seq_two_byte_string, Label::kNear);
// Any other flat string must be ascii.
__ test_b(FieldOperand(ebx, Map::kInstanceTypeOffset),
kStringRepresentationMask);
@@ -3348,14 +3431,14 @@
// eax: subject string (flat ascii)
// ecx: RegExp data (FixedArray)
__ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
- __ Set(edi, Immediate(1)); // Type is ascii.
- __ jmp(&check_code);
+ __ Set(ecx, Immediate(1)); // Type is ascii.
+ __ jmp(&check_code, Label::kNear);
__ bind(&seq_two_byte_string);
// eax: subject string (flat two byte)
// ecx: RegExp data (FixedArray)
__ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
- __ Set(edi, Immediate(0)); // Type is two byte.
+ __ Set(ecx, Immediate(0)); // Type is two byte.
__ bind(&check_code);
// Check that the irregexp code has been generated for the actual string
@@ -3365,7 +3448,7 @@
// eax: subject string
// edx: code
- // edi: encoding of subject string (1 if ascii, 0 if two_byte);
+ // ecx: encoding of subject string (1 if ascii, 0 if two_byte);
// Load used arguments before starting to push arguments for call to native
// RegExp code to avoid handling changing stack height.
__ mov(ebx, Operand(esp, kPreviousIndexOffset));
@@ -3374,7 +3457,7 @@
// eax: subject string
// ebx: previous index
// edx: code
- // edi: encoding of subject string (1 if ascii 0 if two_byte);
+ // ecx: encoding of subject string (1 if ascii 0 if two_byte);
// All checks done. Now push arguments for native regexp code.
Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->regexp_entry_native(), 1);
@@ -3391,23 +3474,47 @@
__ mov(Operand(esp, 6 * kPointerSize), Immediate(1));
// Argument 6: Start (high end) of backtracking stack memory area.
- __ mov(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_address));
- __ add(ecx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
- __ mov(Operand(esp, 5 * kPointerSize), ecx);
+ __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
+ __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
+ __ mov(Operand(esp, 5 * kPointerSize), esi);
// Argument 5: static offsets vector buffer.
__ mov(Operand(esp, 4 * kPointerSize),
Immediate(ExternalReference::address_of_static_offsets_vector(
masm->isolate())));
+ // Argument 2: Previous index.
+ __ mov(Operand(esp, 1 * kPointerSize), ebx);
+
+ // Argument 1: Original subject string.
+ // The original subject is in the previous stack frame. Therefore we have to
+ // use ebp, which points exactly to one pointer size below the previous esp.
+ // (Because creating a new stack frame pushes the previous ebp onto the stack
+ // and thereby moves up esp by one kPointerSize.)
+ __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
+ __ mov(Operand(esp, 0 * kPointerSize), esi);
+
+ // esi: original subject string
+ // eax: underlying subject string
+ // ebx: previous index
+ // ecx: encoding of subject string (1 if ascii 0 if two_byte);
+ // edx: code
// Argument 4: End of string data
// Argument 3: Start of string data
- Label setup_two_byte, setup_rest;
- __ test(edi, Operand(edi));
- __ mov(edi, FieldOperand(eax, String::kLengthOffset));
- __ j(zero, &setup_two_byte, Label::kNear);
+ // Prepare start and end index of the input.
+ // Load the length from the original sliced string if that is the case.
+ __ mov(esi, FieldOperand(esi, String::kLengthOffset));
+ __ add(esi, Operand(edi)); // Calculate input end wrt offset.
__ SmiUntag(edi);
- __ lea(ecx, FieldOperand(eax, edi, times_1, SeqAsciiString::kHeaderSize));
+ __ add(ebx, Operand(edi)); // Calculate input start wrt offset.
+
+ // ebx: start index of the input string
+ // esi: end index of the input string
+ Label setup_two_byte, setup_rest;
+ __ test(ecx, Operand(ecx));
+ __ j(zero, &setup_two_byte, Label::kNear);
+ __ SmiUntag(esi);
+ __ lea(ecx, FieldOperand(eax, esi, times_1, SeqAsciiString::kHeaderSize));
__ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
__ lea(ecx, FieldOperand(eax, ebx, times_1, SeqAsciiString::kHeaderSize));
__ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
@@ -3415,20 +3522,14 @@
__ bind(&setup_two_byte);
STATIC_ASSERT(kSmiTag == 0);
- STATIC_ASSERT(kSmiTagSize == 1); // edi is smi (powered by 2).
- __ lea(ecx, FieldOperand(eax, edi, times_1, SeqTwoByteString::kHeaderSize));
+ STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2).
+ __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
__ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
__ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
__ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
__ bind(&setup_rest);
- // Argument 2: Previous index.
- __ mov(Operand(esp, 1 * kPointerSize), ebx);
-
- // Argument 1: Subject string.
- __ mov(Operand(esp, 0 * kPointerSize), eax);
-
// Locate the code entry and call it.
__ add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
__ call(Operand(edx));
@@ -3467,7 +3568,7 @@
// by javascript code.
__ cmp(eax, factory->termination_exception());
Label throw_termination_exception;
- __ j(equal, &throw_termination_exception);
+ __ j(equal, &throw_termination_exception, Label::kNear);
// Handle normal exception by following handler chain.
__ Throw(eax);
@@ -3750,16 +3851,16 @@
void CompareStub::Generate(MacroAssembler* masm) {
ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
- Label check_unequal_objects, done;
+ Label check_unequal_objects;
// Compare two smis if required.
if (include_smi_compare_) {
Label non_smi, smi_done;
__ mov(ecx, Operand(edx));
__ or_(ecx, Operand(eax));
- __ JumpIfNotSmi(ecx, &non_smi);
+ __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
__ sub(edx, Operand(eax)); // Return on the result of the subtraction.
- __ j(no_overflow, &smi_done);
+ __ j(no_overflow, &smi_done, Label::kNear);
__ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
__ bind(&smi_done);
__ mov(eax, edx);
@@ -3881,7 +3982,7 @@
__ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
Immediate(masm->isolate()->factory()->heap_number_map()));
// If heap number, handle it in the slow case.
- __ j(equal, &slow);
+ __ j(equal, &slow, Label::kNear);
// Return non-equal (ebx is not zero)
__ mov(eax, ebx);
__ ret(0);
@@ -3932,7 +4033,7 @@
__ ucomisd(xmm0, xmm1);
// Don't base result on EFLAGS when a NaN is involved.
- __ j(parity_even, &unordered);
+ __ j(parity_even, &unordered, Label::kNear);
// Return a result of -1, 0, or 1, based on EFLAGS.
__ mov(eax, 0); // equal
__ mov(ecx, Immediate(Smi::FromInt(1)));
@@ -3948,12 +4049,12 @@
__ FCmp();
// Don't base result on EFLAGS when a NaN is involved.
- __ j(parity_even, &unordered);
+ __ j(parity_even, &unordered, Label::kNear);
Label below_label, above_label;
// Return a result of -1, 0, or 1, based on EFLAGS.
- __ j(below, &below_label);
- __ j(above, &above_label);
+ __ j(below, &below_label, Label::kNear);
+ __ j(above, &above_label, Label::kNear);
__ Set(eax, Immediate(0));
__ ret(0);
@@ -4268,7 +4369,7 @@
// If the returned exception is RETRY_AFTER_GC continue at retry label
STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
__ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
- __ j(zero, &retry);
+ __ j(zero, &retry, Label::kNear);
// Special handling of out of memory exceptions.
__ cmp(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException()));
@@ -4388,11 +4489,11 @@
ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address,
masm->isolate());
__ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
- __ j(not_equal, ¬_outermost_js);
+ __ j(not_equal, ¬_outermost_js, Label::kNear);
__ mov(Operand::StaticVariable(js_entry_sp), ebp);
__ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
Label cont;
- __ jmp(&cont);
+ __ jmp(&cont, Label::kNear);
__ bind(¬_outermost_js);
__ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
__ bind(&cont);
@@ -4633,26 +4734,26 @@
__ bind(¬_js_object);
// Before null, smi and string value checks, check that the rhs is a function
// as for a non-function rhs an exception needs to be thrown.
- __ JumpIfSmi(function, &slow);
+ __ JumpIfSmi(function, &slow, Label::kNear);
__ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
- __ j(not_equal, &slow);
+ __ j(not_equal, &slow, Label::kNear);
// Null is not instance of anything.
__ cmp(object, factory->null_value());
- __ j(not_equal, &object_not_null);
+ __ j(not_equal, &object_not_null, Label::kNear);
__ Set(eax, Immediate(Smi::FromInt(1)));
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
__ bind(&object_not_null);
// Smi values is not instance of anything.
- __ JumpIfNotSmi(object, &object_not_null_or_smi);
+ __ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear);
__ Set(eax, Immediate(Smi::FromInt(1)));
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
__ bind(&object_not_null_or_smi);
// String values is not instance of anything.
Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
- __ j(NegateCondition(is_string), &slow);
+ __ j(NegateCondition(is_string), &slow, Label::kNear);
__ Set(eax, Immediate(Smi::FromInt(1)));
__ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
@@ -4739,6 +4840,7 @@
Label flat_string;
Label ascii_string;
Label got_char_code;
+ Label sliced_string;
// If the receiver is a smi trigger the non-string case.
STATIC_ASSERT(kSmiTag == 0);
@@ -4769,31 +4871,45 @@
__ j(zero, &flat_string);
// Handle non-flat strings.
- __ test(result_, Immediate(kIsConsStringMask));
- __ j(zero, &call_runtime_);
+ __ and_(result_, kStringRepresentationMask);
+ STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+ STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
+ __ cmp(result_, kExternalStringTag);
+ __ j(greater, &sliced_string, Label::kNear);
+ __ j(equal, &call_runtime_);
// ConsString.
// Check whether the right hand side is the empty string (i.e. if
// this is really a flat string in a cons string). If that is not
// the case we would rather go to the runtime system now to flatten
// the string.
+ Label assure_seq_string;
__ cmp(FieldOperand(object_, ConsString::kSecondOffset),
Immediate(masm->isolate()->factory()->empty_string()));
__ j(not_equal, &call_runtime_);
// Get the first of the two strings and load its instance type.
__ mov(object_, FieldOperand(object_, ConsString::kFirstOffset));
+ __ jmp(&assure_seq_string, Label::kNear);
+
+ // SlicedString, unpack and add offset.
+ __ bind(&sliced_string);
+ __ add(scratch_, FieldOperand(object_, SlicedString::kOffsetOffset));
+ __ mov(object_, FieldOperand(object_, SlicedString::kParentOffset));
+
+ // Assure that we are dealing with a sequential string. Go to runtime if not.
+ __ bind(&assure_seq_string);
__ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
__ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
- // If the first cons component is also non-flat, then go to runtime.
STATIC_ASSERT(kSeqStringTag == 0);
__ test(result_, Immediate(kStringRepresentationMask));
__ j(not_zero, &call_runtime_);
+ __ jmp(&flat_string, Label::kNear);
// Check for 1-byte or 2-byte string.
__ bind(&flat_string);
STATIC_ASSERT(kAsciiStringTag != 0);
__ test(result_, Immediate(kStringEncodingMask));
- __ j(not_zero, &ascii_string);
+ __ j(not_zero, &ascii_string, Label::kNear);
// 2-byte string.
// Load the 2-byte character code into the result register.
@@ -4801,7 +4917,7 @@
__ movzx_w(result_, FieldOperand(object_,
scratch_, times_1, // Scratch is smi-tagged.
SeqTwoByteString::kHeaderSize));
- __ jmp(&got_char_code);
+ __ jmp(&got_char_code, Label::kNear);
// ASCII string.
// Load the byte into the result register.
@@ -5113,6 +5229,8 @@
__ and_(ecx, kStringRepresentationMask);
__ cmp(ecx, kExternalStringTag);
__ j(equal, &string_add_runtime);
+ // We cannot encounter sliced strings here since:
+ STATIC_ASSERT(SlicedString::kMinLength >= String::kMinNonFlatLength);
// Now check if both strings are ascii strings.
// eax: first string
// ebx: length of resulting flat string as a smi
@@ -5585,7 +5703,83 @@
__ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
__ Set(ecx, Immediate(2));
- __ bind(&result_longer_than_two);
+ if (FLAG_string_slices) {
+ Label copy_routine;
+ // If coming from the make_two_character_string path, the string
+ // is too short to be sliced anyways.
+ STATIC_ASSERT(2 < SlicedString::kMinLength);
+ __ jmp(©_routine);
+ __ bind(&result_longer_than_two);
+
+ // eax: string
+ // ebx: instance type
+ // ecx: sub string length
+ // edx: from index (smi)
+ Label allocate_slice, sliced_string, seq_string;
+ __ cmp(ecx, SlicedString::kMinLength);
+ // Short slice. Copy instead of slicing.
+ __ j(less, ©_routine);
+ STATIC_ASSERT(kSeqStringTag == 0);
+ __ test(ebx, Immediate(kStringRepresentationMask));
+ __ j(zero, &seq_string, Label::kNear);
+ STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
+ STATIC_ASSERT(kIsIndirectStringMask != 0);
+ __ test(ebx, Immediate(kIsIndirectStringMask));
+ // External string. Jump to runtime.
+ __ j(zero, &runtime);
+
+ Factory* factory = masm->isolate()->factory();
+ __ test(ebx, Immediate(kSlicedNotConsMask));
+ __ j(not_zero, &sliced_string, Label::kNear);
+ // Cons string. Check whether it is flat, then fetch first part.
+ __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
+ factory->empty_string());
+ __ j(not_equal, &runtime);
+ __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
+ __ jmp(&allocate_slice, Label::kNear);
+
+ __ bind(&sliced_string);
+ // Sliced string. Fetch parent and correct start index by offset.
+ __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
+ __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
+ __ jmp(&allocate_slice, Label::kNear);
+
+ __ bind(&seq_string);
+ // Sequential string. Just move string to the right register.
+ __ mov(edi, eax);
+
+ __ bind(&allocate_slice);
+ // edi: underlying subject string
+ // ebx: instance type of original subject string
+ // edx: offset
+ // ecx: length
+ // Allocate new sliced string. At this point we do not reload the instance
+ // type including the string encoding because we simply rely on the info
+ // provided by the original string. It does not matter if the original
+ // string's encoding is wrong because we always have to recheck encoding of
+ // the newly created string's parent anyways due to externalized strings.
+ Label two_byte_slice, set_slice_header;
+ STATIC_ASSERT(kAsciiStringTag != 0);
+ __ test(ebx, Immediate(kAsciiStringTag));
+ __ j(zero, &two_byte_slice, Label::kNear);
+ __ AllocateAsciiSlicedString(eax, ebx, no_reg, &runtime);
+ __ jmp(&set_slice_header, Label::kNear);
+ __ bind(&two_byte_slice);
+ __ AllocateSlicedString(eax, ebx, no_reg, &runtime);
+ __ bind(&set_slice_header);
+ __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
+ __ SmiTag(ecx);
+ __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
+ __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
+ __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
+ Immediate(String::kEmptyHashField));
+ __ jmp(&return_eax);
+
+ __ bind(©_routine);
+ } else {
+ __ bind(&result_longer_than_two);
+ }
+
// eax: string
// ebx: instance type
// ecx: result string length
diff --git a/src/ia32/deoptimizer-ia32.cc b/src/ia32/deoptimizer-ia32.cc
index 4ff1bfc..e23f3e9 100644
--- a/src/ia32/deoptimizer-ia32.cc
+++ b/src/ia32/deoptimizer-ia32.cc
@@ -37,7 +37,7 @@
namespace v8 {
namespace internal {
-int Deoptimizer::table_entry_size_ = 10;
+const int Deoptimizer::table_entry_size_ = 10;
int Deoptimizer::patch_size() {
@@ -601,8 +601,6 @@
output_frame->SetContinuation(
reinterpret_cast<uint32_t>(continuation->entry()));
}
-
- if (output_count_ - 1 == frame_index) iterator->Done();
}
diff --git a/src/ia32/disasm-ia32.cc b/src/ia32/disasm-ia32.cc
index 7a59a4f..a936277 100644
--- a/src/ia32/disasm-ia32.cc
+++ b/src/ia32/disasm-ia32.cc
@@ -54,7 +54,7 @@
};
-static ByteMnemonic two_operands_instr[] = {
+static const ByteMnemonic two_operands_instr[] = {
{0x03, "add", REG_OPER_OP_ORDER},
{0x09, "or", OPER_REG_OP_ORDER},
{0x0B, "or", REG_OPER_OP_ORDER},
@@ -79,7 +79,7 @@
};
-static ByteMnemonic zero_operands_instr[] = {
+static const ByteMnemonic zero_operands_instr[] = {
{0xC3, "ret", UNSET_OP_ORDER},
{0xC9, "leave", UNSET_OP_ORDER},
{0x90, "nop", UNSET_OP_ORDER},
@@ -98,14 +98,14 @@
};
-static ByteMnemonic call_jump_instr[] = {
+static const ByteMnemonic call_jump_instr[] = {
{0xE8, "call", UNSET_OP_ORDER},
{0xE9, "jmp", UNSET_OP_ORDER},
{-1, "", UNSET_OP_ORDER}
};
-static ByteMnemonic short_immediate_instr[] = {
+static const ByteMnemonic short_immediate_instr[] = {
{0x05, "add", UNSET_OP_ORDER},
{0x0D, "or", UNSET_OP_ORDER},
{0x15, "adc", UNSET_OP_ORDER},
@@ -117,7 +117,7 @@
};
-static const char* jump_conditional_mnem[] = {
+static const char* const jump_conditional_mnem[] = {
/*0*/ "jo", "jno", "jc", "jnc",
/*4*/ "jz", "jnz", "jna", "ja",
/*8*/ "js", "jns", "jpe", "jpo",
@@ -125,7 +125,7 @@
};
-static const char* set_conditional_mnem[] = {
+static const char* const set_conditional_mnem[] = {
/*0*/ "seto", "setno", "setc", "setnc",
/*4*/ "setz", "setnz", "setna", "seta",
/*8*/ "sets", "setns", "setpe", "setpo",
@@ -133,7 +133,7 @@
};
-static const char* conditional_move_mnem[] = {
+static const char* const conditional_move_mnem[] = {
/*0*/ "cmovo", "cmovno", "cmovc", "cmovnc",
/*4*/ "cmovz", "cmovnz", "cmovna", "cmova",
/*8*/ "cmovs", "cmovns", "cmovpe", "cmovpo",
@@ -169,7 +169,7 @@
InstructionDesc instructions_[256];
void Clear();
void Init();
- void CopyTable(ByteMnemonic bm[], InstructionType type);
+ void CopyTable(const ByteMnemonic bm[], InstructionType type);
void SetTableRange(InstructionType type,
byte start,
byte end,
@@ -208,7 +208,8 @@
}
-void InstructionTable::CopyTable(ByteMnemonic bm[], InstructionType type) {
+void InstructionTable::CopyTable(const ByteMnemonic bm[],
+ InstructionType type) {
for (int i = 0; bm[i].b >= 0; i++) {
InstructionDesc* id = &instructions_[bm[i].b];
id->mnem = bm[i].mnem;
@@ -1140,7 +1141,17 @@
}
} else if (*data == 0x3A) {
data++;
- if (*data == 0x16) {
+ if (*data == 0x0B) {
+ data++;
+ int mod, regop, rm;
+ get_modrm(*data, &mod, ®op, &rm);
+ int8_t imm8 = static_cast<int8_t>(data[1]);
+ AppendToBuffer("roundsd %s,%s,%d",
+ NameOfXMMRegister(regop),
+ NameOfXMMRegister(rm),
+ static_cast<int>(imm8));
+ data += 2;
+ } else if (*data == 0x16) {
data++;
int mod, regop, rm;
get_modrm(*data, &mod, ®op, &rm);
diff --git a/src/ia32/frames-ia32.h b/src/ia32/frames-ia32.h
index bc65ddf..2f1b2a9 100644
--- a/src/ia32/frames-ia32.h
+++ b/src/ia32/frames-ia32.h
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -58,10 +58,11 @@
class StackHandlerConstants : public AllStatic {
public:
- static const int kNextOffset = 0 * kPointerSize;
- static const int kFPOffset = 1 * kPointerSize;
- static const int kStateOffset = 2 * kPointerSize;
- static const int kPCOffset = 3 * kPointerSize;
+ static const int kNextOffset = 0 * kPointerSize;
+ static const int kContextOffset = 1 * kPointerSize;
+ static const int kFPOffset = 2 * kPointerSize;
+ static const int kStateOffset = 3 * kPointerSize;
+ static const int kPCOffset = 4 * kPointerSize;
static const int kSize = kPCOffset + kPointerSize;
};
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc
index 7633856..df2542e 100644
--- a/src/ia32/full-codegen-ia32.cc
+++ b/src/ia32/full-codegen-ia32.cc
@@ -46,7 +46,6 @@
static unsigned GetPropertyId(Property* property) {
- if (property->is_synthetic()) return AstNode::kNoNumber;
return property->id();
}
@@ -166,6 +165,11 @@
}
}
+ set_stack_height(2 + scope()->num_stack_slots());
+ if (FLAG_verify_stack_height) {
+ verify_stack_height();
+ }
+
bool function_in_register = true;
// Possibly allocate a local context.
@@ -358,6 +362,15 @@
}
+void FullCodeGenerator::verify_stack_height() {
+ ASSERT(FLAG_verify_stack_height);
+ __ sub(Operand(ebp), Immediate(kPointerSize * stack_height()));
+ __ cmp(ebp, Operand(esp));
+ __ Assert(equal, "Full codegen stack height not as expected.");
+ __ add(Operand(ebp), Immediate(kPointerSize * stack_height()));
+}
+
+
void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
}
@@ -372,6 +385,7 @@
MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
// Memory operands can be pushed directly.
__ push(slot_operand);
+ codegen()->increment_stack_height();
}
@@ -425,6 +439,7 @@
} else {
__ push(Immediate(lit));
}
+ codegen()->increment_stack_height();
}
@@ -462,6 +477,7 @@
Register reg) const {
ASSERT(count > 0);
__ Drop(count);
+ codegen()->decrement_stack_height(count);
}
@@ -471,6 +487,7 @@
ASSERT(count > 0);
__ Drop(count);
__ Move(result_register(), reg);
+ codegen()->decrement_stack_height(count);
}
@@ -479,6 +496,7 @@
ASSERT(count > 0);
if (count > 1) __ Drop(count - 1);
__ mov(Operand(esp, 0), reg);
+ codegen()->decrement_stack_height(count - 1);
}
@@ -490,6 +508,7 @@
__ Move(result_register(), reg);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(this);
+ codegen()->decrement_stack_height(count);
}
@@ -523,6 +542,7 @@
__ bind(materialize_false);
__ push(Immediate(isolate()->factory()->false_value()));
__ bind(&done);
+ codegen()->increment_stack_height();
}
@@ -550,6 +570,7 @@
? isolate()->factory()->true_value()
: isolate()->factory()->false_value();
__ push(Immediate(value));
+ codegen()->increment_stack_height();
}
@@ -572,7 +593,7 @@
Label* fall_through) {
ToBooleanStub stub(result_register());
__ push(result_register());
- __ CallStub(&stub);
+ __ CallStub(&stub, condition->test_id());
__ test(result_register(), Operand(result_register()));
// The stub returns nonzero for true.
Split(not_zero, if_true, if_false, fall_through);
@@ -668,97 +689,73 @@
Comment cmnt(masm_, "[ Declaration");
ASSERT(variable != NULL); // Must have been resolved.
Slot* slot = variable->AsSlot();
- Property* prop = variable->AsProperty();
-
- if (slot != NULL) {
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
- if (mode == Variable::CONST) {
- __ mov(Operand(ebp, SlotOffset(slot)),
- Immediate(isolate()->factory()->the_hole_value()));
- } else if (function != NULL) {
- VisitForAccumulatorValue(function);
- __ mov(Operand(ebp, SlotOffset(slot)), result_register());
- }
- break;
-
- case Slot::CONTEXT:
- // We bypass the general EmitSlotSearch because we know more about
- // this specific context.
-
- // The variable in the decl always resides in the current function
- // context.
- ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
- if (FLAG_debug_code) {
- // Check that we're not inside a with or catch context.
- __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
- __ cmp(ebx, isolate()->factory()->with_context_map());
- __ Check(not_equal, "Declaration in with context.");
- __ cmp(ebx, isolate()->factory()->catch_context_map());
- __ Check(not_equal, "Declaration in catch context.");
- }
- if (mode == Variable::CONST) {
- __ mov(ContextOperand(esi, slot->index()),
- Immediate(isolate()->factory()->the_hole_value()));
- // No write barrier since the hole value is in old space.
- } else if (function != NULL) {
- VisitForAccumulatorValue(function);
- __ mov(ContextOperand(esi, slot->index()), result_register());
- int offset = Context::SlotOffset(slot->index());
- __ mov(ebx, esi);
- __ RecordWrite(ebx, offset, result_register(), ecx);
- }
- break;
-
- case Slot::LOOKUP: {
- __ push(esi);
- __ push(Immediate(variable->name()));
- // Declaration nodes are always introduced in one of two modes.
- ASSERT(mode == Variable::VAR || mode == Variable::CONST);
- PropertyAttributes attr = (mode == Variable::VAR) ? NONE : READ_ONLY;
- __ push(Immediate(Smi::FromInt(attr)));
- // Push initial value, if any.
- // Note: For variables we must not push an initial value (such as
- // 'undefined') because we may have a (legal) redeclaration and we
- // must not destroy the current value.
- if (mode == Variable::CONST) {
- __ push(Immediate(isolate()->factory()->the_hole_value()));
- } else if (function != NULL) {
- VisitForStackValue(function);
- } else {
- __ push(Immediate(Smi::FromInt(0))); // No initial value!
- }
- __ CallRuntime(Runtime::kDeclareContextSlot, 4);
- break;
+ ASSERT(slot != NULL);
+ switch (slot->type()) {
+ case Slot::PARAMETER:
+ case Slot::LOCAL:
+ if (function != NULL) {
+ VisitForAccumulatorValue(function);
+ __ mov(Operand(ebp, SlotOffset(slot)), result_register());
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
+ __ mov(Operand(ebp, SlotOffset(slot)),
+ Immediate(isolate()->factory()->the_hole_value()));
}
- }
+ break;
- } else if (prop != NULL) {
- // A const declaration aliasing a parameter is an illegal redeclaration.
- ASSERT(mode != Variable::CONST);
- if (function != NULL) {
- // We are declaring a function that rewrites to a property.
- // Use (keyed) IC to set the initial value. We cannot visit the
- // rewrite because it's shared and we risk recording duplicate AST
- // IDs for bailouts from optimized code.
- ASSERT(prop->obj()->AsVariableProxy() != NULL);
- { AccumulatorValueContext for_object(this);
- EmitVariableLoad(prop->obj()->AsVariableProxy());
+ case Slot::CONTEXT:
+ // We bypass the general EmitSlotSearch because we know more about
+ // this specific context.
+
+ // The variable in the decl always resides in the current function
+ // context.
+ ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
+ if (FLAG_debug_code) {
+ // Check that we're not inside a with or catch context.
+ __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
+ __ cmp(ebx, isolate()->factory()->with_context_map());
+ __ Check(not_equal, "Declaration in with context.");
+ __ cmp(ebx, isolate()->factory()->catch_context_map());
+ __ Check(not_equal, "Declaration in catch context.");
}
+ if (function != NULL) {
+ VisitForAccumulatorValue(function);
+ __ mov(ContextOperand(esi, slot->index()), result_register());
+ int offset = Context::SlotOffset(slot->index());
+ __ mov(ebx, esi);
+ __ RecordWrite(ebx, offset, result_register(), ecx);
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
+ __ mov(ContextOperand(esi, slot->index()),
+ Immediate(isolate()->factory()->the_hole_value()));
+ // No write barrier since the hole value is in old space.
+ }
+ break;
- __ push(eax);
- VisitForAccumulatorValue(function);
- __ pop(edx);
-
- ASSERT(prop->key()->AsLiteral() != NULL &&
- prop->key()->AsLiteral()->handle()->IsSmi());
- __ SafeSet(ecx, Immediate(prop->key()->AsLiteral()->handle()));
-
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
- : isolate()->builtins()->KeyedStoreIC_Initialize();
- __ call(ic);
+ case Slot::LOOKUP: {
+ __ push(esi);
+ __ push(Immediate(variable->name()));
+ // Declaration nodes are always introduced in one of two modes.
+ ASSERT(mode == Variable::VAR ||
+ mode == Variable::CONST ||
+ mode == Variable::LET);
+ PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE;
+ __ push(Immediate(Smi::FromInt(attr)));
+ // Push initial value, if any.
+ // Note: For variables we must not push an initial value (such as
+ // 'undefined') because we may have a (legal) redeclaration and we
+ // must not destroy the current value.
+ increment_stack_height(3);
+ if (function != NULL) {
+ VisitForStackValue(function);
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
+ __ push(Immediate(isolate()->factory()->the_hole_value()));
+ increment_stack_height();
+ } else {
+ __ push(Immediate(Smi::FromInt(0))); // No initial value!
+ increment_stack_height();
+ }
+ __ CallRuntime(Runtime::kDeclareContextSlot, 4);
+ decrement_stack_height(4);
+ break;
}
}
}
@@ -785,6 +782,7 @@
Breakable nested_statement(this, stmt);
SetStatementPosition(stmt);
+ int switch_clause_stack_height = stack_height();
// Keep the switch value on the stack until a case matches.
VisitForStackValue(stmt->tag());
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
@@ -844,11 +842,12 @@
__ bind(&next_test);
__ Drop(1); // Switch value is no longer needed.
if (default_clause == NULL) {
- __ jmp(nested_statement.break_target());
+ __ jmp(nested_statement.break_label());
} else {
__ jmp(default_clause->body_target());
}
+ set_stack_height(switch_clause_stack_height);
// Compile all the case bodies.
for (int i = 0; i < clauses->length(); i++) {
Comment cmnt(masm_, "[ Case body");
@@ -858,7 +857,7 @@
VisitStatements(clause->statements());
}
- __ bind(nested_statement.break_target());
+ __ bind(nested_statement.break_label());
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
}
@@ -890,6 +889,7 @@
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ bind(&done_convert);
__ push(eax);
+ increment_stack_height();
// Check cache validity in generated code. This is a fast case for
// the JSObject::IsSimpleEnum cache validity checks. If we cannot
@@ -973,11 +973,13 @@
__ push(eax); // Fixed array length (as smi).
__ push(Immediate(Smi::FromInt(0))); // Initial index.
+ // 1 ~ The object has already been pushed.
+ increment_stack_height(ForIn::kElementCount - 1);
// Generate code for doing the condition check.
__ bind(&loop);
__ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
__ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
- __ j(above_equal, loop_statement.break_target());
+ __ j(above_equal, loop_statement.break_label());
// Get the current entry of the array into register ebx.
__ mov(ebx, Operand(esp, 2 * kPointerSize));
@@ -1001,7 +1003,7 @@
__ push(ebx); // Current entry.
__ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
__ test(eax, Operand(eax));
- __ j(equal, loop_statement.continue_target());
+ __ j(equal, loop_statement.continue_label());
__ mov(ebx, Operand(eax));
// Update the 'each' property or variable from the possibly filtered
@@ -1018,16 +1020,17 @@
// Generate code for going to the next element by incrementing the
// index (smi) stored on top of the stack.
- __ bind(loop_statement.continue_target());
+ __ bind(loop_statement.continue_label());
__ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
EmitStackCheck(stmt);
__ jmp(&loop);
// Remove the pointers stored on the stack.
- __ bind(loop_statement.break_target());
+ __ bind(loop_statement.break_label());
__ add(Operand(esp), Immediate(5 * kPointerSize));
+ decrement_stack_height(ForIn::kElementCount);
// Exit and decrement the loop depth.
__ bind(&exit);
decrement_loop_depth();
@@ -1265,6 +1268,18 @@
__ mov(eax, isolate()->factory()->undefined_value());
__ bind(&done);
context()->Plug(eax);
+ } else if (var->mode() == Variable::LET) {
+ // Let bindings may be the hole value if they have not been initialized.
+ // Throw a type error in this case.
+ Label done;
+ MemOperand slot_operand = EmitSlotSearch(slot, eax);
+ __ mov(eax, slot_operand);
+ __ cmp(eax, isolate()->factory()->the_hole_value());
+ __ j(not_equal, &done, Label::kNear);
+ __ push(Immediate(var->name()));
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ __ bind(&done);
+ context()->Plug(eax);
} else {
context()->Plug(slot);
}
@@ -1363,6 +1378,7 @@
if (!result_saved) {
__ push(eax); // Save result on the stack
result_saved = true;
+ increment_stack_height();
}
switch (property->kind()) {
case ObjectLiteral::Property::MATERIALIZED_LITERAL:
@@ -1387,6 +1403,7 @@
// Fall through.
case ObjectLiteral::Property::PROTOTYPE:
__ push(Operand(esp, 0)); // Duplicate receiver.
+ increment_stack_height();
VisitForStackValue(key);
VisitForStackValue(value);
if (property->emit_store()) {
@@ -1395,16 +1412,20 @@
} else {
__ Drop(3);
}
+ decrement_stack_height(3);
break;
case ObjectLiteral::Property::SETTER:
case ObjectLiteral::Property::GETTER:
__ push(Operand(esp, 0)); // Duplicate receiver.
+ increment_stack_height();
VisitForStackValue(key);
__ push(Immediate(property->kind() == ObjectLiteral::Property::SETTER ?
Smi::FromInt(1) :
Smi::FromInt(0)));
+ increment_stack_height();
VisitForStackValue(value);
__ CallRuntime(Runtime::kDefineAccessor, 4);
+ decrement_stack_height(4);
break;
default: UNREACHABLE();
}
@@ -1467,6 +1488,7 @@
if (!result_saved) {
__ push(eax);
result_saved = true;
+ increment_stack_height();
}
VisitForAccumulatorValue(subexpr);
@@ -1495,7 +1517,9 @@
// Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
// on the left-hand side.
if (!expr->target()->IsValidLeftHandSide()) {
- VisitForEffect(expr->target());
+ ASSERT(expr->target()->AsThrow() != NULL);
+ VisitInCurrentContext(expr->target()); // Throw does not plug the context
+ context()->Plug(eax);
return;
}
@@ -1520,6 +1544,7 @@
// We need the receiver both on the stack and in the accumulator.
VisitForAccumulatorValue(property->obj());
__ push(result_register());
+ increment_stack_height();
} else {
VisitForStackValue(property->obj());
}
@@ -1530,6 +1555,7 @@
VisitForAccumulatorValue(property->key());
__ mov(edx, Operand(esp, 0));
__ push(eax);
+ increment_stack_height();
} else {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
@@ -1541,7 +1567,8 @@
// For compound assignments we need another deoptimization point after the
// variable/property load.
if (expr->is_compound()) {
- { AccumulatorValueContext context(this);
+ AccumulatorValueContext result_context(this);
+ { AccumulatorValueContext left_operand_context(this);
switch (assign_type) {
case VARIABLE:
EmitVariableLoad(expr->target()->AsVariableProxy());
@@ -1560,13 +1587,13 @@
Token::Value op = expr->binary_op();
__ push(eax); // Left operand goes on the stack.
+ increment_stack_height();
VisitForAccumulatorValue(expr->value());
OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
? OVERWRITE_RIGHT
: NO_OVERWRITE;
SetSourcePosition(expr->position() + 1);
- AccumulatorValueContext context(this);
if (ShouldInlineSmiCase(op)) {
EmitInlineSmiBinaryOp(expr->binary_operation(),
op,
@@ -1630,6 +1657,7 @@
// stack. Right operand is in eax.
Label smi_case, done, stub_call;
__ pop(edx);
+ decrement_stack_height();
__ mov(ecx, eax);
__ or_(eax, Operand(edx));
JumpPatchSite patch_site(masm_);
@@ -1721,6 +1749,7 @@
Token::Value op,
OverwriteMode mode) {
__ pop(edx);
+ decrement_stack_height();
BinaryOpStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
__ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
@@ -1733,7 +1762,9 @@
// Invalid left-hand sides are rewritten to have a 'throw
// ReferenceError' on the left-hand side.
if (!expr->IsValidLeftHandSide()) {
- VisitForEffect(expr);
+ ASSERT(expr->AsThrow() != NULL);
+ VisitInCurrentContext(expr); // Throw does not plug the context
+ context()->Plug(eax);
return;
}
@@ -1757,9 +1788,11 @@
}
case NAMED_PROPERTY: {
__ push(eax); // Preserve value.
+ increment_stack_height();
VisitForAccumulatorValue(prop->obj());
__ mov(edx, eax);
__ pop(eax); // Restore value.
+ decrement_stack_height();
__ mov(ecx, prop->key()->AsLiteral()->handle());
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
@@ -1769,21 +1802,14 @@
}
case KEYED_PROPERTY: {
__ push(eax); // Preserve value.
- if (prop->is_synthetic()) {
- ASSERT(prop->obj()->AsVariableProxy() != NULL);
- ASSERT(prop->key()->AsLiteral() != NULL);
- { AccumulatorValueContext for_object(this);
- EmitVariableLoad(prop->obj()->AsVariableProxy());
- }
- __ mov(edx, eax);
- __ SafeSet(ecx, Immediate(prop->key()->AsLiteral()->handle()));
- } else {
- VisitForStackValue(prop->obj());
- VisitForAccumulatorValue(prop->key());
- __ mov(ecx, eax);
- __ pop(edx);
- }
+ increment_stack_height();
+ VisitForStackValue(prop->obj());
+ VisitForAccumulatorValue(prop->key());
+ __ mov(ecx, eax);
+ __ pop(edx);
+ decrement_stack_height();
__ pop(eax); // Restore value.
+ decrement_stack_height();
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
@@ -1841,6 +1867,57 @@
}
__ bind(&skip);
+ } else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
+ // Perform the assignment for non-const variables. Const assignments
+ // are simply skipped.
+ Slot* slot = var->AsSlot();
+ switch (slot->type()) {
+ case Slot::PARAMETER:
+ case Slot::LOCAL: {
+ Label assign;
+ // Check for an initialized let binding.
+ __ mov(edx, Operand(ebp, SlotOffset(slot)));
+ __ cmp(edx, isolate()->factory()->the_hole_value());
+ __ j(not_equal, &assign);
+ __ push(Immediate(var->name()));
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ // Perform the assignment.
+ __ bind(&assign);
+ __ mov(Operand(ebp, SlotOffset(slot)), eax);
+ break;
+ }
+
+ case Slot::CONTEXT: {
+ // Let variables may be the hole value if they have not been
+ // initialized. Throw a type error in this case.
+ Label assign;
+ MemOperand target = EmitSlotSearch(slot, ecx);
+ // Check for an initialized let binding.
+ __ mov(edx, target);
+ __ cmp(edx, isolate()->factory()->the_hole_value());
+ __ j(not_equal, &assign, Label::kNear);
+ __ push(Immediate(var->name()));
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ // Perform the assignment.
+ __ bind(&assign);
+ __ mov(target, eax);
+ // The value of the assignment is in eax. RecordWrite clobbers its
+ // register arguments.
+ __ mov(edx, eax);
+ int offset = Context::SlotOffset(slot->index());
+ __ RecordWrite(ecx, offset, edx, ebx);
+ break;
+ }
+
+ case Slot::LOOKUP:
+ // Call the runtime for the assignment.
+ __ push(eax); // Value.
+ __ push(esi); // Context.
+ __ push(Immediate(var->name()));
+ __ push(Immediate(Smi::FromInt(strict_mode_flag())));
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
+ break;
+ }
} else if (var->mode() != Variable::CONST) {
// Perform the assignment for non-const variables. Const assignments
// are simply skipped.
@@ -1900,6 +1977,7 @@
__ mov(edx, Operand(esp, 0));
} else {
__ pop(edx);
+ decrement_stack_height();
}
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
@@ -1913,6 +1991,7 @@
__ CallRuntime(Runtime::kToFastProperties, 1);
__ pop(eax);
__ Drop(1);
+ decrement_stack_height();
}
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(eax);
@@ -1934,10 +2013,12 @@
}
__ pop(ecx);
+ decrement_stack_height();
if (expr->ends_initialization_block()) {
__ mov(edx, Operand(esp, 0)); // Leave receiver on the stack for later.
} else {
__ pop(edx);
+ decrement_stack_height();
}
// Record source code position before IC call.
SetSourcePosition(expr->position());
@@ -1953,6 +2034,7 @@
__ push(edx);
__ CallRuntime(Runtime::kToFastProperties, 1);
__ pop(eax);
+ decrement_stack_height();
}
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
@@ -1972,6 +2054,7 @@
VisitForStackValue(expr->obj());
VisitForAccumulatorValue(expr->key());
__ pop(edx);
+ decrement_stack_height();
EmitKeyedPropertyLoad(expr);
context()->Plug(eax);
}
@@ -1999,6 +2082,7 @@
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ decrement_stack_height(arg_count + 1);
context()->Plug(eax);
}
@@ -2013,6 +2097,7 @@
__ pop(ecx);
__ push(eax);
__ push(ecx);
+ increment_stack_height();
// Load the arguments.
ZoneList<Expression*>* args = expr->arguments();
@@ -2032,6 +2117,7 @@
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ decrement_stack_height(arg_count + 1);
context()->DropAndPlug(1, eax); // Drop the key still on the stack.
}
@@ -2053,6 +2139,8 @@
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+
+ decrement_stack_height(arg_count + 1);
context()->DropAndPlug(1, eax);
}
@@ -2100,7 +2188,7 @@
VisitForStackValue(fun);
// Reserved receiver slot.
__ push(Immediate(isolate()->factory()->undefined_value()));
-
+ increment_stack_height();
// Push the arguments.
for (int i = 0; i < arg_count; i++) {
VisitForStackValue(args->at(i));
@@ -2144,10 +2232,12 @@
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ decrement_stack_height(arg_count + 1); // Function is left on the stack.
context()->DropAndPlug(1, eax);
} else if (var != NULL && !var->is_this() && var->is_global()) {
// Push global object as receiver for the call IC.
__ push(GlobalObjectOperand());
+ increment_stack_height();
EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
} else if (var != NULL && var->AsSlot() != NULL &&
var->AsSlot()->type() == Slot::LOOKUP) {
@@ -2170,7 +2260,9 @@
__ push(Immediate(var->name()));
__ CallRuntime(Runtime::kLoadContextSlot, 2);
__ push(eax); // Function.
+ increment_stack_height();
__ push(edx); // Receiver.
+ increment_stack_height();
// If fast case code has been generated, emit code to push the
// function and receiver and have the slow path jump around this
@@ -2179,7 +2271,7 @@
Label call;
__ jmp(&call);
__ bind(&done);
- // Push function.
+ // Push function. Stack height already incremented in slow case above.
__ push(eax);
// The receiver is implicitly the global receiver. Indicate this
// by passing the hole to the call function stub.
@@ -2203,38 +2295,10 @@
EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
} else {
// Call to a keyed property.
- // For a synthetic property use keyed load IC followed by function call,
- // for a regular property use EmitKeyedCallWithIC.
- if (prop->is_synthetic()) {
- // Do not visit the object and key subexpressions (they are shared
- // by all occurrences of the same rewritten parameter).
- ASSERT(prop->obj()->AsVariableProxy() != NULL);
- ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
- Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
- MemOperand operand = EmitSlotSearch(slot, edx);
- __ mov(edx, operand);
-
- ASSERT(prop->key()->AsLiteral() != NULL);
- ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
- __ mov(eax, prop->key()->AsLiteral()->handle());
-
- // Record source code position for IC call.
- SetSourcePosition(prop->position());
-
- Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
- // Push result (function).
- __ push(eax);
- // Push Global receiver.
- __ mov(ecx, GlobalObjectOperand());
- __ push(FieldOperand(ecx, GlobalObject::kGlobalReceiverOffset));
- EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
- } else {
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(prop->obj());
- }
- EmitKeyedCallWithIC(expr, prop->key());
+ { PreservePositionScope scope(masm()->positions_recorder());
+ VisitForStackValue(prop->obj());
}
+ EmitKeyedCallWithIC(expr, prop->key());
}
} else {
{ PreservePositionScope scope(masm()->positions_recorder());
@@ -2243,6 +2307,7 @@
// Load global receiver object.
__ mov(ebx, GlobalObjectOperand());
__ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
+ increment_stack_height();
// Emit function call.
EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
}
@@ -2283,6 +2348,8 @@
Handle<Code> construct_builtin =
isolate()->builtins()->JSConstructCall();
__ call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
+
+ decrement_stack_height(arg_count + 1);
context()->Plug(eax);
}
@@ -2595,6 +2662,7 @@
&if_true, &if_false, &fall_through);
__ pop(ebx);
+ decrement_stack_height();
__ cmp(eax, Operand(ebx));
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
@@ -2709,6 +2777,7 @@
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
__ CallRuntime(Runtime::kLog, 2);
+ decrement_stack_height(2);
}
// Finally, we're expected to leave a value on the top of the stack.
__ mov(eax, isolate()->factory()->undefined_value());
@@ -2774,6 +2843,7 @@
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
__ CallStub(&stub);
+ decrement_stack_height(3);
context()->Plug(eax);
}
@@ -2787,6 +2857,7 @@
VisitForStackValue(args->at(2));
VisitForStackValue(args->at(3));
__ CallStub(&stub);
+ decrement_stack_height(4);
context()->Plug(eax);
}
@@ -2821,6 +2892,7 @@
} else {
__ CallRuntime(Runtime::kMath_pow, 2);
}
+ decrement_stack_height(2);
context()->Plug(eax);
}
@@ -2831,6 +2903,7 @@
VisitForStackValue(args->at(0)); // Load the object.
VisitForAccumulatorValue(args->at(1)); // Load the value.
__ pop(ebx); // eax = value. ebx = object.
+ decrement_stack_height();
Label done;
// If the object is a smi, return the value.
@@ -2860,6 +2933,7 @@
NumberToStringStub stub;
__ CallStub(&stub);
+ decrement_stack_height();
context()->Plug(eax);
}
@@ -2894,6 +2968,7 @@
Register result = edx;
__ pop(object);
+ decrement_stack_height();
Label need_conversion;
Label index_out_of_range;
@@ -2942,6 +3017,7 @@
Register result = eax;
__ pop(object);
+ decrement_stack_height();
Label need_conversion;
Label index_out_of_range;
@@ -2986,6 +3062,7 @@
StringAddStub stub(NO_STRING_ADD_FLAGS);
__ CallStub(&stub);
+ decrement_stack_height(2);
context()->Plug(eax);
}
@@ -2998,6 +3075,7 @@
StringCompareStub stub;
__ CallStub(&stub);
+ decrement_stack_height(2);
context()->Plug(eax);
}
@@ -3009,6 +3087,7 @@
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallStub(&stub);
+ decrement_stack_height();
context()->Plug(eax);
}
@@ -3020,6 +3099,7 @@
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallStub(&stub);
+ decrement_stack_height();
context()->Plug(eax);
}
@@ -3031,6 +3111,7 @@
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallStub(&stub);
+ decrement_stack_height();
context()->Plug(eax);
}
@@ -3040,6 +3121,7 @@
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallRuntime(Runtime::kMath_sqrt, 1);
+ decrement_stack_height();
context()->Plug(eax);
}
@@ -3059,6 +3141,7 @@
__ InvokeFunction(edi, count, CALL_FUNCTION,
NullCallWrapper(), CALL_AS_METHOD);
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ decrement_stack_height(arg_count + 1);
context()->Plug(eax);
}
@@ -3071,6 +3154,7 @@
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
__ CallStub(&stub);
+ decrement_stack_height(3);
context()->Plug(eax);
}
@@ -3144,6 +3228,7 @@
__ CallRuntime(Runtime::kSwapElements, 3);
__ bind(&done);
+ decrement_stack_height(3);
context()->Plug(eax);
}
@@ -3177,7 +3262,7 @@
Label done, not_found;
// tmp now holds finger offset as a smi.
- ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
__ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
__ cmp(key, CodeGenerator::FixedArrayElementOperand(cache, tmp));
__ j(not_equal, ¬_found);
@@ -3229,6 +3314,7 @@
__ mov(eax, Immediate(isolate()->factory()->true_value()));
__ bind(&done);
+ decrement_stack_height();
context()->Plug(eax);
}
@@ -3532,43 +3618,11 @@
__ add(Operand(esp), Immediate(3 * kPointerSize));
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ decrement_stack_height();
context()->Plug(eax);
}
-void FullCodeGenerator::EmitIsNativeOrStrictMode(ZoneList<Expression*>* args) {
- ASSERT(args->length() == 1);
-
- // Load the function into eax.
- VisitForAccumulatorValue(args->at(0));
-
- // Prepare for the test.
- Label materialize_true, materialize_false;
- Label* if_true = NULL;
- Label* if_false = NULL;
- Label* fall_through = NULL;
- context()->PrepareTest(&materialize_true, &materialize_false,
- &if_true, &if_false, &fall_through);
-
- // Test for strict mode function.
- __ mov(ecx, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
- __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
- 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
- __ j(not_equal, if_true);
-
- // Test for native function.
- __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
- 1 << SharedFunctionInfo::kNativeBitWithinByte);
- __ j(not_equal, if_true);
-
- // Not native or strict-mode function.
- __ jmp(if_false);
-
- PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
- context()->Plug(if_true, if_false);
-}
-
-
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
Handle<String> name = expr->name();
if (name->length() > 0 && name->Get(0) == '_') {
@@ -3584,6 +3638,7 @@
// Prepare for calling JS runtime function.
__ mov(eax, GlobalObjectOperand());
__ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
+ increment_stack_height();
}
// Push the arguments ("left-to-right").
@@ -3606,6 +3661,11 @@
// Call the C runtime function.
__ CallRuntime(expr->function(), arg_count);
}
+ decrement_stack_height(arg_count);
+ if (expr->is_jsruntime()) {
+ decrement_stack_height();
+ }
+
context()->Plug(eax);
}
@@ -3618,17 +3678,12 @@
Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
if (prop != NULL) {
- if (prop->is_synthetic()) {
- // Result of deleting parameters is false, even when they rewrite
- // to accesses on the arguments object.
- context()->Plug(false);
- } else {
- VisitForStackValue(prop->obj());
- VisitForStackValue(prop->key());
- __ push(Immediate(Smi::FromInt(strict_mode_flag())));
- __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
- context()->Plug(eax);
- }
+ VisitForStackValue(prop->obj());
+ VisitForStackValue(prop->key());
+ __ push(Immediate(Smi::FromInt(strict_mode_flag())));
+ __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
+ decrement_stack_height(2);
+ context()->Plug(eax);
} else if (var != NULL) {
// Delete of an unqualified identifier is disallowed in strict mode
// but "delete this" is.
@@ -3696,6 +3751,7 @@
VisitForTypeofValue(expr->expression());
}
__ CallRuntime(Runtime::kTypeof, 1);
+ decrement_stack_height();
context()->Plug(eax);
break;
}
@@ -3728,7 +3784,6 @@
void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
const char* comment) {
- // TODO(svenpanne): Allowing format strings in Comment would be nice here...
Comment cmt(masm_, comment);
bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
UnaryOverwriteMode overwrite =
@@ -3750,7 +3805,10 @@
// Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
// as the left-hand side.
if (!expr->expression()->IsValidLeftHandSide()) {
- VisitForEffect(expr->expression());
+ ASSERT(expr->expression()->AsThrow() != NULL);
+ VisitInCurrentContext(expr->expression());
+ // Visiting Throw does not plug the context.
+ context()->Plug(eax);
return;
}
@@ -3775,17 +3833,20 @@
// Reserve space for result of postfix operation.
if (expr->is_postfix() && !context()->IsEffect()) {
__ push(Immediate(Smi::FromInt(0)));
+ increment_stack_height();
}
if (assign_type == NAMED_PROPERTY) {
// Put the object both on the stack and in the accumulator.
VisitForAccumulatorValue(prop->obj());
__ push(eax);
+ increment_stack_height();
EmitNamedPropertyLoad(prop);
} else {
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
__ mov(edx, Operand(esp, 0));
__ push(eax);
+ increment_stack_height();
EmitKeyedPropertyLoad(prop);
}
}
@@ -3816,6 +3877,7 @@
switch (assign_type) {
case VARIABLE:
__ push(eax);
+ increment_stack_height();
break;
case NAMED_PROPERTY:
__ mov(Operand(esp, kPointerSize), eax);
@@ -3889,6 +3951,7 @@
case NAMED_PROPERTY: {
__ mov(ecx, prop->key()->AsLiteral()->handle());
__ pop(edx);
+ decrement_stack_height();
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
@@ -3906,6 +3969,8 @@
case KEYED_PROPERTY: {
__ pop(ecx);
__ pop(edx);
+ decrement_stack_height();
+ decrement_stack_height();
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
@@ -3993,6 +4058,10 @@
__ j(equal, if_true);
__ cmp(eax, isolate()->factory()->false_value());
Split(equal, if_true, if_false, fall_through);
+ } else if (FLAG_harmony_typeof &&
+ check->Equals(isolate()->heap()->null_symbol())) {
+ __ cmp(eax, isolate()->factory()->null_value());
+ Split(equal, if_true, if_false, fall_through);
} else if (check->Equals(isolate()->heap()->undefined_symbol())) {
__ cmp(eax, isolate()->factory()->undefined_value());
__ j(equal, if_true);
@@ -4008,8 +4077,10 @@
Split(above_equal, if_true, if_false, fall_through);
} else if (check->Equals(isolate()->heap()->object_symbol())) {
__ JumpIfSmi(eax, if_false);
- __ cmp(eax, isolate()->factory()->null_value());
- __ j(equal, if_true);
+ if (!FLAG_harmony_typeof) {
+ __ cmp(eax, isolate()->factory()->null_value());
+ __ j(equal, if_true);
+ }
__ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
__ j(below, if_false);
__ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
@@ -4063,6 +4134,7 @@
case Token::IN:
VisitForStackValue(expr->right());
__ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
+ decrement_stack_height(2);
PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
__ cmp(eax, isolate()->factory()->true_value());
Split(equal, if_true, if_false, fall_through);
@@ -4072,6 +4144,7 @@
VisitForStackValue(expr->right());
InstanceofStub stub(InstanceofStub::kNoFlags);
__ CallStub(&stub);
+ decrement_stack_height(2);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ test(eax, Operand(eax));
// The stub returns 0 for true.
@@ -4113,6 +4186,7 @@
default:
UNREACHABLE();
}
+ decrement_stack_height();
bool inline_smi_code = ShouldInlineSmiCase(op);
JumpPatchSite patch_site(masm_);
@@ -4228,8 +4302,8 @@
ASSERT(!result_register().is(edx));
__ pop(edx);
__ sub(Operand(edx), Immediate(masm_->CodeObject()));
- ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
+ STATIC_ASSERT(kSmiTag == 0);
__ SmiTag(edx);
__ push(edx);
// Store result register while executing finally block.
@@ -4250,6 +4324,34 @@
#undef __
+#define __ ACCESS_MASM(masm())
+
+FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
+ int* stack_depth,
+ int* context_length) {
+ // The macros used here must preserve the result register.
+
+ // Because the handler block contains the context of the finally
+ // code, we can restore it directly from there for the finally code
+ // rather than iteratively unwinding contexts via their previous
+ // links.
+ __ Drop(*stack_depth); // Down to the handler block.
+ if (*context_length > 0) {
+ // Restore the context to its dedicated register and the stack.
+ __ mov(esi, Operand(esp, StackHandlerConstants::kContextOffset));
+ __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
+ }
+ __ PopTryHandler();
+ __ call(finally_entry_);
+
+ *stack_depth = 0;
+ *context_length = 0;
+ return previous_;
+}
+
+
+#undef __
+
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_IA32
diff --git a/src/ia32/ic-ia32.cc b/src/ia32/ic-ia32.cc
index 5f143b1..7d3ead2 100644
--- a/src/ia32/ic-ia32.cc
+++ b/src/ia32/ic-ia32.cc
@@ -324,7 +324,7 @@
__ cmp(key, FieldOperand(scratch, FixedArray::kLengthOffset));
__ j(above_equal, out_of_range);
// Fast case: Do the load.
- ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0));
+ STATIC_ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0));
__ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize));
__ cmp(Operand(scratch), Immediate(FACTORY->the_hole_value()));
// In case the loaded value is the_hole we have to consult GetProperty
@@ -358,7 +358,7 @@
__ j(zero, index_string);
// Is the string a symbol?
- ASSERT(kSymbolTag != 0);
+ STATIC_ASSERT(kSymbolTag != 0);
__ test_b(FieldOperand(map, Map::kInstanceTypeOffset), kIsSymbolMask);
__ j(zero, not_symbol);
}
diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
index 982eddb..32e3074 100644
--- a/src/ia32/lithium-codegen-ia32.cc
+++ b/src/ia32/lithium-codegen-ia32.cc
@@ -1211,17 +1211,11 @@
}
-void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
+void LCodeGen::DoFixedArrayBaseLength(
+ LFixedArrayBaseLength* instr) {
Register result = ToRegister(instr->result());
Register array = ToRegister(instr->InputAt(0));
- __ mov(result, FieldOperand(array, FixedArray::kLengthOffset));
-}
-
-
-void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) {
- Register result = ToRegister(instr->result());
- Register array = ToRegister(instr->InputAt(0));
- __ mov(result, FieldOperand(array, ExternalArray::kLengthOffset));
+ __ mov(result, FieldOperand(array, FixedArrayBase::kLengthOffset));
}
@@ -1393,44 +1387,98 @@
} else {
ASSERT(r.IsTagged());
Register reg = ToRegister(instr->InputAt(0));
- if (instr->hydrogen()->value()->type().IsBoolean()) {
+ HType type = instr->hydrogen()->value()->type();
+ if (type.IsBoolean()) {
__ cmp(reg, factory()->true_value());
EmitBranch(true_block, false_block, equal);
+ } else if (type.IsSmi()) {
+ __ test(reg, Operand(reg));
+ EmitBranch(true_block, false_block, not_equal);
} else {
Label* true_label = chunk_->GetAssemblyLabel(true_block);
Label* false_label = chunk_->GetAssemblyLabel(false_block);
- __ cmp(reg, factory()->undefined_value());
- __ j(equal, false_label);
- __ cmp(reg, factory()->true_value());
- __ j(equal, true_label);
- __ cmp(reg, factory()->false_value());
- __ j(equal, false_label);
- __ test(reg, Operand(reg));
- __ j(equal, false_label);
- __ JumpIfSmi(reg, true_label);
+ ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
+ // Avoid deopts in the case where we've never executed this path before.
+ if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
- // Test for double values. Zero is false.
- Label call_stub;
- __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
- factory()->heap_number_map());
- __ j(not_equal, &call_stub, Label::kNear);
- __ fldz();
- __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset));
- __ FCmp();
- __ j(zero, false_label);
- __ jmp(true_label);
+ if (expected.Contains(ToBooleanStub::UNDEFINED)) {
+ // undefined -> false.
+ __ cmp(reg, factory()->undefined_value());
+ __ j(equal, false_label);
+ }
+ if (expected.Contains(ToBooleanStub::BOOLEAN)) {
+ // true -> true.
+ __ cmp(reg, factory()->true_value());
+ __ j(equal, true_label);
+ // false -> false.
+ __ cmp(reg, factory()->false_value());
+ __ j(equal, false_label);
+ }
+ if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
+ // 'null' -> false.
+ __ cmp(reg, factory()->null_value());
+ __ j(equal, false_label);
+ }
- // The conversion stub doesn't cause garbage collections so it's
- // safe to not record a safepoint after the call.
- __ bind(&call_stub);
- ToBooleanStub stub(eax);
- __ pushad();
- __ push(reg);
- __ CallStub(&stub);
- __ test(eax, Operand(eax));
- __ popad();
- EmitBranch(true_block, false_block, not_zero);
+ if (expected.Contains(ToBooleanStub::SMI)) {
+ // Smis: 0 -> false, all other -> true.
+ __ test(reg, Operand(reg));
+ __ j(equal, false_label);
+ __ JumpIfSmi(reg, true_label);
+ } else if (expected.NeedsMap()) {
+ // If we need a map later and have a Smi -> deopt.
+ __ test(reg, Immediate(kSmiTagMask));
+ DeoptimizeIf(zero, instr->environment());
+ }
+
+ Register map = no_reg; // Keep the compiler happy.
+ if (expected.NeedsMap()) {
+ map = ToRegister(instr->TempAt(0));
+ ASSERT(!map.is(reg));
+ __ mov(map, FieldOperand(reg, HeapObject::kMapOffset));
+
+ if (expected.CanBeUndetectable()) {
+ // Undetectable -> false.
+ __ test_b(FieldOperand(map, Map::kBitFieldOffset),
+ 1 << Map::kIsUndetectable);
+ __ j(not_zero, false_label);
+ }
+ }
+
+ if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
+ // spec object -> true.
+ __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
+ __ j(above_equal, true_label);
+ }
+
+ if (expected.Contains(ToBooleanStub::STRING)) {
+ // String value -> false iff empty.
+ Label not_string;
+ __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
+ __ j(above_equal, ¬_string, Label::kNear);
+ __ cmp(FieldOperand(reg, String::kLengthOffset), Immediate(0));
+ __ j(not_zero, true_label);
+ __ jmp(false_label);
+ __ bind(¬_string);
+ }
+
+ if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
+ // heap number -> false iff +0, -0, or NaN.
+ Label not_heap_number;
+ __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
+ factory()->heap_number_map());
+ __ j(not_equal, ¬_heap_number, Label::kNear);
+ __ fldz();
+ __ fld_d(FieldOperand(reg, HeapNumber::kValueOffset));
+ __ FCmp();
+ __ j(zero, false_label);
+ __ jmp(true_label);
+ __ bind(¬_heap_number);
+ }
+
+ // We've seen something for the first time -> deopt.
+ DeoptimizeIf(no_condition, instr->environment());
}
}
}
@@ -2211,16 +2259,13 @@
void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
- Register elements = ToRegister(instr->elements());
- Register key = ToRegister(instr->key());
Register result = ToRegister(instr->result());
- ASSERT(result.is(elements));
// Load the result.
- __ mov(result, FieldOperand(elements,
- key,
- times_pointer_size,
- FixedArray::kHeaderSize));
+ __ mov(result,
+ BuildFastArrayOperand(instr->elements(), instr->key(),
+ JSObject::FAST_ELEMENTS,
+ FixedArray::kHeaderSize - kHeapObjectTag));
// Check for the hole value.
if (instr->hydrogen()->RequiresHoleCheck()) {
@@ -2253,22 +2298,22 @@
Operand LCodeGen::BuildFastArrayOperand(
- LOperand* external_pointer,
+ LOperand* elements_pointer,
LOperand* key,
JSObject::ElementsKind elements_kind,
uint32_t offset) {
- Register external_pointer_reg = ToRegister(external_pointer);
+ Register elements_pointer_reg = ToRegister(elements_pointer);
int shift_size = ElementsKindToShiftSize(elements_kind);
if (key->IsConstantOperand()) {
int constant_value = ToInteger32(LConstantOperand::cast(key));
if (constant_value & 0xF0000000) {
Abort("array index constant value too big");
}
- return Operand(external_pointer_reg,
+ return Operand(elements_pointer_reg,
constant_value * (1 << shift_size) + offset);
} else {
ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
- return Operand(external_pointer_reg, ToRegister(key), scale_factor, offset);
+ return Operand(elements_pointer_reg, ToRegister(key), scale_factor, offset);
}
}
@@ -2665,24 +2710,54 @@
XMMRegister xmm_scratch = xmm0;
Register output_reg = ToRegister(instr->result());
XMMRegister input_reg = ToDoubleRegister(instr->value());
- __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
- __ ucomisd(input_reg, xmm_scratch);
- if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
- DeoptimizeIf(below_equal, instr->environment());
+ if (CpuFeatures::IsSupported(SSE4_1)) {
+ CpuFeatures::Scope scope(SSE4_1);
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ // Deoptimize on negative zero.
+ Label non_zero;
+ __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
+ __ ucomisd(input_reg, xmm_scratch);
+ __ j(not_equal, &non_zero, Label::kNear);
+ __ movmskpd(output_reg, input_reg);
+ __ test(output_reg, Immediate(1));
+ DeoptimizeIf(not_zero, instr->environment());
+ __ bind(&non_zero);
+ }
+ __ roundsd(xmm_scratch, input_reg, Assembler::kRoundDown);
+ __ cvttsd2si(output_reg, Operand(xmm_scratch));
+ // Overflow is signalled with minint.
+ __ cmp(output_reg, 0x80000000u);
+ DeoptimizeIf(equal, instr->environment());
} else {
+ Label done;
+ // Deoptimize on negative numbers.
+ __ xorps(xmm_scratch, xmm_scratch); // Zero the register.
+ __ ucomisd(input_reg, xmm_scratch);
DeoptimizeIf(below, instr->environment());
+
+ if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
+ // Check for negative zero.
+ Label positive_sign;
+ __ j(above, &positive_sign, Label::kNear);
+ __ movmskpd(output_reg, input_reg);
+ __ test(output_reg, Immediate(1));
+ DeoptimizeIf(not_zero, instr->environment());
+ __ Set(output_reg, Immediate(0));
+ __ jmp(&done, Label::kNear);
+ __ bind(&positive_sign);
+ }
+
+ // Use truncating instruction (OK because input is positive).
+ __ cvttsd2si(output_reg, Operand(input_reg));
+
+ // Overflow is signalled with minint.
+ __ cmp(output_reg, 0x80000000u);
+ DeoptimizeIf(equal, instr->environment());
+ __ bind(&done);
}
-
- // Use truncating instruction (OK because input is positive).
- __ cvttsd2si(output_reg, Operand(input_reg));
-
- // Overflow is signalled with minint.
- __ cmp(output_reg, 0x80000000u);
- DeoptimizeIf(equal, instr->environment());
}
-
void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
XMMRegister xmm_scratch = xmm0;
Register output_reg = ToRegister(instr->result());
@@ -2692,13 +2767,11 @@
// xmm_scratch = 0.5
ExternalReference one_half = ExternalReference::address_of_one_half();
__ movdbl(xmm_scratch, Operand::StaticVariable(one_half));
-
__ ucomisd(xmm_scratch, input_reg);
__ j(above, &below_half);
// input = input + 0.5
__ addsd(input_reg, xmm_scratch);
-
// Compute Math.floor(value + 0.5).
// Use truncating instruction (OK because input is positive).
__ cvttsd2si(output_reg, Operand(input_reg));
@@ -3017,8 +3090,14 @@
void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
- __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
- DeoptimizeIf(above_equal, instr->environment());
+ if (instr->index()->IsConstantOperand()) {
+ __ cmp(ToOperand(instr->length()),
+ ToImmediate(LConstantOperand::cast(instr->index())));
+ DeoptimizeIf(below_equal, instr->environment());
+ } else {
+ __ cmp(ToRegister(instr->index()), ToOperand(instr->length()));
+ DeoptimizeIf(above_equal, instr->environment());
+ }
}
@@ -3096,6 +3175,7 @@
void LCodeGen::DoStoreKeyedFastDoubleElement(
LStoreKeyedFastDoubleElement* instr) {
XMMRegister value = ToDoubleRegister(instr->value());
+ Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
Label have_value;
__ ucomisd(value, value);
@@ -3137,95 +3217,79 @@
};
Register string = ToRegister(instr->string());
- Register index = no_reg;
- int const_index = -1;
- if (instr->index()->IsConstantOperand()) {
- const_index = ToInteger32(LConstantOperand::cast(instr->index()));
- STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
- if (!Smi::IsValid(const_index)) {
- // Guaranteed to be out of bounds because of the assert above.
- // So the bounds check that must dominate this instruction must
- // have deoptimized already.
- if (FLAG_debug_code) {
- __ Abort("StringCharCodeAt: out of bounds index.");
- }
- // No code needs to be generated.
- return;
- }
- } else {
- index = ToRegister(instr->index());
- }
+ Register index = ToRegister(instr->index());
Register result = ToRegister(instr->result());
DeferredStringCharCodeAt* deferred =
new DeferredStringCharCodeAt(this, instr);
- Label flat_string, ascii_string, done;
-
// Fetch the instance type of the receiver into result register.
__ mov(result, FieldOperand(string, HeapObject::kMapOffset));
__ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
- // We need special handling for non-flat strings.
- STATIC_ASSERT(kSeqStringTag == 0);
- __ test(result, Immediate(kStringRepresentationMask));
- __ j(zero, &flat_string, Label::kNear);
+ // We need special handling for indirect strings.
+ Label check_sequential;
+ __ test(result, Immediate(kIsIndirectStringMask));
+ __ j(zero, &check_sequential, Label::kNear);
- // Handle non-flat strings.
- __ test(result, Immediate(kIsConsStringMask));
- __ j(zero, deferred->entry());
+ // Dispatch on the indirect string shape: slice or cons.
+ Label cons_string;
+ __ test(result, Immediate(kSlicedNotConsMask));
+ __ j(zero, &cons_string, Label::kNear);
- // ConsString.
+ // Handle slices.
+ Label indirect_string_loaded;
+ __ mov(result, FieldOperand(string, SlicedString::kOffsetOffset));
+ __ SmiUntag(result);
+ __ add(index, Operand(result));
+ __ mov(string, FieldOperand(string, SlicedString::kParentOffset));
+ __ jmp(&indirect_string_loaded, Label::kNear);
+
+ // Handle conses.
// Check whether the right hand side is the empty string (i.e. if
// this is really a flat string in a cons string). If that is not
// the case we would rather go to the runtime system now to flatten
// the string.
+ __ bind(&cons_string);
__ cmp(FieldOperand(string, ConsString::kSecondOffset),
Immediate(factory()->empty_string()));
__ j(not_equal, deferred->entry());
- // Get the first of the two strings and load its instance type.
__ mov(string, FieldOperand(string, ConsString::kFirstOffset));
+
+ __ bind(&indirect_string_loaded);
__ mov(result, FieldOperand(string, HeapObject::kMapOffset));
__ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
- // If the first cons component is also non-flat, then go to runtime.
+
+ // Check whether the string is sequential. The only non-sequential
+ // shapes we support have just been unwrapped above.
+ __ bind(&check_sequential);
STATIC_ASSERT(kSeqStringTag == 0);
__ test(result, Immediate(kStringRepresentationMask));
__ j(not_zero, deferred->entry());
- // Check for ASCII or two-byte string.
- __ bind(&flat_string);
+ // Dispatch on the encoding: ASCII or two-byte.
+ Label ascii_string;
STATIC_ASSERT(kAsciiStringTag != 0);
__ test(result, Immediate(kStringEncodingMask));
__ j(not_zero, &ascii_string, Label::kNear);
// Two-byte string.
// Load the two-byte character code into the result register.
+ Label done;
STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
- if (instr->index()->IsConstantOperand()) {
- __ movzx_w(result,
- FieldOperand(string,
- SeqTwoByteString::kHeaderSize +
- (kUC16Size * const_index)));
- } else {
- __ movzx_w(result, FieldOperand(string,
- index,
- times_2,
- SeqTwoByteString::kHeaderSize));
- }
+ __ movzx_w(result, FieldOperand(string,
+ index,
+ times_2,
+ SeqTwoByteString::kHeaderSize));
__ jmp(&done, Label::kNear);
// ASCII string.
// Load the byte into the result register.
__ bind(&ascii_string);
- if (instr->index()->IsConstantOperand()) {
- __ movzx_b(result, FieldOperand(string,
- SeqAsciiString::kHeaderSize + const_index));
- } else {
- __ movzx_b(result, FieldOperand(string,
- index,
- times_1,
- SeqAsciiString::kHeaderSize));
- }
+ __ movzx_b(result, FieldOperand(string,
+ index,
+ times_1,
+ SeqAsciiString::kHeaderSize));
__ bind(&done);
__ bind(deferred->exit());
}
@@ -4108,6 +4172,10 @@
__ cmp(input, factory()->false_value());
final_branch_condition = equal;
+ } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
+ __ cmp(input, factory()->null_value());
+ final_branch_condition = equal;
+
} else if (type_name->Equals(heap()->undefined_symbol())) {
__ cmp(input, factory()->undefined_value());
__ j(equal, true_label);
@@ -4126,8 +4194,10 @@
} else if (type_name->Equals(heap()->object_symbol())) {
__ JumpIfSmi(input, false_label);
- __ cmp(input, factory()->null_value());
- __ j(equal, true_label);
+ if (!FLAG_harmony_typeof) {
+ __ cmp(input, factory()->null_value());
+ __ j(equal, true_label);
+ }
__ CmpObjectType(input, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, input);
__ j(below, false_label);
__ CmpInstanceType(input, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
diff --git a/src/ia32/lithium-codegen-ia32.h b/src/ia32/lithium-codegen-ia32.h
index c568bef..d26f245 100644
--- a/src/ia32/lithium-codegen-ia32.h
+++ b/src/ia32/lithium-codegen-ia32.h
@@ -222,7 +222,7 @@
Register ToRegister(int index) const;
XMMRegister ToDoubleRegister(int index) const;
int ToInteger32(LConstantOperand* op) const;
- Operand BuildFastArrayOperand(LOperand* external_pointer,
+ Operand BuildFastArrayOperand(LOperand* elements_pointer,
LOperand* key,
JSObject::ElementsKind elements_kind,
uint32_t offset);
diff --git a/src/ia32/lithium-ia32.cc b/src/ia32/lithium-ia32.cc
index f0615ef..f59ee07 100644
--- a/src/ia32/lithium-ia32.cc
+++ b/src/ia32/lithium-ia32.cc
@@ -1045,7 +1045,13 @@
: instr->SecondSuccessor();
return new LGoto(successor->block_id());
}
- return new LBranch(UseRegisterAtStart(v));
+ ToBooleanStub::Types expected = instr->expected_input_types();
+ // We need a temporary register when we have to access the map *or* we have
+ // no type info yet, in which case we handle all cases (including the ones
+ // involving maps).
+ bool needs_temp = expected.NeedsMap() || expected.IsEmpty();
+ LOperand* temp = needs_temp ? TempRegister() : NULL;
+ return AssignEnvironment(new LBranch(UseRegister(v), temp));
}
@@ -1536,16 +1542,10 @@
}
-LInstruction* LChunkBuilder::DoFixedArrayLength(HFixedArrayLength* instr) {
+LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
+ HFixedArrayBaseLength* instr) {
LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LFixedArrayLength(array));
-}
-
-
-LInstruction* LChunkBuilder::DoExternalArrayLength(
- HExternalArrayLength* instr) {
- LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LExternalArrayLength(array));
+ return DefineAsRegister(new LFixedArrayBaseLength(array));
}
@@ -1563,8 +1563,9 @@
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
- return AssignEnvironment(new LBoundsCheck(UseRegisterAtStart(instr->index()),
- UseAtStart(instr->length())));
+ return AssignEnvironment(new LBoundsCheck(
+ UseRegisterOrConstantAtStart(instr->index()),
+ UseAtStart(instr->length())));
}
@@ -1885,9 +1886,9 @@
ASSERT(instr->representation().IsTagged());
ASSERT(instr->key()->representation().IsInteger32());
LOperand* obj = UseRegisterAtStart(instr->object());
- LOperand* key = UseRegisterAtStart(instr->key());
+ LOperand* key = UseRegisterOrConstantAtStart(instr->key());
LLoadKeyedFastElement* result = new LLoadKeyedFastElement(obj, key);
- return AssignEnvironment(DefineSameAsFirst(result));
+ return AssignEnvironment(DefineAsRegister(result));
}
@@ -2061,8 +2062,8 @@
LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
- LOperand* string = UseRegister(instr->string());
- LOperand* index = UseRegisterOrConstant(instr->index());
+ LOperand* string = UseTempRegister(instr->string());
+ LOperand* index = UseTempRegister(instr->index());
LOperand* context = UseAny(instr->context());
LStringCharCodeAt* result = new LStringCharCodeAt(context, string, index);
return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
diff --git a/src/ia32/lithium-ia32.h b/src/ia32/lithium-ia32.h
index 0ea7c6b..4b407a3 100644
--- a/src/ia32/lithium-ia32.h
+++ b/src/ia32/lithium-ia32.h
@@ -86,8 +86,7 @@
V(DivI) \
V(DoubleToI) \
V(ElementsKind) \
- V(ExternalArrayLength) \
- V(FixedArrayLength) \
+ V(FixedArrayBaseLength) \
V(FunctionLiteral) \
V(GetCachedArrayIndex) \
V(GlobalObject) \
@@ -876,10 +875,11 @@
};
-class LBranch: public LControlInstruction<1, 0> {
+class LBranch: public LControlInstruction<1, 1> {
public:
- explicit LBranch(LOperand* value) {
+ explicit LBranch(LOperand* value, LOperand* temp) {
inputs_[0] = value;
+ temps_[0] = temp;
}
DECLARE_CONCRETE_INSTRUCTION(Branch, "branch")
@@ -921,25 +921,15 @@
};
-class LExternalArrayLength: public LTemplateInstruction<1, 1, 0> {
+class LFixedArrayBaseLength: public LTemplateInstruction<1, 1, 0> {
public:
- explicit LExternalArrayLength(LOperand* value) {
+ explicit LFixedArrayBaseLength(LOperand* value) {
inputs_[0] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(ExternalArrayLength, "external-array-length")
- DECLARE_HYDROGEN_ACCESSOR(ExternalArrayLength)
-};
-
-
-class LFixedArrayLength: public LTemplateInstruction<1, 1, 0> {
- public:
- explicit LFixedArrayLength(LOperand* value) {
- inputs_[0] = value;
- }
-
- DECLARE_CONCRETE_INSTRUCTION(FixedArrayLength, "fixed-array-length")
- DECLARE_HYDROGEN_ACCESSOR(FixedArrayLength)
+ DECLARE_CONCRETE_INSTRUCTION(FixedArrayBaseLength,
+ "fixed-array-base-length")
+ DECLARE_HYDROGEN_ACCESSOR(FixedArrayBaseLength)
};
@@ -2247,14 +2237,18 @@
template<int I, int T>
LInstruction* DefineFixedDouble(LTemplateInstruction<1, I, T>* instr,
XMMRegister reg);
+ // Assigns an environment to an instruction. An instruction which can
+ // deoptimize must have an environment.
LInstruction* AssignEnvironment(LInstruction* instr);
+ // Assigns a pointer map to an instruction. An instruction which can
+ // trigger a GC or a lazy deoptimization must have a pointer map.
LInstruction* AssignPointerMap(LInstruction* instr);
enum CanDeoptimize { CAN_DEOPTIMIZE_EAGERLY, CANNOT_DEOPTIMIZE_EAGERLY };
- // By default we assume that instruction sequences generated for calls
- // cannot deoptimize eagerly and we do not attach environment to this
- // instruction.
+ // Marks a call for the register allocator. Assigns a pointer map to
+ // support GC and lazy deoptimization. Assigns an environment to support
+ // eager deoptimization if CAN_DEOPTIMIZE_EAGERLY.
LInstruction* MarkAsCall(
LInstruction* instr,
HInstruction* hinstr,
diff --git a/src/ia32/macro-assembler-ia32.cc b/src/ia32/macro-assembler-ia32.cc
index 3e037d7..dff174c 100644
--- a/src/ia32/macro-assembler-ia32.cc
+++ b/src/ia32/macro-assembler-ia32.cc
@@ -69,8 +69,8 @@
// Compute number of region covering addr. See Page::GetRegionNumberForAddress
// method for more details.
- and_(addr, Page::kPageAlignmentMask);
shr(addr, Page::kRegionSizeLog2);
+ and_(addr, Page::kPageAlignmentMask >> Page::kRegionSizeLog2);
// Set dirty mark for region.
// Bit tests with a memory operand should be avoided on Intel processors,
@@ -148,7 +148,7 @@
Label done;
// Skip barrier if writing a smi.
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
JumpIfSmi(value, &done, Label::kNear);
InNewSpace(object, value, equal, &done, Label::kNear);
@@ -166,8 +166,8 @@
// Array access: calculate the destination address in the same manner as
// KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
// into an array of words.
- ASSERT_EQ(1, kSmiTagSize);
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTagSize == 1);
+ STATIC_ASSERT(kSmiTag == 0);
lea(dst, Operand(object, dst, times_half_pointer_size,
FixedArray::kHeaderSize - kHeapObjectTag));
}
@@ -193,7 +193,7 @@
Label done;
// Skip barrier if writing a smi.
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
JumpIfSmi(value, &done, Label::kNear);
InNewSpace(object, value, equal, &done);
@@ -263,6 +263,13 @@
}
+void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
+ // see ROOT_ACCESSOR macro in factory.h
+ Handle<Object> value(&isolate()->heap()->roots_address()[index]);
+ cmp(with, value);
+}
+
+
void MacroAssembler::CmpObjectType(Register heap_object,
InstanceType type,
Register map) {
@@ -319,7 +326,7 @@
Register instance_type) {
mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
- ASSERT(kNotStringTag != 0);
+ STATIC_ASSERT(kNotStringTag != 0);
test(instance_type, Immediate(kIsNotStringMask));
return zero;
}
@@ -535,7 +542,12 @@
void MacroAssembler::PushTryHandler(CodeLocation try_location,
HandlerType type) {
// Adjust this code if not the case.
- ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
// The pc (return address) is already on TOS.
if (try_location == IN_JAVASCRIPT) {
if (type == TRY_CATCH_HANDLER) {
@@ -544,6 +556,7 @@
push(Immediate(StackHandler::TRY_FINALLY));
}
push(ebp);
+ push(esi);
} else {
ASSERT(try_location == IN_JS_ENTRY);
// The frame pointer does not point to a JS frame so we save NULL
@@ -551,6 +564,7 @@
// before dereferencing it to restore the context.
push(Immediate(StackHandler::ENTRY));
push(Immediate(0)); // NULL frame pointer.
+ push(Immediate(Smi::FromInt(0))); // No context.
}
// Save the current handler as the next handler.
push(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
@@ -563,7 +577,7 @@
void MacroAssembler::PopTryHandler() {
- ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
pop(Operand::StaticVariable(ExternalReference(Isolate::k_handler_address,
isolate())));
add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
@@ -572,8 +586,12 @@
void MacroAssembler::Throw(Register value) {
// Adjust this code if not the case.
- STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
-
+ STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
// eax must hold the exception.
if (!value.is(eax)) {
mov(eax, value);
@@ -584,24 +602,21 @@
isolate());
mov(esp, Operand::StaticVariable(handler_address));
- // Restore next handler and frame pointer, discard handler state.
- STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ // Restore next handler, context, and frame pointer; discard handler state.
pop(Operand::StaticVariable(handler_address));
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
- pop(ebp);
- pop(edx); // Remove state.
+ pop(esi); // Context.
+ pop(ebp); // Frame pointer.
+ pop(edx); // State.
- // Before returning we restore the context from the frame pointer if
- // not NULL. The frame pointer is NULL in the exception handler of
- // a JS entry frame.
- Set(esi, Immediate(0)); // Tentatively set context pointer to NULL.
+ // If the handler is a JS frame, restore the context to the frame.
+ // (edx == ENTRY) == (ebp == 0) == (esi == 0), so we could test any
+ // of them.
Label skip;
- cmp(ebp, 0);
+ cmp(Operand(edx), Immediate(StackHandler::ENTRY));
j(equal, &skip, Label::kNear);
- mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
+ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
bind(&skip);
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
ret(0);
}
@@ -609,7 +624,12 @@
void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
Register value) {
// Adjust this code if not the case.
- STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
// eax must hold the exception.
if (!value.is(eax)) {
@@ -635,7 +655,6 @@
bind(&done);
// Set the top handler address to next handler past the current ENTRY handler.
- STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
pop(Operand::StaticVariable(handler_address));
if (type == OUT_OF_MEMORY) {
@@ -653,15 +672,14 @@
mov(Operand::StaticVariable(pending_exception), eax);
}
- // Clear the context pointer.
+ // Discard the context saved in the handler and clear the context pointer.
+ pop(edx);
Set(esi, Immediate(0));
// Restore fp from handler and discard handler state.
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 1 * kPointerSize);
pop(ebp);
pop(edx); // State.
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
ret(0);
}
@@ -1190,6 +1208,42 @@
}
+void MacroAssembler::AllocateSlicedString(Register result,
+ Register scratch1,
+ Register scratch2,
+ Label* gc_required) {
+ // Allocate heap number in new space.
+ AllocateInNewSpace(SlicedString::kSize,
+ result,
+ scratch1,
+ scratch2,
+ gc_required,
+ TAG_OBJECT);
+
+ // Set the map. The other fields are left uninitialized.
+ mov(FieldOperand(result, HeapObject::kMapOffset),
+ Immediate(isolate()->factory()->sliced_string_map()));
+}
+
+
+void MacroAssembler::AllocateAsciiSlicedString(Register result,
+ Register scratch1,
+ Register scratch2,
+ Label* gc_required) {
+ // Allocate heap number in new space.
+ AllocateInNewSpace(SlicedString::kSize,
+ result,
+ scratch1,
+ scratch2,
+ gc_required,
+ TAG_OBJECT);
+
+ // Set the map. The other fields are left uninitialized.
+ mov(FieldOperand(result, HeapObject::kMapOffset),
+ Immediate(isolate()->factory()->sliced_ascii_string_map()));
+}
+
+
// Copy memory, byte-by-byte, from source to destination. Not optimized for
// long or aligned copies. The contents of scratch and length are destroyed.
// Source and destination are incremented by length.
@@ -2148,7 +2202,7 @@
Register scratch2,
Label* failure) {
// Check that both objects are not smis.
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
mov(scratch1, Operand(object1));
and_(scratch1, Operand(object2));
JumpIfSmi(scratch1, failure);
diff --git a/src/ia32/macro-assembler-ia32.h b/src/ia32/macro-assembler-ia32.h
index dac2273..de9361d 100644
--- a/src/ia32/macro-assembler-ia32.h
+++ b/src/ia32/macro-assembler-ia32.h
@@ -209,6 +209,9 @@
void SafeSet(Register dst, const Immediate& x);
void SafePush(const Immediate& x);
+ // Compare a register against a known root, e.g. undefined, null, true, ...
+ void CompareRoot(Register with, Heap::RootListIndex index);
+
// Compare object type for heap object.
// Incoming register is heap_object and outgoing register is map.
void CmpObjectType(Register heap_object, InstanceType type, Register map);
@@ -272,8 +275,8 @@
// Smi tagging support.
void SmiTag(Register reg) {
- ASSERT(kSmiTag == 0);
- ASSERT(kSmiTagSize == 1);
+ STATIC_ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTagSize == 1);
add(reg, Operand(reg));
}
void SmiUntag(Register reg) {
@@ -282,9 +285,9 @@
// Modifies the register even if it does not contain a Smi!
void SmiUntag(Register reg, Label* is_smi) {
- ASSERT(kSmiTagSize == 1);
+ STATIC_ASSERT(kSmiTagSize == 1);
sar(reg, kSmiTagSize);
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
j(not_carry, is_smi);
}
@@ -443,6 +446,17 @@
Register scratch2,
Label* gc_required);
+ // Allocate a raw sliced string object. Only the map field of the result is
+ // initialized.
+ void AllocateSlicedString(Register result,
+ Register scratch1,
+ Register scratch2,
+ Label* gc_required);
+ void AllocateAsciiSlicedString(Register result,
+ Register scratch1,
+ Register scratch2,
+ Label* gc_required);
+
// Copy memory, byte-by-byte, from source to destination. Not optimized for
// long or aligned copies.
// The contents of index and scratch are destroyed.
diff --git a/src/ia32/regexp-macro-assembler-ia32.cc b/src/ia32/regexp-macro-assembler-ia32.cc
index 8db2e9b..d175d9e 100644
--- a/src/ia32/regexp-macro-assembler-ia32.cc
+++ b/src/ia32/regexp-macro-assembler-ia32.cc
@@ -1065,12 +1065,13 @@
}
// Prepare for possible GC.
- HandleScope handles;
+ HandleScope handles(isolate);
Handle<Code> code_handle(re_code);
Handle<String> subject(frame_entry<String*>(re_frame, kInputString));
+
// Current string.
- bool is_ascii = subject->IsAsciiRepresentation();
+ bool is_ascii = subject->IsAsciiRepresentationUnderneath();
ASSERT(re_code->instruction_start() <= *return_address);
ASSERT(*return_address <=
@@ -1079,7 +1080,7 @@
MaybeObject* result = Execution::HandleStackGuardInterrupt();
if (*code_handle != re_code) { // Return address no longer valid
- int delta = *code_handle - re_code;
+ int delta = code_handle->address() - re_code->address();
// Overwrite the return address on the stack.
*return_address += delta;
}
@@ -1088,8 +1089,20 @@
return EXCEPTION;
}
+ Handle<String> subject_tmp = subject;
+ int slice_offset = 0;
+
+ // Extract the underlying string and the slice offset.
+ if (StringShape(*subject_tmp).IsCons()) {
+ subject_tmp = Handle<String>(ConsString::cast(*subject_tmp)->first());
+ } else if (StringShape(*subject_tmp).IsSliced()) {
+ SlicedString* slice = SlicedString::cast(*subject_tmp);
+ subject_tmp = Handle<String>(slice->parent());
+ slice_offset = slice->offset();
+ }
+
// String might have changed.
- if (subject->IsAsciiRepresentation() != is_ascii) {
+ if (subject_tmp->IsAsciiRepresentation() != is_ascii) {
// If we changed between an ASCII and an UC16 string, the specialized
// code cannot be used, and we need to restart regexp matching from
// scratch (including, potentially, compiling a new version of the code).
@@ -1100,8 +1113,8 @@
// be a sequential or external string with the same content.
// Update the start and end pointers in the stack frame to the current
// location (whether it has actually moved or not).
- ASSERT(StringShape(*subject).IsSequential() ||
- StringShape(*subject).IsExternal());
+ ASSERT(StringShape(*subject_tmp).IsSequential() ||
+ StringShape(*subject_tmp).IsExternal());
// The original start address of the characters to match.
const byte* start_address = frame_entry<const byte*>(re_frame, kInputStart);
@@ -1109,13 +1122,14 @@
// Find the current start address of the same character at the current string
// position.
int start_index = frame_entry<int>(re_frame, kStartIndex);
- const byte* new_address = StringCharacterPosition(*subject, start_index);
+ const byte* new_address = StringCharacterPosition(*subject_tmp,
+ start_index + slice_offset);
if (start_address != new_address) {
// If there is a difference, update the object pointer and start and end
// addresses in the RegExp stack frame to match the new value.
const byte* end_address = frame_entry<const byte* >(re_frame, kInputEnd);
- int byte_length = end_address - start_address;
+ int byte_length = static_cast<int>(end_address - start_address);
frame_entry<const String*>(re_frame, kInputString) = *subject;
frame_entry<const byte*>(re_frame, kInputStart) = new_address;
frame_entry<const byte*>(re_frame, kInputEnd) = new_address + byte_length;
diff --git a/src/ia32/stub-cache-ia32.cc b/src/ia32/stub-cache-ia32.cc
index 9a690d7..621a9bb 100644
--- a/src/ia32/stub-cache-ia32.cc
+++ b/src/ia32/stub-cache-ia32.cc
@@ -273,7 +273,7 @@
// Check that the object is a string.
__ mov(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
__ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
- ASSERT(kNotStringTag != 0);
+ STATIC_ASSERT(kNotStringTag != 0);
__ test(scratch, Immediate(kNotStringTag));
__ j(not_zero, non_string_object);
}
@@ -3400,37 +3400,37 @@
__ JumpIfNotSmi(eax, &miss_force_generic);
// Check that the index is in range.
- __ mov(ecx, eax);
- __ SmiUntag(ecx); // Untag the index.
__ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
- __ cmp(ecx, FieldOperand(ebx, ExternalArray::kLengthOffset));
+ __ cmp(eax, FieldOperand(ebx, ExternalArray::kLengthOffset));
// Unsigned comparison catches both negative and too-large values.
__ j(above_equal, &miss_force_generic);
__ mov(ebx, FieldOperand(ebx, ExternalArray::kExternalPointerOffset));
// ebx: base pointer of external storage
switch (elements_kind) {
case JSObject::EXTERNAL_BYTE_ELEMENTS:
- __ movsx_b(eax, Operand(ebx, ecx, times_1, 0));
+ __ SmiUntag(eax); // Untag the index.
+ __ movsx_b(eax, Operand(ebx, eax, times_1, 0));
break;
case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
case JSObject::EXTERNAL_PIXEL_ELEMENTS:
- __ movzx_b(eax, Operand(ebx, ecx, times_1, 0));
+ __ SmiUntag(eax); // Untag the index.
+ __ movzx_b(eax, Operand(ebx, eax, times_1, 0));
break;
case JSObject::EXTERNAL_SHORT_ELEMENTS:
- __ movsx_w(eax, Operand(ebx, ecx, times_2, 0));
+ __ movsx_w(eax, Operand(ebx, eax, times_1, 0));
break;
case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- __ movzx_w(eax, Operand(ebx, ecx, times_2, 0));
+ __ movzx_w(eax, Operand(ebx, eax, times_1, 0));
break;
case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
case JSObject::EXTERNAL_INT_ELEMENTS:
- __ mov(ecx, Operand(ebx, ecx, times_4, 0));
+ __ mov(ecx, Operand(ebx, eax, times_2, 0));
break;
case JSObject::EXTERNAL_FLOAT_ELEMENTS:
- __ fld_s(Operand(ebx, ecx, times_4, 0));
+ __ fld_s(Operand(ebx, eax, times_2, 0));
break;
case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
- __ fld_d(Operand(ebx, ecx, times_8, 0));
+ __ fld_d(Operand(ebx, eax, times_4, 0));
break;
default:
UNREACHABLE();
@@ -3556,9 +3556,7 @@
// Check that the index is in range.
__ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
- __ mov(ebx, ecx);
- __ SmiUntag(ebx);
- __ cmp(ebx, FieldOperand(edi, ExternalArray::kLengthOffset));
+ __ cmp(ecx, FieldOperand(edi, ExternalArray::kLengthOffset));
// Unsigned comparison catches both negative and too-large values.
__ j(above_equal, &slow);
@@ -3568,7 +3566,6 @@
// edx: receiver
// ecx: key
// edi: elements array
- // ebx: untagged index
if (elements_kind == JSObject::EXTERNAL_PIXEL_ELEMENTS) {
__ JumpIfNotSmi(eax, &slow);
} else {
@@ -3576,44 +3573,39 @@
}
// smi case
- __ mov(ecx, eax); // Preserve the value in eax. Key is no longer needed.
- __ SmiUntag(ecx);
+ __ mov(ebx, eax); // Preserve the value in eax as the return value.
+ __ SmiUntag(ebx);
__ mov(edi, FieldOperand(edi, ExternalArray::kExternalPointerOffset));
- // ecx: base pointer of external storage
+ // edi: base pointer of external storage
switch (elements_kind) {
case JSObject::EXTERNAL_PIXEL_ELEMENTS:
- { // Clamp the value to [0..255].
- Label done;
- __ test(ecx, Immediate(0xFFFFFF00));
- __ j(zero, &done, Label::kNear);
- __ setcc(negative, ecx); // 1 if negative, 0 if positive.
- __ dec_b(ecx); // 0 if negative, 255 if positive.
- __ bind(&done);
- }
- __ mov_b(Operand(edi, ebx, times_1, 0), ecx);
+ __ ClampUint8(ebx);
+ __ SmiUntag(ecx);
+ __ mov_b(Operand(edi, ecx, times_1, 0), ebx);
break;
case JSObject::EXTERNAL_BYTE_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- __ mov_b(Operand(edi, ebx, times_1, 0), ecx);
+ __ SmiUntag(ecx);
+ __ mov_b(Operand(edi, ecx, times_1, 0), ebx);
break;
case JSObject::EXTERNAL_SHORT_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- __ mov_w(Operand(edi, ebx, times_2, 0), ecx);
+ __ mov_w(Operand(edi, ecx, times_1, 0), ebx);
break;
case JSObject::EXTERNAL_INT_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
- __ mov(Operand(edi, ebx, times_4, 0), ecx);
+ __ mov(Operand(edi, ecx, times_2, 0), ebx);
break;
case JSObject::EXTERNAL_FLOAT_ELEMENTS:
case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
// Need to perform int-to-float conversion.
- __ push(ecx);
+ __ push(ebx);
__ fild_s(Operand(esp, 0));
- __ pop(ecx);
+ __ pop(ebx);
if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) {
- __ fstp_s(Operand(edi, ebx, times_4, 0));
+ __ fstp_s(Operand(edi, ecx, times_2, 0));
} else { // elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS.
- __ fstp_d(Operand(edi, ebx, times_8, 0));
+ __ fstp_d(Operand(edi, ecx, times_4, 0));
}
break;
default:
@@ -3629,7 +3621,6 @@
// edx: receiver
// ecx: key
// edi: elements array
- // ebx: untagged index
__ cmp(FieldOperand(eax, HeapObject::kMapOffset),
Immediate(masm->isolate()->factory()->heap_number_map()));
__ j(not_equal, &slow);
@@ -3638,15 +3629,14 @@
// +/-Infinity into integer arrays basically undefined. For more
// reproducible behavior, convert these to zero.
__ mov(edi, FieldOperand(edi, ExternalArray::kExternalPointerOffset));
- // ebx: untagged index
// edi: base pointer of external storage
if (elements_kind == JSObject::EXTERNAL_FLOAT_ELEMENTS) {
__ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
- __ fstp_s(Operand(edi, ebx, times_4, 0));
+ __ fstp_s(Operand(edi, ecx, times_2, 0));
__ ret(0);
} else if (elements_kind == JSObject::EXTERNAL_DOUBLE_ELEMENTS) {
__ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
- __ fstp_d(Operand(edi, ebx, times_8, 0));
+ __ fstp_d(Operand(edi, ecx, times_4, 0));
__ ret(0);
} else {
// Perform float-to-int conversion with truncation (round-to-zero)
@@ -3661,27 +3651,20 @@
elements_kind != JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS) {
ASSERT(CpuFeatures::IsSupported(SSE2));
CpuFeatures::Scope scope(SSE2);
- __ cvttsd2si(ecx, FieldOperand(eax, HeapNumber::kValueOffset));
+ __ cvttsd2si(ebx, FieldOperand(eax, HeapNumber::kValueOffset));
// ecx: untagged integer value
switch (elements_kind) {
case JSObject::EXTERNAL_PIXEL_ELEMENTS:
- { // Clamp the value to [0..255].
- Label done;
- __ test(ecx, Immediate(0xFFFFFF00));
- __ j(zero, &done, Label::kNear);
- __ setcc(negative, ecx); // 1 if negative, 0 if positive.
- __ dec_b(ecx); // 0 if negative, 255 if positive.
- __ bind(&done);
- }
- __ mov_b(Operand(edi, ebx, times_1, 0), ecx);
- break;
+ __ ClampUint8(ebx);
+ // Fall through.
case JSObject::EXTERNAL_BYTE_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- __ mov_b(Operand(edi, ebx, times_1, 0), ecx);
+ __ SmiUntag(ecx);
+ __ mov_b(Operand(edi, ecx, times_1, 0), ebx);
break;
case JSObject::EXTERNAL_SHORT_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- __ mov_w(Operand(edi, ebx, times_2, 0), ecx);
+ __ mov_w(Operand(edi, ecx, times_1, 0), ebx);
break;
default:
UNREACHABLE();
@@ -3698,7 +3681,7 @@
__ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
__ sub(Operand(esp), Immediate(2 * kPointerSize));
__ fisttp_d(Operand(esp, 0));
- __ pop(ecx);
+ __ pop(ebx);
__ add(Operand(esp), Immediate(kPointerSize));
} else {
ASSERT(CpuFeatures::IsSupported(SSE2));
@@ -3709,15 +3692,15 @@
// Note: we could do better for signed int arrays.
__ movd(xmm0, FieldOperand(eax, HeapNumber::kValueOffset));
// We will need the key if we have to make the slow runtime call.
- __ push(ecx);
- __ LoadPowerOf2(xmm1, ecx, 31);
- __ pop(ecx);
+ __ push(ebx);
+ __ LoadPowerOf2(xmm1, ebx, 31);
+ __ pop(ebx);
__ ucomisd(xmm1, xmm0);
__ j(above_equal, &slow);
- __ cvttsd2si(ecx, Operand(xmm0));
+ __ cvttsd2si(ebx, Operand(xmm0));
}
- // ecx: untagged integer value
- __ mov(Operand(edi, ebx, times_4, 0), ecx);
+ // ebx: untagged integer value
+ __ mov(Operand(edi, ecx, times_2, 0), ebx);
}
__ ret(0); // Return original value.
}
@@ -3981,10 +3964,12 @@
__ bind(&smi_value);
// Value is a smi. convert to a double and store.
- __ SmiUntag(eax);
- __ push(eax);
+ // Preserve original value.
+ __ mov(edx, eax);
+ __ SmiUntag(edx);
+ __ push(edx);
__ fild_s(Operand(esp, 0));
- __ pop(eax);
+ __ pop(edx);
__ fstp_d(FieldOperand(edi, ecx, times_4, FixedDoubleArray::kHeaderSize));
__ ret(0);
diff --git a/src/ic.cc b/src/ic.cc
index f70f75a..0d0b935 100644
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -88,7 +88,8 @@
// function and the original code.
JSFunction* function = JSFunction::cast(frame->function());
function->PrintName();
- int code_offset = address() - js_code->instruction_start();
+ int code_offset =
+ static_cast<int>(address() - js_code->instruction_start());
PrintF("+%d", code_offset);
} else {
PrintF("<unknown>");
@@ -309,6 +310,7 @@
case Code::UNARY_OP_IC:
case Code::BINARY_OP_IC:
case Code::COMPARE_IC:
+ case Code::TO_BOOLEAN_IC:
// Clearing these is tricky and does not
// make any performance difference.
return;
@@ -842,14 +844,6 @@
}
-#ifdef DEBUG
-#define TRACE_IC_NAMED(msg, name) \
- if (FLAG_trace_ic) PrintF(msg, *(name)->ToCString())
-#else
-#define TRACE_IC_NAMED(msg, name)
-#endif
-
-
MaybeObject* LoadIC::Load(State state,
Handle<Object> object,
Handle<String> name) {
@@ -2506,6 +2500,31 @@
}
+RUNTIME_FUNCTION(MaybeObject*, ToBoolean_Patch) {
+ ASSERT(args.length() == 3);
+
+ HandleScope scope(isolate);
+ Handle<Object> object = args.at<Object>(0);
+ Register tos = Register::from_code(args.smi_at(1));
+ ToBooleanStub::Types old_types(args.smi_at(2));
+
+ ToBooleanStub::Types new_types(old_types);
+ bool to_boolean_value = new_types.Record(object);
+ old_types.TraceTransition(new_types);
+
+ ToBooleanStub stub(tos, new_types);
+ Handle<Code> code = stub.GetCode();
+ ToBooleanIC ic(isolate);
+ ic.patch(*code);
+ return Smi::FromInt(to_boolean_value ? 1 : 0);
+}
+
+
+void ToBooleanIC::patch(Code* code) {
+ set_target(code);
+}
+
+
static const Address IC_utilities[] = {
#define ADDR(name) FUNCTION_ADDR(name),
IC_UTIL_LIST(ADDR)
diff --git a/src/ic.h b/src/ic.h
index 11c2e3a..2236ba3 100644
--- a/src/ic.h
+++ b/src/ic.h
@@ -59,7 +59,8 @@
ICU(StoreInterceptorProperty) \
ICU(UnaryOp_Patch) \
ICU(BinaryOp_Patch) \
- ICU(CompareIC_Miss)
+ ICU(CompareIC_Miss) \
+ ICU(ToBoolean_Patch)
//
// IC is the base class for LoadIC, StoreIC, CallIC, KeyedLoadIC,
// and KeyedStoreIC.
@@ -720,6 +721,15 @@
Token::Value op_;
};
+
+class ToBooleanIC: public IC {
+ public:
+ explicit ToBooleanIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { }
+
+ void patch(Code* code);
+};
+
+
// Helper for BinaryOpIC and CompareIC.
void PatchInlinedSmiCode(Address address);
diff --git a/src/interpreter-irregexp.cc b/src/interpreter-irregexp.cc
index 1c6c52c..796a447 100644
--- a/src/interpreter-irregexp.cc
+++ b/src/interpreter-irregexp.cc
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -635,8 +635,9 @@
AssertNoAllocation a;
const byte* code_base = code_array->GetDataStartAddress();
uc16 previous_char = '\n';
- if (subject->IsAsciiRepresentation()) {
- Vector<const char> subject_vector = subject->ToAsciiVector();
+ String::FlatContent subject_content = subject->GetFlatContent();
+ if (subject_content.IsAscii()) {
+ Vector<const char> subject_vector = subject_content.ToAsciiVector();
if (start_position != 0) previous_char = subject_vector[start_position - 1];
return RawMatch(isolate,
code_base,
@@ -645,7 +646,8 @@
start_position,
previous_char);
} else {
- Vector<const uc16> subject_vector = subject->ToUC16Vector();
+ ASSERT(subject_content.IsTwoByte());
+ Vector<const uc16> subject_vector = subject_content.ToUC16Vector();
if (start_position != 0) previous_char = subject_vector[start_position - 1];
return RawMatch(isolate,
code_base,
diff --git a/src/isolate.cc b/src/isolate.cc
index 8a30e79..09cbc8a 100644
--- a/src/isolate.cc
+++ b/src/isolate.cc
@@ -76,6 +76,10 @@
ThreadLocalTop::ThreadLocalTop() {
InitializeInternal();
+ // This flag may be set using v8::V8::IgnoreOutOfMemoryException()
+ // before an isolate is initialized. The initialize methods below do
+ // not touch it to preserve its value.
+ ignore_out_of_memory_ = false;
}
@@ -382,7 +386,6 @@
if (Thread::GetThreadLocal(isolate_key_) == NULL) {
Thread::SetThreadLocal(isolate_key_, default_isolate_);
}
- CHECK(default_isolate_->PreInit());
}
@@ -654,6 +657,7 @@
incomplete_message_ = &accumulator;
PrintStack(&accumulator);
accumulator.OutputToStdOut();
+ InitializeLoggingAndCounters();
accumulator.Log();
incomplete_message_ = NULL;
stack_trace_nesting_level_ = 0;
@@ -1331,6 +1335,7 @@
if (list_ == data) list_ = data->next_;
if (data->next_ != NULL) data->next_->prev_ = data->prev_;
if (data->prev_ != NULL) data->prev_->next_ = data->next_;
+ delete data;
}
@@ -1375,11 +1380,15 @@
bootstrapper_(NULL),
runtime_profiler_(NULL),
compilation_cache_(NULL),
- counters_(new Counters()),
+ counters_(NULL),
code_range_(NULL),
+ // Must be initialized early to allow v8::SetResourceConstraints calls.
break_access_(OS::CreateMutex()),
- logger_(new Logger()),
- stats_table_(new StatsTable()),
+ debugger_initialized_(false),
+ // Must be initialized early to allow v8::Debug calls.
+ debugger_access_(OS::CreateMutex()),
+ logger_(NULL),
+ stats_table_(NULL),
stub_cache_(NULL),
deoptimizer_data_(NULL),
capture_stack_trace_for_uncaught_exceptions_(false),
@@ -1510,7 +1519,7 @@
logger_->TearDown();
// The default isolate is re-initializable due to legacy API.
- state_ = PREINITIALIZED;
+ state_ = UNINITIALIZED;
}
}
@@ -1525,6 +1534,12 @@
Isolate::~Isolate() {
TRACE_ISOLATE(destructor);
+ // Has to be called while counters_ are still alive.
+ zone_.DeleteKeptSegment();
+
+ delete[] assembler_spare_buffer_;
+ assembler_spare_buffer_ = NULL;
+
delete unicode_cache_;
unicode_cache_ = NULL;
@@ -1558,6 +1573,8 @@
handle_scope_implementer_ = NULL;
delete break_access_;
break_access_ = NULL;
+ delete debugger_access_;
+ debugger_access_ = NULL;
delete compilation_cache_;
compilation_cache_ = NULL;
@@ -1583,6 +1600,9 @@
delete global_handles_;
global_handles_ = NULL;
+ delete external_reference_table_;
+ external_reference_table_ = NULL;
+
#ifdef ENABLE_DEBUGGER_SUPPORT
delete debugger_;
debugger_ = NULL;
@@ -1592,58 +1612,6 @@
}
-bool Isolate::PreInit() {
- if (state_ != UNINITIALIZED) return true;
-
- TRACE_ISOLATE(preinit);
-
- ASSERT(Isolate::Current() == this);
-#ifdef ENABLE_DEBUGGER_SUPPORT
- debug_ = new Debug(this);
- debugger_ = new Debugger(this);
-#endif
-
- memory_allocator_ = new MemoryAllocator();
- memory_allocator_->isolate_ = this;
- code_range_ = new CodeRange();
- code_range_->isolate_ = this;
-
- // Safe after setting Heap::isolate_, initializing StackGuard and
- // ensuring that Isolate::Current() == this.
- heap_.SetStackLimits();
-
-#ifdef DEBUG
- DisallowAllocationFailure disallow_allocation_failure;
-#endif
-
-#define C(name) isolate_addresses_[Isolate::k_##name] = \
- reinterpret_cast<Address>(name());
- ISOLATE_ADDRESS_LIST(C)
-#undef C
-
- string_tracker_ = new StringTracker();
- string_tracker_->isolate_ = this;
- compilation_cache_ = new CompilationCache(this);
- transcendental_cache_ = new TranscendentalCache();
- keyed_lookup_cache_ = new KeyedLookupCache();
- context_slot_cache_ = new ContextSlotCache();
- descriptor_lookup_cache_ = new DescriptorLookupCache();
- unicode_cache_ = new UnicodeCache();
- pc_to_code_cache_ = new PcToCodeCache(this);
- write_input_buffer_ = new StringInputBuffer();
- global_handles_ = new GlobalHandles(this);
- bootstrapper_ = new Bootstrapper();
- handle_scope_implementer_ = new HandleScopeImplementer(this);
- stub_cache_ = new StubCache(this);
- ast_sentinels_ = new AstSentinels();
- regexp_stack_ = new RegExpStack();
- regexp_stack_->isolate_ = this;
-
- state_ = PREINITIALIZED;
- return true;
-}
-
-
void Isolate::InitializeThreadLocal() {
thread_local_top_.isolate_ = this;
thread_local_top_.Initialize();
@@ -1680,19 +1648,71 @@
}
+void Isolate::InitializeLoggingAndCounters() {
+ if (logger_ == NULL) {
+ logger_ = new Logger;
+ }
+ if (counters_ == NULL) {
+ counters_ = new Counters;
+ }
+}
+
+
+void Isolate::InitializeDebugger() {
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ ScopedLock lock(debugger_access_);
+ if (NoBarrier_Load(&debugger_initialized_)) return;
+ InitializeLoggingAndCounters();
+ debug_ = new Debug(this);
+ debugger_ = new Debugger(this);
+ Release_Store(&debugger_initialized_, true);
+#endif
+}
+
+
bool Isolate::Init(Deserializer* des) {
ASSERT(state_ != INITIALIZED);
-
+ ASSERT(Isolate::Current() == this);
TRACE_ISOLATE(init);
- bool create_heap_objects = des == NULL;
-
#ifdef DEBUG
// The initialization process does not handle memory exhaustion.
DisallowAllocationFailure disallow_allocation_failure;
#endif
- if (state_ == UNINITIALIZED && !PreInit()) return false;
+ InitializeLoggingAndCounters();
+
+ InitializeDebugger();
+
+ memory_allocator_ = new MemoryAllocator(this);
+ code_range_ = new CodeRange(this);
+
+ // Safe after setting Heap::isolate_, initializing StackGuard and
+ // ensuring that Isolate::Current() == this.
+ heap_.SetStackLimits();
+
+#define C(name) isolate_addresses_[Isolate::k_##name] = \
+ reinterpret_cast<Address>(name());
+ ISOLATE_ADDRESS_LIST(C)
+#undef C
+
+ string_tracker_ = new StringTracker();
+ string_tracker_->isolate_ = this;
+ compilation_cache_ = new CompilationCache(this);
+ transcendental_cache_ = new TranscendentalCache();
+ keyed_lookup_cache_ = new KeyedLookupCache();
+ context_slot_cache_ = new ContextSlotCache();
+ descriptor_lookup_cache_ = new DescriptorLookupCache();
+ unicode_cache_ = new UnicodeCache();
+ pc_to_code_cache_ = new PcToCodeCache(this);
+ write_input_buffer_ = new StringInputBuffer();
+ global_handles_ = new GlobalHandles(this);
+ bootstrapper_ = new Bootstrapper();
+ handle_scope_implementer_ = new HandleScopeImplementer(this);
+ stub_cache_ = new StubCache(this);
+ ast_sentinels_ = new AstSentinels();
+ regexp_stack_ = new RegExpStack();
+ regexp_stack_->isolate_ = this;
// Enable logging before setting up the heap
logger_->Setup();
@@ -1715,7 +1735,8 @@
stack_guard_.InitThread(lock);
}
- // Setup the object heap
+ // Setup the object heap.
+ const bool create_heap_objects = (des == NULL);
ASSERT(!heap_.HasBeenSetup());
if (!heap_.Setup(create_heap_objects)) {
V8::SetFatalError();
@@ -1775,6 +1796,16 @@
}
+// Initialized lazily to allow early
+// v8::V8::SetAddHistogramSampleFunction calls.
+StatsTable* Isolate::stats_table() {
+ if (stats_table_ == NULL) {
+ stats_table_ = new StatsTable;
+ }
+ return stats_table_;
+}
+
+
void Isolate::Enter() {
Isolate* current_isolate = NULL;
PerIsolateThreadData* current_data = CurrentPerIsolateThreadData();
@@ -1814,8 +1845,6 @@
SetIsolateThreadLocals(this, data);
- CHECK(PreInit());
-
// In case it's the first time some thread enters the isolate.
set_thread_id(data->thread_id());
}
diff --git a/src/isolate.h b/src/isolate.h
index f2281aa..5bb504d 100644
--- a/src/isolate.h
+++ b/src/isolate.h
@@ -256,6 +256,9 @@
// Call back function to report unsafe JS accesses.
v8::FailedAccessCheckCallback failed_access_check_callback_;
+ // Whether out of memory exceptions should be ignored.
+ bool ignore_out_of_memory_;
+
private:
void InitializeInternal();
@@ -446,6 +449,13 @@
return reinterpret_cast<Isolate*>(Thread::GetThreadLocal(isolate_key_));
}
+ // Usually called by Init(), but can be called early e.g. to allow
+ // testing components that require logging but not the whole
+ // isolate.
+ //
+ // Safe to call more than once.
+ void InitializeLoggingAndCounters();
+
bool Init(Deserializer* des);
bool IsInitialized() { return state_ == INITIALIZED; }
@@ -498,10 +508,12 @@
// switched to non-legacy behavior).
static void EnterDefaultIsolate();
- // Debug.
// Mutex for serializing access to break control structures.
Mutex* break_access() { return break_access_; }
+ // Mutex for serializing access to debugger.
+ Mutex* debugger_access() { return debugger_access_; }
+
Address get_address_from_id(AddressId id);
// Access to top context (where the current function object was created).
@@ -661,6 +673,12 @@
// Tells whether the current context has experienced an out of memory
// exception.
bool is_out_of_memory();
+ bool ignore_out_of_memory() {
+ return thread_local_top_.ignore_out_of_memory_;
+ }
+ void set_ignore_out_of_memory(bool value) {
+ thread_local_top_.ignore_out_of_memory_ = value;
+ }
void PrintCurrentStackTrace(FILE* out);
void PrintStackTrace(FILE* out, char* thread_data);
@@ -769,14 +787,24 @@
#undef GLOBAL_CONTEXT_FIELD_ACCESSOR
Bootstrapper* bootstrapper() { return bootstrapper_; }
- Counters* counters() { return counters_; }
+ Counters* counters() {
+ // Call InitializeLoggingAndCounters() if logging is needed before
+ // the isolate is fully initialized.
+ ASSERT(counters_ != NULL);
+ return counters_;
+ }
CodeRange* code_range() { return code_range_; }
RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
CompilationCache* compilation_cache() { return compilation_cache_; }
- Logger* logger() { return logger_; }
+ Logger* logger() {
+ // Call InitializeLoggingAndCounters() if logging is needed before
+ // the isolate is fully initialized.
+ ASSERT(logger_ != NULL);
+ return logger_;
+ }
StackGuard* stack_guard() { return &stack_guard_; }
Heap* heap() { return &heap_; }
- StatsTable* stats_table() { return stats_table_; }
+ StatsTable* stats_table();
StubCache* stub_cache() { return stub_cache_; }
DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
@@ -877,8 +905,14 @@
void PreallocatedStorageInit(size_t size);
#ifdef ENABLE_DEBUGGER_SUPPORT
- Debugger* debugger() { return debugger_; }
- Debug* debug() { return debug_; }
+ Debugger* debugger() {
+ if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
+ return debugger_;
+ }
+ Debug* debug() {
+ if (!NoBarrier_Load(&debugger_initialized_)) InitializeDebugger();
+ return debug_;
+ }
#endif
inline bool DebuggerHasBreakPoints();
@@ -1010,8 +1044,6 @@
static Isolate* default_isolate_;
static ThreadDataTable* thread_data_table_;
- bool PreInit();
-
void Deinit();
static void SetIsolateThreadLocals(Isolate* isolate,
@@ -1019,7 +1051,6 @@
enum State {
UNINITIALIZED, // Some components may not have been allocated.
- PREINITIALIZED, // Components have been allocated but not initialized.
INITIALIZED // All components are fully initialized.
};
@@ -1063,6 +1094,8 @@
void PropagatePendingExceptionToExternalTryCatch();
+ void InitializeDebugger();
+
int stack_trace_nesting_level_;
StringStream* incomplete_message_;
// The preallocated memory thread singleton.
@@ -1076,6 +1109,8 @@
Counters* counters_;
CodeRange* code_range_;
Mutex* break_access_;
+ Atomic32 debugger_initialized_;
+ Mutex* debugger_access_;
Heap heap_;
Logger* logger_;
StackGuard stack_guard_;
@@ -1165,6 +1200,7 @@
friend class Simulator;
friend class StackGuard;
friend class ThreadId;
+ friend class TestMemoryAllocatorScope;
friend class v8::Isolate;
friend class v8::Locker;
friend class v8::Unlocker;
diff --git a/src/json-parser.h b/src/json-parser.h
index 0c01461..68eab65 100644
--- a/src/json-parser.h
+++ b/src/json-parser.h
@@ -166,7 +166,8 @@
template <bool seq_ascii>
Handle<Object> JsonParser<seq_ascii>::ParseJson(Handle<String> source) {
isolate_ = source->map()->isolate();
- source_ = Handle<String>(source->TryFlattenGetString());
+ FlattenString(source);
+ source_ = source;
source_length_ = source_->length();
// Optimized fast case where we only have ASCII characters.
diff --git a/src/json.js b/src/json.js
index 6c984a1..8fd410f 100644
--- a/src/json.js
+++ b/src/json.js
@@ -237,7 +237,7 @@
}
}
stack.pop();
- builder.push("]");
+ builder.push("]");
}
diff --git a/src/jsregexp.cc b/src/jsregexp.cc
index bc47df8..4ca83a4 100644
--- a/src/jsregexp.cc
+++ b/src/jsregexp.cc
@@ -212,19 +212,7 @@
RegExpImpl::SetCapture(array, 1, to);
}
- /* template <typename SubjectChar>, typename PatternChar>
-static int ReStringMatch(Vector<const SubjectChar> sub_vector,
- Vector<const PatternChar> pat_vector,
- int start_index) {
- int pattern_length = pat_vector.length();
- if (pattern_length == 0) return start_index;
-
- int subject_length = sub_vector.length();
- if (start_index + pattern_length > subject_length) return -1;
- return SearchString(sub_vector, pat_vector, start_index);
-}
- */
Handle<Object> RegExpImpl::AtomExec(Handle<JSRegExp> re,
Handle<String> subject,
int index,
@@ -236,38 +224,41 @@
if (!subject->IsFlat()) FlattenString(subject);
AssertNoAllocation no_heap_allocation; // ensure vectors stay valid
- // Extract flattened substrings of cons strings before determining asciiness.
- String* seq_sub = *subject;
- if (seq_sub->IsConsString()) seq_sub = ConsString::cast(seq_sub)->first();
String* needle = String::cast(re->DataAt(JSRegExp::kAtomPatternIndex));
int needle_len = needle->length();
+ ASSERT(needle->IsFlat());
if (needle_len != 0) {
- if (index + needle_len > subject->length())
- return isolate->factory()->null_value();
+ if (index + needle_len > subject->length()) {
+ return isolate->factory()->null_value();
+ }
+ String::FlatContent needle_content = needle->GetFlatContent();
+ String::FlatContent subject_content = subject->GetFlatContent();
+ ASSERT(needle_content.IsFlat());
+ ASSERT(subject_content.IsFlat());
// dispatch on type of strings
- index = (needle->IsAsciiRepresentation()
- ? (seq_sub->IsAsciiRepresentation()
+ index = (needle_content.IsAscii()
+ ? (subject_content.IsAscii()
? SearchString(isolate,
- seq_sub->ToAsciiVector(),
- needle->ToAsciiVector(),
+ subject_content.ToAsciiVector(),
+ needle_content.ToAsciiVector(),
index)
: SearchString(isolate,
- seq_sub->ToUC16Vector(),
- needle->ToAsciiVector(),
+ subject_content.ToUC16Vector(),
+ needle_content.ToAsciiVector(),
index))
- : (seq_sub->IsAsciiRepresentation()
+ : (subject_content.IsAscii()
? SearchString(isolate,
- seq_sub->ToAsciiVector(),
- needle->ToUC16Vector(),
+ subject_content.ToAsciiVector(),
+ needle_content.ToUC16Vector(),
index)
: SearchString(isolate,
- seq_sub->ToUC16Vector(),
- needle->ToUC16Vector(),
+ subject_content.ToUC16Vector(),
+ needle_content.ToUC16Vector(),
index)));
- if (index == -1) return FACTORY->null_value();
+ if (index == -1) return isolate->factory()->null_value();
}
ASSERT(last_match_info->HasFastElements());
@@ -355,10 +346,7 @@
JSRegExp::Flags flags = re->GetFlags();
Handle<String> pattern(re->Pattern());
- if (!pattern->IsFlat()) {
- FlattenString(pattern);
- }
-
+ if (!pattern->IsFlat()) FlattenString(pattern);
RegExpCompileData compile_data;
FlatStringReader reader(isolate, pattern);
if (!RegExpParser::ParseRegExp(&reader, flags.is_multiline(),
@@ -442,22 +430,12 @@
int RegExpImpl::IrregexpPrepare(Handle<JSRegExp> regexp,
Handle<String> subject) {
- if (!subject->IsFlat()) {
- FlattenString(subject);
- }
+ if (!subject->IsFlat()) FlattenString(subject);
+
// Check the asciiness of the underlying storage.
- bool is_ascii;
- {
- AssertNoAllocation no_gc;
- String* sequential_string = *subject;
- if (subject->IsConsString()) {
- sequential_string = ConsString::cast(*subject)->first();
- }
- is_ascii = sequential_string->IsAsciiRepresentation();
- }
- if (!EnsureCompiledIrregexp(regexp, is_ascii)) {
- return -1;
- }
+ bool is_ascii = subject->IsAsciiRepresentationUnderneath();
+ if (!EnsureCompiledIrregexp(regexp, is_ascii)) return -1;
+
#ifdef V8_INTERPRETED_REGEXP
// Byte-code regexp needs space allocated for all its registers.
return IrregexpNumberOfRegisters(FixedArray::cast(regexp->data()));
@@ -482,15 +460,11 @@
ASSERT(index <= subject->length());
ASSERT(subject->IsFlat());
- // A flat ASCII string might have a two-byte first part.
- if (subject->IsConsString()) {
- subject = Handle<String>(ConsString::cast(*subject)->first(), isolate);
- }
+ bool is_ascii = subject->IsAsciiRepresentationUnderneath();
#ifndef V8_INTERPRETED_REGEXP
ASSERT(output.length() >= (IrregexpNumberOfCaptures(*irregexp) + 1) * 2);
do {
- bool is_ascii = subject->IsAsciiRepresentation();
EnsureCompiledIrregexp(regexp, is_ascii);
Handle<Code> code(IrregexpNativeCode(*irregexp, is_ascii), isolate);
NativeRegExpMacroAssembler::Result res =
@@ -518,13 +492,13 @@
// being internal and external, and even between being ASCII and UC16,
// but the characters are always the same).
IrregexpPrepare(regexp, subject);
+ is_ascii = subject->IsAsciiRepresentationUnderneath();
} while (true);
UNREACHABLE();
return RE_EXCEPTION;
#else // V8_INTERPRETED_REGEXP
ASSERT(output.length() >= IrregexpNumberOfRegisters(*irregexp));
- bool is_ascii = subject->IsAsciiRepresentation();
// We must have done EnsureCompiledIrregexp, so we can get the number of
// registers.
int* register_vector = output.start();
diff --git a/src/liveobjectlist.cc b/src/liveobjectlist.cc
index e382a06..451a28a 100644
--- a/src/liveobjectlist.cc
+++ b/src/liveobjectlist.cc
@@ -36,11 +36,12 @@
#include "global-handles.h"
#include "heap.h"
#include "inspector.h"
+#include "isolate.h"
#include "list-inl.h"
#include "liveobjectlist-inl.h"
#include "string-stream.h"
-#include "top.h"
#include "v8utils.h"
+#include "v8conversions.h"
namespace v8 {
namespace internal {
@@ -109,7 +110,7 @@
\
v(Context, "meta: Context") \
v(ByteArray, "meta: ByteArray") \
- v(PixelArray, "meta: PixelArray") \
+ v(ExternalPixelArray, "meta: PixelArray") \
v(ExternalArray, "meta: ExternalArray") \
v(FixedArray, "meta: FixedArray") \
v(String, "String") \
@@ -211,8 +212,9 @@
static bool InSpace(AllocationSpace space, HeapObject *heap_obj) {
+ Heap* heap = ISOLATE->heap();
if (space != LO_SPACE) {
- return Heap::InSpace(heap_obj, space);
+ return heap->InSpace(heap_obj, space);
}
// This is an optimization to speed up the check for an object in the LO
@@ -224,11 +226,11 @@
int first_space = static_cast<int>(FIRST_SPACE);
int last_space = static_cast<int>(LO_SPACE);
for (int sp = first_space; sp < last_space; sp++) {
- if (Heap::InSpace(heap_obj, static_cast<AllocationSpace>(sp))) {
+ if (heap->InSpace(heap_obj, static_cast<AllocationSpace>(sp))) {
return false;
}
}
- SLOW_ASSERT(Heap::InSpace(heap_obj, LO_SPACE));
+ SLOW_ASSERT(heap->InSpace(heap_obj, LO_SPACE));
return true;
}
@@ -285,7 +287,7 @@
void LolFilter::InitTypeFilter(Handle<JSObject> filter_obj) {
- Handle<String> type_sym = Factory::LookupAsciiSymbol("type");
+ Handle<String> type_sym = FACTORY->LookupAsciiSymbol("type");
MaybeObject* maybe_result = filter_obj->GetProperty(*type_sym);
Object* type_obj;
if (maybe_result->ToObject(&type_obj)) {
@@ -301,7 +303,7 @@
void LolFilter::InitSpaceFilter(Handle<JSObject> filter_obj) {
- Handle<String> space_sym = Factory::LookupAsciiSymbol("space");
+ Handle<String> space_sym = FACTORY->LookupAsciiSymbol("space");
MaybeObject* maybe_result = filter_obj->GetProperty(*space_sym);
Object* space_obj;
if (maybe_result->ToObject(&space_obj)) {
@@ -317,7 +319,7 @@
void LolFilter::InitPropertyFilter(Handle<JSObject> filter_obj) {
- Handle<String> prop_sym = Factory::LookupAsciiSymbol("prop");
+ Handle<String> prop_sym = FACTORY->LookupAsciiSymbol("prop");
MaybeObject* maybe_result = filter_obj->GetProperty(*prop_sym);
Object* prop_obj;
if (maybe_result->ToObject(&prop_obj)) {
@@ -571,7 +573,9 @@
Handle<JSObject> detail,
Handle<String> desc,
Handle<Object> error) {
- detail = Factory::NewJSObject(Top::object_function());
+ Isolate* isolate = Isolate::Current();
+ Factory* factory = isolate->factory();
+ detail = factory->NewJSObject(isolate->object_function());
if (detail->IsFailure()) {
error = detail;
return false;
@@ -586,7 +590,7 @@
desc_str = buffer;
size = obj->Size();
}
- desc = Factory::NewStringFromAscii(CStrVector(desc_str));
+ desc = factory->NewStringFromAscii(CStrVector(desc_str));
if (desc->IsFailure()) {
error = desc;
return false;
@@ -663,10 +667,13 @@
int index = 0;
int count = 0;
+ Isolate* isolate = Isolate::Current();
+ Factory* factory = isolate->factory();
+
// Prefetch some needed symbols.
- Handle<String> id_sym = Factory::LookupAsciiSymbol("id");
- Handle<String> desc_sym = Factory::LookupAsciiSymbol("desc");
- Handle<String> size_sym = Factory::LookupAsciiSymbol("size");
+ Handle<String> id_sym = factory->LookupAsciiSymbol("id");
+ Handle<String> desc_sym = factory->LookupAsciiSymbol("desc");
+ Handle<String> size_sym = factory->LookupAsciiSymbol("size");
// Fill the array with the lol object details.
Handle<JSObject> detail;
@@ -1089,7 +1096,9 @@
// Captures a current snapshot of all objects in the heap.
MaybeObject* LiveObjectList::Capture() {
- HandleScope scope;
+ Isolate* isolate = Isolate::Current();
+ Factory* factory = isolate->factory();
+ HandleScope scope(isolate);
// Count the number of objects in the heap.
int total_count = CountHeapObjects();
@@ -1139,11 +1148,11 @@
#endif
}
- Handle<String> id_sym = Factory::LookupAsciiSymbol("id");
- Handle<String> count_sym = Factory::LookupAsciiSymbol("count");
- Handle<String> size_sym = Factory::LookupAsciiSymbol("size");
+ Handle<String> id_sym = factory->LookupAsciiSymbol("id");
+ Handle<String> count_sym = factory->LookupAsciiSymbol("count");
+ Handle<String> size_sym = factory->LookupAsciiSymbol("size");
- Handle<JSObject> result = Factory::NewJSObject(Top::object_function());
+ Handle<JSObject> result = factory->NewJSObject(isolate->object_function());
if (result->IsFailure()) return Object::cast(*result);
{ MaybeObject* maybe_result = result->SetProperty(*id_sym,
@@ -1259,7 +1268,10 @@
int start,
int dump_limit,
LolFilter* filter) {
- HandleScope scope;
+ Isolate* isolate = Isolate::Current();
+ Factory* factory = isolate->factory();
+
+ HandleScope scope(isolate);
// Calculate the number of entries of the dump.
int count = -1;
@@ -1277,7 +1289,7 @@
}
// Allocate an array to hold the result.
- Handle<FixedArray> elements_arr = Factory::NewFixedArray(dump_limit);
+ Handle<FixedArray> elements_arr = factory->NewFixedArray(dump_limit);
if (elements_arr->IsFailure()) return Object::cast(*elements_arr);
// Fill in the dump.
@@ -1292,11 +1304,11 @@
MaybeObject* maybe_result;
// Allocate the result body.
- Handle<JSObject> body = Factory::NewJSObject(Top::object_function());
+ Handle<JSObject> body = factory->NewJSObject(isolate->object_function());
if (body->IsFailure()) return Object::cast(*body);
// Set the updated body.count.
- Handle<String> count_sym = Factory::LookupAsciiSymbol("count");
+ Handle<String> count_sym = factory->LookupAsciiSymbol("count");
maybe_result = body->SetProperty(*count_sym,
Smi::FromInt(count),
NONE,
@@ -1305,7 +1317,7 @@
// Set the updated body.size if appropriate.
if (size >= 0) {
- Handle<String> size_sym = Factory::LookupAsciiSymbol("size");
+ Handle<String> size_sym = factory->LookupAsciiSymbol("size");
maybe_result = body->SetProperty(*size_sym,
Smi::FromInt(size),
NONE,
@@ -1314,7 +1326,7 @@
}
// Set body.first_index.
- Handle<String> first_sym = Factory::LookupAsciiSymbol("first_index");
+ Handle<String> first_sym = factory->LookupAsciiSymbol("first_index");
maybe_result = body->SetProperty(*first_sym,
Smi::FromInt(start),
NONE,
@@ -1322,12 +1334,12 @@
if (maybe_result->IsFailure()) return maybe_result;
// Allocate the JSArray of the elements.
- Handle<JSObject> elements = Factory::NewJSObject(Top::array_function());
+ Handle<JSObject> elements = factory->NewJSObject(isolate->array_function());
if (elements->IsFailure()) return Object::cast(*elements);
Handle<JSArray>::cast(elements)->SetContent(*elements_arr);
// Set body.elements.
- Handle<String> elements_sym = Factory::LookupAsciiSymbol("elements");
+ Handle<String> elements_sym = factory->LookupAsciiSymbol("elements");
maybe_result = body->SetProperty(*elements_sym,
*elements,
NONE,
@@ -1381,6 +1393,9 @@
LiveObjectSummary summary(filter);
writer->Write(&summary);
+ Isolate* isolate = Isolate::Current();
+ Factory* factory = isolate->factory();
+
// The result body will look like this:
// body: {
// count: <total_count>,
@@ -1398,21 +1413,21 @@
// }
// Prefetch some needed symbols.
- Handle<String> desc_sym = Factory::LookupAsciiSymbol("desc");
- Handle<String> count_sym = Factory::LookupAsciiSymbol("count");
- Handle<String> size_sym = Factory::LookupAsciiSymbol("size");
- Handle<String> summary_sym = Factory::LookupAsciiSymbol("summary");
+ Handle<String> desc_sym = factory->LookupAsciiSymbol("desc");
+ Handle<String> count_sym = factory->LookupAsciiSymbol("count");
+ Handle<String> size_sym = factory->LookupAsciiSymbol("size");
+ Handle<String> summary_sym = factory->LookupAsciiSymbol("summary");
// Allocate the summary array.
int entries_count = summary.GetNumberOfEntries();
Handle<FixedArray> summary_arr =
- Factory::NewFixedArray(entries_count);
+ factory->NewFixedArray(entries_count);
if (summary_arr->IsFailure()) return Object::cast(*summary_arr);
int idx = 0;
for (int i = 0; i < LiveObjectSummary::kNumberOfEntries; i++) {
// Allocate the summary record.
- Handle<JSObject> detail = Factory::NewJSObject(Top::object_function());
+ Handle<JSObject> detail = factory->NewJSObject(isolate->object_function());
if (detail->IsFailure()) return Object::cast(*detail);
// Fill in the summary record.
@@ -1420,7 +1435,7 @@
int count = summary.Count(type);
if (count) {
const char* desc_cstr = GetObjectTypeDesc(type);
- Handle<String> desc = Factory::LookupAsciiSymbol(desc_cstr);
+ Handle<String> desc = factory->LookupAsciiSymbol(desc_cstr);
int size = summary.Size(type);
maybe_result = detail->SetProperty(*desc_sym,
@@ -1444,12 +1459,13 @@
}
// Wrap the summary fixed array in a JS array.
- Handle<JSObject> summary_obj = Factory::NewJSObject(Top::array_function());
+ Handle<JSObject> summary_obj =
+ factory->NewJSObject(isolate->array_function());
if (summary_obj->IsFailure()) return Object::cast(*summary_obj);
Handle<JSArray>::cast(summary_obj)->SetContent(*summary_arr);
// Create the body object.
- Handle<JSObject> body = Factory::NewJSObject(Top::object_function());
+ Handle<JSObject> body = factory->NewJSObject(isolate->object_function());
if (body->IsFailure()) return Object::cast(*body);
// Fill out the body object.
@@ -1470,9 +1486,9 @@
if (is_tracking_roots) {
int found_root = summary.found_root();
int found_weak_root = summary.found_weak_root();
- Handle<String> root_sym = Factory::LookupAsciiSymbol("found_root");
+ Handle<String> root_sym = factory->LookupAsciiSymbol("found_root");
Handle<String> weak_root_sym =
- Factory::LookupAsciiSymbol("found_weak_root");
+ factory->LookupAsciiSymbol("found_weak_root");
maybe_result = body->SetProperty(*root_sym,
Smi::FromInt(found_root),
NONE,
@@ -1499,7 +1515,10 @@
// Note: only dumps the section starting at start_idx and only up to
// dump_limit entries.
MaybeObject* LiveObjectList::Info(int start_idx, int dump_limit) {
- HandleScope scope;
+ Isolate* isolate = Isolate::Current();
+ Factory* factory = isolate->factory();
+
+ HandleScope scope(isolate);
MaybeObject* maybe_result;
int total_count = LiveObjectList::list_count();
@@ -1519,13 +1538,13 @@
}
// Allocate an array to hold the result.
- Handle<FixedArray> list = Factory::NewFixedArray(dump_count);
+ Handle<FixedArray> list = factory->NewFixedArray(dump_count);
if (list->IsFailure()) return Object::cast(*list);
// Prefetch some needed symbols.
- Handle<String> id_sym = Factory::LookupAsciiSymbol("id");
- Handle<String> count_sym = Factory::LookupAsciiSymbol("count");
- Handle<String> size_sym = Factory::LookupAsciiSymbol("size");
+ Handle<String> id_sym = factory->LookupAsciiSymbol("id");
+ Handle<String> count_sym = factory->LookupAsciiSymbol("count");
+ Handle<String> size_sym = factory->LookupAsciiSymbol("size");
// Fill the array with the lol details.
int idx = 0;
@@ -1543,7 +1562,8 @@
int size;
count = lol->GetTotalObjCountAndSize(&size);
- Handle<JSObject> detail = Factory::NewJSObject(Top::object_function());
+ Handle<JSObject> detail =
+ factory->NewJSObject(isolate->object_function());
if (detail->IsFailure()) return Object::cast(*detail);
maybe_result = detail->SetProperty(*id_sym,
@@ -1568,10 +1588,10 @@
}
// Return the result as a JS array.
- Handle<JSObject> lols = Factory::NewJSObject(Top::array_function());
+ Handle<JSObject> lols = factory->NewJSObject(isolate->array_function());
Handle<JSArray>::cast(lols)->SetContent(*list);
- Handle<JSObject> result = Factory::NewJSObject(Top::object_function());
+ Handle<JSObject> result = factory->NewJSObject(isolate->object_function());
if (result->IsFailure()) return Object::cast(*result);
maybe_result = result->SetProperty(*count_sym,
@@ -1580,14 +1600,14 @@
kNonStrictMode);
if (maybe_result->IsFailure()) return maybe_result;
- Handle<String> first_sym = Factory::LookupAsciiSymbol("first_index");
+ Handle<String> first_sym = factory->LookupAsciiSymbol("first_index");
maybe_result = result->SetProperty(*first_sym,
Smi::FromInt(start_idx),
NONE,
kNonStrictMode);
if (maybe_result->IsFailure()) return maybe_result;
- Handle<String> lists_sym = Factory::LookupAsciiSymbol("lists");
+ Handle<String> lists_sym = factory->LookupAsciiSymbol("lists");
maybe_result = result->SetProperty(*lists_sym,
*lols,
NONE,
@@ -1618,7 +1638,7 @@
if (element != NULL) {
return Object::cast(element->obj_);
}
- return Heap::undefined_value();
+ return HEAP->undefined_value();
}
@@ -1639,8 +1659,11 @@
SmartPointer<char> addr_str =
address->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL);
+ Isolate* isolate = Isolate::Current();
+
// Extract the address value from the string.
- int value = static_cast<int>(StringToInt(*address, 16));
+ int value =
+ static_cast<int>(StringToInt(isolate->unicode_cache(), *address, 16));
Object* obj = reinterpret_cast<Object*>(value);
return Smi::FromInt(GetObjId(obj));
}
@@ -1760,10 +1783,13 @@
Handle<String> desc;
Handle<HeapObject> retainer;
+ Isolate* isolate = Isolate::Current();
+ Factory* factory = isolate->factory();
+
// Prefetch some needed symbols.
- Handle<String> id_sym = Factory::LookupAsciiSymbol("id");
- Handle<String> desc_sym = Factory::LookupAsciiSymbol("desc");
- Handle<String> size_sym = Factory::LookupAsciiSymbol("size");
+ Handle<String> id_sym = factory->LookupAsciiSymbol("id");
+ Handle<String> desc_sym = factory->LookupAsciiSymbol("desc");
+ Handle<String> size_sym = factory->LookupAsciiSymbol("size");
NoHandleAllocation ha;
int count = 0;
@@ -1774,7 +1800,7 @@
// Iterate roots.
LolVisitor lol_visitor(*target, target);
- Heap::IterateStrongRoots(&lol_visitor, VISIT_ALL);
+ isolate->heap()->IterateStrongRoots(&lol_visitor, VISIT_ALL);
if (!AddRootRetainerIfFound(lol_visitor,
filter,
summary,
@@ -1794,7 +1820,7 @@
}
lol_visitor.reset();
- Heap::IterateWeakRoots(&lol_visitor, VISIT_ALL);
+ isolate->heap()->IterateWeakRoots(&lol_visitor, VISIT_ALL);
if (!AddRootRetainerIfFound(lol_visitor,
filter,
summary,
@@ -1903,11 +1929,15 @@
int start,
int dump_limit,
Handle<JSObject> filter_obj) {
- HandleScope scope;
+ Isolate* isolate = Isolate::Current();
+ Factory* factory = isolate->factory();
+ Heap* heap = isolate->heap();
+
+ HandleScope scope(isolate);
// Get the target object.
HeapObject* heap_obj = HeapObject::cast(GetObj(obj_id));
- if (heap_obj == Heap::undefined_value()) {
+ if (heap_obj == heap->undefined_value()) {
return heap_obj;
}
@@ -1915,7 +1945,7 @@
// Get the constructor function for context extension and arguments array.
JSObject* arguments_boilerplate =
- Top::context()->global_context()->arguments_boilerplate();
+ isolate->context()->global_context()->arguments_boilerplate();
JSFunction* arguments_function =
JSFunction::cast(arguments_boilerplate->map()->constructor());
@@ -1937,7 +1967,7 @@
// Set body.id.
Handle<JSObject> body = Handle<JSObject>(JSObject::cast(body_obj));
- Handle<String> id_sym = Factory::LookupAsciiSymbol("id");
+ Handle<String> id_sym = factory->LookupAsciiSymbol("id");
maybe_result = body->SetProperty(*id_sym,
Smi::FromInt(obj_id),
NONE,
@@ -1952,13 +1982,17 @@
Object* LiveObjectList::PrintObj(int obj_id) {
Object* obj = GetObj(obj_id);
if (!obj) {
- return Heap::undefined_value();
+ return HEAP->undefined_value();
}
EmbeddedVector<char, 128> temp_filename;
static int temp_count = 0;
const char* path_prefix = ".";
+ Isolate* isolate = Isolate::Current();
+ Factory* factory = isolate->factory();
+ Heap* heap = isolate->heap();
+
if (FLAG_lol_workdir) {
path_prefix = FLAG_lol_workdir;
}
@@ -1987,13 +2021,13 @@
if (resource->exists() && !resource->is_empty()) {
ASSERT(resource->IsAscii());
Handle<String> dump_string =
- Factory::NewExternalStringFromAscii(resource);
- ExternalStringTable::AddString(*dump_string);
+ factory->NewExternalStringFromAscii(resource);
+ heap->external_string_table()->AddString(*dump_string);
return *dump_string;
} else {
delete resource;
}
- return Heap::undefined_value();
+ return HEAP->undefined_value();
}
@@ -2081,6 +2115,10 @@
FILE* f = OS::FOpen(temp_filename.start(), "w+");
+ Isolate* isolate = Isolate::Current();
+ Factory* factory = isolate->factory();
+ Heap* heap = isolate->heap();
+
// Save the previous verbosity.
bool prev_verbosity = FLAG_use_verbose_printer;
FLAG_use_verbose_printer = false;
@@ -2096,15 +2134,14 @@
// Check for ObjectGroups that references this object.
// TODO(mlam): refactor this to be more modular.
{
- List<ObjectGroup*>* groups = GlobalHandles::ObjectGroups();
+ List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
for (int i = 0; i < groups->length(); i++) {
ObjectGroup* group = groups->at(i);
if (group == NULL) continue;
bool found_group = false;
- List<Object**>& objects = group->objects_;
- for (int j = 0; j < objects.length(); j++) {
- Object* object = *objects[j];
+ for (size_t j = 0; j < group->length_; j++) {
+ Object* object = *(group->objects_[j]);
HeapObject* hobj = HeapObject::cast(object);
if (obj2 == hobj) {
found_group = true;
@@ -2117,8 +2154,8 @@
"obj %p is a member of object group %p {\n",
reinterpret_cast<void*>(obj2),
reinterpret_cast<void*>(group));
- for (int j = 0; j < objects.length(); j++) {
- Object* object = *objects[j];
+ for (size_t j = 0; j < group->length_; j++) {
+ Object* object = *(group->objects_[j]);
if (!object->IsHeapObject()) continue;
HeapObject* hobj = HeapObject::cast(object);
@@ -2143,12 +2180,12 @@
}
PrintF(f, "path from roots to obj %p\n", reinterpret_cast<void*>(obj2));
- Heap::IterateRoots(&tracer, VISIT_ONLY_STRONG);
+ heap->IterateRoots(&tracer, VISIT_ONLY_STRONG);
found = tracer.found();
if (!found) {
PrintF(f, " No paths found. Checking symbol tables ...\n");
- SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
+ SymbolTable* symbol_table = HEAP->raw_unchecked_symbol_table();
tracer.VisitPointers(reinterpret_cast<Object**>(&symbol_table),
reinterpret_cast<Object**>(&symbol_table)+1);
found = tracer.found();
@@ -2161,7 +2198,7 @@
if (!found) {
PrintF(f, " No paths found. Checking weak roots ...\n");
// Check weak refs next.
- GlobalHandles::IterateWeakRoots(&tracer);
+ isolate->global_handles()->IterateWeakRoots(&tracer);
found = tracer.found();
}
@@ -2191,13 +2228,13 @@
if (resource->exists() && !resource->is_empty()) {
ASSERT(resource->IsAscii());
Handle<String> path_string =
- Factory::NewExternalStringFromAscii(resource);
- ExternalStringTable::AddString(*path_string);
+ factory->NewExternalStringFromAscii(resource);
+ heap->external_string_table()->AddString(*path_string);
return *path_string;
} else {
delete resource;
}
- return Heap::undefined_value();
+ return heap->undefined_value();
}
@@ -2210,13 +2247,13 @@
HeapObject* obj1 = NULL;
if (obj_id1 != 0) {
obj1 = HeapObject::cast(GetObj(obj_id1));
- if (obj1 == Heap::undefined_value()) {
+ if (obj1 == HEAP->undefined_value()) {
return obj1;
}
}
HeapObject* obj2 = HeapObject::cast(GetObj(obj_id2));
- if (obj2 == Heap::undefined_value()) {
+ if (obj2 == HEAP->undefined_value()) {
return obj2;
}
@@ -2570,12 +2607,13 @@
void LiveObjectList::VerifyNotInFromSpace() {
OS::Print("VerifyNotInFromSpace() ...\n");
LolIterator it(NULL, last());
+ Heap* heap = ISOLATE->heap();
int i = 0;
for (it.Init(); !it.Done(); it.Next()) {
HeapObject* heap_obj = it.Obj();
- if (Heap::InFromSpace(heap_obj)) {
+ if (heap->InFromSpace(heap_obj)) {
OS::Print(" ERROR: VerifyNotInFromSpace: [%d] obj %p in From space %p\n",
- i++, heap_obj, Heap::new_space()->FromSpaceLow());
+ i++, heap_obj, heap->new_space()->FromSpaceLow());
}
}
}
diff --git a/src/liveobjectlist.h b/src/liveobjectlist.h
index 23e418d..542482d 100644
--- a/src/liveobjectlist.h
+++ b/src/liveobjectlist.h
@@ -237,10 +237,10 @@
// to live new space objects, and not actually keep them alive.
void UpdatePointer(Object** p) {
Object* object = *p;
- if (!Heap::InNewSpace(object)) return;
+ if (!HEAP->InNewSpace(object)) return;
HeapObject* heap_obj = HeapObject::cast(object);
- ASSERT(Heap::InFromSpace(heap_obj));
+ ASSERT(HEAP->InFromSpace(heap_obj));
// We use the first word (where the map pointer usually is) of a heap
// object to record the forwarding pointer. A forwarding pointer can
diff --git a/src/log-utils.cc b/src/log-utils.cc
index 2d1ce23..27e654d 100644
--- a/src/log-utils.cc
+++ b/src/log-utils.cc
@@ -34,7 +34,7 @@
namespace internal {
-const char* Log::kLogToTemporaryFile = "&";
+const char* const Log::kLogToTemporaryFile = "&";
Log::Log(Logger* logger)
@@ -86,8 +86,6 @@
if (open_log_file) {
if (strcmp(FLAG_logfile, "-") == 0) {
OpenStdout();
- } else if (strcmp(FLAG_logfile, "*") == 0) {
- // Does nothing for now. Will be removed.
} else if (strcmp(FLAG_logfile, kLogToTemporaryFile) == 0) {
OpenTemporaryFile();
} else {
diff --git a/src/log-utils.h b/src/log-utils.h
index d336d71..2b20a01 100644
--- a/src/log-utils.h
+++ b/src/log-utils.h
@@ -59,7 +59,7 @@
// This mode is only used in tests, as temporary files are automatically
// deleted on close and thus can't be accessed afterwards.
- static const char* kLogToTemporaryFile;
+ static const char* const kLogToTemporaryFile;
private:
explicit Log(Logger* logger);
diff --git a/src/log.cc b/src/log.cc
index 04fd22e..dedf7e9 100644
--- a/src/log.cc
+++ b/src/log.cc
@@ -1400,6 +1400,7 @@
case Code::UNARY_OP_IC: // fall through
case Code::BINARY_OP_IC: // fall through
case Code::COMPARE_IC: // fall through
+ case Code::TO_BOOLEAN_IC: // fall through
case Code::STUB:
description =
CodeStub::MajorName(CodeStub::GetMajorKey(code_object), true);
diff --git a/src/macros.py b/src/macros.py
index fc08cb1..5ba7ac3 100644
--- a/src/macros.py
+++ b/src/macros.py
@@ -44,7 +44,7 @@
const kApiConstructorOffset = 2;
const kApiPrototypeTemplateOffset = 5;
const kApiParentTemplateOffset = 6;
-const kApiPrototypeAttributesOffset = 15;
+const kApiFlagOffset = 14;
const NO_HINT = 0;
const NUMBER_HINT = 1;
@@ -65,6 +65,7 @@
# For apinatives.js
const kUninitialized = -1;
+const kReadOnlyPrototypeBit = 3; # For FunctionTemplateInfo, matches objects.h
# Note: kDayZeroInJulianDay = ToJulianDay(1970, 0, 1).
const kInvalidDate = 'Invalid Date';
diff --git a/src/mark-compact.cc b/src/mark-compact.cc
index 0bf8286..3e4a617 100644
--- a/src/mark-compact.cc
+++ b/src/mark-compact.cc
@@ -64,13 +64,15 @@
live_bytes_(0),
#endif
heap_(NULL),
- code_flusher_(NULL) { }
+ code_flusher_(NULL),
+ encountered_weak_maps_(NULL) { }
void MarkCompactCollector::CollectGarbage() {
// Make sure that Prepare() has been called. The individual steps below will
// update the state as they proceed.
ASSERT(state_ == PREPARE_GC);
+ ASSERT(encountered_weak_maps_ == Smi::FromInt(0));
// Prepare has selected whether to compact the old generation or not.
// Tell the tracer.
@@ -80,6 +82,8 @@
if (FLAG_collect_maps) ClearNonLiveTransitions();
+ ClearWeakMaps();
+
SweepLargeObjectSpace();
if (IsCompacting()) {
@@ -390,6 +394,10 @@
ConsString::BodyDescriptor,
void>::Visit);
+ table_.Register(kVisitSlicedString,
+ &FixedBodyVisitor<StaticMarkingVisitor,
+ SlicedString::BodyDescriptor,
+ void>::Visit);
table_.Register(kVisitFixedArray,
&FlexibleBodyVisitor<StaticMarkingVisitor,
@@ -407,6 +415,8 @@
table_.Register(kVisitSeqAsciiString, &DataObjectVisitor::Visit);
table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
+ table_.Register(kVisitJSWeakMap, &VisitJSWeakMap);
+
table_.Register(kVisitOddball,
&FixedBodyVisitor<StaticMarkingVisitor,
Oddball::BodyDescriptor,
@@ -556,6 +566,34 @@
StructBodyDescriptor,
void> StructObjectVisitor;
+ static void VisitJSWeakMap(Map* map, HeapObject* object) {
+ MarkCompactCollector* collector = map->heap()->mark_compact_collector();
+ JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(object);
+
+ // Enqueue weak map in linked list of encountered weak maps.
+ ASSERT(weak_map->next() == Smi::FromInt(0));
+ weak_map->set_next(collector->encountered_weak_maps());
+ collector->set_encountered_weak_maps(weak_map);
+
+ // Skip visiting the backing hash table containing the mappings.
+ int object_size = JSWeakMap::BodyDescriptor::SizeOf(map, object);
+ BodyVisitorBase<StaticMarkingVisitor>::IteratePointers(
+ map->heap(),
+ object,
+ JSWeakMap::BodyDescriptor::kStartOffset,
+ JSWeakMap::kTableOffset);
+ BodyVisitorBase<StaticMarkingVisitor>::IteratePointers(
+ map->heap(),
+ object,
+ JSWeakMap::kTableOffset + kPointerSize,
+ object_size);
+
+ // Mark the backing hash table without pushing it on the marking stack.
+ ASSERT(!weak_map->unchecked_table()->IsMarked());
+ ASSERT(weak_map->unchecked_table()->map()->IsMarked());
+ collector->SetMark(weak_map->unchecked_table());
+ }
+
static void VisitCode(Map* map, HeapObject* object) {
reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>(
map->heap());
@@ -1369,20 +1407,26 @@
// marking stack have been marked, or are overflowed in the heap.
void MarkCompactCollector::EmptyMarkingStack() {
while (!marking_stack_.is_empty()) {
- HeapObject* object = marking_stack_.Pop();
- ASSERT(object->IsHeapObject());
- ASSERT(heap()->Contains(object));
- ASSERT(object->IsMarked());
- ASSERT(!object->IsOverflowed());
+ while (!marking_stack_.is_empty()) {
+ HeapObject* object = marking_stack_.Pop();
+ ASSERT(object->IsHeapObject());
+ ASSERT(heap()->Contains(object));
+ ASSERT(object->IsMarked());
+ ASSERT(!object->IsOverflowed());
- // Because the object is marked, we have to recover the original map
- // pointer and use it to mark the object's body.
- MapWord map_word = object->map_word();
- map_word.ClearMark();
- Map* map = map_word.ToMap();
- MarkObject(map);
+ // Because the object is marked, we have to recover the original map
+ // pointer and use it to mark the object's body.
+ MapWord map_word = object->map_word();
+ map_word.ClearMark();
+ Map* map = map_word.ToMap();
+ MarkObject(map);
- StaticMarkingVisitor::IterateBody(map, object);
+ StaticMarkingVisitor::IterateBody(map, object);
+ }
+
+ // Process encountered weak maps, mark objects only reachable by those
+ // weak maps and repeat until fix-point is reached.
+ ProcessWeakMaps();
}
}
@@ -1735,6 +1779,45 @@
}
}
+
+void MarkCompactCollector::ProcessWeakMaps() {
+ Object* weak_map_obj = encountered_weak_maps();
+ while (weak_map_obj != Smi::FromInt(0)) {
+ ASSERT(HeapObject::cast(weak_map_obj)->IsMarked());
+ JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj);
+ ObjectHashTable* table = weak_map->unchecked_table();
+ for (int i = 0; i < table->Capacity(); i++) {
+ if (HeapObject::cast(table->KeyAt(i))->IsMarked()) {
+ Object* value = table->get(table->EntryToValueIndex(i));
+ StaticMarkingVisitor::MarkObjectByPointer(heap(), &value);
+ table->set_unchecked(heap(),
+ table->EntryToValueIndex(i),
+ value,
+ UPDATE_WRITE_BARRIER);
+ }
+ }
+ weak_map_obj = weak_map->next();
+ }
+}
+
+
+void MarkCompactCollector::ClearWeakMaps() {
+ Object* weak_map_obj = encountered_weak_maps();
+ while (weak_map_obj != Smi::FromInt(0)) {
+ ASSERT(HeapObject::cast(weak_map_obj)->IsMarked());
+ JSWeakMap* weak_map = reinterpret_cast<JSWeakMap*>(weak_map_obj);
+ ObjectHashTable* table = weak_map->unchecked_table();
+ for (int i = 0; i < table->Capacity(); i++) {
+ if (!HeapObject::cast(table->KeyAt(i))->IsMarked()) {
+ table->RemoveEntry(i, heap());
+ }
+ }
+ weak_map_obj = weak_map->next();
+ weak_map->set_next(Smi::FromInt(0));
+ }
+ set_encountered_weak_maps(Smi::FromInt(0));
+}
+
// -------------------------------------------------------------------------
// Phase 2: Encode forwarding addresses.
// When compacting, forwarding addresses for objects in old space and map
diff --git a/src/mark-compact.h b/src/mark-compact.h
index 179edba..9b67c8a 100644
--- a/src/mark-compact.h
+++ b/src/mark-compact.h
@@ -193,6 +193,11 @@
inline bool is_code_flushing_enabled() const { return code_flusher_ != NULL; }
void EnableCodeFlushing(bool enable);
+ inline Object* encountered_weak_maps() { return encountered_weak_maps_; }
+ inline void set_encountered_weak_maps(Object* weak_map) {
+ encountered_weak_maps_ = weak_map;
+ }
+
private:
MarkCompactCollector();
~MarkCompactCollector();
@@ -329,6 +334,16 @@
// We replace them with a null descriptor, with the same key.
void ClearNonLiveTransitions();
+ // Mark all values associated with reachable keys in weak maps encountered
+ // so far. This might push new object or even new weak maps onto the
+ // marking stack.
+ void ProcessWeakMaps();
+
+ // After all reachable objects have been marked those weak map entries
+ // with an unreachable key are removed from all encountered weak maps.
+ // The linked list of all encountered weak maps is destroyed.
+ void ClearWeakMaps();
+
// -----------------------------------------------------------------------
// Phase 2: Sweeping to clear mark bits and free non-live objects for
// a non-compacting collection, or else computing and encoding
@@ -499,6 +514,7 @@
Heap* heap_;
MarkingStack marking_stack_;
CodeFlusher* code_flusher_;
+ Object* encountered_weak_maps_;
friend class Heap;
friend class OverflowedObjectsScanner;
diff --git a/src/messages.js b/src/messages.js
index 0b314a4..845ca07 100644
--- a/src/messages.js
+++ b/src/messages.js
@@ -198,17 +198,19 @@
non_extensible_proto: ["%0", " is not extensible"],
handler_non_object: ["Proxy.", "%0", " called with non-object as handler"],
handler_trap_missing: ["Proxy handler ", "%0", " has no '", "%1", "' trap"],
+ handler_trap_must_be_callable: ["Proxy handler ", "%0", " has non-callable '", "%1", "' trap"],
handler_returned_false: ["Proxy handler ", "%0", " returned false for '", "%1", "' trap"],
handler_returned_undefined: ["Proxy handler ", "%0", " returned undefined for '", "%1", "' trap"],
proxy_prop_not_configurable: ["Trap ", "%1", " of proxy handler ", "%0", " returned non-configurable descriptor for property ", "%2"],
proxy_non_object_prop_names: ["Trap ", "%1", " returned non-object ", "%0"],
proxy_repeated_prop_name: ["Trap ", "%1", " returned repeated property name ", "%2"],
+ invalid_weakmap_key: ["Invalid value used as weak map key"],
// RangeError
invalid_array_length: ["Invalid array length"],
stack_overflow: ["Maximum call stack size exceeded"],
// SyntaxError
unable_to_parse: ["Parse error"],
- duplicate_regexp_flag: ["Duplicate RegExp flag ", "%0"],
+ invalid_regexp_flags: ["Invalid flags supplied to RegExp constructor '", "%0", "'"],
invalid_regexp: ["Invalid RegExp pattern /", "%0", "/"],
illegal_break: ["Illegal break statement"],
illegal_continue: ["Illegal continue statement"],
@@ -248,8 +250,9 @@
strict_cannot_assign: ["Cannot assign to read only '", "%0", "' in strict mode"],
strict_poison_pill: ["'caller', 'callee', and 'arguments' properties may not be accessed on strict mode functions or the arguments objects for calls to them"],
strict_caller: ["Illegal access to a strict mode caller function."],
+ unprotected_let: ["Illegal let declaration in unprotected statement context."],
cant_prevent_ext_external_array_elements: ["Cannot prevent extension of an object with external array elements"],
- redef_external_array_element: ["Cannot redefine a property of an object"]
+ redef_external_array_element: ["Cannot redefine a property of an object with external array elements"],
};
}
var message_type = %MessageGetType(message);
diff --git a/src/mips/assembler-mips.cc b/src/mips/assembler-mips.cc
index 51642e0..28ac557 100644
--- a/src/mips/assembler-mips.cc
+++ b/src/mips/assembler-mips.cc
@@ -780,10 +780,10 @@
void Assembler::next(Label* L) {
ASSERT(L->is_linked());
int link = target_at(L->pos());
- ASSERT(link > 0 || link == kEndOfChain);
if (link == kEndOfChain) {
L->Unuse();
- } else if (link > 0) {
+ } else {
+ ASSERT(link >= 0);
L->link_to(link);
}
}
diff --git a/src/mips/assembler-mips.h b/src/mips/assembler-mips.h
index a16cd80..e5077be 100644
--- a/src/mips/assembler-mips.h
+++ b/src/mips/assembler-mips.h
@@ -127,38 +127,38 @@
const Register no_reg = { -1 };
-const Register zero_reg = { 0 };
-const Register at = { 1 };
-const Register v0 = { 2 };
-const Register v1 = { 3 };
-const Register a0 = { 4 };
+const Register zero_reg = { 0 }; // Always zero.
+const Register at = { 1 }; // at: Reserved for synthetic instructions.
+const Register v0 = { 2 }; // v0, v1: Used when returning multiple values
+const Register v1 = { 3 }; // from subroutines.
+const Register a0 = { 4 }; // a0 - a4: Used to pass non-FP parameters.
const Register a1 = { 5 };
const Register a2 = { 6 };
const Register a3 = { 7 };
-const Register t0 = { 8 };
-const Register t1 = { 9 };
-const Register t2 = { 10 };
+const Register t0 = { 8 }; // t0 - t9: Can be used without reservation, act
+const Register t1 = { 9 }; // as temporary registers and are allowed to
+const Register t2 = { 10 }; // be destroyed by subroutines.
const Register t3 = { 11 };
const Register t4 = { 12 };
const Register t5 = { 13 };
const Register t6 = { 14 };
const Register t7 = { 15 };
-const Register s0 = { 16 };
-const Register s1 = { 17 };
-const Register s2 = { 18 };
-const Register s3 = { 19 };
-const Register s4 = { 20 };
+const Register s0 = { 16 }; // s0 - s7: Subroutine register variables.
+const Register s1 = { 17 }; // Subroutines that write to these registers
+const Register s2 = { 18 }; // must restore their values before exiting so
+const Register s3 = { 19 }; // that the caller can expect the values to be
+const Register s4 = { 20 }; // preserved.
const Register s5 = { 21 };
const Register s6 = { 22 };
const Register s7 = { 23 };
const Register t8 = { 24 };
const Register t9 = { 25 };
-const Register k0 = { 26 };
-const Register k1 = { 27 };
-const Register gp = { 28 };
-const Register sp = { 29 };
-const Register s8_fp = { 30 };
-const Register ra = { 31 };
+const Register k0 = { 26 }; // k0, k1: Reserved for system calls and
+const Register k1 = { 27 }; // interrupt handlers.
+const Register gp = { 28 }; // gp: Reserved.
+const Register sp = { 29 }; // sp: Stack pointer.
+const Register s8_fp = { 30 }; // fp: Frame pointer.
+const Register ra = { 31 }; // ra: Return address pointer.
int ToNumber(Register reg);
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
index d89d3e5..9385f2f 100644
--- a/src/mips/code-stubs-mips.cc
+++ b/src/mips/code-stubs-mips.cc
@@ -2506,7 +2506,7 @@
CpuFeatures::Scope scope(FPU);
__ mtc1(a2, f0);
if (op_ == Token::SHR) {
- __ Cvt_d_uw(f0, f0);
+ __ Cvt_d_uw(f0, f0, f22);
} else {
__ cvt_d_w(f0, f0);
}
@@ -2920,7 +2920,7 @@
} else {
// The result must be interpreted as an unsigned 32-bit integer.
__ mtc1(a2, double_scratch);
- __ Cvt_d_uw(double_scratch, double_scratch);
+ __ Cvt_d_uw(double_scratch, double_scratch, single_scratch);
}
// Store the result.
@@ -3693,10 +3693,10 @@
// args
// Save callee saved registers on the stack.
- __ MultiPush((kCalleeSaved | ra.bit()) & ~sp.bit());
+ __ MultiPush(kCalleeSaved | ra.bit());
// Load argv in s0 register.
- __ lw(s0, MemOperand(sp, kNumCalleeSaved * kPointerSize +
+ __ lw(s0, MemOperand(sp, (kNumCalleeSaved + 1) * kPointerSize +
StandardFrameConstants::kCArgsSlotsSize));
// We build an EntryFrame.
@@ -3830,7 +3830,7 @@
__ addiu(sp, sp, -EntryFrameConstants::kCallerFPOffset);
// Restore callee saved registers from the stack.
- __ MultiPop((kCalleeSaved | ra.bit()) & ~sp.bit());
+ __ MultiPop(kCalleeSaved | ra.bit());
// Return.
__ Jump(ra);
}
@@ -4517,6 +4517,9 @@
__ Addu(a2, a2, Operand(RegExpImpl::kLastMatchOverhead));
__ sra(at, a0, kSmiTagSize); // Untag length for comparison.
__ Branch(&runtime, gt, a2, Operand(at));
+
+ // Reset offset for possibly sliced string.
+ __ mov(t0, zero_reg);
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
// Check the representation and encoding of the subject string.
@@ -4524,29 +4527,41 @@
__ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
__ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset));
// First check for flat string.
- __ And(at, a0, Operand(kIsNotStringMask | kStringRepresentationMask));
+ __ And(a1, a0, Operand(kIsNotStringMask | kStringRepresentationMask));
STATIC_ASSERT((kStringTag | kSeqStringTag) == 0);
- __ Branch(&seq_string, eq, at, Operand(zero_reg));
+ __ Branch(&seq_string, eq, a1, Operand(zero_reg));
// subject: Subject string
// a0: instance type if Subject string
// regexp_data: RegExp data (FixedArray)
- // Check for flat cons string.
+ // Check for flat cons string or sliced string.
// A flat cons string is a cons string where the second part is the empty
// string. In that case the subject string is just the first part of the cons
// string. Also in this case the first part of the cons string is known to be
// a sequential string or an external string.
- STATIC_ASSERT(kExternalStringTag != 0);
- STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
- __ And(at, a0, Operand(kIsNotStringMask | kExternalStringTag));
- __ Branch(&runtime, ne, at, Operand(zero_reg));
+ // In the case of a sliced string its offset has to be taken into account.
+ Label cons_string, check_encoding;
+ STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+ STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
+ __ Branch(&cons_string, lt, a1, Operand(kExternalStringTag));
+ __ Branch(&runtime, eq, a1, Operand(kExternalStringTag));
+
+ // String is sliced.
+ __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset));
+ __ sra(t0, t0, kSmiTagSize);
+ __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset));
+ // t5: offset of sliced string, smi-tagged.
+ __ jmp(&check_encoding);
+ // String is a cons string, check whether it is flat.
+ __ bind(&cons_string);
__ lw(a0, FieldMemOperand(subject, ConsString::kSecondOffset));
__ LoadRoot(a1, Heap::kEmptyStringRootIndex);
__ Branch(&runtime, ne, a0, Operand(a1));
__ lw(subject, FieldMemOperand(subject, ConsString::kFirstOffset));
+ // Is first part of cons or parent of slice a flat string?
+ __ bind(&check_encoding);
__ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset));
__ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset));
- // Is first part a flat string?
STATIC_ASSERT(kSeqStringTag == 0);
__ And(at, a0, Operand(kStringRepresentationMask));
__ Branch(&runtime, ne, at, Operand(zero_reg));
@@ -4562,8 +4577,8 @@
__ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for ascii.
__ lw(t9, FieldMemOperand(regexp_data, JSRegExp::kDataAsciiCodeOffset));
__ sra(a3, a0, 2); // a3 is 1 for ascii, 0 for UC16 (usyed below).
- __ lw(t0, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset));
- __ movz(t9, t0, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset.
+ __ lw(t1, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset));
+ __ movz(t9, t1, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset.
// Check that the irregexp code has been generated for the actual string
// encoding. If it has, the field contains a code object otherwise it contains
@@ -4630,23 +4645,32 @@
// For arguments 4 and 3 get string length, calculate start of string data
// and calculate the shift of the index (0 for ASCII and 1 for two byte).
- __ lw(a0, FieldMemOperand(subject, String::kLengthOffset));
- __ sra(a0, a0, kSmiTagSize);
STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize);
- __ Addu(t0, subject, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
+ __ Addu(t2, subject, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
__ Xor(a3, a3, Operand(1)); // 1 for 2-byte str, 0 for 1-byte.
- // Argument 4 (a3): End of string data
- // Argument 3 (a2): Start of string data
+ // Load the length from the original subject string from the previous stack
+ // frame. Therefore we have to use fp, which points exactly to two pointer
+ // sizes below the previous sp. (Because creating a new stack frame pushes
+ // the previous fp onto the stack and moves up sp by 2 * kPointerSize.)
+ __ lw(a0, MemOperand(fp, kSubjectOffset + 2 * kPointerSize));
+ // If slice offset is not 0, load the length from the original sliced string.
+ // Argument 4, a3: End of string data
+ // Argument 3, a2: Start of string data
+ // Prepare start and end index of the input.
+ __ sllv(t1, t0, a3);
+ __ addu(t0, t2, t1);
__ sllv(t1, a1, a3);
__ addu(a2, t0, t1);
- __ sllv(t1, a0, a3);
- __ addu(a3, t0, t1);
+ __ lw(t2, FieldMemOperand(a0, String::kLengthOffset));
+ __ sra(t2, t2, kSmiTagSize);
+ __ sllv(t1, t2, a3);
+ __ addu(a3, t0, t1);
// Argument 2 (a1): Previous index.
// Already there
// Argument 1 (a0): Subject string.
- __ mov(a0, subject);
+ // Already there
// Locate the code entry and call it.
__ Addu(t9, t9, Operand(Code::kHeaderSize - kHeapObjectTag));
@@ -4663,11 +4687,14 @@
// Check the result.
Label success;
- __ Branch(&success, eq, v0, Operand(NativeRegExpMacroAssembler::SUCCESS));
+ __ Branch(&success, eq,
+ subject, Operand(NativeRegExpMacroAssembler::SUCCESS));
Label failure;
- __ Branch(&failure, eq, v0, Operand(NativeRegExpMacroAssembler::FAILURE));
+ __ Branch(&failure, eq,
+ subject, Operand(NativeRegExpMacroAssembler::FAILURE));
// If not exception it can only be retry. Handle that in the runtime system.
- __ Branch(&runtime, ne, v0, Operand(NativeRegExpMacroAssembler::EXCEPTION));
+ __ Branch(&runtime, ne,
+ subject, Operand(NativeRegExpMacroAssembler::EXCEPTION));
// Result must now be exception. If there is no pending exception already a
// stack overflow (on the backtrack stack) was detected in RegExp code but
// haven't created the exception yet. Handle that in the runtime system.
@@ -4678,16 +4705,16 @@
__ li(a2, Operand(ExternalReference(Isolate::k_pending_exception_address,
masm->isolate())));
__ lw(v0, MemOperand(a2, 0));
- __ Branch(&runtime, eq, v0, Operand(a1));
+ __ Branch(&runtime, eq, subject, Operand(a1));
__ sw(a1, MemOperand(a2, 0)); // Clear pending exception.
// Check if the exception is a termination. If so, throw as uncatchable.
__ LoadRoot(a0, Heap::kTerminationExceptionRootIndex);
Label termination_exception;
- __ Branch(&termination_exception, eq, v0, Operand(a0));
+ __ Branch(&termination_exception, eq, subject, Operand(a0));
- __ Throw(a0); // Expects thrown value in v0.
+ __ Throw(subject); // Expects thrown value in v0.
__ bind(&termination_exception);
__ ThrowUncatchable(TERMINATION, v0); // Expects thrown value in v0.
@@ -4963,6 +4990,7 @@
Label flat_string;
Label ascii_string;
Label got_char_code;
+ Label sliced_string;
ASSERT(!t0.is(scratch_));
ASSERT(!t0.is(index_));
@@ -4996,23 +5024,37 @@
__ Branch(&flat_string, eq, t0, Operand(zero_reg));
// Handle non-flat strings.
- __ And(t0, result_, Operand(kIsConsStringMask));
- __ Branch(&call_runtime_, eq, t0, Operand(zero_reg));
+ __ And(result_, result_, Operand(kStringRepresentationMask));
+ STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+ STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
+ __ Branch(&sliced_string, gt, result_, Operand(kExternalStringTag));
+ __ Branch(&call_runtime_, eq, result_, Operand(kExternalStringTag));
// ConsString.
// Check whether the right hand side is the empty string (i.e. if
// this is really a flat string in a cons string). If that is not
// the case we would rather go to the runtime system now to flatten
// the string.
+ Label assure_seq_string;
__ lw(result_, FieldMemOperand(object_, ConsString::kSecondOffset));
__ LoadRoot(t0, Heap::kEmptyStringRootIndex);
__ Branch(&call_runtime_, ne, result_, Operand(t0));
// Get the first of the two strings and load its instance type.
__ lw(object_, FieldMemOperand(object_, ConsString::kFirstOffset));
+ __ jmp(&assure_seq_string);
+
+ // SlicedString, unpack and add offset.
+ __ bind(&sliced_string);
+ __ lw(result_, FieldMemOperand(object_, SlicedString::kOffsetOffset));
+ __ addu(scratch_, scratch_, result_);
+ __ lw(object_, FieldMemOperand(object_, SlicedString::kParentOffset));
+
+ // Assure that we are dealing with a sequential string. Go to runtime if not.
+ __ bind(&assure_seq_string);
__ lw(result_, FieldMemOperand(object_, HeapObject::kMapOffset));
__ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset));
- // If the first cons component is also non-flat, then go to runtime.
+ // Check that parent is not an external string. Go to runtime otherwise.
STATIC_ASSERT(kSeqStringTag == 0);
__ And(t0, result_, Operand(kStringRepresentationMask));
@@ -5583,6 +5625,11 @@
Register to = t2;
Register from = t3;
+ if (FLAG_string_slices) {
+ __ nop(); // Jumping as first instruction would crash the code generation.
+ __ jmp(&sub_string_runtime);
+ }
+
// Check bounds and smi-ness.
__ lw(to, MemOperand(sp, kToOffset));
__ lw(from, MemOperand(sp, kFromOffset));
diff --git a/src/mips/deoptimizer-mips.cc b/src/mips/deoptimizer-mips.cc
index 9a19aba..18b6231 100644
--- a/src/mips/deoptimizer-mips.cc
+++ b/src/mips/deoptimizer-mips.cc
@@ -39,7 +39,7 @@
namespace internal {
-int Deoptimizer::table_entry_size_ = 10;
+const int Deoptimizer::table_entry_size_ = 10;
int Deoptimizer::patch_size() {
diff --git a/src/mips/frames-mips.h b/src/mips/frames-mips.h
index 2e720fb..1899843 100644
--- a/src/mips/frames-mips.h
+++ b/src/mips/frames-mips.h
@@ -59,10 +59,10 @@
// Saved temporaries.
1 << 16 | 1 << 17 | 1 << 18 | 1 << 19 |
1 << 20 | 1 << 21 | 1 << 22 | 1 << 23 |
- // gp, sp, fp.
- 1 << 28 | 1 << 29 | 1 << 30;
+ // fp.
+ 1 << 30;
-static const int kNumCalleeSaved = 11;
+static const int kNumCalleeSaved = 9;
// Number of registers for which space is reserved in safepoints. Must be a
@@ -121,10 +121,11 @@
class StackHandlerConstants : public AllStatic {
public:
- static const int kNextOffset = 0 * kPointerSize;
- static const int kStateOffset = 1 * kPointerSize;
- static const int kFPOffset = 2 * kPointerSize;
- static const int kPCOffset = 3 * kPointerSize;
+ static const int kNextOffset = 0 * kPointerSize;
+ static const int kStateOffset = 1 * kPointerSize;
+ static const int kContextOffset = 2 * kPointerSize;
+ static const int kFPOffset = 3 * kPointerSize;
+ static const int kPCOffset = 4 * kPointerSize;
static const int kSize = kPCOffset + kPointerSize;
};
diff --git a/src/mips/full-codegen-mips.cc b/src/mips/full-codegen-mips.cc
index 7834273..d3f8922 100644
--- a/src/mips/full-codegen-mips.cc
+++ b/src/mips/full-codegen-mips.cc
@@ -55,7 +55,6 @@
static unsigned GetPropertyId(Property* property) {
- if (property->is_synthetic()) return AstNode::kNoNumber;
return property->id();
}
@@ -697,109 +696,77 @@
Comment cmnt(masm_, "[ Declaration");
ASSERT(variable != NULL); // Must have been resolved.
Slot* slot = variable->AsSlot();
- Property* prop = variable->AsProperty();
-
- if (slot != NULL) {
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
- if (mode == Variable::CONST) {
- __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
- __ sw(t0, MemOperand(fp, SlotOffset(slot)));
- } else if (function != NULL) {
- VisitForAccumulatorValue(function);
- __ sw(result_register(), MemOperand(fp, SlotOffset(slot)));
- }
- break;
-
- case Slot::CONTEXT:
- // We bypass the general EmitSlotSearch because we know more about
- // this specific context.
-
- // The variable in the decl always resides in the current function
- // context.
- ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
- if (FLAG_debug_code) {
- // Check that we're not inside a with or catch context.
- __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
- __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
- __ Check(ne, "Declaration in with context.",
- a1, Operand(t0));
- __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
- __ Check(ne, "Declaration in catch context.",
- a1, Operand(t0));
- }
- if (mode == Variable::CONST) {
- __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
- __ sw(at, ContextOperand(cp, slot->index()));
- // No write barrier since the_hole_value is in old space.
- } else if (function != NULL) {
- VisitForAccumulatorValue(function);
- __ sw(result_register(), ContextOperand(cp, slot->index()));
- int offset = Context::SlotOffset(slot->index());
- // We know that we have written a function, which is not a smi.
- __ mov(a1, cp);
- __ RecordWrite(a1, Operand(offset), a2, result_register());
- }
- break;
-
- case Slot::LOOKUP: {
- __ li(a2, Operand(variable->name()));
- // Declaration nodes are always introduced in one of two modes.
- ASSERT(mode == Variable::VAR ||
- mode == Variable::CONST);
- PropertyAttributes attr =
- (mode == Variable::VAR) ? NONE : READ_ONLY;
- __ li(a1, Operand(Smi::FromInt(attr)));
- // Push initial value, if any.
- // Note: For variables we must not push an initial value (such as
- // 'undefined') because we may have a (legal) redeclaration and we
- // must not destroy the current value.
- if (mode == Variable::CONST) {
- __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
- __ Push(cp, a2, a1, a0);
- } else if (function != NULL) {
- __ Push(cp, a2, a1);
- // Push initial value for function declaration.
- VisitForStackValue(function);
- } else {
- ASSERT(Smi::FromInt(0) == 0);
- // No initial value!
- __ mov(a0, zero_reg); // Operand(Smi::FromInt(0)));
- __ Push(cp, a2, a1, a0);
- }
- __ CallRuntime(Runtime::kDeclareContextSlot, 4);
- break;
+ ASSERT(slot != NULL);
+ switch (slot->type()) {
+ case Slot::PARAMETER:
+ case Slot::LOCAL:
+ if (mode == Variable::CONST) {
+ __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
+ __ sw(t0, MemOperand(fp, SlotOffset(slot)));
+ } else if (function != NULL) {
+ VisitForAccumulatorValue(function);
+ __ sw(result_register(), MemOperand(fp, SlotOffset(slot)));
}
- }
+ break;
- } else if (prop != NULL) {
- // A const declaration aliasing a parameter is an illegal redeclaration.
- ASSERT(mode != Variable::CONST);
- if (function != NULL) {
- // We are declaring a function that rewrites to a property.
- // Use (keyed) IC to set the initial value. We cannot visit the
- // rewrite because it's shared and we risk recording duplicate AST
- // IDs for bailouts from optimized code.
- ASSERT(prop->obj()->AsVariableProxy() != NULL);
- { AccumulatorValueContext for_object(this);
- EmitVariableLoad(prop->obj()->AsVariableProxy());
+ case Slot::CONTEXT:
+ // We bypass the general EmitSlotSearch because we know more about
+ // this specific context.
+
+ // The variable in the decl always resides in the current function
+ // context.
+ ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
+ if (FLAG_debug_code) {
+ // Check that we're not inside a with or catch context.
+ __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
+ __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
+ __ Check(ne, "Declaration in with context.",
+ a1, Operand(t0));
+ __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
+ __ Check(ne, "Declaration in catch context.",
+ a1, Operand(t0));
}
+ if (mode == Variable::CONST) {
+ __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
+ __ sw(at, ContextOperand(cp, slot->index()));
+ // No write barrier since the_hole_value is in old space.
+ } else if (function != NULL) {
+ VisitForAccumulatorValue(function);
+ __ sw(result_register(), ContextOperand(cp, slot->index()));
+ int offset = Context::SlotOffset(slot->index());
+ // We know that we have written a function, which is not a smi.
+ __ mov(a1, cp);
+ __ RecordWrite(a1, Operand(offset), a2, result_register());
+ }
+ break;
- __ push(result_register());
- VisitForAccumulatorValue(function);
- __ mov(a0, result_register());
- __ pop(a2);
-
- ASSERT(prop->key()->AsLiteral() != NULL &&
- prop->key()->AsLiteral()->handle()->IsSmi());
- __ li(a1, Operand(prop->key()->AsLiteral()->handle()));
-
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
- : isolate()->builtins()->KeyedStoreIC_Initialize();
- __ Call(ic);
- // Value in v0 is ignored (declarations are statements).
+ case Slot::LOOKUP: {
+ __ li(a2, Operand(variable->name()));
+ // Declaration nodes are always introduced in one of two modes.
+ ASSERT(mode == Variable::VAR ||
+ mode == Variable::CONST ||
+ mode == Variable::LET);
+ PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE;
+ __ li(a1, Operand(Smi::FromInt(attr)));
+ // Push initial value, if any.
+ // Note: For variables we must not push an initial value (such as
+ // 'undefined') because we may have a (legal) redeclaration and we
+ // must not destroy the current value.
+ if (mode == Variable::CONST) {
+ __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
+ __ Push(cp, a2, a1, a0);
+ } else if (function != NULL) {
+ __ Push(cp, a2, a1);
+ // Push initial value for function declaration.
+ VisitForStackValue(function);
+ } else {
+ ASSERT(Smi::FromInt(0) == 0);
+ // No initial value!
+ __ mov(a0, zero_reg); // Operand(Smi::FromInt(0)));
+ __ Push(cp, a2, a1, a0);
+ }
+ __ CallRuntime(Runtime::kDeclareContextSlot, 4);
+ break;
}
}
}
@@ -886,7 +853,7 @@
__ bind(&next_test);
__ Drop(1); // Switch value is no longer needed.
if (default_clause == NULL) {
- __ Branch(nested_statement.break_target());
+ __ Branch(nested_statement.break_label());
} else {
__ Branch(default_clause->body_target());
}
@@ -900,7 +867,7 @@
VisitStatements(clause->statements());
}
- __ bind(nested_statement.break_target());
+ __ bind(nested_statement.break_label());
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
}
@@ -1026,7 +993,7 @@
// Load the current count to a0, load the length to a1.
__ lw(a0, MemOperand(sp, 0 * kPointerSize));
__ lw(a1, MemOperand(sp, 1 * kPointerSize));
- __ Branch(loop_statement.break_target(), hs, a0, Operand(a1));
+ __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
// Get the current entry of the array into register a3.
__ lw(a2, MemOperand(sp, 2 * kPointerSize));
@@ -1053,7 +1020,7 @@
__ push(a3); // Current entry.
__ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
__ mov(a3, result_register());
- __ Branch(loop_statement.continue_target(), eq, a3, Operand(zero_reg));
+ __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
// Update the 'each' property or variable from the possibly filtered
// entry in register a3.
@@ -1069,7 +1036,7 @@
// Generate code for the going to the next element by incrementing
// the index (smi) stored on top of the stack.
- __ bind(loop_statement.continue_target());
+ __ bind(loop_statement.continue_label());
__ pop(a0);
__ Addu(a0, a0, Operand(Smi::FromInt(1)));
__ push(a0);
@@ -1078,7 +1045,7 @@
__ Branch(&loop);
// Remove the pointers stored on the stack.
- __ bind(loop_statement.break_target());
+ __ bind(loop_statement.break_label());
__ Drop(5);
// Exit and decrement the loop depth.
@@ -1533,9 +1500,7 @@
// Update the write barrier for the array store with v0 as the scratch
// register.
- __ li(a2, Operand(offset));
- // TODO(PJ): double check this RecordWrite call.
- __ RecordWrite(a1, a2, result_register());
+ __ RecordWrite(a1, Operand(offset), a2, result_register());
PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
}
@@ -2286,36 +2251,10 @@
EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
} else {
// Call to a keyed property.
- // For a synthetic property use keyed load IC followed by function call,
- // for a regular property use EmitKeyedCallWithIC.
- if (prop->is_synthetic()) {
- // Do not visit the object and key subexpressions (they are shared
- // by all occurrences of the same rewritten parameter).
- ASSERT(prop->obj()->AsVariableProxy() != NULL);
- ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
- Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
- MemOperand operand = EmitSlotSearch(slot, a1);
- __ lw(a1, operand);
-
- ASSERT(prop->key()->AsLiteral() != NULL);
- ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
- __ li(a0, Operand(prop->key()->AsLiteral()->handle()));
-
- // Record source code position for IC call.
- SetSourcePosition(prop->position());
-
- Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- __ Call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
- __ lw(a1, GlobalObjectOperand());
- __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
- __ Push(v0, a1); // Function, receiver.
- EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
- } else {
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(prop->obj());
- }
- EmitKeyedCallWithIC(expr, prop->key());
+ { PreservePositionScope scope(masm()->positions_recorder());
+ VisitForStackValue(prop->obj());
}
+ EmitKeyedCallWithIC(expr, prop->key());
}
} else {
{ PreservePositionScope scope(masm()->positions_recorder());
@@ -2761,7 +2700,7 @@
// Objects with a non-function constructor have class 'Object'.
__ bind(&non_function_constructor);
- __ LoadRoot(v0, Heap::kfunction_class_symbolRootIndex);
+ __ LoadRoot(v0, Heap::kObject_symbolRootIndex);
__ jmp(&done);
// Non-JS objects have class null.
@@ -3602,39 +3541,6 @@
}
-void FullCodeGenerator::EmitIsNativeOrStrictMode(ZoneList<Expression*>* args) {
- ASSERT(args->length() == 1);
-
- // Load the function into v0.
- VisitForAccumulatorValue(args->at(0));
-
- // Prepare for the test.
- Label materialize_true, materialize_false;
- Label* if_true = NULL;
- Label* if_false = NULL;
- Label* fall_through = NULL;
- context()->PrepareTest(&materialize_true, &materialize_false,
- &if_true, &if_false, &fall_through);
-
- // Test for strict mode function.
- __ lw(a1, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
- __ lw(a1, FieldMemOperand(a1, SharedFunctionInfo::kCompilerHintsOffset));
- __ And(at, a1, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
- kSmiTagSize)));
- __ Branch(if_true, ne, at, Operand(zero_reg));
-
- // Test for native function.
- __ And(at, a1, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
- __ Branch(if_true, ne, at, Operand(zero_reg));
-
- // Not native or strict-mode function.
- __ Branch(if_false);
-
- PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
- context()->Plug(if_true, if_false);
-}
-
-
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
Handle<String> name = expr->name();
if (name->length() > 0 && name->Get(0) == '_') {
@@ -3686,18 +3592,12 @@
Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
if (prop != NULL) {
- if (prop->is_synthetic()) {
- // Result of deleting parameters is false, even when they rewrite
- // to accesses on the arguments object.
- context()->Plug(false);
- } else {
- VisitForStackValue(prop->obj());
- VisitForStackValue(prop->key());
- __ li(a1, Operand(Smi::FromInt(strict_mode_flag())));
- __ push(a1);
- __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
- context()->Plug(v0);
- }
+ VisitForStackValue(prop->obj());
+ VisitForStackValue(prop->key());
+ __ li(a1, Operand(Smi::FromInt(strict_mode_flag())));
+ __ push(a1);
+ __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
+ context()->Plug(v0);
} else if (var != NULL) {
// Delete of an unqualified identifier is disallowed in strict mode
// but "delete this" is.
@@ -4052,6 +3952,10 @@
__ Branch(if_true, eq, v0, Operand(at));
__ LoadRoot(at, Heap::kFalseValueRootIndex);
Split(eq, v0, Operand(at), if_true, if_false, fall_through);
+ } else if (FLAG_harmony_typeof &&
+ check->Equals(isolate()->heap()->null_symbol())) {
+ __ LoadRoot(at, Heap::kNullValueRootIndex);
+ Split(eq, v0, Operand(at), if_true, if_false, fall_through);
} else if (check->Equals(isolate()->heap()->undefined_symbol())) {
__ LoadRoot(at, Heap::kUndefinedValueRootIndex);
__ Branch(if_true, eq, v0, Operand(at));
@@ -4069,8 +3973,10 @@
} else if (check->Equals(isolate()->heap()->object_symbol())) {
__ JumpIfSmi(v0, if_false);
- __ LoadRoot(at, Heap::kNullValueRootIndex);
- __ Branch(if_true, eq, v0, Operand(at));
+ if (!FLAG_harmony_typeof) {
+ __ LoadRoot(at, Heap::kNullValueRootIndex);
+ __ Branch(if_true, eq, v0, Operand(at));
+ }
// Check for JS objects => true.
__ GetObjectType(v0, v0, a1);
__ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
@@ -4313,6 +4219,34 @@
#undef __
+#define __ ACCESS_MASM(masm())
+
+FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
+ int* stack_depth,
+ int* context_length) {
+ // The macros used here must preserve the result register.
+
+ // Because the handler block contains the context of the finally
+ // code, we can restore it directly from there for the finally code
+ // rather than iteratively unwinding contexts via their previous
+ // links.
+ __ Drop(*stack_depth); // Down to the handler block.
+ if (*context_length > 0) {
+ // Restore the context to its dedicated register and the stack.
+ __ lw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
+ __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ }
+ __ PopTryHandler();
+ __ Call(finally_entry_);
+
+ *stack_depth = 0;
+ *context_length = 0;
+ return previous_;
+}
+
+
+#undef __
+
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_MIPS
diff --git a/src/mips/ic-mips.cc b/src/mips/ic-mips.cc
index da39962..85cb916 100644
--- a/src/mips/ic-mips.cc
+++ b/src/mips/ic-mips.cc
@@ -885,8 +885,8 @@
MemOperand unmapped_location =
GenerateUnmappedArgumentsLookup(masm, a0, a2, a3, &slow);
__ lw(a2, unmapped_location);
- __ Branch(&slow, eq, a2, Operand(a3));
__ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
+ __ Branch(&slow, eq, a2, Operand(a3));
__ mov(v0, a2);
__ Ret();
__ bind(&slow);
diff --git a/src/mips/macro-assembler-mips.cc b/src/mips/macro-assembler-mips.cc
index 5e8d676..c7f727b 100644
--- a/src/mips/macro-assembler-mips.cc
+++ b/src/mips/macro-assembler-mips.cc
@@ -757,15 +757,20 @@
uint16_t pos,
uint16_t size) {
ASSERT(pos < 32);
- ASSERT(pos + size < 32);
+ ASSERT(pos + size < 33);
if (mips32r2) {
ext_(rt, rs, pos, size);
} else {
// Move rs to rt and shift it left then right to get the
// desired bitfield on the right side and zeroes on the left.
- sll(rt, rs, 32 - (pos + size));
- srl(rt, rt, 32 - size);
+ int shift_left = 32 - (pos + size);
+ sll(rt, rs, shift_left); // Acts as a move if shift_left == 0.
+
+ int shift_right = 32 - size;
+ if (shift_right > 0) {
+ srl(rt, rt, shift_right);
+ }
}
}
@@ -807,28 +812,32 @@
}
-void MacroAssembler::Cvt_d_uw(FPURegister fd, FPURegister fs) {
- // Move the data from fs to t4.
- mfc1(t4, fs);
- return Cvt_d_uw(fd, t4);
+void MacroAssembler::Cvt_d_uw(FPURegister fd,
+ FPURegister fs,
+ FPURegister scratch) {
+ // Move the data from fs to t8.
+ mfc1(t8, fs);
+ Cvt_d_uw(fd, t8, scratch);
}
-void MacroAssembler::Cvt_d_uw(FPURegister fd, Register rs) {
+void MacroAssembler::Cvt_d_uw(FPURegister fd,
+ Register rs,
+ FPURegister scratch) {
// Convert rs to a FP value in fd (and fd + 1).
// We do this by converting rs minus the MSB to avoid sign conversion,
- // then adding 2^31-1 and 1 to the result.
+ // then adding 2^31 to the result (if needed).
- ASSERT(!fd.is(f20));
+ ASSERT(!fd.is(scratch));
ASSERT(!rs.is(t9));
- ASSERT(!rs.is(t8));
+ ASSERT(!rs.is(at));
- // Save rs's MSB to t8.
- And(t8, rs, 0x80000000);
+ // Save rs's MSB to t9.
+ Ext(t9, rs, 31, 1);
// Remove rs's MSB.
- And(t9, rs, 0x7FFFFFFF);
- // Move t9 to fd.
- mtc1(t9, fd);
+ Ext(at, rs, 0, 31);
+ // Move the result to fd.
+ mtc1(at, fd);
// Convert fd to a real FP value.
cvt_d_w(fd, fd);
@@ -837,41 +846,39 @@
// If rs's MSB was 0, it's done.
// Otherwise we need to add that to the FP register.
- Branch(&conversion_done, eq, t8, Operand(zero_reg));
+ Branch(&conversion_done, eq, t9, Operand(zero_reg));
- // First load 2^31 - 1 into f20.
- Or(t9, zero_reg, 0x7FFFFFFF);
- mtc1(t9, f20);
+ // Load 2^31 into f20 as its float representation.
+ li(at, 0x41E00000);
+ mtc1(at, FPURegister::from_code(scratch.code() + 1));
+ mtc1(zero_reg, scratch);
+ // Add it to fd.
+ add_d(fd, fd, scratch);
- // Convert it to FP and add it to fd.
- cvt_d_w(f20, f20);
- add_d(fd, fd, f20);
- // Now add 1.
- Or(t9, zero_reg, 1);
- mtc1(t9, f20);
-
- cvt_d_w(f20, f20);
- add_d(fd, fd, f20);
bind(&conversion_done);
}
-void MacroAssembler::Trunc_uw_d(FPURegister fd, FPURegister fs) {
- Trunc_uw_d(fs, t4);
- mtc1(t4, fd);
+void MacroAssembler::Trunc_uw_d(FPURegister fd,
+ FPURegister fs,
+ FPURegister scratch) {
+ Trunc_uw_d(fs, t8, scratch);
+ mtc1(t8, fd);
}
-void MacroAssembler::Trunc_uw_d(FPURegister fd, Register rs) {
- ASSERT(!fd.is(f22));
- ASSERT(!rs.is(t8));
+void MacroAssembler::Trunc_uw_d(FPURegister fd,
+ Register rs,
+ FPURegister scratch) {
+ ASSERT(!fd.is(scratch));
+ ASSERT(!rs.is(at));
- // Load 2^31 into f22.
- Or(t8, zero_reg, 0x80000000);
- Cvt_d_uw(f22, t8);
-
- // Test if f22 > fd.
- c(OLT, D, fd, f22);
+ // Load 2^31 into scratch as its float representation.
+ li(at, 0x41E00000);
+ mtc1(at, FPURegister::from_code(scratch.code() + 1));
+ mtc1(zero_reg, scratch);
+ // Test if scratch > fd.
+ c(OLT, D, fd, scratch);
Label simple_convert;
// If fd < 2^31 we can convert it normally.
@@ -879,18 +886,17 @@
// First we subtract 2^31 from fd, then trunc it to rs
// and add 2^31 to rs.
-
- sub_d(f22, fd, f22);
- trunc_w_d(f22, f22);
- mfc1(rs, f22);
- or_(rs, rs, t8);
+ sub_d(scratch, fd, scratch);
+ trunc_w_d(scratch, scratch);
+ mfc1(rs, scratch);
+ Or(rs, rs, 1 << 31);
Label done;
Branch(&done);
// Simple conversion.
bind(&simple_convert);
- trunc_w_d(f22, fd);
- mfc1(rs, f22);
+ trunc_w_d(scratch, fd);
+ mfc1(rs, scratch);
bind(&done);
}
@@ -1551,12 +1557,14 @@
b(offset);
break;
case eq:
+ ASSERT(!scratch.is(rs));
r2 = scratch;
li(r2, rt);
offset = shifted_branch_offset(L, false);
beq(rs, r2, offset);
break;
case ne:
+ ASSERT(!scratch.is(rs));
r2 = scratch;
li(r2, rt);
offset = shifted_branch_offset(L, false);
@@ -1568,6 +1576,7 @@
offset = shifted_branch_offset(L, false);
bgtz(rs, offset);
} else {
+ ASSERT(!scratch.is(rs));
r2 = scratch;
li(r2, rt);
slt(scratch, r2, rs);
@@ -1584,6 +1593,7 @@
offset = shifted_branch_offset(L, false);
beq(scratch, zero_reg, offset);
} else {
+ ASSERT(!scratch.is(rs));
r2 = scratch;
li(r2, rt);
slt(scratch, rs, r2);
@@ -1600,6 +1610,7 @@
offset = shifted_branch_offset(L, false);
bne(scratch, zero_reg, offset);
} else {
+ ASSERT(!scratch.is(rs));
r2 = scratch;
li(r2, rt);
slt(scratch, rs, r2);
@@ -1612,6 +1623,7 @@
offset = shifted_branch_offset(L, false);
blez(rs, offset);
} else {
+ ASSERT(!scratch.is(rs));
r2 = scratch;
li(r2, rt);
slt(scratch, r2, rs);
@@ -1625,6 +1637,7 @@
offset = shifted_branch_offset(L, false);
bgtz(rs, offset);
} else {
+ ASSERT(!scratch.is(rs));
r2 = scratch;
li(r2, rt);
sltu(scratch, r2, rs);
@@ -1641,6 +1654,7 @@
offset = shifted_branch_offset(L, false);
beq(scratch, zero_reg, offset);
} else {
+ ASSERT(!scratch.is(rs));
r2 = scratch;
li(r2, rt);
sltu(scratch, rs, r2);
@@ -1657,6 +1671,7 @@
offset = shifted_branch_offset(L, false);
bne(scratch, zero_reg, offset);
} else {
+ ASSERT(!scratch.is(rs));
r2 = scratch;
li(r2, rt);
sltu(scratch, rs, r2);
@@ -1669,6 +1684,7 @@
offset = shifted_branch_offset(L, false);
b(offset);
} else {
+ ASSERT(!scratch.is(rs));
r2 = scratch;
li(r2, rt);
sltu(scratch, r2, rs);
@@ -2244,7 +2260,13 @@
void MacroAssembler::PushTryHandler(CodeLocation try_location,
HandlerType type) {
// Adjust this code if not the case.
- ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
+
// The return address is passed in register ra.
if (try_location == IN_JAVASCRIPT) {
if (type == TRY_CATCH_HANDLER) {
@@ -2252,19 +2274,16 @@
} else {
li(t0, Operand(StackHandler::TRY_FINALLY));
}
- ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
- && StackHandlerConstants::kFPOffset == 2 * kPointerSize
- && StackHandlerConstants::kPCOffset == 3 * kPointerSize
- && StackHandlerConstants::kNextOffset == 0 * kPointerSize);
// Save the current handler as the next handler.
li(t2, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
lw(t1, MemOperand(t2));
addiu(sp, sp, -StackHandlerConstants::kSize);
- sw(ra, MemOperand(sp, 12));
- sw(fp, MemOperand(sp, 8));
- sw(t0, MemOperand(sp, 4));
- sw(t1, MemOperand(sp, 0));
+ sw(ra, MemOperand(sp, StackHandlerConstants::kPCOffset));
+ sw(fp, MemOperand(sp, StackHandlerConstants::kFPOffset));
+ sw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
+ sw(t0, MemOperand(sp, StackHandlerConstants::kStateOffset));
+ sw(t1, MemOperand(sp, StackHandlerConstants::kNextOffset));
// Link this handler as the new current one.
sw(sp, MemOperand(t2));
@@ -2272,11 +2291,6 @@
} else {
// Must preserve a0-a3, and s0 (argv).
ASSERT(try_location == IN_JS_ENTRY);
- ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize
- && StackHandlerConstants::kFPOffset == 2 * kPointerSize
- && StackHandlerConstants::kPCOffset == 3 * kPointerSize
- && StackHandlerConstants::kNextOffset == 0 * kPointerSize);
-
// The frame pointer does not point to a JS frame so we save NULL
// for fp. We expect the code throwing an exception to check fp
// before dereferencing it to restore the context.
@@ -2286,11 +2300,14 @@
li(t2, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
lw(t1, MemOperand(t2));
+ ASSERT(Smi::FromInt(0) == 0); // Used for no context.
+
addiu(sp, sp, -StackHandlerConstants::kSize);
- sw(ra, MemOperand(sp, 12));
- sw(zero_reg, MemOperand(sp, 8));
- sw(t0, MemOperand(sp, 4));
- sw(t1, MemOperand(sp, 0));
+ sw(ra, MemOperand(sp, StackHandlerConstants::kPCOffset));
+ sw(zero_reg, MemOperand(sp, StackHandlerConstants::kFPOffset));
+ sw(zero_reg, MemOperand(sp, StackHandlerConstants::kContextOffset));
+ sw(t0, MemOperand(sp, StackHandlerConstants::kStateOffset));
+ sw(t1, MemOperand(sp, StackHandlerConstants::kNextOffset));
// Link this handler as the new current one.
sw(sp, MemOperand(t2));
@@ -2299,7 +2316,7 @@
void MacroAssembler::PopTryHandler() {
- ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
pop(a1);
Addu(sp, sp, Operand(StackHandlerConstants::kSize - kPointerSize));
li(at, Operand(ExternalReference(Isolate::k_handler_address, isolate())));
@@ -2312,28 +2329,31 @@
Move(v0, value);
// Adjust this code if not the case.
- STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
// Drop the sp to the top of the handler.
li(a3, Operand(ExternalReference(Isolate::k_handler_address,
- isolate())));
+ isolate())));
lw(sp, MemOperand(a3));
- // Restore the next handler and frame pointer, discard handler state.
- STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
+ // Restore the next handler.
pop(a2);
sw(a2, MemOperand(a3));
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
- MultiPop(a3.bit() | fp.bit());
- // Before returning we restore the context from the frame pointer if
- // not NULL. The frame pointer is NULL in the exception handler of a
- // JS entry frame.
- // Set cp to NULL if fp is NULL.
+ // Restore context and frame pointer, discard state (a3).
+ MultiPop(a3.bit() | cp.bit() | fp.bit());
+
+ // If the handler is a JS frame, restore the context to the frame.
+ // (a3 == ENTRY) == (fp == 0) == (cp == 0), so we could test any
+ // of them.
Label done;
- Branch(USE_DELAY_SLOT, &done, eq, fp, Operand(zero_reg));
- mov(cp, zero_reg); // In branch delay slot.
- lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
+ Branch(&done, eq, fp, Operand(zero_reg));
+ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
bind(&done);
#ifdef DEBUG
@@ -2355,7 +2375,6 @@
}
#endif
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
pop(t9); // 2 instructions: lw, add sp.
Jump(t9); // 2 instructions: jr, nop (in delay slot).
@@ -2370,7 +2389,12 @@
void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
Register value) {
// Adjust this code if not the case.
- STATIC_ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
// v0 is expected to hold the exception.
Move(v0, value);
@@ -2393,7 +2417,6 @@
bind(&done);
// Set the top handler address to next handler past the current ENTRY handler.
- STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
pop(a2);
sw(a2, MemOperand(a3));
@@ -2415,20 +2438,12 @@
// Stack layout at this point. See also StackHandlerConstants.
// sp -> state (ENTRY)
+ // cp
// fp
// ra
- // Discard handler state (a2 is not used) and restore frame pointer.
- STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
- MultiPop(a2.bit() | fp.bit()); // a2: discarded state.
- // Before returning we restore the context from the frame pointer if
- // not NULL. The frame pointer is NULL in the exception handler of a
- // JS entry frame.
- Label cp_null;
- Branch(USE_DELAY_SLOT, &cp_null, eq, fp, Operand(zero_reg));
- mov(cp, zero_reg); // In the branch delay slot.
- lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
- bind(&cp_null);
+ // Restore context and frame pointer, discard state (r2).
+ MultiPop(a2.bit() | cp.bit() | fp.bit());
#ifdef DEBUG
// When emitting debug_code, set ra as return address for the jump.
@@ -2448,7 +2463,6 @@
addiu(ra, ra, kOffsetRaBytes);
}
#endif
- STATIC_ASSERT(StackHandlerConstants::kPCOffset == 3 * kPointerSize);
pop(t9); // 2 instructions: lw, add sp.
Jump(t9); // 2 instructions: jr, nop (in delay slot).
diff --git a/src/mips/macro-assembler-mips.h b/src/mips/macro-assembler-mips.h
index 4994516..0fcf6f1 100644
--- a/src/mips/macro-assembler-mips.h
+++ b/src/mips/macro-assembler-mips.h
@@ -524,12 +524,12 @@
void Ext(Register rt, Register rs, uint16_t pos, uint16_t size);
// Convert unsigned word to double.
- void Cvt_d_uw(FPURegister fd, FPURegister fs);
- void Cvt_d_uw(FPURegister fd, Register rs);
+ void Cvt_d_uw(FPURegister fd, FPURegister fs, FPURegister scratch);
+ void Cvt_d_uw(FPURegister fd, Register rs, FPURegister scratch);
// Convert double to unsigned word.
- void Trunc_uw_d(FPURegister fd, FPURegister fs);
- void Trunc_uw_d(FPURegister fd, Register rs);
+ void Trunc_uw_d(FPURegister fd, FPURegister fs, FPURegister scratch);
+ void Trunc_uw_d(FPURegister fd, Register rs, FPURegister scratch);
// Convert the HeapNumber pointed to by source to a 32bits signed integer
// dest. If the HeapNumber does not fit into a 32bits signed integer branch
diff --git a/src/mips/regexp-macro-assembler-mips.cc b/src/mips/regexp-macro-assembler-mips.cc
index 9935ef9..63e836f 100644
--- a/src/mips/regexp-macro-assembler-mips.cc
+++ b/src/mips/regexp-macro-assembler-mips.cc
@@ -1036,12 +1036,12 @@
}
// Prepare for possible GC.
- HandleScope handles;
+ HandleScope handles(isolate);
Handle<Code> code_handle(re_code);
Handle<String> subject(frame_entry<String*>(re_frame, kInputString));
// Current string.
- bool is_ascii = subject->IsAsciiRepresentation();
+ bool is_ascii = subject->IsAsciiRepresentationUnderneath();
ASSERT(re_code->instruction_start() <= *return_address);
ASSERT(*return_address <=
@@ -1050,7 +1050,7 @@
MaybeObject* result = Execution::HandleStackGuardInterrupt();
if (*code_handle != re_code) { // Return address no longer valid.
- int delta = *code_handle - re_code;
+ int delta = code_handle->address() - re_code->address();
// Overwrite the return address on the stack.
*return_address += delta;
}
@@ -1059,8 +1059,20 @@
return EXCEPTION;
}
+ Handle<String> subject_tmp = subject;
+ int slice_offset = 0;
+
+ // Extract the underlying string and the slice offset.
+ if (StringShape(*subject_tmp).IsCons()) {
+ subject_tmp = Handle<String>(ConsString::cast(*subject_tmp)->first());
+ } else if (StringShape(*subject_tmp).IsSliced()) {
+ SlicedString* slice = SlicedString::cast(*subject_tmp);
+ subject_tmp = Handle<String>(slice->parent());
+ slice_offset = slice->offset();
+ }
+
// String might have changed.
- if (subject->IsAsciiRepresentation() != is_ascii) {
+ if (subject_tmp->IsAsciiRepresentation() != is_ascii) {
// If we changed between an ASCII and an UC16 string, the specialized
// code cannot be used, and we need to restart regexp matching from
// scratch (including, potentially, compiling a new version of the code).
@@ -1071,8 +1083,8 @@
// be a sequential or external string with the same content.
// Update the start and end pointers in the stack frame to the current
// location (whether it has actually moved or not).
- ASSERT(StringShape(*subject).IsSequential() ||
- StringShape(*subject).IsExternal());
+ ASSERT(StringShape(*subject_tmp).IsSequential() ||
+ StringShape(*subject_tmp).IsExternal());
// The original start address of the characters to match.
const byte* start_address = frame_entry<const byte*>(re_frame, kInputStart);
@@ -1080,13 +1092,14 @@
// Find the current start address of the same character at the current string
// position.
int start_index = frame_entry<int>(re_frame, kStartIndex);
- const byte* new_address = StringCharacterPosition(*subject, start_index);
+ const byte* new_address = StringCharacterPosition(*subject_tmp,
+ start_index + slice_offset);
if (start_address != new_address) {
// If there is a difference, update the object pointer and start and end
// addresses in the RegExp stack frame to match the new value.
const byte* end_address = frame_entry<const byte* >(re_frame, kInputEnd);
- int byte_length = end_address - start_address;
+ int byte_length = static_cast<int>(end_address - start_address);
frame_entry<const String*>(re_frame, kInputString) = *subject;
frame_entry<const byte*>(re_frame, kInputStart) = new_address;
frame_entry<const byte*>(re_frame, kInputEnd) = new_address + byte_length;
diff --git a/src/mips/simulator-mips.cc b/src/mips/simulator-mips.cc
index 30e12e7..3b38695 100644
--- a/src/mips/simulator-mips.cc
+++ b/src/mips/simulator-mips.cc
@@ -1409,20 +1409,11 @@
int32_t arg1 = get_register(a1);
int32_t arg2 = get_register(a2);
int32_t arg3 = get_register(a3);
- int32_t arg4 = 0;
- int32_t arg5 = 0;
- // Need to check if sp is valid before assigning arg4, arg5.
- // This is a fix for cctest test-api/CatchStackOverflow which causes
- // the stack to overflow. For some reason arm doesn't need this
- // stack check here.
int32_t* stack_pointer = reinterpret_cast<int32_t*>(get_register(sp));
- int32_t* stack = reinterpret_cast<int32_t*>(stack_);
- if (stack_pointer >= stack && stack_pointer < stack + stack_size_ - 5) {
- // Args 4 and 5 are on the stack after the reserved space for args 0..3.
- arg4 = stack_pointer[4];
- arg5 = stack_pointer[5];
- }
+ // Args 4 and 5 are on the stack after the reserved space for args 0..3.
+ int32_t arg4 = stack_pointer[4];
+ int32_t arg5 = stack_pointer[5];
bool fp_call =
(redirection->type() == ExternalReference::BUILTIN_FP_FP_CALL) ||
diff --git a/src/mips/stub-cache-mips.cc b/src/mips/stub-cache-mips.cc
index f1ffe9b..c17a658 100644
--- a/src/mips/stub-cache-mips.cc
+++ b/src/mips/stub-cache-mips.cc
@@ -3494,7 +3494,7 @@
__ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
__ sra(t2, key, kSmiTagSize);
// Unsigned comparison catches both negative and too-large values.
- __ Branch(&miss_force_generic, Uless, t1, Operand(t2));
+ __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1));
__ lw(a3, FieldMemOperand(a3, ExternalArray::kExternalPointerOffset));
// a3: base pointer of external storage
@@ -3638,7 +3638,7 @@
// __ mtc1(zero_reg, f1); // MS 32-bits are all zero.
// __ cvt_d_l(f0, f0); // Use 64 bit conv to get correct unsigned 32-bit.
- __ Cvt_d_uw(f0, value);
+ __ Cvt_d_uw(f0, value, f22);
__ sdc1(f0, MemOperand(v0, HeapNumber::kValueOffset - kHeapObjectTag));
@@ -3822,16 +3822,16 @@
// This stub is meant to be tail-jumped to, the receiver must already
// have been verified by the caller to not be a smi.
- __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
-
- // Check that the key is a smi.
+ // Check that the key is a smi.
__ JumpIfNotSmi(key, &miss_force_generic);
+ __ lw(a3, FieldMemOperand(receiver, JSObject::kElementsOffset));
+
// Check that the index is in range.
__ SmiUntag(t0, key);
__ lw(t1, FieldMemOperand(a3, ExternalArray::kLengthOffset));
// Unsigned comparison catches both negative and too-large values.
- __ Branch(&miss_force_generic, Ugreater_equal, t0, Operand(t1));
+ __ Branch(&miss_force_generic, Ugreater_equal, key, Operand(t1));
// Handle both smis and HeapNumbers in the fast path. Go to the
// runtime for all other kinds of values.
@@ -4428,7 +4428,8 @@
__ sw(mantissa_reg, FieldMemOperand(scratch, FixedDoubleArray::kHeaderSize));
uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
__ sw(exponent_reg, FieldMemOperand(scratch, offset));
- __ Ret();
+ __ Ret(USE_DELAY_SLOT);
+ __ mov(v0, value_reg); // In delay slot.
__ bind(&maybe_nan);
// Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
@@ -4459,11 +4460,18 @@
} else {
destination = FloatingPointHelper::kCoreRegisters;
}
- __ SmiUntag(value_reg, value_reg);
+
+ Register untagged_value = receiver_reg;
+ __ SmiUntag(untagged_value, value_reg);
FloatingPointHelper::ConvertIntToDouble(
- masm, value_reg, destination,
- f0, mantissa_reg, exponent_reg, // These are: double_dst, dst1, dst2.
- scratch4, f2); // These are: scratch2, single_scratch.
+ masm,
+ untagged_value,
+ destination,
+ f0,
+ mantissa_reg,
+ exponent_reg,
+ scratch4,
+ f2);
if (destination == FloatingPointHelper::kFPURegisters) {
CpuFeatures::Scope scope(FPU);
__ sdc1(f0, MemOperand(scratch, 0));
@@ -4471,7 +4479,8 @@
__ sw(mantissa_reg, MemOperand(scratch, 0));
__ sw(exponent_reg, MemOperand(scratch, Register::kSizeInBytes));
}
- __ Ret();
+ __ Ret(USE_DELAY_SLOT);
+ __ mov(v0, value_reg); // In delay slot.
// Handle store cache miss, replacing the ic with the generic stub.
__ bind(&miss_force_generic);
diff --git a/src/mirror-debugger.js b/src/mirror-debugger.js
index bad0800..e3f3c48 100644
--- a/src/mirror-debugger.js
+++ b/src/mirror-debugger.js
@@ -195,7 +195,8 @@
Local: 1,
With: 2,
Closure: 3,
- Catch: 4 };
+ Catch: 4,
+ Block: 5 };
// Mirror hierarchy:
diff --git a/src/mksnapshot.cc b/src/mksnapshot.cc
index c5ce12f..4f5fe96 100644
--- a/src/mksnapshot.cc
+++ b/src/mksnapshot.cc
@@ -40,8 +40,6 @@
#include "serialize.h"
#include "list.h"
-// use explicit namespace to avoid clashing with types in namespace v8
-namespace i = v8::internal;
using namespace v8;
static const unsigned int kMaxCounters = 256;
diff --git a/src/objects-debug.cc b/src/objects-debug.cc
index 2963231..4da360b 100644
--- a/src/objects-debug.cc
+++ b/src/objects-debug.cc
@@ -153,6 +153,9 @@
case JS_ARRAY_TYPE:
JSArray::cast(this)->JSArrayVerify();
break;
+ case JS_WEAK_MAP_TYPE:
+ JSWeakMap::cast(this)->JSWeakMapVerify();
+ break;
case JS_REGEXP_TYPE:
JSRegExp::cast(this)->JSRegExpVerify();
break;
@@ -313,7 +316,7 @@
void FixedDoubleArray::FixedDoubleArrayVerify() {
for (int i = 0; i < length(); i++) {
if (!is_the_hole(i)) {
- double value = get(i);
+ double value = get_scalar(i);
ASSERT(!isnan(value) ||
(BitCast<uint64_t>(value) ==
BitCast<uint64_t>(canonical_not_the_hole_nan_as_double())));
@@ -349,6 +352,31 @@
if (IsSymbol()) {
CHECK(!HEAP->InNewSpace(this));
}
+ if (IsConsString()) {
+ ConsString::cast(this)->ConsStringVerify();
+ } else if (IsSlicedString()) {
+ SlicedString::cast(this)->SlicedStringVerify();
+ }
+}
+
+
+void ConsString::ConsStringVerify() {
+ CHECK(this->first()->IsString());
+ CHECK(this->second() == GetHeap()->empty_string() ||
+ this->second()->IsString());
+ CHECK(this->length() >= String::kMinNonFlatLength);
+ if (this->IsFlat()) {
+ // A flat cons can only be created by String::SlowTryFlatten.
+ // Afterwards, the first part may be externalized.
+ CHECK(this->first()->IsSeqString() || this->first()->IsExternalString());
+ }
+}
+
+
+void SlicedString::SlicedStringVerify() {
+ CHECK(!this->parent()->IsConsString());
+ CHECK(!this->parent()->IsSlicedString());
+ CHECK(this->length() >= SlicedString::kMinLength);
}
@@ -453,6 +481,14 @@
}
+void JSWeakMap::JSWeakMapVerify() {
+ CHECK(IsJSWeakMap());
+ JSObjectVerify();
+ VerifyHeapPointer(table());
+ ASSERT(table()->IsHashTable());
+}
+
+
void JSRegExp::JSRegExpVerify() {
JSObjectVerify();
ASSERT(data()->IsUndefined() || data()->IsFixedArray());
diff --git a/src/objects-inl.h b/src/objects-inl.h
index 70f6267..ff3be03 100644
--- a/src/objects-inl.h
+++ b/src/objects-inl.h
@@ -35,6 +35,7 @@
#ifndef V8_OBJECTS_INL_H_
#define V8_OBJECTS_INL_H_
+#include "elements.h"
#include "objects.h"
#include "contexts.h"
#include "conversions-inl.h"
@@ -158,23 +159,33 @@
}
+bool Object::IsSpecObject() {
+ return Object::IsHeapObject()
+ && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
+}
+
+
bool Object::IsSymbol() {
if (!this->IsHeapObject()) return false;
uint32_t type = HeapObject::cast(this)->map()->instance_type();
// Because the symbol tag is non-zero and no non-string types have the
// symbol bit set we can test for symbols with a very simple test
// operation.
- ASSERT(kSymbolTag != 0);
+ STATIC_ASSERT(kSymbolTag != 0);
ASSERT(kNotStringTag + kIsSymbolMask > LAST_TYPE);
return (type & kIsSymbolMask) != 0;
}
bool Object::IsConsString() {
- if (!this->IsHeapObject()) return false;
- uint32_t type = HeapObject::cast(this)->map()->instance_type();
- return (type & (kIsNotStringMask | kStringRepresentationMask)) ==
- (kStringTag | kConsStringTag);
+ if (!IsString()) return false;
+ return StringShape(String::cast(this)).IsCons();
+}
+
+
+bool Object::IsSlicedString() {
+ if (!IsString()) return false;
+ return StringShape(String::cast(this)).IsSliced();
}
@@ -245,7 +256,7 @@
bool StringShape::IsSymbol() {
ASSERT(valid());
- ASSERT(kSymbolTag != 0);
+ STATIC_ASSERT(kSymbolTag != 0);
return (type_ & kIsSymbolMask) != 0;
}
@@ -262,6 +273,38 @@
}
+bool String::IsAsciiRepresentationUnderneath() {
+ uint32_t type = map()->instance_type();
+ STATIC_ASSERT(kIsIndirectStringTag != 0);
+ STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
+ ASSERT(IsFlat());
+ switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
+ case kAsciiStringTag:
+ return true;
+ case kTwoByteStringTag:
+ return false;
+ default: // Cons or sliced string. Need to go deeper.
+ return GetUnderlying()->IsAsciiRepresentation();
+ }
+}
+
+
+bool String::IsTwoByteRepresentationUnderneath() {
+ uint32_t type = map()->instance_type();
+ STATIC_ASSERT(kIsIndirectStringTag != 0);
+ STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
+ ASSERT(IsFlat());
+ switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
+ case kAsciiStringTag:
+ return false;
+ case kTwoByteStringTag:
+ return true;
+ default: // Cons or sliced string. Need to go deeper.
+ return GetUnderlying()->IsTwoByteRepresentation();
+ }
+}
+
+
bool String::HasOnlyAsciiChars() {
uint32_t type = map()->instance_type();
return (type & kStringEncodingMask) == kAsciiStringTag ||
@@ -274,6 +317,16 @@
}
+bool StringShape::IsSliced() {
+ return (type_ & kStringRepresentationMask) == kSlicedStringTag;
+}
+
+
+bool StringShape::IsIndirect() {
+ return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
+}
+
+
bool StringShape::IsExternal() {
return (type_ & kStringRepresentationMask) == kExternalStringTag;
}
@@ -290,6 +343,11 @@
}
+uint32_t StringShape::encoding_tag() {
+ return type_ & kStringEncodingMask;
+}
+
+
uint32_t StringShape::full_representation_tag() {
return (type_ & (kStringRepresentationMask | kStringEncodingMask));
}
@@ -474,6 +532,12 @@
}
+bool Object::IsJSWeakMap() {
+ return Object::IsJSObject() &&
+ HeapObject::cast(this)->map()->instance_type() == JS_WEAK_MAP_TYPE;
+}
+
+
bool Object::IsJSContextExtensionObject() {
return IsHeapObject()
&& (HeapObject::cast(this)->map()->instance_type() ==
@@ -539,7 +603,8 @@
return (map == heap->function_context_map() ||
map == heap->catch_context_map() ||
map == heap->with_context_map() ||
- map == heap->global_context_map());
+ map == heap->global_context_map() ||
+ map == heap->block_context_map());
}
return false;
}
@@ -552,6 +617,13 @@
}
+bool Object::IsSerializedScopeInfo() {
+ return Object::IsHeapObject() &&
+ HeapObject::cast(this)->map() ==
+ HeapObject::cast(this)->GetHeap()->serialized_scope_info_map();
+}
+
+
bool Object::IsJSFunction() {
return Object::IsHeapObject()
&& HeapObject::cast(this)->map()->instance_type() == JS_FUNCTION_TYPE;
@@ -1322,17 +1394,19 @@
ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
-HeapObject* JSObject::elements() {
+FixedArrayBase* JSObject::elements() {
Object* array = READ_FIELD(this, kElementsOffset);
ASSERT(array->HasValidElements());
- return reinterpret_cast<HeapObject*>(array);
+ return static_cast<FixedArrayBase*>(array);
}
-void JSObject::set_elements(HeapObject* value, WriteBarrierMode mode) {
+void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
ASSERT(map()->has_fast_elements() ==
(value->map() == GetHeap()->fixed_array_map() ||
value->map() == GetHeap()->fixed_cow_array_map()));
+ ASSERT(map()->has_fast_double_elements() ==
+ value->IsFixedDoubleArray());
ASSERT(value->HasValidElements());
WRITE_FIELD(this, kElementsOffset, value);
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, mode);
@@ -1408,6 +1482,8 @@
return JSValue::kSize;
case JS_ARRAY_TYPE:
return JSValue::kSize;
+ case JS_WEAK_MAP_TYPE:
+ return JSWeakMap::kSize;
case JS_REGEXP_TYPE:
return JSValue::kSize;
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
@@ -1595,6 +1671,7 @@
void FixedArray::set(int index, Smi* value) {
ASSERT(map() != HEAP->fixed_cow_array_map());
+ ASSERT(index >= 0 && index < this->length());
ASSERT(reinterpret_cast<Object*>(value)->IsSmi());
int offset = kHeaderSize + index * kPointerSize;
WRITE_FIELD(this, offset, value);
@@ -1627,7 +1704,7 @@
}
-double FixedDoubleArray::get(int index) {
+double FixedDoubleArray::get_scalar(int index) {
ASSERT(map() != HEAP->fixed_cow_array_map() &&
map() != HEAP->fixed_array_map());
ASSERT(index >= 0 && index < this->length());
@@ -1637,6 +1714,15 @@
}
+MaybeObject* FixedDoubleArray::get(int index) {
+ if (is_the_hole(index)) {
+ return GetHeap()->the_hole_value();
+ } else {
+ return GetHeap()->NumberFromDouble(get_scalar(index));
+ }
+}
+
+
void FixedDoubleArray::set(int index, double value) {
ASSERT(map() != HEAP->fixed_cow_array_map() &&
map() != HEAP->fixed_array_map());
@@ -1663,9 +1749,19 @@
void FixedDoubleArray::Initialize(FixedDoubleArray* from) {
int old_length = from->length();
ASSERT(old_length < length());
- OS::MemCopy(FIELD_ADDR(this, kHeaderSize),
- FIELD_ADDR(from, kHeaderSize),
- old_length * kDoubleSize);
+ if (old_length * kDoubleSize >= OS::kMinComplexMemCopy) {
+ OS::MemCopy(FIELD_ADDR(this, kHeaderSize),
+ FIELD_ADDR(from, kHeaderSize),
+ old_length * kDoubleSize);
+ } else {
+ for (int i = 0; i < old_length; ++i) {
+ if (from->is_the_hole(i)) {
+ set_the_hole(i);
+ } else {
+ set(i, from->get_scalar(i));
+ }
+ }
+ }
int offset = kHeaderSize + old_length * kDoubleSize;
for (int current = from->length(); current < length(); ++current) {
WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
@@ -1961,6 +2057,17 @@
template<typename Shape, typename Key>
+int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
+ const int kMinCapacity = 32;
+ int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
+ if (capacity < kMinCapacity) {
+ capacity = kMinCapacity; // Guarantee min capacity.
+ }
+ return capacity;
+}
+
+
+template<typename Shape, typename Key>
int HashTable<Shape, Key>::FindEntry(Key key) {
return FindEntry(GetIsolate(), key);
}
@@ -2024,6 +2131,7 @@
CAST_ACCESSOR(SeqString)
CAST_ACCESSOR(SeqAsciiString)
CAST_ACCESSOR(SeqTwoByteString)
+CAST_ACCESSOR(SlicedString)
CAST_ACCESSOR(ConsString)
CAST_ACCESSOR(ExternalString)
CAST_ACCESSOR(ExternalAsciiString)
@@ -2047,6 +2155,7 @@
CAST_ACCESSOR(JSRegExp)
CAST_ACCESSOR(JSProxy)
CAST_ACCESSOR(JSFunctionProxy)
+CAST_ACCESSOR(JSWeakMap)
CAST_ACCESSOR(Foreign)
CAST_ACCESSOR(ByteArray)
CAST_ACCESSOR(ExternalArray)
@@ -2075,12 +2184,6 @@
SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
-SMI_ACCESSORS(ByteArray, length, kLengthOffset)
-
-// TODO(1493): Investigate if it's possible to s/INT/SMI/ here (and
-// subsequently unify H{Fixed,External}ArrayLength).
-INT_ACCESSORS(ExternalArray, length, kLengthOffset)
-
SMI_ACCESSORS(String, length, kLengthOffset)
@@ -2110,7 +2213,7 @@
MaybeObject* String::TryFlatten(PretenureFlag pretenure) {
if (!StringShape(this).IsCons()) return this;
ConsString* cons = ConsString::cast(this);
- if (cons->second()->length() == 0) return cons->first();
+ if (cons->IsFlat()) return cons->first();
return SlowTryFlatten(pretenure);
}
@@ -2118,10 +2221,8 @@
String* String::TryFlattenGetString(PretenureFlag pretenure) {
MaybeObject* flat = TryFlatten(pretenure);
Object* successfully_flattened;
- if (flat->ToObject(&successfully_flattened)) {
- return String::cast(successfully_flattened);
- }
- return this;
+ if (!flat->ToObject(&successfully_flattened)) return this;
+ return String::cast(successfully_flattened);
}
@@ -2139,6 +2240,9 @@
return ExternalAsciiString::cast(this)->ExternalAsciiStringGet(index);
case kExternalStringTag | kTwoByteStringTag:
return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
+ case kSlicedStringTag | kAsciiStringTag:
+ case kSlicedStringTag | kTwoByteStringTag:
+ return SlicedString::cast(this)->SlicedStringGet(index);
default:
break;
}
@@ -2159,15 +2263,19 @@
bool String::IsFlat() {
- switch (StringShape(this).representation_tag()) {
- case kConsStringTag: {
- String* second = ConsString::cast(this)->second();
- // Only flattened strings have second part empty.
- return second->length() == 0;
- }
- default:
- return true;
- }
+ if (!StringShape(this).IsCons()) return true;
+ return ConsString::cast(this)->second()->length() == 0;
+}
+
+
+String* String::GetUnderlying() {
+ // Giving direct access to underlying string only makes sense if the
+ // wrapping string is already flattened.
+ ASSERT(this->IsFlat());
+ ASSERT(StringShape(this).IsIndirect());
+ STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
+ const int kUnderlyingOffset = SlicedString::kParentOffset;
+ return String::cast(READ_FIELD(this, kUnderlyingOffset));
}
@@ -2226,6 +2334,20 @@
}
+String* SlicedString::parent() {
+ return String::cast(READ_FIELD(this, kParentOffset));
+}
+
+
+void SlicedString::set_parent(String* parent) {
+ ASSERT(parent->IsSeqString());
+ WRITE_FIELD(this, kParentOffset, parent);
+}
+
+
+SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
+
+
String* ConsString::first() {
return String::cast(READ_FIELD(this, kFirstOffset));
}
@@ -2350,13 +2472,18 @@
}
-uint8_t ExternalPixelArray::get(int index) {
+uint8_t ExternalPixelArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
uint8_t* ptr = external_pixel_pointer();
return ptr[index];
}
+MaybeObject* ExternalPixelArray::get(int index) {
+ return Smi::FromInt(static_cast<int>(get_scalar(index)));
+}
+
+
void ExternalPixelArray::set(int index, uint8_t value) {
ASSERT((index >= 0) && (index < this->length()));
uint8_t* ptr = external_pixel_pointer();
@@ -2376,13 +2503,18 @@
}
-int8_t ExternalByteArray::get(int index) {
+int8_t ExternalByteArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
int8_t* ptr = static_cast<int8_t*>(external_pointer());
return ptr[index];
}
+MaybeObject* ExternalByteArray::get(int index) {
+ return Smi::FromInt(static_cast<int>(get_scalar(index)));
+}
+
+
void ExternalByteArray::set(int index, int8_t value) {
ASSERT((index >= 0) && (index < this->length()));
int8_t* ptr = static_cast<int8_t*>(external_pointer());
@@ -2390,13 +2522,18 @@
}
-uint8_t ExternalUnsignedByteArray::get(int index) {
+uint8_t ExternalUnsignedByteArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
return ptr[index];
}
+MaybeObject* ExternalUnsignedByteArray::get(int index) {
+ return Smi::FromInt(static_cast<int>(get_scalar(index)));
+}
+
+
void ExternalUnsignedByteArray::set(int index, uint8_t value) {
ASSERT((index >= 0) && (index < this->length()));
uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
@@ -2404,13 +2541,18 @@
}
-int16_t ExternalShortArray::get(int index) {
+int16_t ExternalShortArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
int16_t* ptr = static_cast<int16_t*>(external_pointer());
return ptr[index];
}
+MaybeObject* ExternalShortArray::get(int index) {
+ return Smi::FromInt(static_cast<int>(get_scalar(index)));
+}
+
+
void ExternalShortArray::set(int index, int16_t value) {
ASSERT((index >= 0) && (index < this->length()));
int16_t* ptr = static_cast<int16_t*>(external_pointer());
@@ -2418,13 +2560,18 @@
}
-uint16_t ExternalUnsignedShortArray::get(int index) {
+uint16_t ExternalUnsignedShortArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
return ptr[index];
}
+MaybeObject* ExternalUnsignedShortArray::get(int index) {
+ return Smi::FromInt(static_cast<int>(get_scalar(index)));
+}
+
+
void ExternalUnsignedShortArray::set(int index, uint16_t value) {
ASSERT((index >= 0) && (index < this->length()));
uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
@@ -2432,13 +2579,18 @@
}
-int32_t ExternalIntArray::get(int index) {
+int32_t ExternalIntArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
int32_t* ptr = static_cast<int32_t*>(external_pointer());
return ptr[index];
}
+MaybeObject* ExternalIntArray::get(int index) {
+ return GetHeap()->NumberFromInt32(get_scalar(index));
+}
+
+
void ExternalIntArray::set(int index, int32_t value) {
ASSERT((index >= 0) && (index < this->length()));
int32_t* ptr = static_cast<int32_t*>(external_pointer());
@@ -2446,13 +2598,18 @@
}
-uint32_t ExternalUnsignedIntArray::get(int index) {
+uint32_t ExternalUnsignedIntArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
return ptr[index];
}
+MaybeObject* ExternalUnsignedIntArray::get(int index) {
+ return GetHeap()->NumberFromUint32(get_scalar(index));
+}
+
+
void ExternalUnsignedIntArray::set(int index, uint32_t value) {
ASSERT((index >= 0) && (index < this->length()));
uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
@@ -2460,13 +2617,18 @@
}
-float ExternalFloatArray::get(int index) {
+float ExternalFloatArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
float* ptr = static_cast<float*>(external_pointer());
return ptr[index];
}
+MaybeObject* ExternalFloatArray::get(int index) {
+ return GetHeap()->NumberFromDouble(get_scalar(index));
+}
+
+
void ExternalFloatArray::set(int index, float value) {
ASSERT((index >= 0) && (index < this->length()));
float* ptr = static_cast<float*>(external_pointer());
@@ -2474,13 +2636,18 @@
}
-double ExternalDoubleArray::get(int index) {
+double ExternalDoubleArray::get_scalar(int index) {
ASSERT((index >= 0) && (index < this->length()));
double* ptr = static_cast<double*>(external_pointer());
return ptr[index];
}
+MaybeObject* ExternalDoubleArray::get(int index) {
+ return GetHeap()->NumberFromDouble(get_scalar(index));
+}
+
+
void ExternalDoubleArray::set(int index, double value) {
ASSERT((index >= 0) && (index < this->length()));
double* ptr = static_cast<double*>(external_pointer());
@@ -2757,7 +2924,8 @@
ASSERT(kind() == STUB ||
kind() == UNARY_OP_IC ||
kind() == BINARY_OP_IC ||
- kind() == COMPARE_IC);
+ kind() == COMPARE_IC ||
+ kind() == TO_BOOLEAN_IC);
return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
}
@@ -2766,7 +2934,8 @@
ASSERT(kind() == STUB ||
kind() == UNARY_OP_IC ||
kind() == BINARY_OP_IC ||
- kind() == COMPARE_IC);
+ kind() == COMPARE_IC ||
+ kind() == TO_BOOLEAN_IC);
ASSERT(0 <= major && major < 256);
WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
}
@@ -2908,6 +3077,17 @@
}
+byte Code::to_boolean_state() {
+ ASSERT(is_to_boolean_ic_stub());
+ return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
+}
+
+
+void Code::set_to_boolean_state(byte value) {
+ ASSERT(is_to_boolean_ic_stub());
+ WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
+}
+
bool Code::is_inline_cache_stub() {
Kind kind = this->kind();
return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
@@ -3249,8 +3429,6 @@
ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
kAccessCheckInfoOffset)
ACCESSORS(FunctionTemplateInfo, flag, Smi, kFlagOffset)
-ACCESSORS(FunctionTemplateInfo, prototype_attributes, Smi,
- kPrototypeAttributesOffset)
ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
@@ -3305,6 +3483,8 @@
BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
kNeedsAccessCheckBit)
+BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
+ kReadOnlyPrototypeBit)
BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
kIsExpressionBit)
BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
@@ -3743,6 +3923,15 @@
ACCESSORS(JSProxy, padding, Object, kPaddingOffset)
+ACCESSORS(JSWeakMap, table, ObjectHashTable, kTableOffset)
+ACCESSORS_GCSAFE(JSWeakMap, next, Object, kNextOffset)
+
+
+ObjectHashTable* JSWeakMap::unchecked_table() {
+ return reinterpret_cast<ObjectHashTable*>(READ_FIELD(this, kTableOffset));
+}
+
+
Address Foreign::address() {
return AddressFrom<Address>(READ_INTPTR_FIELD(this, kAddressOffset));
}
@@ -3928,6 +4117,11 @@
}
+ElementsAccessor* JSObject::GetElementsAccessor() {
+ return ElementsAccessor::ForKind(GetElementsKind());
+}
+
+
bool JSObject::HasFastElements() {
return GetElementsKind() == FAST_ELEMENTS;
}
@@ -4201,6 +4395,11 @@
}
+bool JSObject::HasHiddenProperties() {
+ return !GetHiddenProperties(OMIT_CREATION)->ToObjectChecked()->IsUndefined();
+}
+
+
bool JSObject::HasElement(uint32_t index) {
return HasElementWithReceiver(this, index);
}
@@ -4316,6 +4515,36 @@
}
+bool ObjectHashTableShape::IsMatch(JSObject* key, Object* other) {
+ return key == JSObject::cast(other);
+}
+
+
+uint32_t ObjectHashTableShape::Hash(JSObject* key) {
+ MaybeObject* maybe_hash = key->GetIdentityHash(JSObject::OMIT_CREATION);
+ ASSERT(!maybe_hash->IsFailure());
+ return Smi::cast(maybe_hash->ToObjectUnchecked())->value();
+}
+
+
+uint32_t ObjectHashTableShape::HashForObject(JSObject* key, Object* other) {
+ MaybeObject* maybe_hash = JSObject::cast(other)->GetIdentityHash(
+ JSObject::OMIT_CREATION);
+ ASSERT(!maybe_hash->IsFailure());
+ return Smi::cast(maybe_hash->ToObjectUnchecked())->value();
+}
+
+
+MaybeObject* ObjectHashTableShape::AsObject(JSObject* key) {
+ return key;
+}
+
+
+void ObjectHashTable::RemoveEntry(int entry) {
+ RemoveEntry(entry, GetHeap());
+}
+
+
void Map::ClearCodeCache(Heap* heap) {
// No write barrier is needed since empty_fixed_array is not in new space.
// Please note this function is used during marking:
diff --git a/src/objects-printer.cc b/src/objects-printer.cc
index 158789e..3573572 100644
--- a/src/objects-printer.cc
+++ b/src/objects-printer.cc
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -151,6 +151,9 @@
case JS_PROXY_TYPE:
JSProxy::cast(this)->JSProxyPrint(out);
break;
+ case JS_WEAK_MAP_TYPE:
+ JSWeakMap::cast(this)->JSWeakMapPrint(out);
+ break;
case FOREIGN_TYPE:
Foreign::cast(this)->ForeignPrint(out);
break;
@@ -282,17 +285,30 @@
}
break;
}
+ case FAST_DOUBLE_ELEMENTS: {
+ // Print in array notation for non-sparse arrays.
+ FixedDoubleArray* p = FixedDoubleArray::cast(elements());
+ for (int i = 0; i < p->length(); i++) {
+ if (p->is_the_hole(i)) {
+ PrintF(out, " %d: <the hole>", i);
+ } else {
+ PrintF(out, " %d: %g", i, p->get_scalar(i));
+ }
+ PrintF(out, "\n");
+ }
+ break;
+ }
case EXTERNAL_PIXEL_ELEMENTS: {
ExternalPixelArray* p = ExternalPixelArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %d\n", i, p->get(i));
+ PrintF(out, " %d: %d\n", i, p->get_scalar(i));
}
break;
}
case EXTERNAL_BYTE_ELEMENTS: {
ExternalByteArray* p = ExternalByteArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %d\n", i, static_cast<int>(p->get(i)));
+ PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
@@ -300,14 +316,14 @@
ExternalUnsignedByteArray* p =
ExternalUnsignedByteArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %d\n", i, static_cast<int>(p->get(i)));
+ PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
case EXTERNAL_SHORT_ELEMENTS: {
ExternalShortArray* p = ExternalShortArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %d\n", i, static_cast<int>(p->get(i)));
+ PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
@@ -315,14 +331,14 @@
ExternalUnsignedShortArray* p =
ExternalUnsignedShortArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %d\n", i, static_cast<int>(p->get(i)));
+ PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
case EXTERNAL_INT_ELEMENTS: {
ExternalIntArray* p = ExternalIntArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %d\n", i, static_cast<int>(p->get(i)));
+ PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
@@ -330,21 +346,21 @@
ExternalUnsignedIntArray* p =
ExternalUnsignedIntArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %d\n", i, static_cast<int>(p->get(i)));
+ PrintF(out, " %d: %d\n", i, static_cast<int>(p->get_scalar(i)));
}
break;
}
case EXTERNAL_FLOAT_ELEMENTS: {
ExternalFloatArray* p = ExternalFloatArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %f\n", i, p->get(i));
+ PrintF(out, " %d: %f\n", i, p->get_scalar(i));
}
break;
}
case EXTERNAL_DOUBLE_ELEMENTS: {
ExternalDoubleArray* p = ExternalDoubleArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
- PrintF(out, " %d: %f\n", i, p->get(i));
+ PrintF(out, " %d: %f\n", i, p->get_scalar(i));
}
break;
}
@@ -360,9 +376,6 @@
}
break;
}
- default:
- UNREACHABLE();
- break;
}
}
@@ -421,6 +434,7 @@
case CODE_TYPE: return "CODE";
case JS_ARRAY_TYPE: return "JS_ARRAY";
case JS_PROXY_TYPE: return "JS_PROXY";
+ case JS_WEAK_MAP_TYPE: return "JS_WEAK_MAP";
case JS_REGEXP_TYPE: return "JS_REGEXP";
case JS_VALUE_TYPE: return "JS_VALUE";
case JS_GLOBAL_OBJECT_TYPE: return "JS_GLOBAL_OBJECT";
@@ -550,6 +564,21 @@
}
+// This method is only meant to be called from gdb for debugging purposes.
+// Since the string can also be in two-byte encoding, non-ascii characters
+// will be ignored in the output.
+char* String::ToAsciiArray() {
+ // Static so that subsequent calls frees previously allocated space.
+ // This also means that previous results will be overwritten.
+ static char* buffer = NULL;
+ if (buffer != NULL) free(buffer);
+ buffer = new char[length()+1];
+ WriteToFlat(this, buffer, 0, length());
+ buffer[length()] = 0;
+ return buffer;
+}
+
+
void JSProxy::JSProxyPrint(FILE* out) {
HeapObject::PrintHeader(out, "JSProxy");
PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
@@ -559,6 +588,16 @@
}
+void JSWeakMap::JSWeakMapPrint(FILE* out) {
+ HeapObject::PrintHeader(out, "JSWeakMap");
+ PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
+ PrintF(out, " - number of elements = %d\n", table()->NumberOfElements());
+ PrintF(out, " - table = ");
+ table()->ShortPrint(out);
+ PrintF(out, "\n");
+}
+
+
void JSFunction::JSFunctionPrint(FILE* out) {
HeapObject::PrintHeader(out, "Function");
PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));
diff --git a/src/objects-visiting.cc b/src/objects-visiting.cc
index 4cd795e..bde9e83 100644
--- a/src/objects-visiting.cc
+++ b/src/objects-visiting.cc
@@ -58,6 +58,9 @@
return kVisitConsString;
}
+ case kSlicedStringTag:
+ return kVisitSlicedString;
+
case kExternalStringTag:
return GetVisitorIdForSize(kVisitDataObject,
kVisitDataObjectGeneric,
@@ -88,6 +91,9 @@
case JS_GLOBAL_PROPERTY_CELL_TYPE:
return kVisitPropertyCell;
+ case JS_WEAK_MAP_TYPE:
+ return kVisitJSWeakMap;
+
case JS_REGEXP_TYPE:
return kVisitJSRegExp;
diff --git a/src/objects-visiting.h b/src/objects-visiting.h
index cc64763..4ce1bd0 100644
--- a/src/objects-visiting.h
+++ b/src/objects-visiting.h
@@ -115,12 +115,14 @@
kVisitStructGeneric,
kVisitConsString,
+ kVisitSlicedString,
kVisitOddball,
kVisitCode,
kVisitMap,
kVisitPropertyCell,
kVisitSharedFunctionInfo,
kVisitJSFunction,
+ kVisitJSWeakMap,
kVisitJSRegExp,
kVisitorIdCount,
@@ -298,6 +300,11 @@
ConsString::BodyDescriptor,
int>::Visit);
+ table_.Register(kVisitSlicedString,
+ &FixedBodyVisitor<StaticVisitor,
+ SlicedString::BodyDescriptor,
+ int>::Visit);
+
table_.Register(kVisitFixedArray,
&FlexibleBodyVisitor<StaticVisitor,
FixedArray::BodyDescriptor,
@@ -317,7 +324,9 @@
SharedFunctionInfo::BodyDescriptor,
int>::Visit);
- table_.Register(kVisitJSRegExp, &VisitJSRegExp);
+ table_.Register(kVisitJSWeakMap, &VisitJSObject);
+
+ table_.Register(kVisitJSRegExp, &VisitJSObject);
table_.Register(kVisitSeqAsciiString, &VisitSeqAsciiString);
@@ -356,15 +365,15 @@
return FixedDoubleArray::SizeFor(length);
}
+ static inline int VisitJSObject(Map* map, HeapObject* object) {
+ return JSObjectVisitor::Visit(map, object);
+ }
+
static inline int VisitSeqAsciiString(Map* map, HeapObject* object) {
return SeqAsciiString::cast(object)->
SeqAsciiStringSize(map->instance_type());
}
- static inline int VisitJSRegExp(Map* map, HeapObject* object) {
- return JSObjectVisitor::Visit(map, object);
- }
-
static inline int VisitSeqTwoByteString(Map* map, HeapObject* object) {
return SeqTwoByteString::cast(object)->
SeqTwoByteStringSize(map->instance_type());
diff --git a/src/objects.cc b/src/objects.cc
index 1ab5dd2..0660dba 100644
--- a/src/objects.cc
+++ b/src/objects.cc
@@ -33,6 +33,7 @@
#include "codegen.h"
#include "debug.h"
#include "deoptimizer.h"
+#include "elements.h"
#include "execution.h"
#include "full-codegen.h"
#include "hydrogen.h"
@@ -602,36 +603,69 @@
MaybeObject* Object::GetElementWithReceiver(Object* receiver, uint32_t index) {
- Object* holder = NULL;
- if (IsSmi()) {
- Context* global_context = Isolate::Current()->context()->global_context();
- holder = global_context->number_function()->instance_prototype();
- } else {
- HeapObject* heap_object = HeapObject::cast(this);
+ Heap* heap = IsSmi()
+ ? Isolate::Current()->heap()
+ : HeapObject::cast(this)->GetHeap();
+ Object* holder = this;
- if (heap_object->IsJSObject()) {
- return JSObject::cast(this)->GetElementWithReceiver(receiver, index);
- }
- Heap* heap = heap_object->GetHeap();
- Isolate* isolate = heap->isolate();
-
- Context* global_context = isolate->context()->global_context();
- if (heap_object->IsString()) {
- holder = global_context->string_function()->instance_prototype();
- } else if (heap_object->IsHeapNumber()) {
+ // Iterate up the prototype chain until an element is found or the null
+ // prototype is encountered.
+ for (holder = this;
+ holder != heap->null_value();
+ holder = holder->GetPrototype()) {
+ if (holder->IsSmi()) {
+ Context* global_context = Isolate::Current()->context()->global_context();
holder = global_context->number_function()->instance_prototype();
- } else if (heap_object->IsBoolean()) {
- holder = global_context->boolean_function()->instance_prototype();
- } else if (heap_object->IsJSProxy()) {
- return heap->undefined_value(); // For now...
} else {
- // Undefined and null have no indexed properties.
- ASSERT(heap_object->IsUndefined() || heap_object->IsNull());
- return heap->undefined_value();
+ HeapObject* heap_object = HeapObject::cast(holder);
+ if (!heap_object->IsJSObject()) {
+ Isolate* isolate = heap->isolate();
+ Context* global_context = isolate->context()->global_context();
+ if (heap_object->IsString()) {
+ holder = global_context->string_function()->instance_prototype();
+ } else if (heap_object->IsHeapNumber()) {
+ holder = global_context->number_function()->instance_prototype();
+ } else if (heap_object->IsBoolean()) {
+ holder = global_context->boolean_function()->instance_prototype();
+ } else if (heap_object->IsJSProxy()) {
+ return heap->undefined_value(); // For now...
+ } else {
+ // Undefined and null have no indexed properties.
+ ASSERT(heap_object->IsUndefined() || heap_object->IsNull());
+ return heap->undefined_value();
+ }
+ }
+ }
+
+ // Inline the case for JSObjects. Doing so significantly improves the
+ // performance of fetching elements where checking the prototype chain is
+ // necessary.
+ JSObject* js_object = JSObject::cast(holder);
+
+ // Check access rights if needed.
+ if (js_object->IsAccessCheckNeeded()) {
+ Isolate* isolate = heap->isolate();
+ if (!isolate->MayIndexedAccess(js_object, index, v8::ACCESS_GET)) {
+ isolate->ReportFailedAccessCheck(js_object, v8::ACCESS_GET);
+ return heap->undefined_value();
+ }
+ }
+
+ if (js_object->HasIndexedInterceptor()) {
+ return js_object->GetElementWithInterceptor(receiver, index);
+ }
+
+ if (js_object->elements() != heap->empty_fixed_array()) {
+ MaybeObject* result = js_object->GetElementsAccessor()->Get(
+ js_object->elements(),
+ index,
+ js_object,
+ receiver);
+ if (result != heap->the_hole_value()) return result;
}
}
- return JSObject::cast(holder)->GetElementWithReceiver(receiver, index);
+ return heap->undefined_value();
}
@@ -962,6 +996,11 @@
accumulator->Add("<JS array[%u]>", static_cast<uint32_t>(length));
break;
}
+ case JS_WEAK_MAP_TYPE: {
+ int elements = JSWeakMap::cast(this)->table()->NumberOfElements();
+ accumulator->Add("<JS WeakMap[%d]>", elements);
+ break;
+ }
case JS_REGEXP_TYPE: {
accumulator->Add("<JS RegExp>");
break;
@@ -1168,6 +1207,9 @@
case kConsStringTag:
ConsString::BodyDescriptor::IterateBody(this, v);
break;
+ case kSlicedStringTag:
+ SlicedString::BodyDescriptor::IterateBody(this, v);
+ break;
case kExternalStringTag:
if ((type & kStringEncodingMask) == kAsciiStringTag) {
reinterpret_cast<ExternalAsciiString*>(this)->
@@ -1191,6 +1233,7 @@
case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
case JS_VALUE_TYPE:
case JS_ARRAY_TYPE:
+ case JS_WEAK_MAP_TYPE:
case JS_REGEXP_TYPE:
case JS_GLOBAL_PROXY_TYPE:
case JS_GLOBAL_OBJECT_TYPE:
@@ -2278,7 +2321,8 @@
if (has_exception) return Failure::Exception();
Object* bool_result = result->ToBoolean();
- if (mode == STRICT_DELETION && bool_result == GetHeap()->false_value()) {
+ if (mode == STRICT_DELETION &&
+ bool_result == isolate->heap()->false_value()) {
Handle<Object> args[] = { handler, trap_name };
Handle<Object> error = isolate->factory()->NewTypeError(
"handler_failed", HandleVector(args, ARRAY_SIZE(args)));
@@ -2330,7 +2374,7 @@
Handle<JSProxy> self(this);
isolate->factory()->BecomeJSObject(self);
- ASSERT(IsJSObject());
+ ASSERT(self->IsJSObject());
// TODO(rossberg): recognize function proxies.
}
@@ -2470,6 +2514,9 @@
// callback setter removed. The two lines looking up the LookupResult
// result are also added. If one of the functions is changed, the other
// should be.
+// Note that this method cannot be used to set the prototype of a function
+// because ConvertDescriptorToField() which is called in "case CALLBACKS:"
+// doesn't handle function prototypes correctly.
MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
String* name,
Object* value,
@@ -2895,9 +2942,12 @@
int length = IsJSArray()
? Smi::cast(JSArray::cast(this)->length())->value()
: array->length();
+ int old_capacity = 0;
+ int used_elements = 0;
+ GetElementsCapacityAndUsage(&old_capacity, &used_elements);
NumberDictionary* dictionary = NULL;
{ Object* object;
- MaybeObject* maybe = NumberDictionary::Allocate(length);
+ MaybeObject* maybe = NumberDictionary::Allocate(used_elements);
if (!maybe->ToObject(&object)) return maybe;
dictionary = NumberDictionary::cast(object);
}
@@ -2916,7 +2966,7 @@
// exceed the capacity of new space, and we would fail repeatedly
// trying to convert the FixedDoubleArray.
MaybeObject* maybe_value_object =
- GetHeap()->AllocateHeapNumber(double_array->get(i), TENURED);
+ GetHeap()->AllocateHeapNumber(double_array->get_scalar(i), TENURED);
if (!maybe_value_object->ToObject(&value)) return maybe_value_object;
}
} else {
@@ -2960,6 +3010,98 @@
}
+MaybeObject* JSObject::GetHiddenProperties(HiddenPropertiesFlag flag) {
+ Isolate* isolate = GetIsolate();
+ Heap* heap = isolate->heap();
+ Object* holder = BypassGlobalProxy();
+ if (holder->IsUndefined()) return heap->undefined_value();
+ JSObject* obj = JSObject::cast(holder);
+ if (obj->HasFastProperties()) {
+ // If the object has fast properties, check whether the first slot
+ // in the descriptor array matches the hidden symbol. Since the
+ // hidden symbols hash code is zero (and no other string has hash
+ // code zero) it will always occupy the first entry if present.
+ DescriptorArray* descriptors = obj->map()->instance_descriptors();
+ if ((descriptors->number_of_descriptors() > 0) &&
+ (descriptors->GetKey(0) == heap->hidden_symbol()) &&
+ descriptors->IsProperty(0)) {
+ ASSERT(descriptors->GetType(0) == FIELD);
+ return obj->FastPropertyAt(descriptors->GetFieldIndex(0));
+ }
+ }
+
+ // Only attempt to find the hidden properties in the local object and not
+ // in the prototype chain.
+ if (!obj->HasHiddenPropertiesObject()) {
+ // Hidden properties object not found. Allocate a new hidden properties
+ // object if requested. Otherwise return the undefined value.
+ if (flag == ALLOW_CREATION) {
+ Object* hidden_obj;
+ { MaybeObject* maybe_obj = heap->AllocateJSObject(
+ isolate->context()->global_context()->object_function());
+ if (!maybe_obj->ToObject(&hidden_obj)) return maybe_obj;
+ }
+ // Don't allow leakage of the hidden object through accessors
+ // on Object.prototype.
+ {
+ MaybeObject* maybe_obj =
+ JSObject::cast(hidden_obj)->SetPrototype(heap->null_value(), false);
+ if (maybe_obj->IsFailure()) return maybe_obj;
+ }
+ return obj->SetHiddenPropertiesObject(hidden_obj);
+ } else {
+ return heap->undefined_value();
+ }
+ }
+ return obj->GetHiddenPropertiesObject();
+}
+
+
+MaybeObject* JSObject::GetIdentityHash(HiddenPropertiesFlag flag) {
+ Isolate* isolate = GetIsolate();
+ Object* hidden_props_obj;
+ { MaybeObject* maybe_obj = GetHiddenProperties(flag);
+ if (!maybe_obj->ToObject(&hidden_props_obj)) return maybe_obj;
+ }
+ if (!hidden_props_obj->IsJSObject()) {
+ // We failed to create hidden properties. That's a detached
+ // global proxy.
+ ASSERT(hidden_props_obj->IsUndefined());
+ return Smi::FromInt(0);
+ }
+ JSObject* hidden_props = JSObject::cast(hidden_props_obj);
+ String* hash_symbol = isolate->heap()->identity_hash_symbol();
+ {
+ // Note that HasLocalProperty() can cause a GC in the general case in the
+ // presence of interceptors.
+ AssertNoAllocation no_alloc;
+ if (hidden_props->HasLocalProperty(hash_symbol)) {
+ MaybeObject* hash = hidden_props->GetProperty(hash_symbol);
+ return Smi::cast(hash->ToObjectChecked());
+ }
+ }
+
+ int hash_value;
+ int attempts = 0;
+ do {
+ // Generate a random 32-bit hash value but limit range to fit
+ // within a smi.
+ hash_value = V8::Random(isolate) & Smi::kMaxValue;
+ attempts++;
+ } while (hash_value == 0 && attempts < 30);
+ hash_value = hash_value != 0 ? hash_value : 1; // never return 0
+
+ Smi* hash = Smi::FromInt(hash_value);
+ { MaybeObject* result = hidden_props->SetLocalPropertyIgnoreAttributes(
+ hash_symbol,
+ hash,
+ static_cast<PropertyAttributes>(None));
+ if (result->IsFailure()) return result;
+ }
+ return hash;
+}
+
+
MaybeObject* JSObject::DeletePropertyPostInterceptor(String* name,
DeleteMode mode) {
// Check local property, ignore interceptor.
@@ -3009,48 +3151,6 @@
}
-MaybeObject* JSObject::DeleteElementPostInterceptor(uint32_t index,
- DeleteMode mode) {
- ASSERT(!HasExternalArrayElements());
- switch (GetElementsKind()) {
- case FAST_ELEMENTS: {
- Object* obj;
- { MaybeObject* maybe_obj = EnsureWritableFastElements();
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
- uint32_t length = IsJSArray() ?
- static_cast<uint32_t>(Smi::cast(JSArray::cast(this)->length())->value()) :
- static_cast<uint32_t>(FixedArray::cast(elements())->length());
- if (index < length) {
- FixedArray::cast(elements())->set_the_hole(index);
- }
- break;
- }
- case DICTIONARY_ELEMENTS: {
- NumberDictionary* dictionary = element_dictionary();
- int entry = dictionary->FindEntry(index);
- if (entry != NumberDictionary::kNotFound) {
- Object* deleted = dictionary->DeleteProperty(entry, mode);
- if (deleted == GetHeap()->true_value()) {
- MaybeObject* maybe_elements = dictionary->Shrink(index);
- FixedArray* new_elements = NULL;
- if (!maybe_elements->To(&new_elements)) {
- return maybe_elements;
- }
- set_elements(new_elements);
- }
- return deleted;
- }
- break;
- }
- default:
- UNREACHABLE();
- break;
- }
- return GetHeap()->true_value();
-}
-
-
MaybeObject* JSObject::DeleteElementWithInterceptor(uint32_t index) {
Isolate* isolate = GetIsolate();
Heap* heap = isolate->heap();
@@ -3078,100 +3178,15 @@
ASSERT(result->IsBoolean());
return *v8::Utils::OpenHandle(*result);
}
- MaybeObject* raw_result =
- this_handle->DeleteElementPostInterceptor(index, NORMAL_DELETION);
+ MaybeObject* raw_result = this_handle->GetElementsAccessor()->Delete(
+ *this_handle,
+ index,
+ NORMAL_DELETION);
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
return raw_result;
}
-MaybeObject* JSObject::DeleteFastElement(uint32_t index) {
- ASSERT(HasFastElements() || HasFastArgumentsElements());
- Heap* heap = GetHeap();
- FixedArray* backing_store = FixedArray::cast(elements());
- if (backing_store->map() == heap->non_strict_arguments_elements_map()) {
- backing_store = FixedArray::cast(backing_store->get(1));
- } else {
- Object* writable;
- MaybeObject* maybe = EnsureWritableFastElements();
- if (!maybe->ToObject(&writable)) return maybe;
- backing_store = FixedArray::cast(writable);
- }
- uint32_t length = static_cast<uint32_t>(
- IsJSArray()
- ? Smi::cast(JSArray::cast(this)->length())->value()
- : backing_store->length());
- if (index < length) {
- backing_store->set_the_hole(index);
- // If an old space backing store is larger than a certain size and
- // has too few used values, normalize it.
- // To avoid doing the check on every delete we require at least
- // one adjacent hole to the value being deleted.
- Object* hole = heap->the_hole_value();
- const int kMinLengthForSparsenessCheck = 64;
- if (backing_store->length() >= kMinLengthForSparsenessCheck &&
- !heap->InNewSpace(backing_store) &&
- ((index > 0 && backing_store->get(index - 1) == hole) ||
- (index + 1 < length && backing_store->get(index + 1) == hole))) {
- int num_used = 0;
- for (int i = 0; i < backing_store->length(); ++i) {
- if (backing_store->get(i) != hole) ++num_used;
- // Bail out early if more than 1/4 is used.
- if (4 * num_used > backing_store->length()) break;
- }
- if (4 * num_used <= backing_store->length()) {
- MaybeObject* result = NormalizeElements();
- if (result->IsFailure()) return result;
- }
- }
- }
- return heap->true_value();
-}
-
-
-MaybeObject* JSObject::DeleteDictionaryElement(uint32_t index,
- DeleteMode mode) {
- Isolate* isolate = GetIsolate();
- Heap* heap = isolate->heap();
- FixedArray* backing_store = FixedArray::cast(elements());
- bool is_arguments =
- (GetElementsKind() == JSObject::NON_STRICT_ARGUMENTS_ELEMENTS);
- if (is_arguments) {
- backing_store = FixedArray::cast(backing_store->get(1));
- }
- NumberDictionary* dictionary = NumberDictionary::cast(backing_store);
- int entry = dictionary->FindEntry(index);
- if (entry != NumberDictionary::kNotFound) {
- Object* result = dictionary->DeleteProperty(entry, mode);
- if (result == heap->true_value()) {
- MaybeObject* maybe_elements = dictionary->Shrink(index);
- FixedArray* new_elements = NULL;
- if (!maybe_elements->To(&new_elements)) {
- return maybe_elements;
- }
- if (is_arguments) {
- FixedArray::cast(elements())->set(1, new_elements);
- } else {
- set_elements(new_elements);
- }
- }
- if (mode == STRICT_DELETION && result == heap->false_value()) {
- // In strict mode, attempting to delete a non-configurable property
- // throws an exception.
- HandleScope scope(isolate);
- Handle<Object> holder(this);
- Handle<Object> name = isolate->factory()->NewNumberFromUint(index);
- Handle<Object> args[2] = { name, holder };
- Handle<Object> error =
- isolate->factory()->NewTypeError("strict_delete_property",
- HandleVector(args, 2));
- return isolate->Throw(*error);
- }
- }
- return heap->true_value();
-}
-
-
MaybeObject* JSObject::DeleteElement(uint32_t index, DeleteMode mode) {
Isolate* isolate = GetIsolate();
// Check access rights if needed.
@@ -3190,62 +3205,13 @@
if (HasIndexedInterceptor()) {
// Skip interceptor if forcing deletion.
- return (mode == FORCE_DELETION)
- ? DeleteElementPostInterceptor(index, FORCE_DELETION)
- : DeleteElementWithInterceptor(index);
+ if (mode != FORCE_DELETION) {
+ return DeleteElementWithInterceptor(index);
+ }
+ mode = JSReceiver::FORCE_DELETION;
}
- switch (GetElementsKind()) {
- case FAST_ELEMENTS:
- return DeleteFastElement(index);
-
- case DICTIONARY_ELEMENTS:
- return DeleteDictionaryElement(index, mode);
-
- case FAST_DOUBLE_ELEMENTS: {
- int length = IsJSArray()
- ? Smi::cast(JSArray::cast(this)->length())->value()
- : FixedDoubleArray::cast(elements())->length();
- if (index < static_cast<uint32_t>(length)) {
- FixedDoubleArray::cast(elements())->set_the_hole(index);
- }
- break;
- }
- case EXTERNAL_PIXEL_ELEMENTS:
- case EXTERNAL_BYTE_ELEMENTS:
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- case EXTERNAL_SHORT_ELEMENTS:
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- case EXTERNAL_INT_ELEMENTS:
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- case EXTERNAL_FLOAT_ELEMENTS:
- case EXTERNAL_DOUBLE_ELEMENTS:
- // Pixel and external array elements cannot be deleted. Just
- // silently ignore here.
- break;
-
- case NON_STRICT_ARGUMENTS_ELEMENTS: {
- FixedArray* parameter_map = FixedArray::cast(elements());
- uint32_t length = parameter_map->length();
- Object* probe =
- index < (length - 2) ? parameter_map->get(index + 2) : NULL;
- if (probe != NULL && !probe->IsTheHole()) {
- // TODO(kmillikin): We could check if this was the last aliased
- // parameter, and revert to normal elements in that case. That
- // would enable GC of the context.
- parameter_map->set_the_hole(index + 2);
- } else {
- FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
- if (arguments->IsDictionary()) {
- return DeleteDictionaryElement(index, mode);
- } else {
- return DeleteFastElement(index);
- }
- }
- break;
- }
- }
- return isolate->heap()->true_value();
+ return GetElementsAccessor()->Delete(this, index, mode);
}
@@ -3666,6 +3632,7 @@
if (is_element) {
switch (GetElementsKind()) {
case FAST_ELEMENTS:
+ case FAST_DOUBLE_ELEMENTS:
break;
case EXTERNAL_PIXEL_ELEMENTS:
case EXTERNAL_BYTE_ELEMENTS:
@@ -3676,7 +3643,6 @@
case EXTERNAL_UNSIGNED_INT_ELEMENTS:
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
- case FAST_DOUBLE_ELEMENTS:
// Ignore getters and setters on pixel and external array
// elements.
return heap->undefined_value();
@@ -3915,6 +3881,7 @@
// Accessors overwrite previous callbacks (cf. with getters/setters).
switch (GetElementsKind()) {
case FAST_ELEMENTS:
+ case FAST_DOUBLE_ELEMENTS:
break;
case EXTERNAL_PIXEL_ELEMENTS:
case EXTERNAL_BYTE_ELEMENTS:
@@ -3925,7 +3892,6 @@
case EXTERNAL_UNSIGNED_INT_ELEMENTS:
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
- case FAST_DOUBLE_ELEMENTS:
// Ignore getters and setters on pixel and external array
// elements.
return isolate->heap()->undefined_value();
@@ -4521,20 +4487,6 @@
}
-static bool HasKey(FixedArray* array, Object* key) {
- int len0 = array->length();
- for (int i = 0; i < len0; i++) {
- Object* element = array->get(i);
- if (element->IsSmi() && key->IsSmi() && (element == key)) return true;
- if (element->IsString() &&
- key->IsString() && String::cast(element)->Equals(String::cast(key))) {
- return true;
- }
- }
- return false;
-}
-
-
MaybeObject* PolymorphicCodeCache::Update(MapList* maps,
Code::Flags flags,
Code* code) {
@@ -4694,102 +4646,37 @@
MaybeObject* FixedArray::AddKeysFromJSArray(JSArray* array) {
- ASSERT(!array->HasExternalArrayElements());
- switch (array->GetElementsKind()) {
- case JSObject::FAST_ELEMENTS:
- return UnionOfKeys(FixedArray::cast(array->elements()));
- case JSObject::DICTIONARY_ELEMENTS: {
- NumberDictionary* dict = array->element_dictionary();
- int size = dict->NumberOfElements();
-
- // Allocate a temporary fixed array.
- Object* object;
- { MaybeObject* maybe_object = GetHeap()->AllocateFixedArray(size);
- if (!maybe_object->ToObject(&object)) return maybe_object;
- }
- FixedArray* key_array = FixedArray::cast(object);
-
- int capacity = dict->Capacity();
- int pos = 0;
- // Copy the elements from the JSArray to the temporary fixed array.
- for (int i = 0; i < capacity; i++) {
- if (dict->IsKey(dict->KeyAt(i))) {
- key_array->set(pos++, dict->ValueAt(i));
- }
- }
- // Compute the union of this and the temporary fixed array.
- return UnionOfKeys(key_array);
+ ElementsAccessor* accessor = array->GetElementsAccessor();
+ MaybeObject* maybe_result =
+ accessor->AddElementsToFixedArray(array->elements(), this, array, array);
+ FixedArray* result;
+ if (!maybe_result->To<FixedArray>(&result)) return maybe_result;
+#ifdef DEBUG
+ if (FLAG_enable_slow_asserts) {
+ for (int i = 0; i < result->length(); i++) {
+ Object* current = result->get(i);
+ ASSERT(current->IsNumber() || current->IsString());
}
- case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
- UNIMPLEMENTED();
- break;
- case JSObject::EXTERNAL_BYTE_ELEMENTS:
- case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- case JSObject::EXTERNAL_SHORT_ELEMENTS:
- case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- case JSObject::EXTERNAL_INT_ELEMENTS:
- case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
- case JSObject::EXTERNAL_FLOAT_ELEMENTS:
- case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
- case JSObject::EXTERNAL_PIXEL_ELEMENTS:
- case JSObject::FAST_DOUBLE_ELEMENTS:
- break;
}
- UNREACHABLE();
- return GetHeap()->null_value(); // Failure case needs to "return" a value.
+#endif
+ return result;
}
MaybeObject* FixedArray::UnionOfKeys(FixedArray* other) {
- int len0 = length();
+ ElementsAccessor* accessor = ElementsAccessor::ForArray(other);
+ MaybeObject* maybe_result =
+ accessor->AddElementsToFixedArray(other, this, NULL, NULL);
+ FixedArray* result;
+ if (!maybe_result->To<FixedArray>(&result)) return maybe_result;
#ifdef DEBUG
if (FLAG_enable_slow_asserts) {
- for (int i = 0; i < len0; i++) {
- ASSERT(get(i)->IsString() || get(i)->IsNumber());
+ for (int i = 0; i < result->length(); i++) {
+ Object* current = result->get(i);
+ ASSERT(current->IsNumber() || current->IsString());
}
}
#endif
- int len1 = other->length();
- // Optimize if 'other' is empty.
- // We cannot optimize if 'this' is empty, as other may have holes
- // or non keys.
- if (len1 == 0) return this;
-
- // Compute how many elements are not in this.
- int extra = 0;
- for (int y = 0; y < len1; y++) {
- Object* value = other->get(y);
- if (!value->IsTheHole() && !HasKey(this, value)) extra++;
- }
-
- if (extra == 0) return this;
-
- // Allocate the result
- Object* obj;
- { MaybeObject* maybe_obj = GetHeap()->AllocateFixedArray(len0 + extra);
- if (!maybe_obj->ToObject(&obj)) return maybe_obj;
- }
- // Fill in the content
- AssertNoAllocation no_gc;
- FixedArray* result = FixedArray::cast(obj);
- WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
- for (int i = 0; i < len0; i++) {
- Object* e = get(i);
- ASSERT(e->IsString() || e->IsNumber());
- result->set(i, e, mode);
- }
- // Fill in the extra keys.
- int index = 0;
- for (int y = 0; y < len1; y++) {
- Object* value = other->get(y);
- if (!value->IsTheHole() && !HasKey(this, value)) {
- Object* e = other->get(y);
- ASSERT(e->IsString() || e->IsNumber());
- result->set(len0 + index, e, mode);
- index++;
- }
- }
- ASSERT(extra == index);
return result;
}
@@ -5171,55 +5058,45 @@
}
-Vector<const char> String::ToAsciiVector() {
- ASSERT(IsAsciiRepresentation());
- ASSERT(IsFlat());
-
- int offset = 0;
+String::FlatContent String::GetFlatContent() {
int length = this->length();
- StringRepresentationTag string_tag = StringShape(this).representation_tag();
+ StringShape shape(this);
String* string = this;
- if (string_tag == kConsStringTag) {
- ConsString* cons = ConsString::cast(string);
- ASSERT(cons->second()->length() == 0);
- string = cons->first();
- string_tag = StringShape(string).representation_tag();
- }
- if (string_tag == kSeqStringTag) {
- SeqAsciiString* seq = SeqAsciiString::cast(string);
- char* start = seq->GetChars();
- return Vector<const char>(start + offset, length);
- }
- ASSERT(string_tag == kExternalStringTag);
- ExternalAsciiString* ext = ExternalAsciiString::cast(string);
- const char* start = ext->resource()->data();
- return Vector<const char>(start + offset, length);
-}
-
-
-Vector<const uc16> String::ToUC16Vector() {
- ASSERT(IsTwoByteRepresentation());
- ASSERT(IsFlat());
-
int offset = 0;
- int length = this->length();
- StringRepresentationTag string_tag = StringShape(this).representation_tag();
- String* string = this;
- if (string_tag == kConsStringTag) {
+ if (shape.representation_tag() == kConsStringTag) {
ConsString* cons = ConsString::cast(string);
- ASSERT(cons->second()->length() == 0);
+ if (cons->second()->length() != 0) {
+ return FlatContent();
+ }
string = cons->first();
- string_tag = StringShape(string).representation_tag();
+ shape = StringShape(string);
}
- if (string_tag == kSeqStringTag) {
- SeqTwoByteString* seq = SeqTwoByteString::cast(string);
- return Vector<const uc16>(seq->GetChars() + offset, length);
+ if (shape.representation_tag() == kSlicedStringTag) {
+ SlicedString* slice = SlicedString::cast(string);
+ offset = slice->offset();
+ string = slice->parent();
+ shape = StringShape(string);
+ ASSERT(shape.representation_tag() != kConsStringTag &&
+ shape.representation_tag() != kSlicedStringTag);
}
- ASSERT(string_tag == kExternalStringTag);
- ExternalTwoByteString* ext = ExternalTwoByteString::cast(string);
- const uc16* start =
- reinterpret_cast<const uc16*>(ext->resource()->data());
- return Vector<const uc16>(start + offset, length);
+ if (shape.encoding_tag() == kAsciiStringTag) {
+ const char* start;
+ if (shape.representation_tag() == kSeqStringTag) {
+ start = SeqAsciiString::cast(string)->GetChars();
+ } else {
+ start = ExternalAsciiString::cast(string)->resource()->data();
+ }
+ return FlatContent(Vector<const char>(start + offset, length));
+ } else {
+ ASSERT(shape.encoding_tag() == kTwoByteStringTag);
+ const uc16* start;
+ if (shape.representation_tag() == kSeqStringTag) {
+ start = SeqTwoByteString::cast(string)->GetChars();
+ } else {
+ start = ExternalTwoByteString::cast(string)->resource()->data();
+ }
+ return FlatContent(Vector<const uc16>(start + offset, length));
+ }
}
@@ -5290,13 +5167,17 @@
const uc16* String::GetTwoByteData(unsigned start) {
- ASSERT(!IsAsciiRepresentation());
+ ASSERT(!IsAsciiRepresentationUnderneath());
switch (StringShape(this).representation_tag()) {
case kSeqStringTag:
return SeqTwoByteString::cast(this)->SeqTwoByteStringGetData(start);
case kExternalStringTag:
return ExternalTwoByteString::cast(this)->
ExternalTwoByteStringGetData(start);
+ case kSlicedStringTag: {
+ SlicedString* slice = SlicedString::cast(this);
+ return slice->parent()->GetTwoByteData(start + slice->offset());
+ }
case kConsStringTag:
UNREACHABLE();
return NULL;
@@ -5587,6 +5468,10 @@
max_chars);
return rbb->util_buffer;
}
+ case kSlicedStringTag:
+ return SlicedString::cast(input)->SlicedStringReadBlock(rbb,
+ offset_ptr,
+ max_chars);
default:
break;
}
@@ -5669,11 +5554,13 @@
if (str_ == NULL) return;
Handle<String> str(str_);
ASSERT(str->IsFlat());
- is_ascii_ = str->IsAsciiRepresentation();
+ String::FlatContent content = str->GetFlatContent();
+ ASSERT(content.IsFlat());
+ is_ascii_ = content.IsAscii();
if (is_ascii_) {
- start_ = str->ToAsciiVector().start();
+ start_ = content.ToAsciiVector().start();
} else {
- start_ = str->ToUC16Vector().start();
+ start_ = content.ToUC16Vector().start();
}
}
@@ -5728,6 +5615,11 @@
max_chars);
}
return;
+ case kSlicedStringTag:
+ SlicedString::cast(input)->SlicedStringReadBlockIntoBuffer(rbb,
+ offset_ptr,
+ max_chars);
+ return;
default:
break;
}
@@ -5862,6 +5754,31 @@
}
+uint16_t SlicedString::SlicedStringGet(int index) {
+ return parent()->Get(offset() + index);
+}
+
+
+const unibrow::byte* SlicedString::SlicedStringReadBlock(
+ ReadBlockBuffer* buffer, unsigned* offset_ptr, unsigned chars) {
+ unsigned offset = this->offset();
+ *offset_ptr += offset;
+ const unibrow::byte* answer = String::ReadBlock(String::cast(parent()),
+ buffer, offset_ptr, chars);
+ *offset_ptr -= offset;
+ return answer;
+}
+
+
+void SlicedString::SlicedStringReadBlockIntoBuffer(
+ ReadBlockBuffer* buffer, unsigned* offset_ptr, unsigned chars) {
+ unsigned offset = this->offset();
+ *offset_ptr += offset;
+ String::ReadBlockIntoBuffer(String::cast(parent()),
+ buffer, offset_ptr, chars);
+ *offset_ptr -= offset;
+}
+
template <typename sinkchar>
void String::WriteToFlat(String* src,
sinkchar* sink,
@@ -5929,6 +5846,13 @@
}
break;
}
+ case kAsciiStringTag | kSlicedStringTag:
+ case kTwoByteStringTag | kSlicedStringTag: {
+ SlicedString* slice = SlicedString::cast(source);
+ unsigned offset = slice->offset();
+ WriteToFlat(slice->parent(), sink, from + offset, to + offset);
+ return;
+ }
}
}
}
@@ -5993,12 +5917,13 @@
static inline bool CompareStringContentsPartial(Isolate* isolate,
IteratorA* ia,
String* b) {
- if (b->IsFlat()) {
- if (b->IsAsciiRepresentation()) {
- VectorIterator<char> ib(b->ToAsciiVector());
+ String::FlatContent content = b->GetFlatContent();
+ if (content.IsFlat()) {
+ if (content.IsAscii()) {
+ VectorIterator<char> ib(content.ToAsciiVector());
return CompareStringContents(ia, &ib);
} else {
- VectorIterator<uc16> ib(b->ToUC16Vector());
+ VectorIterator<uc16> ib(content.ToUC16Vector());
return CompareStringContents(ia, &ib);
}
} else {
@@ -6037,16 +5962,18 @@
}
Isolate* isolate = GetIsolate();
- if (lhs->IsFlat()) {
- if (lhs->IsAsciiRepresentation()) {
- Vector<const char> vec1 = lhs->ToAsciiVector();
- if (rhs->IsFlat()) {
- if (rhs->IsAsciiRepresentation()) {
- Vector<const char> vec2 = rhs->ToAsciiVector();
+ String::FlatContent lhs_content = lhs->GetFlatContent();
+ String::FlatContent rhs_content = rhs->GetFlatContent();
+ if (lhs_content.IsFlat()) {
+ if (lhs_content.IsAscii()) {
+ Vector<const char> vec1 = lhs_content.ToAsciiVector();
+ if (rhs_content.IsFlat()) {
+ if (rhs_content.IsAscii()) {
+ Vector<const char> vec2 = rhs_content.ToAsciiVector();
return CompareRawStringContents(vec1, vec2);
} else {
VectorIterator<char> buf1(vec1);
- VectorIterator<uc16> ib(rhs->ToUC16Vector());
+ VectorIterator<uc16> ib(rhs_content.ToUC16Vector());
return CompareStringContents(&buf1, &ib);
}
} else {
@@ -6056,14 +5983,14 @@
isolate->objects_string_compare_buffer_b());
}
} else {
- Vector<const uc16> vec1 = lhs->ToUC16Vector();
- if (rhs->IsFlat()) {
- if (rhs->IsAsciiRepresentation()) {
+ Vector<const uc16> vec1 = lhs_content.ToUC16Vector();
+ if (rhs_content.IsFlat()) {
+ if (rhs_content.IsAscii()) {
VectorIterator<uc16> buf1(vec1);
- VectorIterator<char> ib(rhs->ToAsciiVector());
+ VectorIterator<char> ib(rhs_content.ToAsciiVector());
return CompareStringContents(&buf1, &ib);
} else {
- Vector<const uc16> vec2(rhs->ToUC16Vector());
+ Vector<const uc16> vec2(rhs_content.ToUC16Vector());
return CompareRawStringContents(vec1, vec2);
}
} else {
@@ -6116,8 +6043,10 @@
bool String::IsAsciiEqualTo(Vector<const char> str) {
int slen = length();
if (str.length() != slen) return false;
- if (IsFlat() && IsAsciiRepresentation()) {
- return CompareChars(ToAsciiVector().start(), str.start(), slen) == 0;
+ FlatContent content = GetFlatContent();
+ if (content.IsAscii()) {
+ return CompareChars(content.ToAsciiVector().start(),
+ str.start(), slen) == 0;
}
for (int i = 0; i < slen; i++) {
if (Get(i) != static_cast<uint16_t>(str[i])) return false;
@@ -6129,8 +6058,9 @@
bool String::IsTwoByteEqualTo(Vector<const uc16> str) {
int slen = length();
if (str.length() != slen) return false;
- if (IsFlat() && IsTwoByteRepresentation()) {
- return CompareChars(ToUC16Vector().start(), str.start(), slen) == 0;
+ FlatContent content = GetFlatContent();
+ if (content.IsTwoByte()) {
+ return CompareChars(content.ToUC16Vector().start(), str.start(), slen) == 0;
}
for (int i = 0; i < slen; i++) {
if (Get(i) != str[i]) return false;
@@ -7060,126 +6990,99 @@
PrintF(out, "Deoptimization Input Data (deopt points = %d)\n", deopt_count);
if (0 == deopt_count) return;
- PrintF(out, "%6s %6s %6s %12s\n", "index", "ast id", "argc", "commands");
+ PrintF(out, "%6s %6s %6s %12s\n", "index", "ast id", "argc",
+ FLAG_print_code_verbose ? "commands" : "");
for (int i = 0; i < deopt_count; i++) {
- int command_count = 0;
PrintF(out, "%6d %6d %6d",
i, AstId(i)->value(), ArgumentsStackHeight(i)->value());
+
+ if (!FLAG_print_code_verbose) {
+ PrintF(out, "\n");
+ continue;
+ }
+ // Print details of the frame translation.
int translation_index = TranslationIndex(i)->value();
TranslationIterator iterator(TranslationByteArray(), translation_index);
Translation::Opcode opcode =
static_cast<Translation::Opcode>(iterator.Next());
ASSERT(Translation::BEGIN == opcode);
int frame_count = iterator.Next();
- if (FLAG_print_code_verbose) {
- PrintF(out, " %s {count=%d}\n", Translation::StringFor(opcode),
- frame_count);
- }
+ PrintF(out, " %s {count=%d}\n", Translation::StringFor(opcode),
+ frame_count);
- for (int i = 0; i < frame_count; ++i) {
- opcode = static_cast<Translation::Opcode>(iterator.Next());
- ASSERT(Translation::FRAME == opcode);
- int ast_id = iterator.Next();
- int function_id = iterator.Next();
- JSFunction* function =
- JSFunction::cast(LiteralArray()->get(function_id));
- unsigned height = iterator.Next();
- if (FLAG_print_code_verbose) {
- PrintF(out, "%24s %s {ast_id=%d, function=",
- "", Translation::StringFor(opcode), ast_id);
- function->PrintName(out);
- PrintF(out, ", height=%u}\n", height);
+ while (iterator.HasNext() &&
+ Translation::BEGIN !=
+ (opcode = static_cast<Translation::Opcode>(iterator.Next()))) {
+ PrintF(out, "%24s %s ", "", Translation::StringFor(opcode));
+
+ switch (opcode) {
+ case Translation::BEGIN:
+ UNREACHABLE();
+ break;
+
+ case Translation::FRAME: {
+ int ast_id = iterator.Next();
+ int function_id = iterator.Next();
+ JSFunction* function =
+ JSFunction::cast(LiteralArray()->get(function_id));
+ unsigned height = iterator.Next();
+ PrintF(out, "{ast_id=%d, \nfunction=", ast_id);
+ function->PrintName(out);
+ PrintF(out, ", height=%u}", height);
+ break;
+ }
+
+ case Translation::DUPLICATE:
+ break;
+
+ case Translation::REGISTER: {
+ int reg_code = iterator.Next();
+ PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
+ break;
+ }
+
+ case Translation::INT32_REGISTER: {
+ int reg_code = iterator.Next();
+ PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
+ break;
+ }
+
+ case Translation::DOUBLE_REGISTER: {
+ int reg_code = iterator.Next();
+ PrintF(out, "{input=%s}",
+ DoubleRegister::AllocationIndexToString(reg_code));
+ break;
+ }
+
+ case Translation::STACK_SLOT: {
+ int input_slot_index = iterator.Next();
+ PrintF(out, "{input=%d}", input_slot_index);
+ break;
+ }
+
+ case Translation::INT32_STACK_SLOT: {
+ int input_slot_index = iterator.Next();
+ PrintF(out, "{input=%d}", input_slot_index);
+ break;
+ }
+
+ case Translation::DOUBLE_STACK_SLOT: {
+ int input_slot_index = iterator.Next();
+ PrintF(out, "{input=%d}", input_slot_index);
+ break;
+ }
+
+ case Translation::LITERAL: {
+ unsigned literal_index = iterator.Next();
+ PrintF(out, "{literal_id=%u}", literal_index);
+ break;
+ }
+
+ case Translation::ARGUMENTS_OBJECT:
+ break;
}
-
- // Size of translation is height plus all incoming arguments including
- // receiver.
- int size = height + function->shared()->formal_parameter_count() + 1;
- command_count += size;
- for (int j = 0; j < size; ++j) {
- opcode = static_cast<Translation::Opcode>(iterator.Next());
- if (FLAG_print_code_verbose) {
- PrintF(out, "%24s %s ", "", Translation::StringFor(opcode));
- }
-
- if (opcode == Translation::DUPLICATE) {
- opcode = static_cast<Translation::Opcode>(iterator.Next());
- if (FLAG_print_code_verbose) {
- PrintF(out, "%s ", Translation::StringFor(opcode));
- }
- --j; // Two commands share the same frame index.
- }
-
- switch (opcode) {
- case Translation::BEGIN:
- case Translation::FRAME:
- case Translation::DUPLICATE:
- UNREACHABLE();
- break;
-
- case Translation::REGISTER: {
- int reg_code = iterator.Next();
- if (FLAG_print_code_verbose) {
- PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
- }
- break;
- }
-
- case Translation::INT32_REGISTER: {
- int reg_code = iterator.Next();
- if (FLAG_print_code_verbose) {
- PrintF(out, "{input=%s}", converter.NameOfCPURegister(reg_code));
- }
- break;
- }
-
- case Translation::DOUBLE_REGISTER: {
- int reg_code = iterator.Next();
- if (FLAG_print_code_verbose) {
- PrintF(out, "{input=%s}",
- DoubleRegister::AllocationIndexToString(reg_code));
- }
- break;
- }
-
- case Translation::STACK_SLOT: {
- int input_slot_index = iterator.Next();
- if (FLAG_print_code_verbose) {
- PrintF(out, "{input=%d}", input_slot_index);
- }
- break;
- }
-
- case Translation::INT32_STACK_SLOT: {
- int input_slot_index = iterator.Next();
- if (FLAG_print_code_verbose) {
- PrintF(out, "{input=%d}", input_slot_index);
- }
- break;
- }
-
- case Translation::DOUBLE_STACK_SLOT: {
- int input_slot_index = iterator.Next();
- if (FLAG_print_code_verbose) {
- PrintF(out, "{input=%d}", input_slot_index);
- }
- break;
- }
-
- case Translation::LITERAL: {
- unsigned literal_index = iterator.Next();
- if (FLAG_print_code_verbose) {
- PrintF(out, "{literal_id=%u}", literal_index);
- }
- break;
- }
-
- case Translation::ARGUMENTS_OBJECT:
- break;
- }
- if (FLAG_print_code_verbose) PrintF(out, "\n");
- }
+ PrintF(out, "\n");
}
- if (!FLAG_print_code_verbose) PrintF(out, " %12d\n", command_count);
}
}
@@ -7217,6 +7120,7 @@
case UNARY_OP_IC: return "UNARY_OP_IC";
case BINARY_OP_IC: return "BINARY_OP_IC";
case COMPARE_IC: return "COMPARE_IC";
+ case TO_BOOLEAN_IC: return "TO_BOOLEAN_IC";
}
UNREACHABLE();
return NULL;
@@ -7455,7 +7359,8 @@
// exceed the capacity of new space, and we would fail repeatedly
// trying to convert the FixedDoubleArray.
MaybeObject* maybe_value_object =
- GetHeap()->AllocateHeapNumber(old_elements->get(i), TENURED);
+ GetHeap()->AllocateHeapNumber(old_elements->get_scalar(i),
+ TENURED);
if (!maybe_value_object->ToObject(&obj)) return maybe_value_object;
// Force write barrier. It's not worth trying to exploit
// elems->GetWriteBarrierMode(), since it requires an
@@ -7549,9 +7454,10 @@
switch (GetElementsKind()) {
case FAST_ELEMENTS: {
+ case FAST_DOUBLE_ELEMENTS:
// Make sure we never try to shrink dense arrays into sparse arrays.
- ASSERT(static_cast<uint32_t>(FixedArray::cast(elements())->length()) <=
- new_length);
+ ASSERT(static_cast<uint32_t>(
+ FixedArrayBase::cast(elements())->length()) <= new_length);
MaybeObject* result = NormalizeElements();
if (result->IsFailure()) return result;
@@ -7580,7 +7486,6 @@
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case EXTERNAL_PIXEL_ELEMENTS:
- case FAST_DOUBLE_ELEMENTS:
UNREACHABLE();
break;
}
@@ -7695,8 +7600,7 @@
}
int min = NewElementsCapacity(old_capacity);
int new_capacity = value > min ? value : min;
- if (new_capacity <= kMaxFastElementsLength ||
- !ShouldConvertToSlowElements(new_capacity)) {
+ if (!ShouldConvertToSlowElements(new_capacity)) {
MaybeObject* result;
if (GetElementsKind() == FAST_ELEMENTS) {
result = SetFastElementsCapacityAndLength(new_capacity, value);
@@ -7922,6 +7826,17 @@
}
break;
}
+ case FAST_DOUBLE_ELEMENTS: {
+ uint32_t length = IsJSArray() ?
+ static_cast<uint32_t>
+ (Smi::cast(JSArray::cast(this)->length())->value()) :
+ static_cast<uint32_t>(FixedDoubleArray::cast(elements())->length());
+ if ((index < length) &&
+ !FixedDoubleArray::cast(elements())->is_the_hole(index)) {
+ return true;
+ }
+ break;
+ }
case EXTERNAL_PIXEL_ELEMENTS: {
ExternalPixelArray* pixels = ExternalPixelArray::cast(elements());
if (index < static_cast<uint32_t>(pixels->length())) {
@@ -7936,8 +7851,7 @@
case EXTERNAL_INT_ELEMENTS:
case EXTERNAL_UNSIGNED_INT_ELEMENTS:
case EXTERNAL_FLOAT_ELEMENTS:
- case EXTERNAL_DOUBLE_ELEMENTS:
- case FAST_DOUBLE_ELEMENTS: {
+ case EXTERNAL_DOUBLE_ELEMENTS: {
ExternalArray* array = ExternalArray::cast(elements());
if (index < static_cast<uint32_t>(array->length())) {
return true;
@@ -8048,6 +7962,17 @@
}
break;
}
+ case FAST_DOUBLE_ELEMENTS: {
+ uint32_t length = IsJSArray() ?
+ static_cast<uint32_t>
+ (Smi::cast(JSArray::cast(this)->length())->value()) :
+ static_cast<uint32_t>(FixedDoubleArray::cast(elements())->length());
+ if ((index < length) &&
+ !FixedDoubleArray::cast(elements())->is_the_hole(index)) {
+ return FAST_ELEMENT;
+ }
+ break;
+ }
case EXTERNAL_PIXEL_ELEMENTS: {
ExternalPixelArray* pixels = ExternalPixelArray::cast(elements());
if (index < static_cast<uint32_t>(pixels->length())) return FAST_ELEMENT;
@@ -8065,9 +7990,6 @@
if (index < static_cast<uint32_t>(array->length())) return FAST_ELEMENT;
break;
}
- case FAST_DOUBLE_ELEMENTS:
- UNREACHABLE();
- break;
case DICTIONARY_ELEMENTS: {
if (element_dictionary()->FindEntry(index) !=
NumberDictionary::kNotFound) {
@@ -8434,8 +8356,7 @@
if ((index - length) < kMaxGap) {
// Try allocating extra space.
int new_capacity = NewElementsCapacity(index + 1);
- if (new_capacity <= kMaxFastElementsLength ||
- !ShouldConvertToSlowElements(new_capacity)) {
+ if (!ShouldConvertToSlowElements(new_capacity)) {
ASSERT(static_cast<uint32_t>(new_capacity) > index);
Object* new_elements;
MaybeObject* maybe =
@@ -8515,14 +8436,14 @@
return isolate->Throw(*error);
}
}
- Object* new_dictionary;
+ FixedArrayBase* new_dictionary;
MaybeObject* maybe = dictionary->AtNumberPut(index, value);
- if (!maybe->ToObject(&new_dictionary)) return maybe;
+ if (!maybe->To<FixedArrayBase>(&new_dictionary)) return maybe;
if (dictionary != NumberDictionary::cast(new_dictionary)) {
if (is_arguments) {
elements->set(1, new_dictionary);
} else {
- set_elements(HeapObject::cast(new_dictionary));
+ set_elements(new_dictionary);
}
dictionary = NumberDictionary::cast(new_dictionary);
}
@@ -8543,7 +8464,7 @@
} else {
new_length = dictionary->max_number_key() + 1;
}
- MaybeObject* result = ShouldConvertToFastDoubleElements()
+ MaybeObject* result = CanConvertToFastDoubleElements()
? SetFastDoubleElementsCapacityAndLength(new_length, new_length)
: SetFastElementsCapacityAndLength(new_length, new_length);
if (result->IsFailure()) return result;
@@ -8617,8 +8538,7 @@
if ((index - elms_length) < kMaxGap) {
// Try allocating extra space.
int new_capacity = NewElementsCapacity(index+1);
- if (new_capacity <= kMaxFastElementsLength ||
- !ShouldConvertToSlowElements(new_capacity)) {
+ if (!ShouldConvertToSlowElements(new_capacity)) {
ASSERT(static_cast<uint32_t>(new_capacity) > index);
Object* obj;
{ MaybeObject* maybe_obj =
@@ -8783,71 +8703,6 @@
}
-MaybeObject* JSObject::GetElementPostInterceptor(Object* receiver,
- uint32_t index) {
- // Get element works for both JSObject and JSArray since
- // JSArray::length cannot change.
- switch (GetElementsKind()) {
- case FAST_ELEMENTS: {
- FixedArray* elms = FixedArray::cast(elements());
- if (index < static_cast<uint32_t>(elms->length())) {
- Object* value = elms->get(index);
- if (!value->IsTheHole()) return value;
- }
- break;
- }
- case FAST_DOUBLE_ELEMENTS: {
- FixedDoubleArray* elms = FixedDoubleArray::cast(elements());
- if (index < static_cast<uint32_t>(elms->length())) {
- if (!elms->is_the_hole(index)) {
- return GetHeap()->NumberFromDouble(elms->get(index));
- }
- }
- break;
- }
- case EXTERNAL_PIXEL_ELEMENTS:
- case EXTERNAL_BYTE_ELEMENTS:
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- case EXTERNAL_SHORT_ELEMENTS:
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- case EXTERNAL_INT_ELEMENTS:
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- case EXTERNAL_FLOAT_ELEMENTS:
- case EXTERNAL_DOUBLE_ELEMENTS: {
- MaybeObject* maybe_value = GetExternalElement(index);
- Object* value;
- if (!maybe_value->ToObject(&value)) return maybe_value;
- if (!value->IsUndefined()) return value;
- break;
- }
- case DICTIONARY_ELEMENTS: {
- NumberDictionary* dictionary = element_dictionary();
- int entry = dictionary->FindEntry(index);
- if (entry != NumberDictionary::kNotFound) {
- Object* element = dictionary->ValueAt(entry);
- PropertyDetails details = dictionary->DetailsAt(entry);
- if (details.type() == CALLBACKS) {
- return GetElementWithCallback(receiver,
- element,
- index,
- this);
- }
- return element;
- }
- break;
- }
- case NON_STRICT_ARGUMENTS_ELEMENTS:
- UNIMPLEMENTED();
- break;
- }
-
- // Continue searching via the prototype chain.
- Object* pt = GetPrototype();
- if (pt->IsNull()) return GetHeap()->undefined_value();
- return pt->GetElementWithReceiver(receiver, index);
-}
-
-
MaybeObject* JSObject::GetElementWithInterceptor(Object* receiver,
uint32_t index) {
Isolate* isolate = GetIsolate();
@@ -8875,218 +8730,33 @@
if (!result.IsEmpty()) return *v8::Utils::OpenHandle(*result);
}
- MaybeObject* raw_result =
- holder_handle->GetElementPostInterceptor(*this_handle, index);
+ Heap* heap = holder_handle->GetHeap();
+ ElementsAccessor* handler = holder_handle->GetElementsAccessor();
+ MaybeObject* raw_result = handler->Get(holder_handle->elements(),
+ index,
+ *holder_handle,
+ *this_handle);
+ if (raw_result != heap->the_hole_value()) return raw_result;
+
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
- return raw_result;
-}
-
-MaybeObject* JSObject::GetElementWithReceiver(Object* receiver,
- uint32_t index) {
- // Check access rights if needed.
- if (IsAccessCheckNeeded()) {
- Heap* heap = GetHeap();
- if (!heap->isolate()->MayIndexedAccess(this, index, v8::ACCESS_GET)) {
- heap->isolate()->ReportFailedAccessCheck(this, v8::ACCESS_GET);
- return heap->undefined_value();
- }
- }
-
- if (HasIndexedInterceptor()) {
- return GetElementWithInterceptor(receiver, index);
- }
-
- // Get element works for both JSObject and JSArray since
- // JSArray::length cannot change.
- switch (GetElementsKind()) {
- case FAST_ELEMENTS: {
- FixedArray* elms = FixedArray::cast(elements());
- if (index < static_cast<uint32_t>(elms->length())) {
- Object* value = elms->get(index);
- if (!value->IsTheHole()) return value;
- }
- break;
- }
- case FAST_DOUBLE_ELEMENTS: {
- FixedDoubleArray* elms = FixedDoubleArray::cast(elements());
- if (index < static_cast<uint32_t>(elms->length())) {
- if (!elms->is_the_hole(index)) {
- double double_value = elms->get(index);
- return GetHeap()->NumberFromDouble(double_value);
- }
- }
- break;
- }
- case EXTERNAL_PIXEL_ELEMENTS:
- case EXTERNAL_BYTE_ELEMENTS:
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
- case EXTERNAL_SHORT_ELEMENTS:
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
- case EXTERNAL_INT_ELEMENTS:
- case EXTERNAL_UNSIGNED_INT_ELEMENTS:
- case EXTERNAL_FLOAT_ELEMENTS:
- case EXTERNAL_DOUBLE_ELEMENTS: {
- MaybeObject* maybe_value = GetExternalElement(index);
- Object* value;
- if (!maybe_value->ToObject(&value)) return maybe_value;
- if (!value->IsUndefined()) return value;
- break;
- }
- case DICTIONARY_ELEMENTS: {
- NumberDictionary* dictionary = element_dictionary();
- int entry = dictionary->FindEntry(index);
- if (entry != NumberDictionary::kNotFound) {
- Object* element = dictionary->ValueAt(entry);
- PropertyDetails details = dictionary->DetailsAt(entry);
- if (details.type() == CALLBACKS) {
- return GetElementWithCallback(receiver,
- element,
- index,
- this);
- }
- return element;
- }
- break;
- }
- case NON_STRICT_ARGUMENTS_ELEMENTS: {
- FixedArray* parameter_map = FixedArray::cast(elements());
- uint32_t length = parameter_map->length();
- Object* probe =
- (index < length - 2) ? parameter_map->get(index + 2) : NULL;
- if (probe != NULL && !probe->IsTheHole()) {
- Context* context = Context::cast(parameter_map->get(0));
- int context_index = Smi::cast(probe)->value();
- ASSERT(!context->get(context_index)->IsTheHole());
- return context->get(context_index);
- } else {
- // Object is not mapped, defer to the arguments.
- FixedArray* arguments = FixedArray::cast(parameter_map->get(1));
- if (arguments->IsDictionary()) {
- NumberDictionary* dictionary = NumberDictionary::cast(arguments);
- int entry = dictionary->FindEntry(index);
- if (entry != NumberDictionary::kNotFound) {
- Object* element = dictionary->ValueAt(entry);
- PropertyDetails details = dictionary->DetailsAt(entry);
- if (details.type() == CALLBACKS) {
- return GetElementWithCallback(receiver,
- element,
- index,
- this);
- }
- return element;
- }
- } else if (index < static_cast<uint32_t>(arguments->length())) {
- Object* value = arguments->get(index);
- if (!value->IsTheHole()) return value;
- }
- }
- break;
- }
- }
-
- Object* pt = GetPrototype();
- Heap* heap = GetHeap();
+ Object* pt = holder_handle->GetPrototype();
if (pt == heap->null_value()) return heap->undefined_value();
- return pt->GetElementWithReceiver(receiver, index);
-}
-
-
-MaybeObject* JSObject::GetExternalElement(uint32_t index) {
- // Get element works for both JSObject and JSArray since
- // JSArray::length cannot change.
- switch (GetElementsKind()) {
- case EXTERNAL_PIXEL_ELEMENTS: {
- ExternalPixelArray* pixels = ExternalPixelArray::cast(elements());
- if (index < static_cast<uint32_t>(pixels->length())) {
- uint8_t value = pixels->get(index);
- return Smi::FromInt(value);
- }
- break;
- }
- case EXTERNAL_BYTE_ELEMENTS: {
- ExternalByteArray* array = ExternalByteArray::cast(elements());
- if (index < static_cast<uint32_t>(array->length())) {
- int8_t value = array->get(index);
- return Smi::FromInt(value);
- }
- break;
- }
- case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: {
- ExternalUnsignedByteArray* array =
- ExternalUnsignedByteArray::cast(elements());
- if (index < static_cast<uint32_t>(array->length())) {
- uint8_t value = array->get(index);
- return Smi::FromInt(value);
- }
- break;
- }
- case EXTERNAL_SHORT_ELEMENTS: {
- ExternalShortArray* array = ExternalShortArray::cast(elements());
- if (index < static_cast<uint32_t>(array->length())) {
- int16_t value = array->get(index);
- return Smi::FromInt(value);
- }
- break;
- }
- case EXTERNAL_UNSIGNED_SHORT_ELEMENTS: {
- ExternalUnsignedShortArray* array =
- ExternalUnsignedShortArray::cast(elements());
- if (index < static_cast<uint32_t>(array->length())) {
- uint16_t value = array->get(index);
- return Smi::FromInt(value);
- }
- break;
- }
- case EXTERNAL_INT_ELEMENTS: {
- ExternalIntArray* array = ExternalIntArray::cast(elements());
- if (index < static_cast<uint32_t>(array->length())) {
- int32_t value = array->get(index);
- return GetHeap()->NumberFromInt32(value);
- }
- break;
- }
- case EXTERNAL_UNSIGNED_INT_ELEMENTS: {
- ExternalUnsignedIntArray* array =
- ExternalUnsignedIntArray::cast(elements());
- if (index < static_cast<uint32_t>(array->length())) {
- uint32_t value = array->get(index);
- return GetHeap()->NumberFromUint32(value);
- }
- break;
- }
- case EXTERNAL_FLOAT_ELEMENTS: {
- ExternalFloatArray* array = ExternalFloatArray::cast(elements());
- if (index < static_cast<uint32_t>(array->length())) {
- float value = array->get(index);
- return GetHeap()->AllocateHeapNumber(value);
- }
- break;
- }
- case EXTERNAL_DOUBLE_ELEMENTS: {
- ExternalDoubleArray* array = ExternalDoubleArray::cast(elements());
- if (index < static_cast<uint32_t>(array->length())) {
- double value = array->get(index);
- return GetHeap()->AllocateHeapNumber(value);
- }
- break;
- }
- case FAST_DOUBLE_ELEMENTS:
- case FAST_ELEMENTS:
- case DICTIONARY_ELEMENTS:
- UNREACHABLE();
- break;
- case NON_STRICT_ARGUMENTS_ELEMENTS:
- UNIMPLEMENTED();
- break;
- }
- return GetHeap()->undefined_value();
+ return pt->GetElementWithReceiver(*this_handle, index);
}
bool JSObject::HasDenseElements() {
int capacity = 0;
- int number_of_elements = 0;
+ int used = 0;
+ GetElementsCapacityAndUsage(&capacity, &used);
+ return (capacity == 0) || (used > (capacity / 2));
+}
+
+
+void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) {
+ *capacity = 0;
+ *used = 0;
FixedArrayBase* backing_store_base = FixedArrayBase::cast(elements());
FixedArray* backing_store = NULL;
@@ -9097,34 +8767,33 @@
backing_store = FixedArray::cast(backing_store_base);
if (backing_store->IsDictionary()) {
NumberDictionary* dictionary = NumberDictionary::cast(backing_store);
- capacity = dictionary->Capacity();
- number_of_elements = dictionary->NumberOfElements();
+ *capacity = dictionary->Capacity();
+ *used = dictionary->NumberOfElements();
break;
}
// Fall through.
case FAST_ELEMENTS:
backing_store = FixedArray::cast(backing_store_base);
- capacity = backing_store->length();
- for (int i = 0; i < capacity; ++i) {
- if (!backing_store->get(i)->IsTheHole()) ++number_of_elements;
+ *capacity = backing_store->length();
+ for (int i = 0; i < *capacity; ++i) {
+ if (!backing_store->get(i)->IsTheHole()) ++(*used);
}
break;
case DICTIONARY_ELEMENTS: {
NumberDictionary* dictionary =
NumberDictionary::cast(FixedArray::cast(elements()));
- capacity = dictionary->Capacity();
- number_of_elements = dictionary->NumberOfElements();
+ *capacity = dictionary->Capacity();
+ *used = dictionary->NumberOfElements();
break;
}
case FAST_DOUBLE_ELEMENTS: {
FixedDoubleArray* elms = FixedDoubleArray::cast(elements());
- capacity = elms->length();
- for (int i = 0; i < capacity; i++) {
- if (!elms->is_the_hole(i)) number_of_elements++;
+ *capacity = elms->length();
+ for (int i = 0; i < *capacity; i++) {
+ if (!elms->is_the_hole(i)) ++(*used);
}
break;
}
- case EXTERNAL_PIXEL_ELEMENTS:
case EXTERNAL_BYTE_ELEMENTS:
case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
case EXTERNAL_SHORT_ELEMENTS:
@@ -9132,30 +8801,34 @@
case EXTERNAL_INT_ELEMENTS:
case EXTERNAL_UNSIGNED_INT_ELEMENTS:
case EXTERNAL_FLOAT_ELEMENTS:
- case EXTERNAL_DOUBLE_ELEMENTS: {
- return true;
- }
+ case EXTERNAL_DOUBLE_ELEMENTS:
+ case EXTERNAL_PIXEL_ELEMENTS:
+ // External arrays are considered 100% used.
+ ExternalArray* external_array = ExternalArray::cast(elements());
+ *capacity = external_array->length();
+ *used = external_array->length();
+ break;
}
- return (capacity == 0) || (number_of_elements > (capacity / 2));
}
bool JSObject::ShouldConvertToSlowElements(int new_capacity) {
- // Keep the array in fast case if the current backing storage is
- // almost filled and if the new capacity is no more than twice the
- // old capacity.
- int elements_length = 0;
- if (elements()->map() == GetHeap()->non_strict_arguments_elements_map()) {
- FixedArray* backing_store = FixedArray::cast(elements());
- elements_length = FixedArray::cast(backing_store->get(1))->length();
- } else if (HasFastElements()) {
- elements_length = FixedArray::cast(elements())->length();
- } else if (HasFastDoubleElements()) {
- elements_length = FixedDoubleArray::cast(elements())->length();
- } else {
- UNREACHABLE();
+ STATIC_ASSERT(kMaxUncheckedOldFastElementsLength <=
+ kMaxUncheckedFastElementsLength);
+ if (new_capacity <= kMaxUncheckedOldFastElementsLength ||
+ (new_capacity <= kMaxUncheckedFastElementsLength &&
+ GetHeap()->InNewSpace(this))) {
+ return false;
}
- return !HasDenseElements() || ((new_capacity / 2) > elements_length);
+ // If the fast-case backing storage takes up roughly three times as
+ // much space (in machine words) as a dictionary backing storage
+ // would, the object should have slow elements.
+ int old_capacity = 0;
+ int used_elements = 0;
+ GetElementsCapacityAndUsage(&old_capacity, &used_elements);
+ int dictionary_size = NumberDictionary::ComputeCapacity(used_elements) *
+ NumberDictionary::kEntrySize;
+ return 3 * dictionary_size <= new_capacity;
}
@@ -9178,20 +8851,21 @@
// dictionary, we cannot go back to fast case.
if (dictionary->requires_slow_elements()) return false;
// If the dictionary backing storage takes up roughly half as much
- // space as a fast-case backing storage would the array should have
- // fast elements.
- uint32_t length = 0;
+ // space (in machine words) as a fast-case backing storage would,
+ // the object should have fast elements.
+ uint32_t array_size = 0;
if (IsJSArray()) {
- CHECK(JSArray::cast(this)->length()->ToArrayIndex(&length));
+ CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_size));
} else {
- length = dictionary->max_number_key();
+ array_size = dictionary->max_number_key();
}
- return static_cast<uint32_t>(dictionary->Capacity()) >=
- (length / (2 * NumberDictionary::kEntrySize));
+ uint32_t dictionary_size = static_cast<uint32_t>(dictionary->Capacity()) *
+ NumberDictionary::kEntrySize;
+ return 2 * dictionary_size >= array_size;
}
-bool JSObject::ShouldConvertToFastDoubleElements() {
+bool JSObject::CanConvertToFastDoubleElements() {
if (FLAG_unbox_double_arrays) {
ASSERT(HasDictionaryElements());
NumberDictionary* dictionary = NumberDictionary::cast(elements());
@@ -9381,6 +9055,15 @@
return (index < length) &&
!FixedArray::cast(elements())->get(index)->IsTheHole();
}
+ case FAST_DOUBLE_ELEMENTS: {
+ uint32_t length = IsJSArray() ?
+ static_cast<uint32_t>(
+ Smi::cast(JSArray::cast(this)->length())->value()) :
+ static_cast<uint32_t>(FixedDoubleArray::cast(elements())->length());
+ return (index < length) &&
+ !FixedDoubleArray::cast(elements())->is_the_hole(index);
+ break;
+ }
case EXTERNAL_PIXEL_ELEMENTS: {
ExternalPixelArray* pixels = ExternalPixelArray::cast(elements());
return index < static_cast<uint32_t>(pixels->length());
@@ -9396,9 +9079,6 @@
ExternalArray* array = ExternalArray::cast(elements());
return index < static_cast<uint32_t>(array->length());
}
- case FAST_DOUBLE_ELEMENTS:
- UNREACHABLE();
- break;
case DICTIONARY_ELEMENTS: {
return element_dictionary()->FindEntry(index)
!= NumberDictionary::kNotFound;
@@ -10118,11 +9798,8 @@
template<typename Shape, typename Key>
MaybeObject* HashTable<Shape, Key>::Allocate(int at_least_space_for,
PretenureFlag pretenure) {
- const int kMinCapacity = 32;
- int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
- if (capacity < kMinCapacity) {
- capacity = kMinCapacity; // Guarantee min capacity.
- } else if (capacity > HashTable::kMaxCapacity) {
+ int capacity = ComputeCapacity(at_least_space_for);
+ if (capacity > HashTable::kMaxCapacity) {
return Failure::OutOfMemoryException();
}
@@ -10290,6 +9967,8 @@
template class HashTable<MapCacheShape, HashTableKey*>;
+template class HashTable<ObjectHashTableShape, JSObject*>;
+
template class Dictionary<StringDictionaryShape, String*>;
template class Dictionary<NumberDictionaryShape, uint32_t>;
@@ -10504,19 +10183,19 @@
} else if (HasExternalArrayElements()) {
// External arrays cannot have holes or undefined elements.
return Smi::FromInt(ExternalArray::cast(elements())->length());
- } else {
+ } else if (!HasFastDoubleElements()) {
Object* obj;
{ MaybeObject* maybe_obj = EnsureWritableFastElements();
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
}
- ASSERT(HasFastElements());
+ ASSERT(HasFastElements() || HasFastDoubleElements());
// Collect holes at the end, undefined before that and the rest at the
// start, and return the number of non-hole, non-undefined values.
- FixedArray* elements = FixedArray::cast(this->elements());
- uint32_t elements_length = static_cast<uint32_t>(elements->length());
+ FixedArrayBase* elements_base = FixedArrayBase::cast(this->elements());
+ uint32_t elements_length = static_cast<uint32_t>(elements_base->length());
if (limit > elements_length) {
limit = elements_length ;
}
@@ -10535,47 +10214,78 @@
result_double = HeapNumber::cast(new_double);
}
- AssertNoAllocation no_alloc;
-
- // Split elements into defined, undefined and the_hole, in that order.
- // Only count locations for undefined and the hole, and fill them afterwards.
- WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_alloc);
- unsigned int undefs = limit;
- unsigned int holes = limit;
- // Assume most arrays contain no holes and undefined values, so minimize the
- // number of stores of non-undefined, non-the-hole values.
- for (unsigned int i = 0; i < undefs; i++) {
- Object* current = elements->get(i);
- if (current->IsTheHole()) {
- holes--;
- undefs--;
- } else if (current->IsUndefined()) {
- undefs--;
- } else {
- continue;
+ uint32_t result = 0;
+ if (elements_base->map() == heap->fixed_double_array_map()) {
+ FixedDoubleArray* elements = FixedDoubleArray::cast(elements_base);
+ // Split elements into defined and the_hole, in that order.
+ unsigned int holes = limit;
+ // Assume most arrays contain no holes and undefined values, so minimize the
+ // number of stores of non-undefined, non-the-hole values.
+ for (unsigned int i = 0; i < holes; i++) {
+ if (elements->is_the_hole(i)) {
+ holes--;
+ } else {
+ continue;
+ }
+ // Position i needs to be filled.
+ while (holes > i) {
+ if (elements->is_the_hole(holes)) {
+ holes--;
+ } else {
+ elements->set(i, elements->get_scalar(holes));
+ break;
+ }
+ }
}
- // Position i needs to be filled.
- while (undefs > i) {
- current = elements->get(undefs);
+ result = holes;
+ while (holes < limit) {
+ elements->set_the_hole(holes);
+ holes++;
+ }
+ } else {
+ FixedArray* elements = FixedArray::cast(elements_base);
+ AssertNoAllocation no_alloc;
+
+ // Split elements into defined, undefined and the_hole, in that order. Only
+ // count locations for undefined and the hole, and fill them afterwards.
+ WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_alloc);
+ unsigned int undefs = limit;
+ unsigned int holes = limit;
+ // Assume most arrays contain no holes and undefined values, so minimize the
+ // number of stores of non-undefined, non-the-hole values.
+ for (unsigned int i = 0; i < undefs; i++) {
+ Object* current = elements->get(i);
if (current->IsTheHole()) {
holes--;
undefs--;
} else if (current->IsUndefined()) {
undefs--;
} else {
- elements->set(i, current, write_barrier);
- break;
+ continue;
+ }
+ // Position i needs to be filled.
+ while (undefs > i) {
+ current = elements->get(undefs);
+ if (current->IsTheHole()) {
+ holes--;
+ undefs--;
+ } else if (current->IsUndefined()) {
+ undefs--;
+ } else {
+ elements->set(i, current, write_barrier);
+ break;
+ }
}
}
- }
- uint32_t result = undefs;
- while (undefs < holes) {
- elements->set_undefined(undefs);
- undefs++;
- }
- while (holes < limit) {
- elements->set_the_hole(holes);
- holes++;
+ result = undefs;
+ while (undefs < holes) {
+ elements->set_undefined(undefs);
+ undefs++;
+ }
+ while (holes < limit) {
+ elements->set_the_hole(holes);
+ holes++;
+ }
}
if (result <= static_cast<uint32_t>(Smi::kMaxValue)) {
@@ -11603,6 +11313,63 @@
}
+Object* ObjectHashTable::Lookup(JSObject* key) {
+ // If the object does not have an identity hash, it was never used as a key.
+ MaybeObject* maybe_hash = key->GetIdentityHash(JSObject::OMIT_CREATION);
+ if (maybe_hash->IsFailure()) return GetHeap()->undefined_value();
+ int entry = FindEntry(key);
+ if (entry == kNotFound) return GetHeap()->undefined_value();
+ return get(EntryToIndex(entry) + 1);
+}
+
+
+MaybeObject* ObjectHashTable::Put(JSObject* key, Object* value) {
+ // Make sure the key object has an identity hash code.
+ int hash;
+ { MaybeObject* maybe_hash = key->GetIdentityHash(JSObject::ALLOW_CREATION);
+ if (maybe_hash->IsFailure()) return maybe_hash;
+ hash = Smi::cast(maybe_hash->ToObjectUnchecked())->value();
+ }
+ int entry = FindEntry(key);
+
+ // Check whether to perform removal operation.
+ if (value->IsUndefined()) {
+ if (entry == kNotFound) return this;
+ RemoveEntry(entry);
+ return Shrink(key);
+ }
+
+ // Key is already in table, just overwrite value.
+ if (entry != kNotFound) {
+ set(EntryToIndex(entry) + 1, value);
+ return this;
+ }
+
+ // Check whether the hash table should be extended.
+ Object* obj;
+ { MaybeObject* maybe_obj = EnsureCapacity(1, key);
+ if (!maybe_obj->ToObject(&obj)) return maybe_obj;
+ }
+ ObjectHashTable* table = ObjectHashTable::cast(obj);
+ table->AddEntry(table->FindInsertionEntry(hash), key, value);
+ return table;
+}
+
+
+void ObjectHashTable::AddEntry(int entry, JSObject* key, Object* value) {
+ set(EntryToIndex(entry), key);
+ set(EntryToIndex(entry) + 1, value);
+ ElementAdded();
+}
+
+
+void ObjectHashTable::RemoveEntry(int entry, Heap* heap) {
+ set_null(heap, EntryToIndex(entry));
+ set_null(heap, EntryToIndex(entry) + 1);
+ ElementRemoved();
+}
+
+
#ifdef ENABLE_DEBUGGER_SUPPORT
// Check if there is a break point at this code position.
bool DebugInfo::HasBreakPoint(int code_position) {
@@ -11829,7 +11596,7 @@
Handle<Object> break_point_object) {
// No break point.
if (break_point_info->break_point_objects()->IsUndefined()) return false;
- // Single beak point.
+ // Single break point.
if (!break_point_info->break_point_objects()->IsFixedArray()) {
return break_point_info->break_point_objects() == *break_point_object;
}
@@ -11848,7 +11615,7 @@
int BreakPointInfo::GetBreakPointCount() {
// No break point.
if (break_point_objects()->IsUndefined()) return 0;
- // Single beak point.
+ // Single break point.
if (!break_point_objects()->IsFixedArray()) return 1;
// Multiple break points.
return FixedArray::cast(break_point_objects())->length();
diff --git a/src/objects.h b/src/objects.h
index 2aa6b4a..53ba981 100644
--- a/src/objects.h
+++ b/src/objects.h
@@ -51,6 +51,7 @@
// - JSReceiver (suitable for property access)
// - JSObject
// - JSArray
+// - JSWeakMap
// - JSRegExp
// - JSFunction
// - GlobalObject
@@ -61,31 +62,34 @@
// - JSMessageObject
// - JSProxy
// - JSFunctionProxy
-// - ByteArray
-// - ExternalArray
-// - ExternalPixelArray
-// - ExternalByteArray
-// - ExternalUnsignedByteArray
-// - ExternalShortArray
-// - ExternalUnsignedShortArray
-// - ExternalIntArray
-// - ExternalUnsignedIntArray
-// - ExternalFloatArray
-// - FixedArray
-// - DescriptorArray
-// - HashTable
-// - Dictionary
-// - SymbolTable
-// - CompilationCacheTable
-// - CodeCacheHashTable
-// - MapCache
-// - Context
-// - JSFunctionResultCache
-// - SerializedScopeInfo
+// - FixedArrayBase
+// - ByteArray
+// - FixedArray
+// - DescriptorArray
+// - HashTable
+// - Dictionary
+// - SymbolTable
+// - CompilationCacheTable
+// - CodeCacheHashTable
+// - MapCache
+// - Context
+// - JSFunctionResultCache
+// - SerializedScopeInfo
+// - FixedDoubleArray
+// - ExternalArray
+// - ExternalPixelArray
+// - ExternalByteArray
+// - ExternalUnsignedByteArray
+// - ExternalShortArray
+// - ExternalUnsignedShortArray
+// - ExternalIntArray
+// - ExternalUnsignedIntArray
+// - ExternalFloatArray
// - String
// - SeqString
// - SeqAsciiString
// - SeqTwoByteString
+// - SlicedString
// - ConsString
// - ExternalString
// - ExternalAsciiString
@@ -280,6 +284,7 @@
V(ASCII_STRING_TYPE) \
V(CONS_STRING_TYPE) \
V(CONS_ASCII_STRING_TYPE) \
+ V(SLICED_STRING_TYPE) \
V(EXTERNAL_STRING_TYPE) \
V(EXTERNAL_STRING_WITH_ASCII_DATA_TYPE) \
V(EXTERNAL_ASCII_STRING_TYPE) \
@@ -319,6 +324,7 @@
V(POLYMORPHIC_CODE_CACHE_TYPE) \
\
V(FIXED_ARRAY_TYPE) \
+ V(FIXED_DOUBLE_ARRAY_TYPE) \
V(SHARED_FUNCTION_INFO_TYPE) \
\
V(JS_MESSAGE_OBJECT_TYPE) \
@@ -331,6 +337,7 @@
V(JS_GLOBAL_PROXY_TYPE) \
V(JS_ARRAY_TYPE) \
V(JS_PROXY_TYPE) \
+ V(JS_WEAK_MAP_TYPE) \
V(JS_REGEXP_TYPE) \
\
V(JS_FUNCTION_TYPE) \
@@ -396,6 +403,14 @@
ConsString::kSize, \
cons_ascii_string, \
ConsAsciiString) \
+ V(SLICED_STRING_TYPE, \
+ SlicedString::kSize, \
+ sliced_string, \
+ SlicedString) \
+ V(SLICED_ASCII_STRING_TYPE, \
+ SlicedString::kSize, \
+ sliced_ascii_string, \
+ SlicedAsciiString) \
V(EXTERNAL_STRING_TYPE, \
ExternalTwoByteString::kSize, \
external_string, \
@@ -469,9 +484,22 @@
enum StringRepresentationTag {
kSeqStringTag = 0x0,
kConsStringTag = 0x1,
- kExternalStringTag = 0x2
+ kExternalStringTag = 0x2,
+ kSlicedStringTag = 0x3
};
-const uint32_t kIsConsStringMask = 0x1;
+const uint32_t kIsIndirectStringMask = 0x1;
+const uint32_t kIsIndirectStringTag = 0x1;
+STATIC_ASSERT((kSeqStringTag & kIsIndirectStringMask) == 0);
+STATIC_ASSERT((kExternalStringTag & kIsIndirectStringMask) == 0);
+STATIC_ASSERT(
+ (kConsStringTag & kIsIndirectStringMask) == kIsIndirectStringTag);
+STATIC_ASSERT(
+ (kSlicedStringTag & kIsIndirectStringMask) == kIsIndirectStringTag);
+
+// Use this mask to distinguish between cons and slice only after making
+// sure that the string is one of the two (an indirect string).
+const uint32_t kSlicedNotConsMask = kSlicedStringTag & ~kConsStringTag;
+STATIC_ASSERT(IS_POWER_OF_TWO(kSlicedNotConsMask) && kSlicedNotConsMask != 0);
// If bit 7 is clear, then bit 3 indicates whether this two-byte
// string actually contains ascii data.
@@ -506,6 +534,8 @@
ASCII_STRING_TYPE = kAsciiStringTag | kSeqStringTag,
CONS_STRING_TYPE = kTwoByteStringTag | kConsStringTag,
CONS_ASCII_STRING_TYPE = kAsciiStringTag | kConsStringTag,
+ SLICED_STRING_TYPE = kTwoByteStringTag | kSlicedStringTag,
+ SLICED_ASCII_STRING_TYPE = kAsciiStringTag | kSlicedStringTag,
EXTERNAL_STRING_TYPE = kTwoByteStringTag | kExternalStringTag,
EXTERNAL_STRING_WITH_ASCII_DATA_TYPE =
kTwoByteStringTag | kExternalStringTag | kAsciiDataHintTag,
@@ -568,6 +598,7 @@
JS_GLOBAL_PROXY_TYPE,
JS_ARRAY_TYPE,
JS_PROXY_TYPE,
+ JS_WEAK_MAP_TYPE,
JS_REGEXP_TYPE, // LAST_NONCALLABLE_SPEC_OBJECT_TYPE
@@ -630,8 +661,11 @@
WriteBarrierMode mode = UPDATE_WRITE_BARRIER); \
-class StringStream;
+class DictionaryElementsAccessor;
+class ElementsAccessor;
+class FixedArrayBase;
class ObjectVisitor;
+class StringStream;
struct ValueInfo : public Malloced {
ValueInfo() : type(FIRST_TYPE), ptr(NULL), str(NULL), number(0) { }
@@ -709,6 +743,7 @@
V(SeqString) \
V(ExternalString) \
V(ConsString) \
+ V(SlicedString) \
V(ExternalTwoByteString) \
V(ExternalAsciiString) \
V(SeqTwoByteString) \
@@ -736,6 +771,7 @@
V(FixedDoubleArray) \
V(Context) \
V(GlobalContext) \
+ V(SerializedScopeInfo) \
V(JSFunction) \
V(Code) \
V(Oddball) \
@@ -748,6 +784,7 @@
V(JSArray) \
V(JSProxy) \
V(JSFunctionProxy) \
+ V(JSWeakMap) \
V(JSRegExp) \
V(HashTable) \
V(Dictionary) \
@@ -790,6 +827,8 @@
STRUCT_LIST(DECLARE_STRUCT_PREDICATE)
#undef DECLARE_STRUCT_PREDICATE
+ INLINE(bool IsSpecObject());
+
// Oddball testing.
INLINE(bool IsUndefined());
INLINE(bool IsNull());
@@ -1482,10 +1521,11 @@
// In the slow mode the elements is either a NumberDictionary, an
// ExternalArray, or a FixedArray parameter map for a (non-strict)
// arguments object.
- DECL_ACCESSORS(elements, HeapObject)
+ DECL_ACCESSORS(elements, FixedArrayBase)
inline void initialize_elements();
MUST_USE_RESULT inline MaybeObject* ResetElements();
inline ElementsKind GetElementsKind();
+ inline ElementsAccessor* GetElementsAccessor();
inline bool HasFastElements();
inline bool HasFastDoubleElements();
inline bool HasDictionaryElements();
@@ -1636,6 +1676,23 @@
MUST_USE_RESULT inline MaybeObject* SetHiddenPropertiesObject(
Object* hidden_obj);
+ // Indicates whether the hidden properties object should be created.
+ enum HiddenPropertiesFlag { ALLOW_CREATION, OMIT_CREATION };
+
+ // Retrieves the hidden properties object.
+ //
+ // The undefined value might be returned in case no hidden properties object
+ // is present and creation was omitted.
+ inline bool HasHiddenProperties();
+ MUST_USE_RESULT MaybeObject* GetHiddenProperties(HiddenPropertiesFlag flag);
+
+ // Retrieves a permanent object identity hash code.
+ //
+ // The identity hash is stored as a hidden property. The undefined value might
+ // be returned in case no hidden properties object is present and creation was
+ // omitted.
+ MUST_USE_RESULT MaybeObject* GetIdentityHash(HiddenPropertiesFlag flag);
+
MUST_USE_RESULT MaybeObject* DeleteProperty(String* name, DeleteMode mode);
MUST_USE_RESULT MaybeObject* DeleteElement(uint32_t index, DeleteMode mode);
@@ -1652,7 +1709,7 @@
bool ShouldConvertToFastElements();
// Returns true if the elements of JSObject contains only values that can be
// represented in a FixedDoubleArray.
- bool ShouldConvertToFastDoubleElements();
+ bool CanConvertToFastDoubleElements();
// Tells whether the index'th element is present.
inline bool HasElement(uint32_t index);
@@ -1711,14 +1768,8 @@
// Returns the index'th element.
// The undefined object if index is out of bounds.
- MaybeObject* GetElementWithReceiver(Object* receiver, uint32_t index);
MaybeObject* GetElementWithInterceptor(Object* receiver, uint32_t index);
- // Get external element value at index if there is one and undefined
- // otherwise. Can return a failure if allocation of a heap number
- // failed.
- MaybeObject* GetExternalElement(uint32_t index);
-
// Replace the elements' backing store with fast elements of the given
// capacity. Update the length for JSArrays. Returns the new backing
// store.
@@ -1946,8 +1997,21 @@
// Also maximal value of JSArray's length property.
static const uint32_t kMaxElementCount = 0xffffffffu;
+ // Constants for heuristics controlling conversion of fast elements
+ // to slow elements.
+
+ // Maximal gap that can be introduced by adding an element beyond
+ // the current elements length.
static const uint32_t kMaxGap = 1024;
- static const int kMaxFastElementsLength = 5000;
+
+ // Maximal length of fast elements array that won't be checked for
+ // being dense enough on expansion.
+ static const int kMaxUncheckedFastElementsLength = 5000;
+
+ // Same as above but for old arrays. This limit is more strict. We
+ // don't want to be wasteful with long lived objects.
+ static const int kMaxUncheckedOldFastElementsLength = 500;
+
static const int kInitialMaxFastElementArray = 100000;
static const int kMaxFastProperties = 12;
static const int kMaxInstanceSize = 255 * kPointerSize;
@@ -1969,6 +2033,8 @@
};
private:
+ friend class DictionaryElementsAccessor;
+
MUST_USE_RESULT MaybeObject* GetElementWithCallback(Object* receiver,
Object* structure,
uint32_t index,
@@ -1989,14 +2055,10 @@
StrictModeFlag strict_mode,
bool check_prototype);
- MaybeObject* GetElementPostInterceptor(Object* receiver, uint32_t index);
-
MUST_USE_RESULT MaybeObject* DeletePropertyPostInterceptor(String* name,
DeleteMode mode);
MUST_USE_RESULT MaybeObject* DeletePropertyWithInterceptor(String* name);
- MUST_USE_RESULT MaybeObject* DeleteElementPostInterceptor(uint32_t index,
- DeleteMode mode);
MUST_USE_RESULT MaybeObject* DeleteElementWithInterceptor(uint32_t index);
MUST_USE_RESULT MaybeObject* DeleteFastElement(uint32_t index);
@@ -2013,6 +2075,9 @@
// Returns true if most of the elements backing storage is used.
bool HasDenseElements();
+ // Gets the current elements capacity and the number of used elements.
+ void GetElementsCapacityAndUsage(int* capacity, int* used);
+
bool CanSetCallback(String* name);
MUST_USE_RESULT MaybeObject* SetElementCallback(
uint32_t index,
@@ -2049,6 +2114,8 @@
};
+class FixedDoubleArray;
+
// FixedArray describes fixed-sized arrays with element type Object*.
class FixedArray: public FixedArrayBase {
public:
@@ -2056,6 +2123,7 @@
inline Object* get(int index);
// Setter that uses write barrier.
inline void set(int index, Object* value);
+ inline bool is_the_hole(int index);
// Setter that doesn't need write barrier).
inline void set(int index, Smi* value);
@@ -2157,7 +2225,8 @@
inline void Initialize(NumberDictionary* from);
// Setter and getter for elements.
- inline double get(int index);
+ inline double get_scalar(int index);
+ inline MaybeObject* get(int index);
inline void set(int index, double value);
inline void set_the_hole(int index);
@@ -2484,6 +2553,10 @@
int at_least_space_for,
PretenureFlag pretenure = NOT_TENURED);
+ // Computes the required capacity for a table holding the given
+ // number of elements. May be more than HashTable::kMaxCapacity.
+ static int ComputeCapacity(int at_least_space_for);
+
// Returns the key at entry.
Object* KeyAt(int entry) { return get(EntryToIndex(entry)); }
@@ -2906,6 +2979,48 @@
};
+class ObjectHashTableShape {
+ public:
+ static inline bool IsMatch(JSObject* key, Object* other);
+ static inline uint32_t Hash(JSObject* key);
+ static inline uint32_t HashForObject(JSObject* key, Object* object);
+ MUST_USE_RESULT static inline MaybeObject* AsObject(JSObject* key);
+ static const int kPrefixSize = 0;
+ static const int kEntrySize = 2;
+};
+
+
+// ObjectHashTable maps keys that are JavaScript objects to object values by
+// using the identity hash of the key for hashing purposes.
+class ObjectHashTable: public HashTable<ObjectHashTableShape, JSObject*> {
+ public:
+ static inline ObjectHashTable* cast(Object* obj) {
+ ASSERT(obj->IsHashTable());
+ return reinterpret_cast<ObjectHashTable*>(obj);
+ }
+
+ // Looks up the value associated with the given key. The undefined value is
+ // returned in case the key is not present.
+ Object* Lookup(JSObject* key);
+
+ // Adds (or overwrites) the value associated with the given key. Mapping a
+ // key to the undefined value causes removal of the whole entry.
+ MUST_USE_RESULT MaybeObject* Put(JSObject* key, Object* value);
+
+ private:
+ friend class MarkCompactCollector;
+
+ void AddEntry(int entry, JSObject* key, Object* value);
+ void RemoveEntry(int entry, Heap* heap);
+ inline void RemoveEntry(int entry);
+
+ // Returns the index to the value of an entry.
+ static inline int EntryToValueIndex(int entry) {
+ return EntryToIndex(entry) + 1;
+ }
+};
+
+
// JSFunctionResultCache caches results of some JSFunction invocation.
// It is a fixed array with fixed structure:
// [0]: factory function
@@ -2968,12 +3083,8 @@
// ByteArray represents fixed sized byte arrays. Used by the outside world,
// such as PCRE, and also by the memory allocator and garbage collector to
// fill in free blocks in the heap.
-class ByteArray: public HeapObject {
+class ByteArray: public FixedArrayBase {
public:
- // [length]: length of the array.
- inline int length();
- inline void set_length(int value);
-
// Setter and getter.
inline byte get(int index);
inline void set(int index, byte value);
@@ -3018,10 +3129,6 @@
#endif
// Layout description.
- // Length is smi tagged when it is stored.
- static const int kLengthOffset = HeapObject::kHeaderSize;
- static const int kHeaderSize = kLengthOffset + kPointerSize;
-
static const int kAlignedSize = OBJECT_POINTER_ALIGN(kHeaderSize);
// Maximal memory consumption for a single ByteArray.
@@ -3045,11 +3152,10 @@
// Out-of-range values passed to the setter are converted via a C
// cast, not clamping. Out-of-range indices cause exceptions to be
// raised rather than being silently ignored.
-class ExternalArray: public HeapObject {
+class ExternalArray: public FixedArrayBase {
public:
- // [length]: length of the array.
- inline int length();
- inline void set_length(int value);
+
+ inline bool is_the_hole(int index) { return false; }
// [external_pointer]: The pointer to the external memory area backing this
// external array.
@@ -3062,9 +3168,8 @@
static const int kMaxLength = 0x3fffffff;
// ExternalArray headers are not quadword aligned.
- static const int kLengthOffset = HeapObject::kHeaderSize;
static const int kExternalPointerOffset =
- POINTER_SIZE_ALIGN(kLengthOffset + kIntSize);
+ POINTER_SIZE_ALIGN(FixedArrayBase::kLengthOffset + kPointerSize);
static const int kHeaderSize = kExternalPointerOffset + kPointerSize;
static const int kAlignedSize = OBJECT_POINTER_ALIGN(kHeaderSize);
@@ -3086,7 +3191,8 @@
inline uint8_t* external_pixel_pointer();
// Setter and getter.
- inline uint8_t get(int index);
+ inline uint8_t get_scalar(int index);
+ inline MaybeObject* get(int index);
inline void set(int index, uint8_t value);
// This accessor applies the correct conversion from Smi, HeapNumber and
@@ -3114,7 +3220,8 @@
class ExternalByteArray: public ExternalArray {
public:
// Setter and getter.
- inline int8_t get(int index);
+ inline int8_t get_scalar(int index);
+ inline MaybeObject* get(int index);
inline void set(int index, int8_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
@@ -3142,7 +3249,8 @@
class ExternalUnsignedByteArray: public ExternalArray {
public:
// Setter and getter.
- inline uint8_t get(int index);
+ inline uint8_t get_scalar(int index);
+ inline MaybeObject* get(int index);
inline void set(int index, uint8_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
@@ -3170,7 +3278,8 @@
class ExternalShortArray: public ExternalArray {
public:
// Setter and getter.
- inline int16_t get(int index);
+ inline int16_t get_scalar(int index);
+ inline MaybeObject* get(int index);
inline void set(int index, int16_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
@@ -3198,7 +3307,8 @@
class ExternalUnsignedShortArray: public ExternalArray {
public:
// Setter and getter.
- inline uint16_t get(int index);
+ inline uint16_t get_scalar(int index);
+ inline MaybeObject* get(int index);
inline void set(int index, uint16_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
@@ -3226,7 +3336,8 @@
class ExternalIntArray: public ExternalArray {
public:
// Setter and getter.
- inline int32_t get(int index);
+ inline int32_t get_scalar(int index);
+ inline MaybeObject* get(int index);
inline void set(int index, int32_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
@@ -3254,7 +3365,8 @@
class ExternalUnsignedIntArray: public ExternalArray {
public:
// Setter and getter.
- inline uint32_t get(int index);
+ inline uint32_t get_scalar(int index);
+ inline MaybeObject* get(int index);
inline void set(int index, uint32_t value);
// This accessor applies the correct conversion from Smi, HeapNumber
@@ -3282,7 +3394,8 @@
class ExternalFloatArray: public ExternalArray {
public:
// Setter and getter.
- inline float get(int index);
+ inline float get_scalar(int index);
+ inline MaybeObject* get(int index);
inline void set(int index, float value);
// This accessor applies the correct conversion from Smi, HeapNumber
@@ -3310,7 +3423,8 @@
class ExternalDoubleArray: public ExternalArray {
public:
// Setter and getter.
- inline double get(int index);
+ inline double get_scalar(int index);
+ inline MaybeObject* get(int index);
inline void set(int index, double value);
// This accessor applies the correct conversion from Smi, HeapNumber
@@ -3480,13 +3594,14 @@
UNARY_OP_IC,
BINARY_OP_IC,
COMPARE_IC,
+ TO_BOOLEAN_IC,
// No more than 16 kinds. The value currently encoded in four bits in
// Flags.
// Pseudo-kinds.
REGEXP = BUILTIN,
FIRST_IC_KIND = LOAD_IC,
- LAST_IC_KIND = COMPARE_IC
+ LAST_IC_KIND = TO_BOOLEAN_IC
};
enum {
@@ -3552,13 +3667,10 @@
inline bool is_keyed_store_stub() { return kind() == KEYED_STORE_IC; }
inline bool is_call_stub() { return kind() == CALL_IC; }
inline bool is_keyed_call_stub() { return kind() == KEYED_CALL_IC; }
- inline bool is_unary_op_stub() {
- return kind() == UNARY_OP_IC;
- }
- inline bool is_binary_op_stub() {
- return kind() == BINARY_OP_IC;
- }
+ inline bool is_unary_op_stub() { return kind() == UNARY_OP_IC; }
+ inline bool is_binary_op_stub() { return kind() == BINARY_OP_IC; }
inline bool is_compare_ic_stub() { return kind() == COMPARE_IC; }
+ inline bool is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
// [major_key]: For kind STUB or BINARY_OP_IC, the major key.
inline int major_key();
@@ -3600,21 +3712,24 @@
inline CheckType check_type();
inline void set_check_type(CheckType value);
- // [type-recording unary op type]: For all UNARY_OP_IC.
+ // [type-recording unary op type]: For kind UNARY_OP_IC.
inline byte unary_op_type();
inline void set_unary_op_type(byte value);
- // [type-recording binary op type]: For all TYPE_RECORDING_BINARY_OP_IC.
+ // [type-recording binary op type]: For kind BINARY_OP_IC.
inline byte binary_op_type();
inline void set_binary_op_type(byte value);
inline byte binary_op_result_type();
inline void set_binary_op_result_type(byte value);
- // [compare state]: For kind compare IC stubs, tells what state the
- // stub is in.
+ // [compare state]: For kind COMPARE_IC, tells what state the stub is in.
inline byte compare_state();
inline void set_compare_state(byte value);
+ // [to_boolean_foo]: For kind TO_BOOLEAN_IC tells what state the stub is in.
+ inline byte to_boolean_state();
+ inline void set_to_boolean_state(byte value);
+
// Get the safepoint entry for the given pc.
SafepointEntry GetSafepointEntry(Address pc);
@@ -3756,9 +3871,10 @@
static const int kStackSlotsOffset = kKindSpecificFlagsOffset;
static const int kCheckTypeOffset = kKindSpecificFlagsOffset;
- static const int kCompareStateOffset = kStubMajorKeyOffset + 1;
static const int kUnaryOpTypeOffset = kStubMajorKeyOffset + 1;
static const int kBinaryOpTypeOffset = kStubMajorKeyOffset + 1;
+ static const int kCompareStateOffset = kStubMajorKeyOffset + 1;
+ static const int kToBooleanTypeOffset = kStubMajorKeyOffset + 1;
static const int kHasDeoptimizationSupportOffset = kOptimizableOffset + 1;
static const int kBinaryOpReturnTypeOffset = kBinaryOpTypeOffset + 1;
@@ -5693,12 +5809,15 @@
inline bool IsSequential();
inline bool IsExternal();
inline bool IsCons();
+ inline bool IsSliced();
+ inline bool IsIndirect();
inline bool IsExternalAscii();
inline bool IsExternalTwoByte();
inline bool IsSequentialAscii();
inline bool IsSequentialTwoByte();
inline bool IsSymbol();
inline StringRepresentationTag representation_tag();
+ inline uint32_t encoding_tag();
inline uint32_t full_representation_tag();
inline uint32_t size_tag();
#ifdef DEBUG
@@ -5730,6 +5849,51 @@
// All string values have a length field.
class String: public HeapObject {
public:
+ // Representation of the flat content of a String.
+ // A non-flat string doesn't have flat content.
+ // A flat string has content that's encoded as a sequence of either
+ // ASCII chars or two-byte UC16.
+ // Returned by String::GetFlatContent().
+ class FlatContent {
+ public:
+ // Returns true if the string is flat and this structure contains content.
+ bool IsFlat() { return state_ != NON_FLAT; }
+ // Returns true if the structure contains ASCII content.
+ bool IsAscii() { return state_ == ASCII; }
+ // Returns true if the structure contains two-byte content.
+ bool IsTwoByte() { return state_ == TWO_BYTE; }
+
+ // Return the ASCII content of the string. Only use if IsAscii() returns
+ // true.
+ Vector<const char> ToAsciiVector() {
+ ASSERT_EQ(ASCII, state_);
+ return Vector<const char>::cast(buffer_);
+ }
+ // Return the two-byte content of the string. Only use if IsTwoByte()
+ // returns true.
+ Vector<const uc16> ToUC16Vector() {
+ ASSERT_EQ(TWO_BYTE, state_);
+ return Vector<const uc16>::cast(buffer_);
+ }
+
+ private:
+ enum State { NON_FLAT, ASCII, TWO_BYTE };
+
+ // Constructors only used by String::GetFlatContent().
+ explicit FlatContent(Vector<const char> chars)
+ : buffer_(Vector<const byte>::cast(chars)),
+ state_(ASCII) { }
+ explicit FlatContent(Vector<const uc16> chars)
+ : buffer_(Vector<const byte>::cast(chars)),
+ state_(TWO_BYTE) { }
+ FlatContent() : buffer_(), state_(NON_FLAT) { }
+
+ Vector<const byte> buffer_;
+ State state_;
+
+ friend class String;
+ };
+
// Get and set the length of the string.
inline int length();
inline void set_length(int value);
@@ -5738,14 +5902,19 @@
inline uint32_t hash_field();
inline void set_hash_field(uint32_t value);
+ // Returns whether this string has only ASCII chars, i.e. all of them can
+ // be ASCII encoded. This might be the case even if the string is
+ // two-byte. Such strings may appear when the embedder prefers
+ // two-byte external representations even for ASCII data.
inline bool IsAsciiRepresentation();
inline bool IsTwoByteRepresentation();
- // Returns whether this string has ascii chars, i.e. all of them can
- // be ascii encoded. This might be the case even if the string is
- // two-byte. Such strings may appear when the embedder prefers
- // two-byte external representations even for ascii data.
- //
+ // Cons and slices have an encoding flag that may not represent the actual
+ // encoding of the underlying string. This is taken into account here.
+ // Requires: this->IsFlat()
+ inline bool IsAsciiRepresentationUnderneath();
+ inline bool IsTwoByteRepresentationUnderneath();
+
// NOTE: this should be considered only a hint. False negatives are
// possible.
inline bool HasOnlyAsciiChars();
@@ -5778,8 +5947,16 @@
// string.
inline String* TryFlattenGetString(PretenureFlag pretenure = NOT_TENURED);
- Vector<const char> ToAsciiVector();
- Vector<const uc16> ToUC16Vector();
+ // Tries to return the content of a flat string as a structure holding either
+ // a flat vector of char or of uc16.
+ // If the string isn't flat, and therefore doesn't have flat content, the
+ // returned structure will report so, and can't provide a vector of either
+ // kind.
+ FlatContent GetFlatContent();
+
+ // Returns the parent of a sliced string or first part of a flat cons string.
+ // Requires: StringShape(this).IsIndirect() && this->IsFlat()
+ inline String* GetUnderlying();
// Mark the string as an undetectable object. It only applies to
// ascii and two byte string types.
@@ -5860,6 +6037,8 @@
StringPrint(stdout);
}
void StringPrint(FILE* out);
+
+ char* ToAsciiArray();
#endif
#ifdef DEBUG
void StringVerify();
@@ -6207,11 +6386,69 @@
typedef FixedBodyDescriptor<kFirstOffset, kSecondOffset + kPointerSize, kSize>
BodyDescriptor;
+#ifdef DEBUG
+ void ConsStringVerify();
+#endif
+
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(ConsString);
};
+// The Sliced String class describes strings that are substrings of another
+// sequential string. The motivation is to save time and memory when creating
+// a substring. A Sliced String is described as a pointer to the parent,
+// the offset from the start of the parent string and the length. Using
+// a Sliced String therefore requires unpacking of the parent string and
+// adding the offset to the start address. A substring of a Sliced String
+// are not nested since the double indirection is simplified when creating
+// such a substring.
+// Currently missing features are:
+// - handling externalized parent strings
+// - external strings as parent
+// - truncating sliced string to enable otherwise unneeded parent to be GC'ed.
+class SlicedString: public String {
+ public:
+
+ inline String* parent();
+ inline void set_parent(String* parent);
+ inline int offset();
+ inline void set_offset(int offset);
+
+ // Dispatched behavior.
+ uint16_t SlicedStringGet(int index);
+
+ // Casting.
+ static inline SlicedString* cast(Object* obj);
+
+ // Layout description.
+ static const int kParentOffset = POINTER_SIZE_ALIGN(String::kSize);
+ static const int kOffsetOffset = kParentOffset + kPointerSize;
+ static const int kSize = kOffsetOffset + kPointerSize;
+
+ // Support for StringInputBuffer
+ inline const unibrow::byte* SlicedStringReadBlock(ReadBlockBuffer* buffer,
+ unsigned* offset_ptr,
+ unsigned chars);
+ inline void SlicedStringReadBlockIntoBuffer(ReadBlockBuffer* buffer,
+ unsigned* offset_ptr,
+ unsigned chars);
+ // Minimum length for a sliced string.
+ static const int kMinLength = 13;
+
+ typedef FixedBodyDescriptor<kParentOffset,
+ kOffsetOffset + kPointerSize, kSize>
+ BodyDescriptor;
+
+#ifdef DEBUG
+ void SlicedStringVerify();
+#endif
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(SlicedString);
+};
+
+
// The ExternalString class describes string values that are backed by
// a string resource that lies outside the V8 heap. ExternalStrings
// consist of the length field common to all strings, a pointer to the
@@ -6551,6 +6788,40 @@
};
+// The JSWeakMap describes EcmaScript Harmony weak maps
+class JSWeakMap: public JSObject {
+ public:
+ // [table]: the backing hash table mapping keys to values.
+ DECL_ACCESSORS(table, ObjectHashTable)
+
+ // [next]: linked list of encountered weak maps during GC.
+ DECL_ACCESSORS(next, Object)
+
+ // Unchecked accessors to be used during GC.
+ inline ObjectHashTable* unchecked_table();
+
+ // Casting.
+ static inline JSWeakMap* cast(Object* obj);
+
+#ifdef OBJECT_PRINT
+ inline void JSWeakMapPrint() {
+ JSWeakMapPrint(stdout);
+ }
+ void JSWeakMapPrint(FILE* out);
+#endif
+#ifdef DEBUG
+ void JSWeakMapVerify();
+#endif
+
+ static const int kTableOffset = JSObject::kHeaderSize;
+ static const int kNextOffset = kTableOffset + kPointerSize;
+ static const int kSize = kNextOffset + kPointerSize;
+
+ private:
+ DISALLOW_IMPLICIT_CONSTRUCTORS(JSWeakMap);
+};
+
+
// Foreign describes objects pointing from JavaScript to C structures.
// Since they cannot contain references to JS HeapObjects they can be
// placed in old_data_space.
@@ -6850,7 +7121,6 @@
DECL_ACCESSORS(instance_call_handler, Object)
DECL_ACCESSORS(access_check_info, Object)
DECL_ACCESSORS(flag, Smi)
- DECL_ACCESSORS(prototype_attributes, Smi)
// Following properties use flag bits.
DECL_BOOLEAN_ACCESSORS(hidden_prototype)
@@ -6858,6 +7128,7 @@
// If the bit is set, object instances created by this function
// requires access check.
DECL_BOOLEAN_ACCESSORS(needs_access_check)
+ DECL_BOOLEAN_ACCESSORS(read_only_prototype)
static inline FunctionTemplateInfo* cast(Object* obj);
@@ -6890,14 +7161,14 @@
static const int kAccessCheckInfoOffset =
kInstanceCallHandlerOffset + kPointerSize;
static const int kFlagOffset = kAccessCheckInfoOffset + kPointerSize;
- static const int kPrototypeAttributesOffset = kFlagOffset + kPointerSize;
- static const int kSize = kPrototypeAttributesOffset + kPointerSize;
+ static const int kSize = kFlagOffset + kPointerSize;
private:
// Bit position in the flag, from least significant bit position.
static const int kHiddenPrototypeBit = 0;
static const int kUndetectableBit = 1;
static const int kNeedsAccessCheckBit = 2;
+ static const int kReadOnlyPrototypeBit = 3;
DISALLOW_IMPLICIT_CONSTRUCTORS(FunctionTemplateInfo);
};
diff --git a/src/parser.cc b/src/parser.cc
index ece0cfe..f8c7c41 100644
--- a/src/parser.cc
+++ b/src/parser.cc
@@ -584,7 +584,8 @@
pre_data_(pre_data),
fni_(NULL),
stack_overflow_(false),
- parenthesized_function_(false) {
+ parenthesized_function_(false),
+ harmony_block_scoping_(false) {
AstNode::ResetIds();
}
@@ -809,6 +810,10 @@
isolate()->Throw(*result, &location);
}
+void Parser::SetHarmonyBlockScoping(bool block_scoping) {
+ scanner().SetHarmonyBlockScoping(block_scoping);
+ harmony_block_scoping_ = block_scoping;
+}
// Base class containing common code for the different finder classes used by
// the parser.
@@ -1106,6 +1111,25 @@
};
+Statement* Parser::ParseSourceElement(ZoneStringList* labels,
+ bool* ok) {
+ if (peek() == Token::FUNCTION) {
+ // FunctionDeclaration is only allowed in the context of SourceElements
+ // (Ecma 262 5th Edition, clause 14):
+ // SourceElement:
+ // Statement
+ // FunctionDeclaration
+ // Common language extension is to allow function declaration in place
+ // of any statement. This language extension is disabled in strict mode.
+ return ParseFunctionDeclaration(ok);
+ } else if (peek() == Token::LET) {
+ return ParseVariableStatement(kSourceElement, ok);
+ } else {
+ return ParseStatement(labels, ok);
+ }
+}
+
+
void* Parser::ParseSourceElements(ZoneList<Statement*>* processor,
int end_token,
bool* ok) {
@@ -1129,21 +1153,7 @@
}
Scanner::Location token_loc = scanner().peek_location();
-
- Statement* stat;
- if (peek() == Token::FUNCTION) {
- // FunctionDeclaration is only allowed in the context of SourceElements
- // (Ecma 262 5th Edition, clause 14):
- // SourceElement:
- // Statement
- // FunctionDeclaration
- // Common language extension is to allow function declaration in place
- // of any statement. This language extension is disabled in strict mode.
- stat = ParseFunctionDeclaration(CHECK_OK);
- } else {
- stat = ParseStatement(NULL, CHECK_OK);
- }
-
+ Statement* stat = ParseSourceElement(NULL, CHECK_OK);
if (stat == NULL || stat->IsEmpty()) {
directive_prologue = false; // End of directive prologue.
continue;
@@ -1231,7 +1241,7 @@
case Token::CONST: // fall through
case Token::VAR:
- stmt = ParseVariableStatement(ok);
+ stmt = ParseVariableStatement(kStatement, ok);
break;
case Token::SEMICOLON:
@@ -1326,9 +1336,9 @@
bool resolve,
bool* ok) {
Variable* var = NULL;
- // If we are inside a function, a declaration of a variable
- // is a truly local variable, and the scope of the variable
- // is always the function scope.
+ // If we are inside a function, a declaration of a var/const variable is a
+ // truly local variable, and the scope of the variable is always the function
+ // scope.
// If a function scope exists, then we can statically declare this
// variable and also set its mode. In any case, a Declaration node
@@ -1338,24 +1348,28 @@
// to the calling function context.
// Similarly, strict mode eval scope does not leak variable declarations to
// the caller's scope so we declare all locals, too.
- Scope* declaration_scope = top_scope_->DeclarationScope();
+
+ Scope* declaration_scope = mode == Variable::LET ? top_scope_
+ : top_scope_->DeclarationScope();
if (declaration_scope->is_function_scope() ||
- declaration_scope->is_strict_mode_eval_scope()) {
+ declaration_scope->is_strict_mode_eval_scope() ||
+ declaration_scope->is_block_scope()) {
// Declare the variable in the function scope.
var = declaration_scope->LocalLookup(name);
if (var == NULL) {
// Declare the name.
var = declaration_scope->DeclareLocal(name, mode);
} else {
- // The name was declared before; check for conflicting
- // re-declarations. If the previous declaration was a const or the
- // current declaration is a const then we have a conflict. There is
- // similar code in runtime.cc in the Declare functions.
- if ((mode == Variable::CONST) || (var->mode() == Variable::CONST)) {
- // We only have vars and consts in declarations.
+ // The name was declared before; check for conflicting re-declarations.
+ // We have a conflict if either of the declarations is not a var. There
+ // is similar code in runtime.cc in the Declare functions.
+ if ((mode != Variable::VAR) || (var->mode() != Variable::VAR)) {
+ // We only have vars, consts and lets in declarations.
ASSERT(var->mode() == Variable::VAR ||
- var->mode() == Variable::CONST);
- const char* type = (var->mode() == Variable::VAR) ? "var" : "const";
+ var->mode() == Variable::CONST ||
+ var->mode() == Variable::LET);
+ const char* type = (var->mode() == Variable::VAR) ? "var" :
+ (var->mode() == Variable::CONST) ? "const" : "let";
Handle<String> type_string =
isolate()->factory()->NewStringFromUtf8(CStrVector(type), TENURED);
Expression* expression =
@@ -1498,12 +1512,15 @@
// Even if we're not at the top-level of the global or a function
// scope, we treat is as such and introduce the function with it's
// initial value upon entering the corresponding scope.
- Declare(name, Variable::VAR, fun, true, CHECK_OK);
+ Variable::Mode mode = harmony_block_scoping_ ? Variable::LET : Variable::VAR;
+ Declare(name, mode, fun, true, CHECK_OK);
return EmptyStatement();
}
Block* Parser::ParseBlock(ZoneStringList* labels, bool* ok) {
+ if (harmony_block_scoping_) return ParseScopedBlock(labels, ok);
+
// Block ::
// '{' Statement* '}'
@@ -1527,12 +1544,65 @@
}
-Block* Parser::ParseVariableStatement(bool* ok) {
+Block* Parser::ParseScopedBlock(ZoneStringList* labels, bool* ok) {
+ // Construct block expecting 16 statements.
+ Block* body = new(zone()) Block(isolate(), labels, 16, false);
+ Scope* saved_scope = top_scope_;
+ Scope* block_scope = NewScope(top_scope_,
+ Scope::BLOCK_SCOPE,
+ inside_with());
+ body->set_block_scope(block_scope);
+ block_scope->DeclareLocal(isolate()->factory()->block_scope_symbol(),
+ Variable::VAR);
+ if (top_scope_->is_strict_mode()) {
+ block_scope->EnableStrictMode();
+ }
+ top_scope_ = block_scope;
+
+ // Parse the statements and collect escaping labels.
+ TargetCollector collector;
+ Target target(&this->target_stack_, &collector);
+ Expect(Token::LBRACE, CHECK_OK);
+ {
+ Target target_body(&this->target_stack_, body);
+ InitializationBlockFinder block_finder(top_scope_, target_stack_);
+
+ while (peek() != Token::RBRACE) {
+ Statement* stat = ParseSourceElement(NULL, CHECK_OK);
+ if (stat && !stat->IsEmpty()) {
+ body->AddStatement(stat);
+ block_finder.Update(stat);
+ }
+ }
+ }
+ Expect(Token::RBRACE, CHECK_OK);
+
+ // Create exit block.
+ Block* exit = new(zone()) Block(isolate(), NULL, 1, false);
+ exit->AddStatement(new(zone()) ExitContextStatement());
+
+ // Create a try-finally statement.
+ TryFinallyStatement* try_finally =
+ new(zone()) TryFinallyStatement(body, exit);
+ try_finally->set_escaping_targets(collector.targets());
+ top_scope_ = saved_scope;
+
+ // Create a result block.
+ Block* result = new(zone()) Block(isolate(), NULL, 1, false);
+ result->AddStatement(try_finally);
+ return result;
+}
+
+
+Block* Parser::ParseVariableStatement(VariableDeclarationContext var_context,
+ bool* ok) {
// VariableStatement ::
// VariableDeclarations ';'
Handle<String> ignore;
- Block* result = ParseVariableDeclarations(true, &ignore, CHECK_OK);
+ Block* result = ParseVariableDeclarations(var_context,
+ &ignore,
+ CHECK_OK);
ExpectSemicolon(CHECK_OK);
return result;
}
@@ -1549,33 +1619,54 @@
// *var is untouched; in particular, it is the caller's responsibility
// to initialize it properly. This mechanism is used for the parsing
// of 'for-in' loops.
-Block* Parser::ParseVariableDeclarations(bool accept_IN,
+Block* Parser::ParseVariableDeclarations(VariableDeclarationContext var_context,
Handle<String>* out,
bool* ok) {
// VariableDeclarations ::
// ('var' | 'const') (Identifier ('=' AssignmentExpression)?)+[',']
Variable::Mode mode = Variable::VAR;
+ // True if the binding needs initialization. 'let' and 'const' declared
+ // bindings are created uninitialized by their declaration nodes and
+ // need initialization. 'var' declared bindings are always initialized
+ // immediately by their declaration nodes.
+ bool needs_init = false;
bool is_const = false;
- Scope* declaration_scope = top_scope_->DeclarationScope();
+ Token::Value init_op = Token::INIT_VAR;
if (peek() == Token::VAR) {
Consume(Token::VAR);
} else if (peek() == Token::CONST) {
Consume(Token::CONST);
- if (declaration_scope->is_strict_mode()) {
+ if (top_scope_->is_strict_mode()) {
ReportMessage("strict_const", Vector<const char*>::empty());
*ok = false;
return NULL;
}
mode = Variable::CONST;
is_const = true;
+ needs_init = true;
+ init_op = Token::INIT_CONST;
+ } else if (peek() == Token::LET) {
+ Consume(Token::LET);
+ if (var_context != kSourceElement &&
+ var_context != kForStatement) {
+ ASSERT(var_context == kStatement);
+ ReportMessage("unprotected_let", Vector<const char*>::empty());
+ *ok = false;
+ return NULL;
+ }
+ mode = Variable::LET;
+ needs_init = true;
+ init_op = Token::INIT_LET;
} else {
UNREACHABLE(); // by current callers
}
- // The scope of a variable/const declared anywhere inside a function
+ Scope* declaration_scope = mode == Variable::LET
+ ? top_scope_ : top_scope_->DeclarationScope();
+ // The scope of a var/const declared variable anywhere inside a function
// is the entire function (ECMA-262, 3rd, 10.1.3, and 12.2). Thus we can
- // transform a source-level variable/const declaration into a (Function)
+ // transform a source-level var/const declaration into a (Function)
// Scope declaration, and rewrite the source-level initialization into an
// assignment statement. We use a block to collect multiple assignments.
//
@@ -1659,18 +1750,19 @@
if (peek() == Token::ASSIGN) {
Expect(Token::ASSIGN, CHECK_OK);
position = scanner().location().beg_pos;
- value = ParseAssignmentExpression(accept_IN, CHECK_OK);
+ value = ParseAssignmentExpression(var_context != kForStatement, CHECK_OK);
// Don't infer if it is "a = function(){...}();"-like expression.
if (fni_ != NULL &&
value->AsCall() == NULL &&
value->AsCallNew() == NULL) {
fni_->Infer();
+ } else {
+ fni_->RemoveLastFunction();
}
}
- // Make sure that 'const c' actually initializes 'c' to undefined
- // even though it seems like a stupid thing to do.
- if (value == NULL && is_const) {
+ // Make sure that 'const x' and 'let x' initialize 'x' to undefined.
+ if (value == NULL && needs_init) {
value = GetLiteralUndefined();
}
@@ -1758,12 +1850,11 @@
// for constant lookups is always the function context, while it is
// the top context for variables). Sigh...
if (value != NULL) {
- Token::Value op = (is_const ? Token::INIT_CONST : Token::INIT_VAR);
bool in_with = is_const ? false : inside_with();
VariableProxy* proxy =
initialization_scope->NewUnresolved(name, in_with);
Assignment* assignment =
- new(zone()) Assignment(isolate(), op, proxy, value, position);
+ new(zone()) Assignment(isolate(), init_op, proxy, value, position);
if (block) {
block->AddStatement(new(zone()) ExpressionStatement(assignment));
}
@@ -1973,41 +2064,6 @@
}
-Block* Parser::WithHelper(Expression* obj, ZoneStringList* labels, bool* ok) {
- // Parse the statement and collect escaping labels.
- TargetCollector collector;
- Statement* stat;
- { Target target(&this->target_stack_, &collector);
- with_nesting_level_++;
- top_scope_->DeclarationScope()->RecordWithStatement();
- stat = ParseStatement(labels, CHECK_OK);
- with_nesting_level_--;
- }
- // Create resulting block with two statements.
- // 1: Evaluate the with expression.
- // 2: The try-finally block evaluating the body.
- Block* result = new(zone()) Block(isolate(), NULL, 2, false);
-
- if (result != NULL) {
- result->AddStatement(new(zone()) EnterWithContextStatement(obj));
-
- // Create body block.
- Block* body = new(zone()) Block(isolate(), NULL, 1, false);
- body->AddStatement(stat);
-
- // Create exit block.
- Block* exit = new(zone()) Block(isolate(), NULL, 1, false);
- exit->AddStatement(new(zone()) ExitContextStatement());
-
- // Return a try-finally statement.
- TryFinallyStatement* wrapper = new(zone()) TryFinallyStatement(body, exit);
- wrapper->set_escaping_targets(collector.targets());
- result->AddStatement(wrapper);
- }
- return result;
-}
-
-
Statement* Parser::ParseWithStatement(ZoneStringList* labels, bool* ok) {
// WithStatement ::
// 'with' '(' Expression ')' Statement
@@ -2024,7 +2080,11 @@
Expression* expr = ParseExpression(true, CHECK_OK);
Expect(Token::RPAREN, CHECK_OK);
- return WithHelper(expr, labels, CHECK_OK);
+ ++with_nesting_level_;
+ top_scope_->DeclarationScope()->RecordWithStatement();
+ Statement* stmt = ParseStatement(labels, CHECK_OK);
+ --with_nesting_level_;
+ return new(zone()) WithStatement(expr, stmt);
}
@@ -2159,39 +2219,22 @@
Expect(Token::RPAREN, CHECK_OK);
if (peek() == Token::LBRACE) {
- // Rewrite the catch body B to a single statement block
- // { try B finally { PopContext }}.
- Block* inner_body;
- // We need to collect escapes from the body for both the inner
- // try/finally used to pop the catch context and any possible outer
- // try/finally.
- TargetCollector inner_collector;
- { Target target(&this->target_stack_, &catch_collector);
- { Target target(&this->target_stack_, &inner_collector);
- catch_scope = NewScope(top_scope_, Scope::CATCH_SCOPE, inside_with());
- if (top_scope_->is_strict_mode()) {
- catch_scope->EnableStrictMode();
- }
- catch_variable = catch_scope->DeclareLocal(name, Variable::VAR);
-
- Scope* saved_scope = top_scope_;
- top_scope_ = catch_scope;
- inner_body = ParseBlock(NULL, CHECK_OK);
- top_scope_ = saved_scope;
- }
+ // Rewrite the catch body { B } to a block:
+ // { { B } ExitContext; }.
+ Target target(&this->target_stack_, &catch_collector);
+ catch_scope = NewScope(top_scope_, Scope::CATCH_SCOPE, inside_with());
+ if (top_scope_->is_strict_mode()) {
+ catch_scope->EnableStrictMode();
}
+ catch_variable = catch_scope->DeclareLocal(name, Variable::VAR);
+ catch_block = new(zone()) Block(isolate(), NULL, 2, false);
- // Create exit block.
- Block* inner_finally = new(zone()) Block(isolate(), NULL, 1, false);
- inner_finally->AddStatement(new(zone()) ExitContextStatement());
-
- // Create a try/finally statement.
- TryFinallyStatement* inner_try_finally =
- new(zone()) TryFinallyStatement(inner_body, inner_finally);
- inner_try_finally->set_escaping_targets(inner_collector.targets());
-
- catch_block = new(zone()) Block(isolate(), NULL, 1, false);
- catch_block->AddStatement(inner_try_finally);
+ Scope* saved_scope = top_scope_;
+ top_scope_ = catch_scope;
+ Block* catch_body = ParseBlock(NULL, CHECK_OK);
+ top_scope_ = saved_scope;
+ catch_block->AddStatement(catch_body);
+ catch_block->AddStatement(new(zone()) ExitContextStatement());
} else {
Expect(Token::LBRACE, CHECK_OK);
}
@@ -2307,7 +2350,7 @@
if (peek() == Token::VAR || peek() == Token::CONST) {
Handle<String> name;
Block* variable_statement =
- ParseVariableDeclarations(false, &name, CHECK_OK);
+ ParseVariableDeclarations(kForStatement, &name, CHECK_OK);
if (peek() == Token::IN && !name.is_null()) {
VariableProxy* each = top_scope_->NewUnresolved(name, inside_with());
@@ -2465,6 +2508,8 @@
|| op == Token::ASSIGN)
&& (right->AsCall() == NULL && right->AsCallNew() == NULL)) {
fni_->Infer();
+ } else {
+ fni_->RemoveLastFunction();
}
fni_->Leave();
}
@@ -2777,7 +2822,7 @@
Handle<String> name = callee->name();
Variable* var = top_scope_->Lookup(name);
if (var == NULL) {
- top_scope_->RecordEvalCall();
+ top_scope_->DeclarationScope()->RecordEvalCall();
}
}
result = NewCall(result, args, pos);
@@ -3666,8 +3711,11 @@
}
int num_parameters = 0;
- // Function declarations are hoisted.
- Scope* scope = (type == FunctionLiteral::DECLARATION)
+ // Function declarations are function scoped in normal mode, so they are
+ // hoisted. In harmony block scoping mode they are block scoped, so they
+ // are not hoisted.
+ Scope* scope = (type == FunctionLiteral::DECLARATION &&
+ !harmony_block_scoping_)
? NewScope(top_scope_->DeclarationScope(), Scope::FUNCTION_SCOPE, false)
: NewScope(top_scope_, Scope::FUNCTION_SCOPE, inside_with());
ZoneList<Statement*>* body = new(zone()) ZoneList<Statement*>(8);
@@ -3969,7 +4017,7 @@
}
-// Parses and identifier that is valid for the current scope, in particular it
+// Parses an identifier that is valid for the current scope, in particular it
// fails on strict mode future reserved keywords in a strict scope.
Handle<String> Parser::ParseIdentifier(bool* ok) {
if (top_scope_->is_strict_mode()) {
@@ -5050,9 +5098,11 @@
// Create a Scanner for the preparser to use as input, and preparse the source.
static ScriptDataImpl* DoPreParse(UC16CharacterStream* source,
bool allow_lazy,
- ParserRecorder* recorder) {
+ ParserRecorder* recorder,
+ bool harmony_block_scoping) {
Isolate* isolate = Isolate::Current();
JavaScriptScanner scanner(isolate->unicode_cache());
+ scanner.SetHarmonyBlockScoping(harmony_block_scoping);
scanner.Initialize(source);
intptr_t stack_limit = isolate->stack_guard()->real_climit();
if (!preparser::PreParser::PreParseProgram(&scanner,
@@ -5073,7 +5123,8 @@
// Preparse, but only collect data that is immediately useful,
// even if the preparser data is only used once.
ScriptDataImpl* ParserApi::PartialPreParse(UC16CharacterStream* source,
- v8::Extension* extension) {
+ v8::Extension* extension,
+ bool harmony_block_scoping) {
bool allow_lazy = FLAG_lazy && (extension == NULL);
if (!allow_lazy) {
// Partial preparsing is only about lazily compiled functions.
@@ -5081,16 +5132,17 @@
return NULL;
}
PartialParserRecorder recorder;
- return DoPreParse(source, allow_lazy, &recorder);
+ return DoPreParse(source, allow_lazy, &recorder, harmony_block_scoping);
}
ScriptDataImpl* ParserApi::PreParse(UC16CharacterStream* source,
- v8::Extension* extension) {
+ v8::Extension* extension,
+ bool harmony_block_scoping) {
Handle<Script> no_script;
bool allow_lazy = FLAG_lazy && (extension == NULL);
CompleteParserRecorder recorder;
- return DoPreParse(source, allow_lazy, &recorder);
+ return DoPreParse(source, allow_lazy, &recorder, harmony_block_scoping);
}
@@ -5120,18 +5172,25 @@
ASSERT(info->function() == NULL);
FunctionLiteral* result = NULL;
Handle<Script> script = info->script();
+ bool harmony_block_scoping = !info->is_native() &&
+ FLAG_harmony_block_scoping;
if (info->is_lazy()) {
bool allow_natives_syntax =
FLAG_allow_natives_syntax ||
info->is_native();
Parser parser(script, allow_natives_syntax, NULL, NULL);
+ parser.SetHarmonyBlockScoping(harmony_block_scoping);
result = parser.ParseLazy(info);
} else {
// Whether we allow %identifier(..) syntax.
bool allow_natives_syntax =
info->is_native() || FLAG_allow_natives_syntax;
ScriptDataImpl* pre_data = info->pre_parse_data();
- Parser parser(script, allow_natives_syntax, info->extension(), pre_data);
+ Parser parser(script,
+ allow_natives_syntax,
+ info->extension(),
+ pre_data);
+ parser.SetHarmonyBlockScoping(harmony_block_scoping);
if (pre_data != NULL && pre_data->has_error()) {
Scanner::Location loc = pre_data->MessageLocation();
const char* message = pre_data->BuildMessage();
@@ -5150,7 +5209,6 @@
info->StrictMode());
}
}
-
info->SetFunction(result);
return (result != NULL);
}
diff --git a/src/parser.h b/src/parser.h
index 535b639..686dac8 100644
--- a/src/parser.h
+++ b/src/parser.h
@@ -164,12 +164,14 @@
// Generic preparser generating full preparse data.
static ScriptDataImpl* PreParse(UC16CharacterStream* source,
- v8::Extension* extension);
+ v8::Extension* extension,
+ bool harmony_block_scoping);
// Preparser that only does preprocessing that makes sense if only used
// immediately after.
static ScriptDataImpl* PartialPreParse(UC16CharacterStream* source,
- v8::Extension* extension);
+ v8::Extension* extension,
+ bool harmony_block_scoping);
};
// ----------------------------------------------------------------------------
@@ -435,6 +437,7 @@
void ReportMessageAt(Scanner::Location loc,
const char* message,
Vector<Handle<String> > args);
+ void SetHarmonyBlockScoping(bool block_scoping);
private:
// Limit on number of function parameters is chosen arbitrarily.
@@ -451,6 +454,12 @@
PARSE_EAGERLY
};
+ enum VariableDeclarationContext {
+ kSourceElement,
+ kStatement,
+ kForStatement
+ };
+
Isolate* isolate() { return isolate_; }
Zone* zone() { return isolate_->zone(); }
@@ -479,12 +488,15 @@
// for failure at the call sites.
void* ParseSourceElements(ZoneList<Statement*>* processor,
int end_token, bool* ok);
+ Statement* ParseSourceElement(ZoneStringList* labels, bool* ok);
Statement* ParseStatement(ZoneStringList* labels, bool* ok);
Statement* ParseFunctionDeclaration(bool* ok);
Statement* ParseNativeDeclaration(bool* ok);
Block* ParseBlock(ZoneStringList* labels, bool* ok);
- Block* ParseVariableStatement(bool* ok);
- Block* ParseVariableDeclarations(bool accept_IN,
+ Block* ParseScopedBlock(ZoneStringList* labels, bool* ok);
+ Block* ParseVariableStatement(VariableDeclarationContext var_context,
+ bool* ok);
+ Block* ParseVariableDeclarations(VariableDeclarationContext var_context,
Handle<String>* out,
bool* ok);
Statement* ParseExpressionOrLabelledStatement(ZoneStringList* labels,
@@ -493,7 +505,6 @@
Statement* ParseContinueStatement(bool* ok);
Statement* ParseBreakStatement(ZoneStringList* labels, bool* ok);
Statement* ParseReturnStatement(bool* ok);
- Block* WithHelper(Expression* obj, ZoneStringList* labels, bool* ok);
Statement* ParseWithStatement(ZoneStringList* labels, bool* ok);
CaseClause* ParseCaseClause(bool* default_seen_ptr, bool* ok);
SwitchStatement* ParseSwitchStatement(ZoneStringList* labels, bool* ok);
@@ -715,6 +726,7 @@
// Heuristically that means that the function will be called immediately,
// so never lazily compile it.
bool parenthesized_function_;
+ bool harmony_block_scoping_;
friend class LexicalScope;
};
diff --git a/src/platform-cygwin.cc b/src/platform-cygwin.cc
index 5f283c3..85a5e4f 100644
--- a/src/platform-cygwin.cc
+++ b/src/platform-cygwin.cc
@@ -166,6 +166,18 @@
}
+void OS::ProtectCode(void* address, const size_t size) {
+ DWORD old_protect;
+ VirtualProtect(address, size, PAGE_EXECUTE_READ, &old_protect);
+}
+
+
+void OS::Guard(void* address, const size_t size) {
+ DWORD oldprotect;
+ VirtualProtect(address, size, PAGE_READONLY | PAGE_GUARD, &oldprotect);
+}
+
+
void OS::Sleep(int milliseconds) {
unsigned int ms = static_cast<unsigned int>(milliseconds);
usleep(1000 * ms);
diff --git a/src/platform-linux.cc b/src/platform-linux.cc
index 37330be..362bf47 100644
--- a/src/platform-linux.cc
+++ b/src/platform-linux.cc
@@ -676,9 +676,11 @@
// This is also initialized by the first argument to pthread_create() but we
// don't know which thread will run first (the original thread or the new
// one) so we initialize it here too.
+#ifdef PR_SET_NAME
prctl(PR_SET_NAME,
reinterpret_cast<unsigned long>(thread->name()), // NOLINT
0, 0, 0);
+#endif
thread->data()->thread_ = pthread_self();
ASSERT(thread->data()->thread_ != kNoThread);
thread->Run();
diff --git a/src/platform-posix.cc b/src/platform-posix.cc
index 1ea53c8..52cf029 100644
--- a/src/platform-posix.cc
+++ b/src/platform-posix.cc
@@ -70,6 +70,7 @@
}
+#ifndef __CYGWIN__
// Get rid of writable permission on code allocations.
void OS::ProtectCode(void* address, const size_t size) {
mprotect(address, size, PROT_READ | PROT_EXEC);
@@ -80,6 +81,7 @@
void OS::Guard(void* address, const size_t size) {
mprotect(address, size, PROT_NONE);
}
+#endif // __CYGWIN__
// ----------------------------------------------------------------------------
diff --git a/src/preparser-api.cc b/src/preparser-api.cc
index e0ab500..80656d5 100644
--- a/src/preparser-api.cc
+++ b/src/preparser-api.cc
@@ -28,6 +28,7 @@
#include "../include/v8-preparser.h"
#include "globals.h"
+#include "flags.h"
#include "checks.h"
#include "allocation.h"
#include "utils.h"
diff --git a/src/preparser.cc b/src/preparser.cc
index c741b46..1a3dd73 100644
--- a/src/preparser.cc
+++ b/src/preparser.cc
@@ -56,8 +56,6 @@
// That means that contextual checks (like a label being declared where
// it is used) are generally omitted.
-namespace i = ::v8::internal;
-
void PreParser::ReportUnexpectedToken(i::Token::Value token) {
// We don't report stack overflows here, to avoid increasing the
// stack depth even further. Instead we report it after parsing is
@@ -114,6 +112,16 @@
#undef DUMMY
+PreParser::Statement PreParser::ParseSourceElement(bool* ok) {
+ switch (peek()) {
+ case i::Token::LET:
+ return ParseVariableStatement(kSourceElement, ok);
+ default:
+ return ParseStatement(ok);
+ }
+}
+
+
PreParser::SourceElements PreParser::ParseSourceElements(int end_token,
bool* ok) {
// SourceElements ::
@@ -121,7 +129,7 @@
bool allow_directive_prologue = true;
while (peek() != end_token) {
- Statement statement = ParseStatement(CHECK_OK);
+ Statement statement = ParseSourceElement(CHECK_OK);
if (allow_directive_prologue) {
if (statement.IsUseStrictLiteral()) {
set_strict_mode();
@@ -174,7 +182,7 @@
case i::Token::CONST:
case i::Token::VAR:
- return ParseVariableStatement(ok);
+ return ParseVariableStatement(kStatement, ok);
case i::Token::SEMICOLON:
Next();
@@ -260,7 +268,7 @@
Expect(i::Token::LBRACE, CHECK_OK);
while (peek() != i::Token::RBRACE) {
i::Scanner::Location start_location = scanner_->peek_location();
- Statement statement = ParseStatement(CHECK_OK);
+ Statement statement = ParseSourceElement(CHECK_OK);
i::Scanner::Location end_location = scanner_->location();
if (strict_mode() && statement.IsFunctionDeclaration()) {
ReportMessageAt(start_location.beg_pos, end_location.end_pos,
@@ -274,11 +282,15 @@
}
-PreParser::Statement PreParser::ParseVariableStatement(bool* ok) {
+PreParser::Statement PreParser::ParseVariableStatement(
+ VariableDeclarationContext var_context,
+ bool* ok) {
// VariableStatement ::
// VariableDeclarations ';'
- Statement result = ParseVariableDeclarations(true, NULL, CHECK_OK);
+ Statement result = ParseVariableDeclarations(var_context,
+ NULL,
+ CHECK_OK);
ExpectSemicolon(CHECK_OK);
return result;
}
@@ -289,9 +301,10 @@
// *var is untouched; in particular, it is the caller's responsibility
// to initialize it properly. This mechanism is also used for the parsing
// of 'for-in' loops.
-PreParser::Statement PreParser::ParseVariableDeclarations(bool accept_IN,
- int* num_decl,
- bool* ok) {
+PreParser::Statement PreParser::ParseVariableDeclarations(
+ VariableDeclarationContext var_context,
+ int* num_decl,
+ bool* ok) {
// VariableDeclarations ::
// ('var' | 'const') (Identifier ('=' AssignmentExpression)?)+[',']
@@ -306,13 +319,25 @@
return Statement::Default();
}
Consume(i::Token::CONST);
+ } else if (peek() == i::Token::LET) {
+ if (var_context != kSourceElement &&
+ var_context != kForStatement) {
+ i::Scanner::Location location = scanner_->peek_location();
+ ReportMessageAt(location.beg_pos, location.end_pos,
+ "unprotected_let", NULL);
+ *ok = false;
+ return Statement::Default();
+ }
+ Consume(i::Token::LET);
} else {
*ok = false;
return Statement::Default();
}
- // The scope of a variable/const declared anywhere inside a function
- // is the entire function (ECMA-262, 3rd, 10.1.3, and 12.2). .
+ // The scope of a var/const declared variable anywhere inside a function
+ // is the entire function (ECMA-262, 3rd, 10.1.3, and 12.2). The scope
+ // of a let declared variable is the scope of the immediately enclosing
+ // block.
int nvars = 0; // the number of variables declared
do {
// Parse variable name.
@@ -328,7 +353,7 @@
nvars++;
if (peek() == i::Token::ASSIGN) {
Expect(i::Token::ASSIGN, CHECK_OK);
- ParseAssignmentExpression(accept_IN, CHECK_OK);
+ ParseAssignmentExpression(var_context != kForStatement, CHECK_OK);
}
} while (peek() == i::Token::COMMA);
@@ -537,9 +562,10 @@
Expect(i::Token::FOR, CHECK_OK);
Expect(i::Token::LPAREN, CHECK_OK);
if (peek() != i::Token::SEMICOLON) {
- if (peek() == i::Token::VAR || peek() == i::Token::CONST) {
+ if (peek() == i::Token::VAR || peek() == i::Token::CONST ||
+ peek() == i::Token::LET) {
int decl_count;
- ParseVariableDeclarations(false, &decl_count, CHECK_OK);
+ ParseVariableDeclarations(kForStatement, &decl_count, CHECK_OK);
if (peek() == i::Token::IN && decl_count == 1) {
Expect(i::Token::IN, CHECK_OK);
ParseExpression(true, CHECK_OK);
diff --git a/src/preparser.h b/src/preparser.h
index 3d72c97..cd0a530 100644
--- a/src/preparser.h
+++ b/src/preparser.h
@@ -77,6 +77,12 @@
kFunctionScope
};
+ enum VariableDeclarationContext {
+ kSourceElement,
+ kStatement,
+ kForStatement
+ };
+
class Expression;
class Identifier {
@@ -344,7 +350,8 @@
strict_mode_violation_type_(NULL),
stack_overflow_(false),
allow_lazy_(true),
- parenthesized_function_(false) { }
+ parenthesized_function_(false),
+ harmony_block_scoping_(scanner->HarmonyBlockScoping()) { }
// Preparse the program. Only called in PreParseProgram after creating
// the instance.
@@ -377,12 +384,16 @@
// which is set to false if parsing failed; it is unchanged otherwise.
// By making the 'exception handling' explicit, we are forced to check
// for failure at the call sites.
+ Statement ParseSourceElement(bool* ok);
SourceElements ParseSourceElements(int end_token, bool* ok);
Statement ParseStatement(bool* ok);
Statement ParseFunctionDeclaration(bool* ok);
Statement ParseBlock(bool* ok);
- Statement ParseVariableStatement(bool* ok);
- Statement ParseVariableDeclarations(bool accept_IN, int* num_decl, bool* ok);
+ Statement ParseVariableStatement(VariableDeclarationContext var_context,
+ bool* ok);
+ Statement ParseVariableDeclarations(VariableDeclarationContext var_context,
+ int* num_decl,
+ bool* ok);
Statement ParseExpressionOrLabelledStatement(bool* ok);
Statement ParseIfStatement(bool* ok);
Statement ParseContinueStatement(bool* ok);
@@ -496,6 +507,7 @@
bool stack_overflow_;
bool allow_lazy_;
bool parenthesized_function_;
+ bool harmony_block_scoping_;
};
} } // v8::preparser
diff --git a/src/prettyprinter.cc b/src/prettyprinter.cc
index f18b320..b034293 100644
--- a/src/prettyprinter.cc
+++ b/src/prettyprinter.cc
@@ -123,11 +123,11 @@
}
-void PrettyPrinter::VisitEnterWithContextStatement(
- EnterWithContextStatement* node) {
- Print("<enter with context> (");
+void PrettyPrinter::VisitWithStatement(WithStatement* node) {
+ Print("with (");
Visit(node->expression());
Print(") ");
+ Visit(node->statement());
}
@@ -798,9 +798,10 @@
}
-void AstPrinter::VisitEnterWithContextStatement(
- EnterWithContextStatement* node) {
- PrintIndentedVisit("ENTER WITH CONTEXT", node->expression());
+void AstPrinter::VisitWithStatement(WithStatement* node) {
+ IndentedScope indent(this, "WITH");
+ PrintIndentedVisit("OBJECT", node->expression());
+ PrintIndentedVisit("BODY", node->statement());
}
@@ -1194,10 +1195,10 @@
}
-void JsonAstBuilder::VisitEnterWithContextStatement(
- EnterWithContextStatement* stmt) {
- TagScope tag(this, "EnterWithContextStatement");
+void JsonAstBuilder::VisitWithStatement(WithStatement* stmt) {
+ TagScope tag(this, "WithStatement");
Visit(stmt->expression());
+ Visit(stmt->statement());
}
diff --git a/src/profile-generator.cc b/src/profile-generator.cc
index 07426f2..5a95445 100644
--- a/src/profile-generator.cc
+++ b/src/profile-generator.cc
@@ -983,6 +983,11 @@
}
+Handle<HeapObject> HeapEntry::GetHeapObject() {
+ return snapshot_->collection()->FindHeapObjectById(id());
+}
+
+
template<class Visitor>
void HeapEntry::ApplyAndPaintAllReachable(Visitor* visitor) {
List<HeapEntry*> list(10);
@@ -1195,12 +1200,9 @@
int children_count,
int retainers_count) {
ASSERT(raw_entries_ == NULL);
- raw_entries_ = NewArray<char>(
- HeapEntry::EntriesSize(entries_count, children_count, retainers_count));
-#ifdef DEBUG
raw_entries_size_ =
HeapEntry::EntriesSize(entries_count, children_count, retainers_count);
-#endif
+ raw_entries_ = NewArray<char>(raw_entries_size_);
}
@@ -1346,8 +1348,8 @@
void HeapObjectsMap::SnapshotGenerationFinished() {
- initial_fill_mode_ = false;
- RemoveDeadEntries();
+ initial_fill_mode_ = false;
+ RemoveDeadEntries();
}
@@ -1493,6 +1495,24 @@
}
+Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(uint64_t id) {
+ AssertNoAllocation no_allocation;
+ HeapObject* object = NULL;
+ HeapIterator iterator(HeapIterator::kFilterUnreachable);
+ // Make sure that object with the given id is still reachable.
+ for (HeapObject* obj = iterator.next();
+ obj != NULL;
+ obj = iterator.next()) {
+ if (ids_.FindObject(obj->address()) == id) {
+ ASSERT(object == NULL);
+ object = obj;
+ // Can't break -- kFilterUnreachable requires full heap traversal.
+ }
+ }
+ return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>();
+}
+
+
HeapEntry *const HeapEntriesMap::kHeapEntryPlaceholder =
reinterpret_cast<HeapEntry*>(1);
@@ -1663,7 +1683,7 @@
} else if (object->IsJSGlobalObject()) {
const char* tag = objects_tags_.GetTag(object);
const char* name = collection_->names()->GetName(
- GetConstructorNameForHeapProfile(JSObject::cast(object)));
+ GetConstructorName(JSObject::cast(object)));
if (tag != NULL) {
name = collection_->names()->GetFormatted("%s / %s", name, tag);
}
@@ -1691,8 +1711,7 @@
return AddEntry(object,
HeapEntry::kObject,
collection_->names()->GetName(
- GetConstructorNameForHeapProfile(
- JSObject::cast(object))),
+ GetConstructorName(JSObject::cast(object))),
children_count,
retainers_count);
} else if (object->IsString()) {
@@ -2101,6 +2120,31 @@
}
+String* V8HeapExplorer::GetConstructorName(JSObject* object) {
+ if (object->IsJSFunction()) return HEAP->closure_symbol();
+ String* constructor_name = object->constructor_name();
+ if (constructor_name == HEAP->Object_symbol()) {
+ // Look up an immediate "constructor" property, if it is a function,
+ // return its name. This is for instances of binding objects, which
+ // have prototype constructor type "Object".
+ Object* constructor_prop = NULL;
+ LookupResult result;
+ object->LocalLookupRealNamedProperty(HEAP->constructor_symbol(), &result);
+ if (result.IsProperty()) {
+ constructor_prop = result.GetLazyValue();
+ }
+ if (constructor_prop->IsJSFunction()) {
+ Object* maybe_name = JSFunction::cast(constructor_prop)->shared()->name();
+ if (maybe_name->IsString()) {
+ String* name = String::cast(maybe_name);
+ if (name->length() > 0) return name;
+ }
+ }
+ }
+ return object->constructor_name();
+}
+
+
HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
if (!obj->IsHeapObject()) return NULL;
return filler_->FindOrAddEntry(obj, this);
@@ -2960,10 +3004,19 @@
bool aborted_;
};
+const int HeapSnapshotJSONSerializer::kMaxSerializableSnapshotRawSize =
+ 256 * MB;
+
void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
ASSERT(writer_ == NULL);
writer_ = new OutputStreamWriter(stream);
+ HeapSnapshot* original_snapshot = NULL;
+ if (snapshot_->raw_entries_size() >= kMaxSerializableSnapshotRawSize) {
+ // The snapshot is too big. Serialize a fake snapshot.
+ original_snapshot = snapshot_;
+ snapshot_ = CreateFakeSnapshot();
+ }
// Since nodes graph is cyclic, we need the first pass to enumerate
// them. Strings can be serialized in one pass.
EnumerateNodes();
@@ -2971,6 +3024,26 @@
delete writer_;
writer_ = NULL;
+
+ if (original_snapshot != NULL) {
+ delete snapshot_;
+ snapshot_ = original_snapshot;
+ }
+}
+
+
+HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
+ HeapSnapshot* result = new HeapSnapshot(snapshot_->collection(),
+ HeapSnapshot::kFull,
+ snapshot_->title(),
+ snapshot_->uid());
+ result->AllocateEntries(2, 1, 0);
+ HeapEntry* root = result->AddRootEntry(1);
+ HeapEntry* message = result->AddEntry(
+ HeapEntry::kString, "The snapshot is too big", 0, 4, 0, 0);
+ root->SetUnidirElementReference(0, 1, message);
+ result->SetDominatorsToSelf();
+ return result;
}
@@ -3250,10 +3323,4 @@
sorted_entries->Sort(SortUsingEntryValue);
}
-
-String* GetConstructorNameForHeapProfile(JSObject* object) {
- if (object->IsJSFunction()) return HEAP->closure_symbol();
- return object->constructor_name();
-}
-
} } // namespace v8::internal
diff --git a/src/profile-generator.h b/src/profile-generator.h
index d1c2b38..fbb6fab 100644
--- a/src/profile-generator.h
+++ b/src/profile-generator.h
@@ -584,6 +584,8 @@
void Print(int max_depth, int indent);
+ Handle<HeapObject> GetHeapObject();
+
static int EntriesSize(int entries_count,
int children_count,
int retainers_count);
@@ -654,6 +656,7 @@
HeapEntry* gc_roots() { return gc_roots_entry_; }
HeapEntry* natives_root() { return natives_root_entry_; }
List<HeapEntry*>* entries() { return &entries_; }
+ int raw_entries_size() { return raw_entries_size_; }
void AllocateEntries(
int entries_count, int children_count, int retainers_count);
@@ -689,9 +692,7 @@
char* raw_entries_;
List<HeapEntry*> entries_;
bool entries_sorted_;
-#ifdef DEBUG
int raw_entries_size_;
-#endif
friend class HeapSnapshotTester;
@@ -763,6 +764,7 @@
TokenEnumerator* token_enumerator() { return token_enumerator_; }
uint64_t GetObjectId(Address addr) { return ids_.FindObject(addr); }
+ Handle<HeapObject> FindHeapObjectById(uint64_t id);
void ObjectMoveEvent(Address from, Address to) { ids_.MoveObject(from, to); }
private:
@@ -921,6 +923,8 @@
bool IterateAndExtractReferences(SnapshotFillerInterface* filler);
void TagGlobalObjects();
+ static String* GetConstructorName(JSObject* object);
+
static HeapObject* const kInternalRootObject;
private:
@@ -1095,6 +1099,7 @@
}
void EnumerateNodes();
+ HeapSnapshot* CreateFakeSnapshot();
int GetNodeId(HeapEntry* entry);
int GetStringId(const char* s);
void SerializeEdge(HeapGraphEdge* edge);
@@ -1106,6 +1111,8 @@
void SerializeStrings();
void SortHashMap(HashMap* map, List<HashMap::Entry*>* sorted_entries);
+ static const int kMaxSerializableSnapshotRawSize;
+
HeapSnapshot* snapshot_;
HashMap nodes_;
HashMap strings_;
@@ -1119,9 +1126,6 @@
DISALLOW_COPY_AND_ASSIGN(HeapSnapshotJSONSerializer);
};
-
-String* GetConstructorNameForHeapProfile(JSObject* object);
-
} } // namespace v8::internal
#endif // V8_PROFILE_GENERATOR_H_
diff --git a/src/proxy.js b/src/proxy.js
index 27524bd..2839159 100644
--- a/src/proxy.js
+++ b/src/proxy.js
@@ -136,6 +136,10 @@
return !!this.getPropertyDescriptor(name)
}
+function DerivedHasOwnTrap(name) {
+ return !!this.getOwnPropertyDescriptor(name)
+}
+
function DerivedKeysTrap() {
var names = this.getOwnPropertyNames()
var enumerableNames = []
diff --git a/src/regexp-macro-assembler.cc b/src/regexp-macro-assembler.cc
index 5578243..f91ea93 100644
--- a/src/regexp-macro-assembler.cc
+++ b/src/regexp-macro-assembler.cc
@@ -120,27 +120,31 @@
String* subject_ptr = *subject;
// Character offsets into string.
int start_offset = previous_index;
- int end_offset = subject_ptr->length();
+ int char_length = subject_ptr->length() - start_offset;
+ int slice_offset = 0;
- // The string has been flattened, so it it is a cons string it contains the
+ // The string has been flattened, so if it is a cons string it contains the
// full string in the first part.
if (StringShape(subject_ptr).IsCons()) {
ASSERT_EQ(0, ConsString::cast(subject_ptr)->second()->length());
subject_ptr = ConsString::cast(subject_ptr)->first();
+ } else if (StringShape(subject_ptr).IsSliced()) {
+ SlicedString* slice = SlicedString::cast(subject_ptr);
+ subject_ptr = slice->parent();
+ slice_offset = slice->offset();
}
// Ensure that an underlying string has the same ascii-ness.
bool is_ascii = subject_ptr->IsAsciiRepresentation();
ASSERT(subject_ptr->IsExternalString() || subject_ptr->IsSeqString());
// String is now either Sequential or External
int char_size_shift = is_ascii ? 0 : 1;
- int char_length = end_offset - start_offset;
const byte* input_start =
- StringCharacterPosition(subject_ptr, start_offset);
+ StringCharacterPosition(subject_ptr, start_offset + slice_offset);
int byte_length = char_length << char_size_shift;
const byte* input_end = input_start + byte_length;
Result res = Execute(*regexp_code,
- subject_ptr,
+ *subject,
start_offset,
input_start,
input_end,
@@ -152,7 +156,7 @@
NativeRegExpMacroAssembler::Result NativeRegExpMacroAssembler::Execute(
Code* code,
- String* input,
+ String* input, // This needs to be the unpacked (sliced, cons) string.
int start_offset,
const byte* input_start,
const byte* input_end,
diff --git a/src/regexp.js b/src/regexp.js
index 7b851a3..a7f42d5 100644
--- a/src/regexp.js
+++ b/src/regexp.js
@@ -50,24 +50,29 @@
var global = false;
var ignoreCase = false;
var multiline = false;
-
for (var i = 0; i < flags.length; i++) {
var c = %_CallFunction(flags, i, StringCharAt);
switch (c) {
case 'g':
- // Allow duplicate flags to be consistent with JSC and others.
+ if (global) {
+ throw MakeSyntaxError("invalid_regexp_flags", [flags]);
+ }
global = true;
break;
case 'i':
+ if (ignoreCase) {
+ throw MakeSyntaxError("invalid_regexp_flags", [flags]);
+ }
ignoreCase = true;
break;
case 'm':
+ if (multiline) {
+ throw MakeSyntaxError("invalid_regexp_flags", [flags]);
+ }
multiline = true;
break;
default:
- // Ignore flags that have no meaning to be consistent with
- // JSC.
- break;
+ throw MakeSyntaxError("invalid_regexp_flags", [flags]);
}
}
diff --git a/src/rewriter.cc b/src/rewriter.cc
index e8ca5b9..ad6ce05 100644
--- a/src/rewriter.cc
+++ b/src/rewriter.cc
@@ -197,13 +197,17 @@
}
+void Processor::VisitWithStatement(WithStatement* node) {
+ bool set_after_body = is_set_;
+ Visit(node->statement());
+ is_set_ = is_set_ && set_after_body;
+}
+
+
// Do nothing:
void Processor::VisitDeclaration(Declaration* node) {}
void Processor::VisitEmptyStatement(EmptyStatement* node) {}
void Processor::VisitReturnStatement(ReturnStatement* node) {}
-void Processor::VisitEnterWithContextStatement(
- EnterWithContextStatement* node) {
-}
void Processor::VisitExitContextStatement(ExitContextStatement* node) {}
void Processor::VisitDebuggerStatement(DebuggerStatement* node) {}
diff --git a/src/runtime-profiler.h b/src/runtime-profiler.h
index 3f3ab07..15c2097 100644
--- a/src/runtime-profiler.h
+++ b/src/runtime-profiler.h
@@ -94,12 +94,6 @@
private:
static const int kSamplerWindowSize = 16;
- static const int kStateWindowSize = 128;
-
- enum SamplerState {
- IN_NON_JS_STATE = 0,
- IN_JS_STATE = 1
- };
static void HandleWakeUp(Isolate* isolate);
diff --git a/src/runtime.cc b/src/runtime.cc
index 8f14565..50f9ce1 100644
--- a/src/runtime.cc
+++ b/src/runtime.cc
@@ -219,8 +219,20 @@
}
break;
}
- default:
- UNREACHABLE();
+ case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
+ UNIMPLEMENTED();
+ break;
+ case JSObject::EXTERNAL_PIXEL_ELEMENTS:
+ case JSObject::EXTERNAL_BYTE_ELEMENTS:
+ case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
+ case JSObject::EXTERNAL_SHORT_ELEMENTS:
+ case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
+ case JSObject::EXTERNAL_INT_ELEMENTS:
+ case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
+ case JSObject::EXTERNAL_FLOAT_ELEMENTS:
+ case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
+ case JSObject::FAST_DOUBLE_ELEMENTS:
+ // No contained objects, nothing to do.
break;
}
return copy;
@@ -618,7 +630,44 @@
ASSERT(args.length() == 1);
CONVERT_CHECKED(JSProxy, proxy, args[0]);
proxy->Fix();
- return proxy;
+ return isolate->heap()->undefined_value();
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapInitialize) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 1);
+ CONVERT_ARG_CHECKED(JSWeakMap, weakmap, 0);
+ ASSERT(weakmap->map()->inobject_properties() == 0);
+ Handle<ObjectHashTable> table = isolate->factory()->NewObjectHashTable(0);
+ weakmap->set_table(*table);
+ weakmap->set_next(Smi::FromInt(0));
+ return *weakmap;
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapGet) {
+ NoHandleAllocation ha;
+ ASSERT(args.length() == 2);
+ CONVERT_ARG_CHECKED(JSWeakMap, weakmap, 0);
+ // TODO(mstarzinger): Currently we cannot use JSProxy objects as keys
+ // because they cannot be cast to JSObject to get an identity hash code.
+ CONVERT_ARG_CHECKED(JSObject, key, 1);
+ return weakmap->table()->Lookup(*key);
+}
+
+
+RUNTIME_FUNCTION(MaybeObject*, Runtime_WeakMapSet) {
+ HandleScope scope(isolate);
+ ASSERT(args.length() == 3);
+ CONVERT_ARG_CHECKED(JSWeakMap, weakmap, 0);
+ // TODO(mstarzinger): See Runtime_WeakMapGet above.
+ CONVERT_ARG_CHECKED(JSObject, key, 1);
+ Handle<Object> value(args[2]);
+ Handle<ObjectHashTable> table(weakmap->table());
+ Handle<ObjectHashTable> new_table = PutIntoObjectHashTable(table, key, value);
+ weakmap->set_table(*new_table);
+ return *value;
}
@@ -1257,8 +1306,9 @@
int index;
PropertyAttributes attributes;
ContextLookupFlags flags = DONT_FOLLOW_CHAINS;
+ BindingFlags binding_flags;
Handle<Object> holder =
- context->Lookup(name, flags, &index, &attributes);
+ context->Lookup(name, flags, &index, &attributes, &binding_flags);
if (attributes != ABSENT) {
// The name was declared before; check for conflicting
@@ -1545,8 +1595,9 @@
int index;
PropertyAttributes attributes;
ContextLookupFlags flags = FOLLOW_CHAINS;
+ BindingFlags binding_flags;
Handle<Object> holder =
- context->Lookup(name, flags, &index, &attributes);
+ context->Lookup(name, flags, &index, &attributes, &binding_flags);
// In most situations, the property introduced by the const
// declaration should be present in the context extension object.
@@ -1673,7 +1724,9 @@
RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpConstructResult) {
ASSERT(args.length() == 3);
CONVERT_SMI_ARG_CHECKED(elements_count, 0);
- if (elements_count > JSArray::kMaxFastElementsLength) {
+ if (elements_count < 0 ||
+ elements_count > FixedArray::kMaxLength ||
+ !Smi::IsValid(elements_count)) {
return isolate->ThrowIllegalOperation();
}
Object* new_object;
@@ -1815,10 +1868,19 @@
}
-RUNTIME_FUNCTION(MaybeObject*, Runtime_GetGlobalReceiver) {
- // Returns a real global receiver, not one of builtins object.
+RUNTIME_FUNCTION(MaybeObject*, Runtime_GetDefaultReceiver) {
+ NoHandleAllocation handle_free;
+ ASSERT(args.length() == 1);
+ CONVERT_CHECKED(JSFunction, function, args[0]);
+ SharedFunctionInfo* shared = function->shared();
+ if (shared->native() || shared->strict_mode()) {
+ return isolate->heap()->undefined_value();
+ }
+ // Returns undefined for strict or native functions, or
+ // the associated global receiver for "normal" functions.
+
Context* global_context =
- isolate->context()->global()->global_context();
+ function->context()->global()->global_context();
return global_context->global()->global_receiver();
}
@@ -1978,6 +2040,24 @@
}
+// Creates a local, readonly, property called length with the correct
+// length (when read by the user). This effectively overwrites the
+// interceptor used to normally provide the length.
+RUNTIME_FUNCTION(MaybeObject*, Runtime_BoundFunctionSetLength) {
+ NoHandleAllocation ha;
+ ASSERT(args.length() == 2);
+ CONVERT_CHECKED(JSFunction, fun, args[0]);
+ CONVERT_CHECKED(Smi, length, args[1]);
+ MaybeObject* maybe_name =
+ isolate->heap()->AllocateStringFromAscii(CStrVector("length"));
+ String* name;
+ if (!maybe_name->To(&name)) return maybe_name;
+ PropertyAttributes attr =
+ static_cast<PropertyAttributes>(DONT_DELETE | DONT_ENUM | READ_ONLY);
+ return fun->AddProperty(name, length, attr, kNonStrictMode);
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetPrototype) {
NoHandleAllocation ha;
ASSERT(args.length() == 2);
@@ -1993,6 +2073,61 @@
}
+RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetReadOnlyPrototype) {
+ NoHandleAllocation ha;
+ RUNTIME_ASSERT(args.length() == 1);
+ CONVERT_CHECKED(JSFunction, function, args[0]);
+
+ MaybeObject* maybe_name =
+ isolate->heap()->AllocateStringFromAscii(CStrVector("prototype"));
+ String* name;
+ if (!maybe_name->To(&name)) return maybe_name;
+
+ if (function->HasFastProperties()) {
+ // Construct a new field descriptor with updated attributes.
+ DescriptorArray* instance_desc = function->map()->instance_descriptors();
+ int index = instance_desc->Search(name);
+ ASSERT(index != DescriptorArray::kNotFound);
+ PropertyDetails details(instance_desc->GetDetails(index));
+ CallbacksDescriptor new_desc(name,
+ instance_desc->GetValue(index),
+ static_cast<PropertyAttributes>(details.attributes() | READ_ONLY),
+ details.index());
+ // Construct a new field descriptors array containing the new descriptor.
+ Object* descriptors_unchecked;
+ { MaybeObject* maybe_descriptors_unchecked =
+ instance_desc->CopyInsert(&new_desc, REMOVE_TRANSITIONS);
+ if (!maybe_descriptors_unchecked->ToObject(&descriptors_unchecked)) {
+ return maybe_descriptors_unchecked;
+ }
+ }
+ DescriptorArray* new_descriptors =
+ DescriptorArray::cast(descriptors_unchecked);
+ // Create a new map featuring the new field descriptors array.
+ Object* map_unchecked;
+ { MaybeObject* maybe_map_unchecked = function->map()->CopyDropDescriptors();
+ if (!maybe_map_unchecked->ToObject(&map_unchecked)) {
+ return maybe_map_unchecked;
+ }
+ }
+ Map* new_map = Map::cast(map_unchecked);
+ new_map->set_instance_descriptors(new_descriptors);
+ function->set_map(new_map);
+ } else { // Dictionary properties.
+ // Directly manipulate the property details.
+ int entry = function->property_dictionary()->FindEntry(name);
+ ASSERT(entry != StringDictionary::kNotFound);
+ PropertyDetails details = function->property_dictionary()->DetailsAt(entry);
+ PropertyDetails new_details(
+ static_cast<PropertyAttributes>(details.attributes() | READ_ONLY),
+ details.type(),
+ details.index());
+ function->property_dictionary()->DetailsAtPut(entry, new_details);
+ }
+ return function;
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionIsAPIFunction) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
@@ -2549,21 +2684,22 @@
void CompiledReplacement::Compile(Handle<String> replacement,
int capture_count,
int subject_length) {
- ASSERT(replacement->IsFlat());
- if (replacement->IsAsciiRepresentation()) {
+ {
AssertNoAllocation no_alloc;
- ParseReplacementPattern(&parts_,
- replacement->ToAsciiVector(),
- capture_count,
- subject_length);
- } else {
- ASSERT(replacement->IsTwoByteRepresentation());
- AssertNoAllocation no_alloc;
-
- ParseReplacementPattern(&parts_,
- replacement->ToUC16Vector(),
- capture_count,
- subject_length);
+ String::FlatContent content = replacement->GetFlatContent();
+ ASSERT(content.IsFlat());
+ if (content.IsAscii()) {
+ ParseReplacementPattern(&parts_,
+ content.ToAsciiVector(),
+ capture_count,
+ subject_length);
+ } else {
+ ASSERT(content.IsTwoByte());
+ ParseReplacementPattern(&parts_,
+ content.ToUC16Vector(),
+ capture_count,
+ subject_length);
+ }
}
Isolate* isolate = replacement->GetIsolate();
// Find substrings of replacement string and create them as String objects.
@@ -2935,34 +3071,32 @@
AssertNoAllocation no_heap_allocation; // ensure vectors stay valid
// Extract flattened substrings of cons strings before determining asciiness.
- String* seq_sub = *sub;
- if (seq_sub->IsConsString()) seq_sub = ConsString::cast(seq_sub)->first();
- String* seq_pat = *pat;
- if (seq_pat->IsConsString()) seq_pat = ConsString::cast(seq_pat)->first();
+ String::FlatContent seq_sub = sub->GetFlatContent();
+ String::FlatContent seq_pat = pat->GetFlatContent();
// dispatch on type of strings
- if (seq_pat->IsAsciiRepresentation()) {
- Vector<const char> pat_vector = seq_pat->ToAsciiVector();
- if (seq_sub->IsAsciiRepresentation()) {
+ if (seq_pat.IsAscii()) {
+ Vector<const char> pat_vector = seq_pat.ToAsciiVector();
+ if (seq_sub.IsAscii()) {
return SearchString(isolate,
- seq_sub->ToAsciiVector(),
+ seq_sub.ToAsciiVector(),
pat_vector,
start_index);
}
return SearchString(isolate,
- seq_sub->ToUC16Vector(),
+ seq_sub.ToUC16Vector(),
pat_vector,
start_index);
}
- Vector<const uc16> pat_vector = seq_pat->ToUC16Vector();
- if (seq_sub->IsAsciiRepresentation()) {
+ Vector<const uc16> pat_vector = seq_pat.ToUC16Vector();
+ if (seq_sub.IsAscii()) {
return SearchString(isolate,
- seq_sub->ToAsciiVector(),
+ seq_sub.ToAsciiVector(),
pat_vector,
start_index);
}
return SearchString(isolate,
- seq_sub->ToUC16Vector(),
+ seq_sub.ToUC16Vector(),
pat_vector,
start_index);
}
@@ -3045,29 +3179,31 @@
if (!sub->IsFlat()) FlattenString(sub);
if (!pat->IsFlat()) FlattenString(pat);
+ int position = -1;
AssertNoAllocation no_heap_allocation; // ensure vectors stay valid
- int position = -1;
+ String::FlatContent sub_content = sub->GetFlatContent();
+ String::FlatContent pat_content = pat->GetFlatContent();
- if (pat->IsAsciiRepresentation()) {
- Vector<const char> pat_vector = pat->ToAsciiVector();
- if (sub->IsAsciiRepresentation()) {
- position = StringMatchBackwards(sub->ToAsciiVector(),
+ if (pat_content.IsAscii()) {
+ Vector<const char> pat_vector = pat_content.ToAsciiVector();
+ if (sub_content.IsAscii()) {
+ position = StringMatchBackwards(sub_content.ToAsciiVector(),
pat_vector,
start_index);
} else {
- position = StringMatchBackwards(sub->ToUC16Vector(),
+ position = StringMatchBackwards(sub_content.ToUC16Vector(),
pat_vector,
start_index);
}
} else {
- Vector<const uc16> pat_vector = pat->ToUC16Vector();
- if (sub->IsAsciiRepresentation()) {
- position = StringMatchBackwards(sub->ToAsciiVector(),
+ Vector<const uc16> pat_vector = pat_content.ToUC16Vector();
+ if (sub_content.IsAscii()) {
+ position = StringMatchBackwards(sub_content.ToAsciiVector(),
pat_vector,
start_index);
} else {
- position = StringMatchBackwards(sub->ToUC16Vector(),
+ position = StringMatchBackwards(sub_content.ToUC16Vector(),
pat_vector,
start_index);
}
@@ -3285,36 +3421,38 @@
for (;;) { // Break when search complete.
builder->EnsureCapacity(kMaxBuilderEntriesPerRegExpMatch);
AssertNoAllocation no_gc;
- if (subject->IsAsciiRepresentation()) {
- Vector<const char> subject_vector = subject->ToAsciiVector();
- if (pattern->IsAsciiRepresentation()) {
+ String::FlatContent subject_content = subject->GetFlatContent();
+ String::FlatContent pattern_content = pattern->GetFlatContent();
+ if (subject_content.IsAscii()) {
+ Vector<const char> subject_vector = subject_content.ToAsciiVector();
+ if (pattern_content.IsAscii()) {
if (SearchStringMultiple(isolate,
subject_vector,
- pattern->ToAsciiVector(),
+ pattern_content.ToAsciiVector(),
*pattern,
builder,
&match_pos)) break;
} else {
if (SearchStringMultiple(isolate,
subject_vector,
- pattern->ToUC16Vector(),
+ pattern_content.ToUC16Vector(),
*pattern,
builder,
&match_pos)) break;
}
} else {
- Vector<const uc16> subject_vector = subject->ToUC16Vector();
- if (pattern->IsAsciiRepresentation()) {
+ Vector<const uc16> subject_vector = subject_content.ToUC16Vector();
+ if (pattern_content.IsAscii()) {
if (SearchStringMultiple(isolate,
subject_vector,
- pattern->ToAsciiVector(),
+ pattern_content.ToAsciiVector(),
*pattern,
builder,
&match_pos)) break;
} else {
if (SearchStringMultiple(isolate,
subject_vector,
- pattern->ToUC16Vector(),
+ pattern_content.ToUC16Vector(),
*pattern,
builder,
&match_pos)) break;
@@ -3547,7 +3685,7 @@
HandleScope handles(isolate);
CONVERT_ARG_CHECKED(String, subject, 1);
- if (!subject->IsFlat()) { FlattenString(subject); }
+ if (!subject->IsFlat()) FlattenString(subject);
CONVERT_ARG_CHECKED(JSRegExp, regexp, 0);
CONVERT_ARG_CHECKED(JSArray, last_match_info, 2);
CONVERT_ARG_CHECKED(JSArray, result_array, 3);
@@ -4498,7 +4636,7 @@
for (int i = 0; i < length; i++) {
jsproto->GetLocalPropertyNames(*names, next_copy_index);
next_copy_index += local_property_count[i];
- if (!GetHiddenProperties(jsproto, false)->IsUndefined()) {
+ if (jsproto->HasHiddenProperties()) {
proto_with_hidden_properties++;
}
if (i < length - 1) {
@@ -4750,7 +4888,9 @@
return isolate->heap()->boolean_symbol();
}
if (heap_obj->IsNull()) {
- return isolate->heap()->object_symbol();
+ return FLAG_harmony_typeof
+ ? isolate->heap()->null_symbol()
+ : isolate->heap()->object_symbol();
}
ASSERT(heap_obj->IsUndefined());
return isolate->heap()->undefined_symbol();
@@ -5311,12 +5451,14 @@
str = String::cast(flat);
ASSERT(str->IsFlat());
}
- if (str->IsTwoByteRepresentation()) {
+ String::FlatContent flat = str->GetFlatContent();
+ ASSERT(flat.IsFlat());
+ if (flat.IsTwoByte()) {
return QuoteJsonString<uc16, SeqTwoByteString, false>(isolate,
- str->ToUC16Vector());
+ flat.ToUC16Vector());
} else {
return QuoteJsonString<char, SeqAsciiString, false>(isolate,
- str->ToAsciiVector());
+ flat.ToAsciiVector());
}
}
@@ -5333,12 +5475,13 @@
str = String::cast(flat);
ASSERT(str->IsFlat());
}
- if (str->IsTwoByteRepresentation()) {
+ String::FlatContent flat = str->GetFlatContent();
+ if (flat.IsTwoByte()) {
return QuoteJsonString<uc16, SeqTwoByteString, true>(isolate,
- str->ToUC16Vector());
+ flat.ToUC16Vector());
} else {
return QuoteJsonString<char, SeqAsciiString, true>(isolate,
- str->ToAsciiVector());
+ flat.ToAsciiVector());
}
}
@@ -5373,14 +5516,16 @@
for (int i = 0; i < length; i++) {
if (i != 0) *(write_cursor++) = ',';
String* str = String::cast(array->get(i));
- if (str->IsTwoByteRepresentation()) {
+ String::FlatContent content = str->GetFlatContent();
+ ASSERT(content.IsFlat());
+ if (content.IsTwoByte()) {
write_cursor = WriteQuoteJsonString<Char, uc16>(isolate,
write_cursor,
- str->ToUC16Vector());
+ content.ToUC16Vector());
} else {
write_cursor = WriteQuoteJsonString<Char, char>(isolate,
write_cursor,
- str->ToAsciiVector());
+ content.ToAsciiVector());
}
}
*(write_cursor++) = ']';
@@ -5859,11 +6004,15 @@
// No allocation block.
{
- AssertNoAllocation nogc;
- if (subject->IsAsciiRepresentation()) {
- Vector<const char> subject_vector = subject->ToAsciiVector();
- if (pattern->IsAsciiRepresentation()) {
- Vector<const char> pattern_vector = pattern->ToAsciiVector();
+ AssertNoAllocation no_gc;
+ String::FlatContent subject_content = subject->GetFlatContent();
+ String::FlatContent pattern_content = pattern->GetFlatContent();
+ ASSERT(subject_content.IsFlat());
+ ASSERT(pattern_content.IsFlat());
+ if (subject_content.IsAscii()) {
+ Vector<const char> subject_vector = subject_content.ToAsciiVector();
+ if (pattern_content.IsAscii()) {
+ Vector<const char> pattern_vector = pattern_content.ToAsciiVector();
if (pattern_vector.length() == 1) {
FindAsciiStringIndices(subject_vector,
pattern_vector[0],
@@ -5879,22 +6028,22 @@
} else {
FindStringIndices(isolate,
subject_vector,
- pattern->ToUC16Vector(),
+ pattern_content.ToUC16Vector(),
&indices,
limit);
}
} else {
- Vector<const uc16> subject_vector = subject->ToUC16Vector();
+ Vector<const uc16> subject_vector = subject_content.ToUC16Vector();
if (pattern->IsAsciiRepresentation()) {
FindStringIndices(isolate,
subject_vector,
- pattern->ToAsciiVector(),
+ pattern_content.ToAsciiVector(),
&indices,
limit);
} else {
FindStringIndices(isolate,
subject_vector,
- pattern->ToUC16Vector(),
+ pattern_content.ToUC16Vector(),
&indices,
limit);
}
@@ -5943,7 +6092,7 @@
const char* chars,
FixedArray* elements,
int length) {
- AssertNoAllocation nogc;
+ AssertNoAllocation no_gc;
FixedArray* ascii_cache = heap->single_character_string_cache();
Object* undefined = heap->undefined_value();
int i;
@@ -5976,36 +6125,39 @@
CONVERT_ARG_CHECKED(String, s, 0);
CONVERT_NUMBER_CHECKED(uint32_t, limit, Uint32, args[1]);
- s->TryFlatten();
+ s = FlattenGetString(s);
const int length = static_cast<int>(Min<uint32_t>(s->length(), limit));
Handle<FixedArray> elements;
+ int position = 0;
if (s->IsFlat() && s->IsAsciiRepresentation()) {
+ // Try using cached chars where possible.
Object* obj;
{ MaybeObject* maybe_obj =
isolate->heap()->AllocateUninitializedFixedArray(length);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
elements = Handle<FixedArray>(FixedArray::cast(obj), isolate);
-
- Vector<const char> chars = s->ToAsciiVector();
- // Note, this will initialize all elements (not only the prefix)
- // to prevent GC from seeing partially initialized array.
- int num_copied_from_cache = CopyCachedAsciiCharsToArray(isolate->heap(),
- chars.start(),
- *elements,
- length);
-
- for (int i = num_copied_from_cache; i < length; ++i) {
- Handle<Object> str = LookupSingleCharacterStringFromCode(chars[i]);
- elements->set(i, *str);
+ String::FlatContent content = s->GetFlatContent();
+ if (content.IsAscii()) {
+ Vector<const char> chars = content.ToAsciiVector();
+ // Note, this will initialize all elements (not only the prefix)
+ // to prevent GC from seeing partially initialized array.
+ position = CopyCachedAsciiCharsToArray(isolate->heap(),
+ chars.start(),
+ *elements,
+ length);
+ } else {
+ MemsetPointer(elements->data_start(),
+ isolate->heap()->undefined_value(),
+ length);
}
} else {
elements = isolate->factory()->NewFixedArray(length);
- for (int i = 0; i < length; ++i) {
- Handle<Object> str = LookupSingleCharacterStringFromCode(s->Get(i));
- elements->set(i, *str);
- }
+ }
+ for (int i = position; i < length; ++i) {
+ Handle<Object> str = LookupSingleCharacterStringFromCode(s->Get(i));
+ elements->set(i, *str);
}
#ifdef DEBUG
@@ -6509,7 +6661,7 @@
// Find total length of join result.
int string_length = 0;
- bool is_ascii = true;
+ bool is_ascii = separator->IsAsciiRepresentation();
int max_string_length = SeqAsciiString::kMaxLength;
bool overflow = false;
CONVERT_NUMBER_CHECKED(int, elements_length,
@@ -6816,22 +6968,24 @@
equal_prefix_result = Smi::FromInt(LESS);
}
int r;
- if (x->IsAsciiRepresentation()) {
- Vector<const char> x_chars = x->ToAsciiVector();
- if (y->IsAsciiRepresentation()) {
- Vector<const char> y_chars = y->ToAsciiVector();
+ String::FlatContent x_content = x->GetFlatContent();
+ String::FlatContent y_content = y->GetFlatContent();
+ if (x_content.IsAscii()) {
+ Vector<const char> x_chars = x_content.ToAsciiVector();
+ if (y_content.IsAscii()) {
+ Vector<const char> y_chars = y_content.ToAsciiVector();
r = CompareChars(x_chars.start(), y_chars.start(), prefix_length);
} else {
- Vector<const uc16> y_chars = y->ToUC16Vector();
+ Vector<const uc16> y_chars = y_content.ToUC16Vector();
r = CompareChars(x_chars.start(), y_chars.start(), prefix_length);
}
} else {
- Vector<const uc16> x_chars = x->ToUC16Vector();
- if (y->IsAsciiRepresentation()) {
- Vector<const char> y_chars = y->ToAsciiVector();
+ Vector<const uc16> x_chars = x_content.ToUC16Vector();
+ if (y_content.IsAscii()) {
+ Vector<const char> y_chars = y_content.ToAsciiVector();
r = CompareChars(x_chars.start(), y_chars.start(), prefix_length);
} else {
- Vector<const uc16> y_chars = y->ToUC16Vector();
+ Vector<const uc16> y_chars = y_content.ToUC16Vector();
r = CompareChars(x_chars.start(), y_chars.start(), prefix_length);
}
}
@@ -8211,6 +8365,30 @@
}
+RUNTIME_FUNCTION(MaybeObject*, Runtime_PushBlockContext) {
+ NoHandleAllocation ha;
+ ASSERT(args.length() == 2);
+ SerializedScopeInfo* scope_info = SerializedScopeInfo::cast(args[0]);
+ JSFunction* function;
+ if (args[1]->IsSmi()) {
+ // A smi sentinel indicates a context nested inside global code rather
+ // than some function. There is a canonical empty function that can be
+ // gotten from the global context.
+ function = isolate->context()->global_context()->closure();
+ } else {
+ function = JSFunction::cast(args[1]);
+ }
+ Context* context;
+ MaybeObject* maybe_context =
+ isolate->heap()->AllocateBlockContext(function,
+ isolate->context(),
+ scope_info);
+ if (!maybe_context->To(&context)) return maybe_context;
+ isolate->set_context(context);
+ return context;
+}
+
+
RUNTIME_FUNCTION(MaybeObject*, Runtime_DeleteContextSlot) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
@@ -8221,7 +8399,12 @@
int index;
PropertyAttributes attributes;
ContextLookupFlags flags = FOLLOW_CHAINS;
- Handle<Object> holder = context->Lookup(name, flags, &index, &attributes);
+ BindingFlags binding_flags;
+ Handle<Object> holder = context->Lookup(name,
+ flags,
+ &index,
+ &attributes,
+ &binding_flags);
// If the slot was not found the result is true.
if (holder.is_null()) {
@@ -8323,7 +8506,12 @@
int index;
PropertyAttributes attributes;
ContextLookupFlags flags = FOLLOW_CHAINS;
- Handle<Object> holder = context->Lookup(name, flags, &index, &attributes);
+ BindingFlags binding_flags;
+ Handle<Object> holder = context->Lookup(name,
+ flags,
+ &index,
+ &attributes,
+ &binding_flags);
// If the index is non-negative, the slot has been found in a local
// variable or a parameter. Read it from the context object or the
@@ -8339,7 +8527,17 @@
MaybeObject* value = (holder->IsContext())
? Context::cast(*holder)->get(index)
: JSObject::cast(*holder)->GetElement(index);
- return MakePair(Unhole(isolate->heap(), value, attributes), *receiver);
+ // Check for uninitialized bindings.
+ if (holder->IsContext() &&
+ binding_flags == MUTABLE_CHECK_INITIALIZED &&
+ value->IsTheHole()) {
+ Handle<Object> reference_error =
+ isolate->factory()->NewReferenceError("not_defined",
+ HandleVector(&name, 1));
+ return MakePair(isolate->Throw(*reference_error), NULL);
+ } else {
+ return MakePair(Unhole(isolate->heap(), value, attributes), *receiver);
+ }
}
// If the holder is found, we read the property from it.
@@ -8405,14 +8603,27 @@
int index;
PropertyAttributes attributes;
ContextLookupFlags flags = FOLLOW_CHAINS;
- Handle<Object> holder = context->Lookup(name, flags, &index, &attributes);
+ BindingFlags binding_flags;
+ Handle<Object> holder = context->Lookup(name,
+ flags,
+ &index,
+ &attributes,
+ &binding_flags);
if (index >= 0) {
if (holder->IsContext()) {
+ Handle<Context> context = Handle<Context>::cast(holder);
+ if (binding_flags == MUTABLE_CHECK_INITIALIZED &&
+ context->get(index)->IsTheHole()) {
+ Handle<Object> error =
+ isolate->factory()->NewReferenceError("not_defined",
+ HandleVector(&name, 1));
+ return isolate->Throw(*error);
+ }
// Ignore if read_only variable.
if ((attributes & READ_ONLY) == 0) {
// Context is a fixed array and set cannot fail.
- Context::cast(*holder)->set(index, *value);
+ context->set(index, *value);
} else if (strict_mode == kStrictMode) {
// Setting read only property in strict mode.
Handle<Object> error =
@@ -8688,13 +8899,14 @@
FixedArray* output_array = FixedArray::cast(output->elements());
RUNTIME_ASSERT(output_array->length() >= DateParser::OUTPUT_SIZE);
bool result;
- if (str->IsAsciiRepresentation()) {
- result = DateParser::Parse(str->ToAsciiVector(),
+ String::FlatContent str_content = str->GetFlatContent();
+ if (str_content.IsAscii()) {
+ result = DateParser::Parse(str_content.ToAsciiVector(),
output_array,
isolate->unicode_cache());
} else {
- ASSERT(str->IsTwoByteRepresentation());
- result = DateParser::Parse(str->ToUC16Vector(),
+ ASSERT(str_content.IsTwoByte());
+ result = DateParser::Parse(str_content.ToUC16Vector(),
output_array,
isolate->unicode_cache());
}
@@ -8863,10 +9075,13 @@
// it is bound in the global context.
int index = -1;
PropertyAttributes attributes = ABSENT;
+ BindingFlags binding_flags;
while (true) {
receiver = context->Lookup(isolate->factory()->eval_symbol(),
FOLLOW_PROTOTYPE_CHAIN,
- &index, &attributes);
+ &index,
+ &attributes,
+ &binding_flags);
// Stop search when eval is found or when the global context is
// reached.
if (attributes != ABSENT || context->IsGlobalContext()) break;
@@ -9166,13 +9381,13 @@
if (elements_are_guaranteed_smis) {
for (uint32_t j = 0; j < len; j++) {
HandleScope loop_scope;
- Handle<Smi> e(Smi::FromInt(static_cast<int>(array->get(j))));
+ Handle<Smi> e(Smi::FromInt(static_cast<int>(array->get_scalar(j))));
visitor->visit(j, e);
}
} else {
for (uint32_t j = 0; j < len; j++) {
HandleScope loop_scope;
- int64_t val = static_cast<int64_t>(array->get(j));
+ int64_t val = static_cast<int64_t>(array->get_scalar(j));
if (Smi::IsValid(static_cast<intptr_t>(val))) {
Handle<Smi> e(Smi::FromInt(static_cast<int>(val)));
visitor->visit(j, e);
@@ -9186,7 +9401,7 @@
} else {
for (uint32_t j = 0; j < len; j++) {
HandleScope loop_scope(isolate);
- Handle<Object> e = isolate->factory()->NewNumber(array->get(j));
+ Handle<Object> e = isolate->factory()->NewNumber(array->get_scalar(j));
visitor->visit(j, e);
}
}
@@ -9372,7 +9587,7 @@
Handle<ExternalPixelArray> pixels(ExternalPixelArray::cast(
receiver->elements()));
for (uint32_t j = 0; j < length; j++) {
- Handle<Smi> e(Smi::FromInt(pixels->get(j)));
+ Handle<Smi> e(Smi::FromInt(pixels->get_scalar(j)));
visitor->visit(j, e);
}
break;
@@ -9551,11 +9766,14 @@
ASSERT(args.length() == 2);
CONVERT_CHECKED(JSArray, from, args[0]);
CONVERT_CHECKED(JSArray, to, args[1]);
- HeapObject* new_elements = from->elements();
+ FixedArrayBase* new_elements = from->elements();
MaybeObject* maybe_new_map;
if (new_elements->map() == isolate->heap()->fixed_array_map() ||
new_elements->map() == isolate->heap()->fixed_cow_array_map()) {
maybe_new_map = to->map()->GetFastElementsMap();
+ } else if (new_elements->map() ==
+ isolate->heap()->fixed_double_array_map()) {
+ maybe_new_map = to->map()->GetFastDoubleElementsMap();
} else {
maybe_new_map = to->map()->GetSlowElementsMap();
}
@@ -9643,12 +9861,13 @@
}
return *isolate->factory()->NewJSArrayWithElements(keys);
} else {
- ASSERT(array->HasFastElements());
+ ASSERT(array->HasFastElements() || array->HasFastDoubleElements());
Handle<FixedArray> single_interval = isolate->factory()->NewFixedArray(2);
// -1 means start of array.
single_interval->set(0, Smi::FromInt(-1));
+ FixedArrayBase* elements = FixedArrayBase::cast(array->elements());
uint32_t actual_length =
- static_cast<uint32_t>(FixedArray::cast(array->elements())->length());
+ static_cast<uint32_t>(elements->length());
uint32_t min_length = actual_length < length ? actual_length : length;
Handle<Object> length_object =
isolate->factory()->NewNumber(static_cast<double>(min_length));
@@ -10532,6 +10751,34 @@
}
+// Create a plain JSObject which materializes the block scope for the specified
+// block context.
+static Handle<JSObject> MaterializeBlockScope(
+ Isolate* isolate,
+ Handle<Context> context) {
+ ASSERT(context->IsBlockContext());
+ Handle<SerializedScopeInfo> serialized_scope_info(
+ SerializedScopeInfo::cast(context->extension()));
+ ScopeInfo<> scope_info(*serialized_scope_info);
+
+ // Allocate and initialize a JSObject with all the arguments, stack locals
+ // heap locals and extension properties of the debugged function.
+ Handle<JSObject> block_scope =
+ isolate->factory()->NewJSObject(isolate->object_function());
+
+ // Fill all context locals.
+ if (scope_info.number_of_context_slots() > Context::MIN_CONTEXT_SLOTS) {
+ if (!CopyContextLocalsToScopeObject(isolate,
+ serialized_scope_info, scope_info,
+ context, block_scope)) {
+ return Handle<JSObject>();
+ }
+ }
+
+ return block_scope;
+}
+
+
// Iterate over the actual scopes visible from a stack frame. All scopes are
// backed by an actual context except the local scope, which is inserted
// "artifically" in the context chain.
@@ -10542,7 +10789,8 @@
ScopeTypeLocal,
ScopeTypeWith,
ScopeTypeClosure,
- ScopeTypeCatch
+ ScopeTypeCatch,
+ ScopeTypeBlock
};
ScopeIterator(Isolate* isolate,
@@ -10568,8 +10816,10 @@
} else if (context_->IsFunctionContext()) {
at_local_ = true;
} else if (context_->closure() != *function_) {
- // The context_ is a with or catch block from the outer function.
- ASSERT(context_->IsWithContext() || context_->IsCatchContext());
+ // The context_ is a block or with or catch block from the outer function.
+ ASSERT(context_->IsWithContext() ||
+ context_->IsCatchContext() ||
+ context_->IsBlockContext());
at_local_ = true;
}
}
@@ -10624,6 +10874,9 @@
if (context_->IsCatchContext()) {
return ScopeTypeCatch;
}
+ if (context_->IsBlockContext()) {
+ return ScopeTypeBlock;
+ }
ASSERT(context_->IsWithContext());
return ScopeTypeWith;
}
@@ -10644,6 +10897,8 @@
case ScopeIterator::ScopeTypeClosure:
// Materialize the content of the closure scope into a JSObject.
return MaterializeClosure(isolate_, CurrentContext());
+ case ScopeIterator::ScopeTypeBlock:
+ return MaterializeBlockScope(isolate_, CurrentContext());
}
UNREACHABLE();
return Handle<JSObject>();
@@ -11200,7 +11455,18 @@
new_previous,
name,
thrown_object);
+ } else if (current->IsBlockContext()) {
+ Handle<SerializedScopeInfo> scope_info(
+ SerializedScopeInfo::cast(current->extension()));
+ new_current =
+ isolate->factory()->NewBlockContext(function, new_previous, scope_info);
+ // Copy context slots.
+ int num_context_slots = scope_info->NumberOfContextSlots();
+ for (int i = Context::MIN_CONTEXT_SLOTS; i < num_context_slots; ++i) {
+ new_current->set(i, current->get(i));
+ }
} else {
+ ASSERT(current->IsWithContext());
Handle<JSObject> extension(JSObject::cast(current->extension()));
new_current =
isolate->factory()->NewWithContext(function, new_previous, extension);
@@ -11338,7 +11604,11 @@
context->set_extension(*local_scope);
// Copy any with contexts present and chain them in front of this context.
Handle<Context> frame_context(Context::cast(frame->context()));
- Handle<Context> function_context(frame_context->declaration_context());
+ Handle<Context> function_context;
+ // Get the function's context if it has one.
+ if (scope_info->HasHeapAllocatedLocals()) {
+ function_context = Handle<Context>(frame_context->declaration_context());
+ }
context = CopyWithContextChain(isolate, go_between, frame_context, context);
if (additional_context->IsJSObject()) {
@@ -12619,7 +12889,9 @@
ASSERT(args.length() == 2);
CONVERT_CHECKED(String, format, args[0]);
CONVERT_CHECKED(JSArray, elms, args[1]);
- Vector<const char> chars = format->ToAsciiVector();
+ String::FlatContent format_content = format->GetFlatContent();
+ RUNTIME_ASSERT(format_content.IsAscii());
+ Vector<const char> chars = format_content.ToAsciiVector();
LOGGER->LogRuntime(chars, elms);
return isolate->heap()->undefined_value();
}
diff --git a/src/runtime.h b/src/runtime.h
index 9a2cf1d..91a19df 100644
--- a/src/runtime.h
+++ b/src/runtime.h
@@ -65,7 +65,7 @@
F(ToSlowProperties, 1, 1) \
F(FinishArrayPrototypeSetup, 1, 1) \
F(SpecialArrayFunctions, 1, 1) \
- F(GetGlobalReceiver, 0, 1) \
+ F(GetDefaultReceiver, 1, 1) \
\
F(GetPrototype, 1, 1) \
F(IsInPrototypeChain, 2, 1) \
@@ -209,7 +209,9 @@
/* Reflection */ \
F(FunctionSetInstanceClassName, 2, 1) \
F(FunctionSetLength, 2, 1) \
+ F(BoundFunctionSetLength, 2, 1) \
F(FunctionSetPrototype, 2, 1) \
+ F(FunctionSetReadOnlyPrototype, 1, 1) \
F(FunctionGetName, 1, 1) \
F(FunctionSetName, 2, 1) \
F(FunctionNameShouldPrintAsAnonymous, 1, 1) \
@@ -288,6 +290,11 @@
F(GetHandler, 1, 1) \
F(Fix, 1, 1) \
\
+ /* Harmony weakmaps */ \
+ F(WeakMapInitialize, 1, 1) \
+ F(WeakMapGet, 2, 1) \
+ F(WeakMapSet, 3, 1) \
+ \
/* Statements */ \
F(NewClosure, 3, 1) \
F(NewObject, 1, 1) \
@@ -303,6 +310,7 @@
F(NewFunctionContext, 1, 1) \
F(PushWithContext, 2, 1) \
F(PushCatchContext, 3, 1) \
+ F(PushBlockContext, 2, 1) \
F(DeleteContextSlot, 2, 1) \
F(LoadContextSlot, 2, 2) \
F(LoadContextSlotNoReferenceError, 2, 2) \
@@ -483,8 +491,7 @@
F(IsRegExpEquivalent, 2, 1) \
F(HasCachedArrayIndex, 1, 1) \
F(GetCachedArrayIndex, 1, 1) \
- F(FastAsciiArrayJoin, 2, 1) \
- F(IsNativeOrStrictMode, 1, 1)
+ F(FastAsciiArrayJoin, 2, 1)
// ----------------------------------------------------------------------------
diff --git a/src/scanner-base.cc b/src/scanner-base.cc
index 16f8db5..62eee1a 100644
--- a/src/scanner-base.cc
+++ b/src/scanner-base.cc
@@ -41,12 +41,12 @@
: unicode_cache_(unicode_cache) { }
-uc32 Scanner::ScanHexEscape(uc32 c, int length) {
- ASSERT(length <= 4); // prevent overflow
+uc32 Scanner::ScanHexNumber(int expected_length) {
+ ASSERT(expected_length <= 4); // prevent overflow
- uc32 digits[4];
+ uc32 digits[4] = { 0, 0, 0, 0 };
uc32 x = 0;
- for (int i = 0; i < length; i++) {
+ for (int i = 0; i < expected_length; i++) {
digits[i] = c0_;
int d = HexValue(c0_);
if (d < 0) {
@@ -54,12 +54,11 @@
// should be illegal, but other JS VMs just return the
// non-escaped version of the original character.
- // Push back digits read, except the last one (in c0_).
+ // Push back digits that we have advanced past.
for (int j = i-1; j >= 0; j--) {
PushBack(digits[j]);
}
- // Notice: No handling of error - treat it as "\u"->"u".
- return c;
+ return -1;
}
x = x * 16 + d;
Advance();
@@ -74,7 +73,9 @@
// JavaScriptScanner
JavaScriptScanner::JavaScriptScanner(UnicodeCache* scanner_contants)
- : Scanner(scanner_contants), octal_pos_(Location::invalid()) { }
+ : Scanner(scanner_contants),
+ octal_pos_(Location::invalid()),
+ harmony_block_scoping_(false) { }
void JavaScriptScanner::Initialize(UC16CharacterStream* source) {
@@ -89,10 +90,158 @@
Scan();
}
+
+// Ensure that tokens can be stored in a byte.
+STATIC_ASSERT(Token::NUM_TOKENS <= 0x100);
+
+// Table of one-character tokens, by character (0x00..0x7f only).
+static const byte one_char_tokens[] = {
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::LPAREN, // 0x28
+ Token::RPAREN, // 0x29
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::COMMA, // 0x2c
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::COLON, // 0x3a
+ Token::SEMICOLON, // 0x3b
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::CONDITIONAL, // 0x3f
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::LBRACK, // 0x5b
+ Token::ILLEGAL,
+ Token::RBRACK, // 0x5d
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::ILLEGAL,
+ Token::LBRACE, // 0x7b
+ Token::ILLEGAL,
+ Token::RBRACE, // 0x7d
+ Token::BIT_NOT, // 0x7e
+ Token::ILLEGAL
+};
+
+
Token::Value JavaScriptScanner::Next() {
current_ = next_;
has_line_terminator_before_next_ = false;
has_multiline_comment_before_next_ = false;
+ if (static_cast<unsigned>(c0_) <= 0x7f) {
+ Token::Value token = static_cast<Token::Value>(one_char_tokens[c0_]);
+ if (token != Token::ILLEGAL) {
+ int pos = source_pos();
+ next_.token = token;
+ next_.location.beg_pos = pos;
+ next_.location.end_pos = pos + 1;
+ Advance();
+ return current_.token;
+ }
+ }
Scan();
return current_.token;
}
@@ -171,7 +320,7 @@
Advance();
while (c0_ >= 0) {
- char ch = c0_;
+ uc32 ch = c0_;
Advance();
if (unicode_cache_->IsLineTerminator(ch)) {
// Following ECMA-262, section 7.4, a comment containing
@@ -490,9 +639,17 @@
case 'n' : c = '\n'; break;
case 'r' : c = '\r'; break;
case 't' : c = '\t'; break;
- case 'u' : c = ScanHexEscape(c, 4); break;
+ case 'u' : {
+ c = ScanHexNumber(4);
+ if (c < 0) c = 'u';
+ break;
+ }
case 'v' : c = '\v'; break;
- case 'x' : c = ScanHexEscape(c, 2); break;
+ case 'x' : {
+ c = ScanHexNumber(2);
+ if (c < 0) c = 'x';
+ break;
+ }
case '0' : // fall through
case '1' : // fall through
case '2' : // fall through
@@ -652,25 +809,133 @@
uc32 JavaScriptScanner::ScanIdentifierUnicodeEscape() {
Advance();
- if (c0_ != 'u') return unibrow::Utf8::kBadChar;
+ if (c0_ != 'u') return -1;
Advance();
- uc32 c = ScanHexEscape('u', 4);
- // We do not allow a unicode escape sequence to start another
- // unicode escape sequence.
- if (c == '\\') return unibrow::Utf8::kBadChar;
- return c;
+ uc32 result = ScanHexNumber(4);
+ if (result < 0) PushBack('u');
+ return result;
+}
+
+
+// ----------------------------------------------------------------------------
+// Keyword Matcher
+
+#define KEYWORDS(KEYWORD_GROUP, KEYWORD) \
+ KEYWORD_GROUP('b') \
+ KEYWORD("break", Token::BREAK) \
+ KEYWORD_GROUP('c') \
+ KEYWORD("case", Token::CASE) \
+ KEYWORD("catch", Token::CATCH) \
+ KEYWORD("class", Token::FUTURE_RESERVED_WORD) \
+ KEYWORD("const", Token::CONST) \
+ KEYWORD("continue", Token::CONTINUE) \
+ KEYWORD_GROUP('d') \
+ KEYWORD("debugger", Token::DEBUGGER) \
+ KEYWORD("default", Token::DEFAULT) \
+ KEYWORD("delete", Token::DELETE) \
+ KEYWORD("do", Token::DO) \
+ KEYWORD_GROUP('e') \
+ KEYWORD("else", Token::ELSE) \
+ KEYWORD("enum", Token::FUTURE_RESERVED_WORD) \
+ KEYWORD("export", Token::FUTURE_RESERVED_WORD) \
+ KEYWORD("extends", Token::FUTURE_RESERVED_WORD) \
+ KEYWORD_GROUP('f') \
+ KEYWORD("false", Token::FALSE_LITERAL) \
+ KEYWORD("finally", Token::FINALLY) \
+ KEYWORD("for", Token::FOR) \
+ KEYWORD("function", Token::FUNCTION) \
+ KEYWORD_GROUP('i') \
+ KEYWORD("if", Token::IF) \
+ KEYWORD("implements", Token::FUTURE_STRICT_RESERVED_WORD) \
+ KEYWORD("import", Token::FUTURE_RESERVED_WORD) \
+ KEYWORD("in", Token::IN) \
+ KEYWORD("instanceof", Token::INSTANCEOF) \
+ KEYWORD("interface", Token::FUTURE_STRICT_RESERVED_WORD) \
+ KEYWORD_GROUP('l') \
+ KEYWORD("let", harmony_block_scoping \
+ ? Token::LET : Token::FUTURE_STRICT_RESERVED_WORD) \
+ KEYWORD_GROUP('n') \
+ KEYWORD("new", Token::NEW) \
+ KEYWORD("null", Token::NULL_LITERAL) \
+ KEYWORD_GROUP('p') \
+ KEYWORD("package", Token::FUTURE_STRICT_RESERVED_WORD) \
+ KEYWORD("private", Token::FUTURE_STRICT_RESERVED_WORD) \
+ KEYWORD("protected", Token::FUTURE_STRICT_RESERVED_WORD) \
+ KEYWORD("public", Token::FUTURE_STRICT_RESERVED_WORD) \
+ KEYWORD_GROUP('r') \
+ KEYWORD("return", Token::RETURN) \
+ KEYWORD_GROUP('s') \
+ KEYWORD("static", Token::FUTURE_STRICT_RESERVED_WORD) \
+ KEYWORD("super", Token::FUTURE_RESERVED_WORD) \
+ KEYWORD("switch", Token::SWITCH) \
+ KEYWORD_GROUP('t') \
+ KEYWORD("this", Token::THIS) \
+ KEYWORD("throw", Token::THROW) \
+ KEYWORD("true", Token::TRUE_LITERAL) \
+ KEYWORD("try", Token::TRY) \
+ KEYWORD("typeof", Token::TYPEOF) \
+ KEYWORD_GROUP('v') \
+ KEYWORD("var", Token::VAR) \
+ KEYWORD("void", Token::VOID) \
+ KEYWORD_GROUP('w') \
+ KEYWORD("while", Token::WHILE) \
+ KEYWORD("with", Token::WITH) \
+ KEYWORD_GROUP('y') \
+ KEYWORD("yield", Token::FUTURE_STRICT_RESERVED_WORD)
+
+
+static Token::Value KeywordOrIdentifierToken(const char* input,
+ int input_length,
+ bool harmony_block_scoping) {
+ ASSERT(input_length >= 1);
+ const int kMinLength = 2;
+ const int kMaxLength = 10;
+ if (input_length < kMinLength || input_length > kMaxLength) {
+ return Token::IDENTIFIER;
+ }
+ switch (input[0]) {
+ default:
+#define KEYWORD_GROUP_CASE(ch) \
+ break; \
+ case ch:
+#define KEYWORD(keyword, token) \
+ { \
+ /* 'keyword' is a char array, so sizeof(keyword) is */ \
+ /* strlen(keyword) plus 1 for the NUL char. */ \
+ const int keyword_length = sizeof(keyword) - 1; \
+ STATIC_ASSERT(keyword_length >= kMinLength); \
+ STATIC_ASSERT(keyword_length <= kMaxLength); \
+ if (input_length == keyword_length && \
+ input[1] == keyword[1] && \
+ (keyword_length <= 2 || input[2] == keyword[2]) && \
+ (keyword_length <= 3 || input[3] == keyword[3]) && \
+ (keyword_length <= 4 || input[4] == keyword[4]) && \
+ (keyword_length <= 5 || input[5] == keyword[5]) && \
+ (keyword_length <= 6 || input[6] == keyword[6]) && \
+ (keyword_length <= 7 || input[7] == keyword[7]) && \
+ (keyword_length <= 8 || input[8] == keyword[8]) && \
+ (keyword_length <= 9 || input[9] == keyword[9])) { \
+ return token; \
+ } \
+ }
+ KEYWORDS(KEYWORD_GROUP_CASE, KEYWORD)
+ }
+ return Token::IDENTIFIER;
}
Token::Value JavaScriptScanner::ScanIdentifierOrKeyword() {
ASSERT(unicode_cache_->IsIdentifierStart(c0_));
LiteralScope literal(this);
- KeywordMatcher keyword_match;
// Scan identifier start character.
if (c0_ == '\\') {
uc32 c = ScanIdentifierUnicodeEscape();
// Only allow legal identifier start characters.
- if (!unicode_cache_->IsIdentifierStart(c)) return Token::ILLEGAL;
+ if (c < 0 ||
+ c == '\\' || // No recursive escapes.
+ !unicode_cache_->IsIdentifierStart(c)) {
+ return Token::ILLEGAL;
+ }
AddLiteralChar(c);
return ScanIdentifierSuffix(&literal);
}
@@ -678,9 +943,6 @@
uc32 first_char = c0_;
Advance();
AddLiteralChar(first_char);
- if (!keyword_match.AddChar(first_char)) {
- return ScanIdentifierSuffix(&literal);
- }
// Scan the rest of the identifier characters.
while (unicode_cache_->IsIdentifierPart(c0_)) {
@@ -688,14 +950,22 @@
uc32 next_char = c0_;
Advance();
AddLiteralChar(next_char);
- if (keyword_match.AddChar(next_char)) continue;
+ continue;
}
- // Fallthrough if no loner able to complete keyword.
+ // Fallthrough if no longer able to complete keyword.
return ScanIdentifierSuffix(&literal);
}
+
literal.Complete();
- return keyword_match.token();
+ if (next_.literal_chars->is_ascii()) {
+ Vector<const char> chars = next_.literal_chars->ascii_literal();
+ return KeywordOrIdentifierToken(chars.start(),
+ chars.length(),
+ harmony_block_scoping_);
+ }
+
+ return Token::IDENTIFIER;
}
@@ -705,7 +975,11 @@
if (c0_ == '\\') {
uc32 c = ScanIdentifierUnicodeEscape();
// Only allow legal identifier part characters.
- if (!unicode_cache_->IsIdentifierPart(c)) return Token::ILLEGAL;
+ if (c < 0 ||
+ c == '\\' ||
+ !unicode_cache_->IsIdentifierPart(c)) {
+ return Token::ILLEGAL;
+ }
AddLiteralChar(c);
} else {
AddLiteralChar(c0_);
@@ -731,8 +1005,9 @@
// the scanner should pass uninterpreted bodies to the RegExp
// constructor.
LiteralScope literal(this);
- if (seen_equal)
+ if (seen_equal) {
AddLiteralChar('=');
+ }
while (c0_ != '/' || in_character_class) {
if (unicode_cache_->IsLineTerminator(c0_) || c0_ < 0) return false;
@@ -764,20 +1039,47 @@
}
+bool JavaScriptScanner::ScanLiteralUnicodeEscape() {
+ ASSERT(c0_ == '\\');
+ uc32 chars_read[6] = {'\\', 'u', 0, 0, 0, 0};
+ Advance();
+ int i = 1;
+ if (c0_ == 'u') {
+ i++;
+ while (i < 6) {
+ Advance();
+ if (!IsHexDigit(c0_)) break;
+ chars_read[i] = c0_;
+ i++;
+ }
+ }
+ if (i < 6) {
+ // Incomplete escape. Undo all advances and return false.
+ while (i > 0) {
+ i--;
+ PushBack(chars_read[i]);
+ }
+ return false;
+ }
+ // Complete escape. Add all chars to current literal buffer.
+ for (int i = 0; i < 6; i++) {
+ AddLiteralChar(chars_read[i]);
+ }
+ return true;
+}
+
+
bool JavaScriptScanner::ScanRegExpFlags() {
// Scan regular expression flags.
LiteralScope literal(this);
while (unicode_cache_->IsIdentifierPart(c0_)) {
- if (c0_ == '\\') {
- uc32 c = ScanIdentifierUnicodeEscape();
- if (c != static_cast<uc32>(unibrow::Utf8::kBadChar)) {
- // We allow any escaped character, unlike the restriction on
- // IdentifierPart when it is used to build an IdentifierName.
- AddLiteralChar(c);
- continue;
+ if (c0_ != '\\') {
+ AddLiteralCharAdvance();
+ } else {
+ if (!ScanLiteralUnicodeEscape()) {
+ break;
}
}
- AddLiteralCharAdvance();
}
literal.Complete();
@@ -785,182 +1087,4 @@
return true;
}
-// ----------------------------------------------------------------------------
-// Keyword Matcher
-
-KeywordMatcher::FirstState KeywordMatcher::first_states_[] = {
- { "break", KEYWORD_PREFIX, Token::BREAK },
- { NULL, C, Token::ILLEGAL },
- { NULL, D, Token::ILLEGAL },
- { NULL, E, Token::ILLEGAL },
- { NULL, F, Token::ILLEGAL },
- { NULL, UNMATCHABLE, Token::ILLEGAL },
- { NULL, UNMATCHABLE, Token::ILLEGAL },
- { NULL, I, Token::ILLEGAL },
- { NULL, UNMATCHABLE, Token::ILLEGAL },
- { NULL, UNMATCHABLE, Token::ILLEGAL },
- { "let", KEYWORD_PREFIX, Token::FUTURE_STRICT_RESERVED_WORD },
- { NULL, UNMATCHABLE, Token::ILLEGAL },
- { NULL, N, Token::ILLEGAL },
- { NULL, UNMATCHABLE, Token::ILLEGAL },
- { NULL, P, Token::ILLEGAL },
- { NULL, UNMATCHABLE, Token::ILLEGAL },
- { "return", KEYWORD_PREFIX, Token::RETURN },
- { NULL, S, Token::ILLEGAL },
- { NULL, T, Token::ILLEGAL },
- { NULL, UNMATCHABLE, Token::ILLEGAL },
- { NULL, V, Token::ILLEGAL },
- { NULL, W, Token::ILLEGAL },
- { NULL, UNMATCHABLE, Token::ILLEGAL },
- { "yield", KEYWORD_PREFIX, Token::FUTURE_STRICT_RESERVED_WORD }
-};
-
-
-void KeywordMatcher::Step(unibrow::uchar input) {
- switch (state_) {
- case INITIAL: {
- // matching the first character is the only state with significant fanout.
- // Match only lower-case letters in range 'b'..'y'.
- unsigned int offset = input - kFirstCharRangeMin;
- if (offset < kFirstCharRangeLength) {
- state_ = first_states_[offset].state;
- if (state_ == KEYWORD_PREFIX) {
- keyword_ = first_states_[offset].keyword;
- counter_ = 1;
- keyword_token_ = first_states_[offset].token;
- }
- return;
- }
- break;
- }
- case KEYWORD_PREFIX:
- if (static_cast<unibrow::uchar>(keyword_[counter_]) == input) {
- counter_++;
- if (keyword_[counter_] == '\0') {
- state_ = KEYWORD_MATCHED;
- token_ = keyword_token_;
- }
- return;
- }
- break;
- case KEYWORD_MATCHED:
- token_ = Token::IDENTIFIER;
- break;
- case C:
- if (MatchState(input, 'a', CA)) return;
- if (MatchKeywordStart(input, "class", 1,
- Token::FUTURE_RESERVED_WORD)) return;
- if (MatchState(input, 'o', CO)) return;
- break;
- case CA:
- if (MatchKeywordStart(input, "case", 2, Token::CASE)) return;
- if (MatchKeywordStart(input, "catch", 2, Token::CATCH)) return;
- break;
- case CO:
- if (MatchState(input, 'n', CON)) return;
- break;
- case CON:
- if (MatchKeywordStart(input, "const", 3, Token::CONST)) return;
- if (MatchKeywordStart(input, "continue", 3, Token::CONTINUE)) return;
- break;
- case D:
- if (MatchState(input, 'e', DE)) return;
- if (MatchKeyword(input, 'o', KEYWORD_MATCHED, Token::DO)) return;
- break;
- case DE:
- if (MatchKeywordStart(input, "debugger", 2, Token::DEBUGGER)) return;
- if (MatchKeywordStart(input, "default", 2, Token::DEFAULT)) return;
- if (MatchKeywordStart(input, "delete", 2, Token::DELETE)) return;
- break;
- case E:
- if (MatchKeywordStart(input, "else", 1, Token::ELSE)) return;
- if (MatchKeywordStart(input, "enum", 1,
- Token::FUTURE_RESERVED_WORD)) return;
- if (MatchState(input, 'x', EX)) return;
- break;
- case EX:
- if (MatchKeywordStart(input, "export", 2,
- Token::FUTURE_RESERVED_WORD)) return;
- if (MatchKeywordStart(input, "extends", 2,
- Token::FUTURE_RESERVED_WORD)) return;
- break;
- case F:
- if (MatchKeywordStart(input, "false", 1, Token::FALSE_LITERAL)) return;
- if (MatchKeywordStart(input, "finally", 1, Token::FINALLY)) return;
- if (MatchKeywordStart(input, "for", 1, Token::FOR)) return;
- if (MatchKeywordStart(input, "function", 1, Token::FUNCTION)) return;
- break;
- case I:
- if (MatchKeyword(input, 'f', KEYWORD_MATCHED, Token::IF)) return;
- if (MatchState(input, 'm', IM)) return;
- if (MatchKeyword(input, 'n', IN, Token::IN)) return;
- break;
- case IM:
- if (MatchState(input, 'p', IMP)) return;
- break;
- case IMP:
- if (MatchKeywordStart(input, "implements", 3,
- Token::FUTURE_STRICT_RESERVED_WORD )) return;
- if (MatchKeywordStart(input, "import", 3,
- Token::FUTURE_RESERVED_WORD)) return;
- break;
- case IN:
- token_ = Token::IDENTIFIER;
- if (MatchKeywordStart(input, "interface", 2,
- Token::FUTURE_STRICT_RESERVED_WORD)) return;
- if (MatchKeywordStart(input, "instanceof", 2, Token::INSTANCEOF)) return;
- break;
- case N:
- if (MatchKeywordStart(input, "new", 1, Token::NEW)) return;
- if (MatchKeywordStart(input, "null", 1, Token::NULL_LITERAL)) return;
- break;
- case P:
- if (MatchKeywordStart(input, "package", 1,
- Token::FUTURE_STRICT_RESERVED_WORD)) return;
- if (MatchState(input, 'r', PR)) return;
- if (MatchKeywordStart(input, "public", 1,
- Token::FUTURE_STRICT_RESERVED_WORD)) return;
- break;
- case PR:
- if (MatchKeywordStart(input, "private", 2,
- Token::FUTURE_STRICT_RESERVED_WORD)) return;
- if (MatchKeywordStart(input, "protected", 2,
- Token::FUTURE_STRICT_RESERVED_WORD)) return;
- break;
- case S:
- if (MatchKeywordStart(input, "static", 1,
- Token::FUTURE_STRICT_RESERVED_WORD)) return;
- if (MatchKeywordStart(input, "super", 1,
- Token::FUTURE_RESERVED_WORD)) return;
- if (MatchKeywordStart(input, "switch", 1,
- Token::SWITCH)) return;
- break;
- case T:
- if (MatchState(input, 'h', TH)) return;
- if (MatchState(input, 'r', TR)) return;
- if (MatchKeywordStart(input, "typeof", 1, Token::TYPEOF)) return;
- break;
- case TH:
- if (MatchKeywordStart(input, "this", 2, Token::THIS)) return;
- if (MatchKeywordStart(input, "throw", 2, Token::THROW)) return;
- break;
- case TR:
- if (MatchKeywordStart(input, "true", 2, Token::TRUE_LITERAL)) return;
- if (MatchKeyword(input, 'y', KEYWORD_MATCHED, Token::TRY)) return;
- break;
- case V:
- if (MatchKeywordStart(input, "var", 1, Token::VAR)) return;
- if (MatchKeywordStart(input, "void", 1, Token::VOID)) return;
- break;
- case W:
- if (MatchKeywordStart(input, "while", 1, Token::WHILE)) return;
- if (MatchKeywordStart(input, "with", 1, Token::WITH)) return;
- break;
- case UNMATCHABLE:
- break;
- }
- // On fallthrough, it's a failure.
- state_ = UNMATCHABLE;
-}
-
} } // namespace v8::internal
diff --git a/src/scanner-base.h b/src/scanner-base.h
index 3d67d4e..d68d240 100644
--- a/src/scanner-base.h
+++ b/src/scanner-base.h
@@ -362,7 +362,7 @@
// Call this after setting source_ to the input.
void Init() {
// Set c0_ (one character ahead)
- ASSERT(kCharacterLookaheadBufferSize == 1);
+ STATIC_ASSERT(kCharacterLookaheadBufferSize == 1);
Advance();
// Initialize current_ to not refer to a literal.
current_.literal_chars = NULL;
@@ -419,7 +419,7 @@
}
}
- uc32 ScanHexEscape(uc32 c, int length);
+ uc32 ScanHexNumber(int expected_length);
// Return the current source position.
int source_pos() {
@@ -507,6 +507,14 @@
// tokens, which is what it is used for.
void SeekForward(int pos);
+ bool HarmonyBlockScoping() const {
+ return harmony_block_scoping_;
+ }
+ void SetHarmonyBlockScoping(bool block_scoping) {
+ harmony_block_scoping_ = block_scoping;
+ }
+
+
protected:
bool SkipWhiteSpace();
Token::Value SkipSingleLineComment();
@@ -529,6 +537,10 @@
// Decodes a unicode escape-sequence which is part of an identifier.
// If the escape sequence cannot be decoded the result is kBadChar.
uc32 ScanIdentifierUnicodeEscape();
+ // Recognizes a uniocde escape-sequence and adds its characters,
+ // uninterpreted, to the current literal. Used for parsing RegExp
+ // flags.
+ bool ScanLiteralUnicodeEscape();
// Start position of the octal literal last scanned.
Location octal_pos_;
@@ -540,153 +552,11 @@
// Whether there is a multi-line comment that contains a
// line-terminator after the current token, and before the next.
bool has_multiline_comment_before_next_;
+ // Whether we scan 'let' as a keyword for harmony block scoped
+ // let bindings.
+ bool harmony_block_scoping_;
};
-
-// ----------------------------------------------------------------------------
-// Keyword matching state machine.
-
-class KeywordMatcher {
-// Incrementally recognize keywords.
-//
-// We distinguish between normal future reserved words and words that are
-// considered to be future reserved words only in strict mode as required by
-// ECMA-262 7.6.1.2.
-//
-// Recognized as keywords:
-// break, case, catch, const*, continue, debugger, default, delete, do,
-// else, finally, false, for, function, if, in, instanceof, new, null,
-// return, switch, this, throw, true, try, typeof, var, void, while, with.
-//
-// Recognized as Future Reserved Keywords:
-// class, enum, export, extends, import, super.
-//
-// Recognized as Future Reserved Keywords (strict mode only):
-// implements, interface, let, package, private, protected, public,
-// static, yield.
-//
-// *: Actually a "future reserved keyword". It's the only one we are
-// recognizing outside of ES5 strict mode, the remaining are allowed
-// as identifiers.
-//
- public:
- KeywordMatcher()
- : state_(INITIAL),
- token_(Token::IDENTIFIER),
- keyword_(NULL),
- counter_(0),
- keyword_token_(Token::ILLEGAL) {}
-
- Token::Value token() { return token_; }
-
- inline bool AddChar(unibrow::uchar input) {
- if (state_ != UNMATCHABLE) {
- Step(input);
- }
- return state_ != UNMATCHABLE;
- }
-
- void Fail() {
- token_ = Token::IDENTIFIER;
- state_ = UNMATCHABLE;
- }
-
- private:
- enum State {
- UNMATCHABLE,
- INITIAL,
- KEYWORD_PREFIX,
- KEYWORD_MATCHED,
- C,
- CA,
- CO,
- CON,
- D,
- DE,
- E,
- EX,
- F,
- I,
- IM,
- IMP,
- IN,
- N,
- P,
- PR,
- S,
- T,
- TH,
- TR,
- V,
- W
- };
-
- struct FirstState {
- const char* keyword;
- State state;
- Token::Value token;
- };
-
- // Range of possible first characters of a keyword.
- static const unsigned int kFirstCharRangeMin = 'b';
- static const unsigned int kFirstCharRangeMax = 'y';
- static const unsigned int kFirstCharRangeLength =
- kFirstCharRangeMax - kFirstCharRangeMin + 1;
- // State map for first keyword character range.
- static FirstState first_states_[kFirstCharRangeLength];
-
- // If input equals keyword's character at position, continue matching keyword
- // from that position.
- inline bool MatchKeywordStart(unibrow::uchar input,
- const char* keyword,
- int position,
- Token::Value token_if_match) {
- if (input != static_cast<unibrow::uchar>(keyword[position])) {
- return false;
- }
- state_ = KEYWORD_PREFIX;
- this->keyword_ = keyword;
- this->counter_ = position + 1;
- this->keyword_token_ = token_if_match;
- return true;
- }
-
- // If input equals match character, transition to new state and return true.
- inline bool MatchState(unibrow::uchar input, char match, State new_state) {
- if (input != static_cast<unibrow::uchar>(match)) {
- return false;
- }
- state_ = new_state;
- return true;
- }
-
- inline bool MatchKeyword(unibrow::uchar input,
- char match,
- State new_state,
- Token::Value keyword_token) {
- if (input != static_cast<unibrow::uchar>(match)) {
- return false;
- }
- state_ = new_state;
- token_ = keyword_token;
- return true;
- }
-
- void Step(unibrow::uchar input);
-
- // Current state.
- State state_;
- // Token for currently added characters.
- Token::Value token_;
-
- // Matching a specific keyword string (there is only one possible valid
- // keyword with the current prefix).
- const char* keyword_;
- int counter_;
- Token::Value keyword_token_;
-};
-
-
} } // namespace v8::internal
#endif // V8_SCANNER_BASE_H_
diff --git a/src/scanner.cc b/src/scanner.cc
old mode 100755
new mode 100644
diff --git a/src/scopeinfo.cc b/src/scopeinfo.cc
index 3e18368..0eacc83 100644
--- a/src/scopeinfo.cc
+++ b/src/scopeinfo.cc
@@ -313,7 +313,7 @@
stack_slots_.length();
Handle<SerializedScopeInfo> data(
- SerializedScopeInfo::cast(*FACTORY->NewFixedArray(length, TENURED)));
+ SerializedScopeInfo::cast(*FACTORY->NewSerializedScopeInfo(length)));
AssertNoAllocation nogc;
Object** p0 = data->data_start();
diff --git a/src/scopeinfo.h b/src/scopeinfo.h
index 86c33f6..1c61f11 100644
--- a/src/scopeinfo.h
+++ b/src/scopeinfo.h
@@ -107,7 +107,7 @@
public :
static SerializedScopeInfo* cast(Object* object) {
- ASSERT(object->IsFixedArray());
+ ASSERT(object->IsSerializedScopeInfo());
return reinterpret_cast<SerializedScopeInfo*>(object);
}
diff --git a/src/scopes.cc b/src/scopes.cc
index 390a0b6..a76492e 100644
--- a/src/scopes.cc
+++ b/src/scopes.cc
@@ -146,7 +146,9 @@
}
-Scope::Scope(Scope* inner_scope, Handle<SerializedScopeInfo> scope_info)
+Scope::Scope(Scope* inner_scope,
+ Type type,
+ Handle<SerializedScopeInfo> scope_info)
: isolate_(Isolate::Current()),
inner_scopes_(4),
variables_(),
@@ -156,7 +158,7 @@
decls_(4),
already_resolved_(true) {
ASSERT(!scope_info.is_null());
- SetDefaults(FUNCTION_SCOPE, NULL, scope_info);
+ SetDefaults(type, NULL, scope_info);
if (scope_info->HasHeapAllocatedLocals()) {
num_heap_slots_ = scope_info_->NumberOfContextSlots();
}
@@ -232,8 +234,13 @@
if (context->IsFunctionContext()) {
SerializedScopeInfo* scope_info =
context->closure()->shared()->scope_info();
- current_scope =
- new Scope(current_scope, Handle<SerializedScopeInfo>(scope_info));
+ current_scope = new Scope(current_scope, FUNCTION_SCOPE,
+ Handle<SerializedScopeInfo>(scope_info));
+ } else if (context->IsBlockContext()) {
+ SerializedScopeInfo* scope_info =
+ SerializedScopeInfo::cast(context->extension());
+ current_scope = new Scope(current_scope, BLOCK_SCOPE,
+ Handle<SerializedScopeInfo>(scope_info));
} else {
ASSERT(context->IsCatchContext());
String* name = String::cast(context->extension());
@@ -294,10 +301,13 @@
// instead load them directly from the stack. Currently, the only
// such parameter is 'this' which is passed on the stack when
// invoking scripts
- if (is_catch_scope()) {
+ if (is_catch_scope() || is_block_scope()) {
ASSERT(outer_scope() != NULL);
receiver_ = outer_scope()->receiver();
} else {
+ ASSERT(is_function_scope() ||
+ is_global_scope() ||
+ is_eval_scope());
Variable* var =
variables_.Declare(this,
isolate_->factory()->this_symbol(),
@@ -387,7 +397,9 @@
// This function handles VAR and CONST modes. DYNAMIC variables are
// introduces during variable allocation, INTERNAL variables are allocated
// explicitly, and TEMPORARY variables are allocated via NewTemporary().
- ASSERT(mode == Variable::VAR || mode == Variable::CONST);
+ ASSERT(mode == Variable::VAR ||
+ mode == Variable::CONST ||
+ mode == Variable::LET);
++num_var_or_const_;
return variables_.Declare(this, name, mode, true, Variable::NORMAL);
}
@@ -559,13 +571,22 @@
Scope* Scope::DeclarationScope() {
Scope* scope = this;
- while (scope->is_catch_scope()) {
+ while (scope->is_catch_scope() ||
+ scope->is_block_scope()) {
scope = scope->outer_scope();
}
return scope;
}
+Handle<SerializedScopeInfo> Scope::GetSerializedScopeInfo() {
+ if (scope_info_.is_null()) {
+ scope_info_ = SerializedScopeInfo::Create(this);
+ }
+ return scope_info_;
+}
+
+
#ifdef DEBUG
static const char* Header(Scope::Type type) {
switch (type) {
@@ -573,6 +594,7 @@
case Scope::FUNCTION_SCOPE: return "function";
case Scope::GLOBAL_SCOPE: return "global";
case Scope::CATCH_SCOPE: return "catch";
+ case Scope::BLOCK_SCOPE: return "block";
}
UNREACHABLE();
return NULL;
@@ -600,7 +622,9 @@
PrintF("%s, ", printer->Print(var->rewrite()));
if (var->is_accessed_from_inner_scope()) PrintF(", ");
}
- if (var->is_accessed_from_inner_scope()) PrintF("inner scope access");
+ if (var->is_accessed_from_inner_scope()) {
+ PrintF("inner scope access");
+ }
PrintF("\n");
}
}
@@ -721,7 +745,7 @@
// another variable that is introduced dynamically via an 'eval' call
// or a 'with' statement).
Variable* Scope::LookupRecursive(Handle<String> name,
- bool inner_lookup,
+ bool from_inner_scope,
Variable** invalidated_local) {
// If we find a variable, but the current scope calls 'eval', the found
// variable may not be the correct one (the 'eval' may introduce a
@@ -737,7 +761,7 @@
// (Even if there is an 'eval' in this scope which introduces the
// same variable again, the resulting variable remains the same.
// Note that enclosing 'with' statements are handled at the call site.)
- if (!inner_lookup)
+ if (!from_inner_scope)
return var;
} else {
@@ -770,7 +794,7 @@
ASSERT(var != NULL);
// If this is a lookup from an inner scope, mark the variable.
- if (inner_lookup) {
+ if (from_inner_scope) {
var->MarkAsAccessedFromInnerScope();
}
@@ -926,7 +950,8 @@
scope_calls_eval_ ||
inner_scope_calls_eval_ ||
scope_contains_with_ ||
- is_catch_scope())) {
+ is_catch_scope() ||
+ is_block_scope())) {
var->set_is_used(true);
}
// Global variables do not need to be allocated.
@@ -943,7 +968,7 @@
// Exceptions: temporary variables are never allocated in a context;
// catch-bound variables are always allocated in a context.
if (var->mode() == Variable::TEMPORARY) return false;
- if (is_catch_scope()) return true;
+ if (is_catch_scope() || is_block_scope()) return true;
return var->is_accessed_from_inner_scope() ||
scope_calls_eval_ ||
inner_scope_calls_eval_ ||
diff --git a/src/scopes.h b/src/scopes.h
index e76fb50..c2c4179 100644
--- a/src/scopes.h
+++ b/src/scopes.h
@@ -93,7 +93,8 @@
EVAL_SCOPE, // The top-level scope for an eval source.
FUNCTION_SCOPE, // The top-level scope for a function.
GLOBAL_SCOPE, // The top-level scope for a program or a top-level eval.
- CATCH_SCOPE // The scope introduced by catch.
+ CATCH_SCOPE, // The scope introduced by catch.
+ BLOCK_SCOPE // The scope introduced by a new block.
};
Scope(Scope* outer_scope, Type type);
@@ -204,6 +205,7 @@
bool is_function_scope() const { return type_ == FUNCTION_SCOPE; }
bool is_global_scope() const { return type_ == GLOBAL_SCOPE; }
bool is_catch_scope() const { return type_ == CATCH_SCOPE; }
+ bool is_block_scope() const { return type_ == BLOCK_SCOPE; }
bool is_strict_mode() const { return strict_mode_; }
bool is_strict_mode_eval_scope() const {
return is_eval_scope() && is_strict_mode();
@@ -294,6 +296,8 @@
// where var declarations will be hoisted to in the implementation.
Scope* DeclarationScope();
+ Handle<SerializedScopeInfo> GetSerializedScopeInfo();
+
// ---------------------------------------------------------------------------
// Strict mode support.
bool IsDeclared(Handle<String> name) {
@@ -357,11 +361,17 @@
// Illegal redeclaration.
Expression* illegal_redecl_;
- // Scope-specific information.
- bool scope_inside_with_; // this scope is inside a 'with' of some outer scope
- bool scope_contains_with_; // this scope contains a 'with' statement
- bool scope_calls_eval_; // this scope contains an 'eval' call
- bool strict_mode_; // this scope is a strict mode scope
+ // Scope-specific information computed during parsing.
+ //
+ // This scope is inside a 'with' of some outer scope.
+ bool scope_inside_with_;
+ // This scope contains a 'with' statement.
+ bool scope_contains_with_;
+ // This scope or a nested catch scope or with scope contain an 'eval' call. At
+ // the 'eval' call site this scope is the declaration scope.
+ bool scope_calls_eval_;
+ // This scope is a strict mode scope.
+ bool strict_mode_;
// Computed via PropagateScopeInfo.
bool outer_scope_calls_eval_;
@@ -391,7 +401,7 @@
// Variable resolution.
Variable* LookupRecursive(Handle<String> name,
- bool inner_lookup,
+ bool from_inner_function,
Variable** invalidated_local);
void ResolveVariable(Scope* global_scope,
Handle<Context> context,
@@ -419,8 +429,8 @@
void AllocateVariablesRecursively();
private:
- // Construct a function scope based on the scope info.
- Scope(Scope* inner_scope, Handle<SerializedScopeInfo> scope_info);
+ // Construct a function or block scope based on the scope info.
+ Scope(Scope* inner_scope, Type type, Handle<SerializedScopeInfo> scope_info);
// Construct a catch scope with a binding for the name.
Scope(Scope* inner_scope, Handle<String> catch_variable_name);
diff --git a/src/serialize.cc b/src/serialize.cc
index 8cde580..094ad20 100644
--- a/src/serialize.cc
+++ b/src/serialize.cc
@@ -62,57 +62,15 @@
}
-// ExternalReferenceTable is a helper class that defines the relationship
-// between external references and their encodings. It is used to build
-// hashmaps in ExternalReferenceEncoder and ExternalReferenceDecoder.
-class ExternalReferenceTable {
- public:
- static ExternalReferenceTable* instance(Isolate* isolate) {
- ExternalReferenceTable* external_reference_table =
- isolate->external_reference_table();
- if (external_reference_table == NULL) {
- external_reference_table = new ExternalReferenceTable(isolate);
- isolate->set_external_reference_table(external_reference_table);
- }
- return external_reference_table;
+ExternalReferenceTable* ExternalReferenceTable::instance(Isolate* isolate) {
+ ExternalReferenceTable* external_reference_table =
+ isolate->external_reference_table();
+ if (external_reference_table == NULL) {
+ external_reference_table = new ExternalReferenceTable(isolate);
+ isolate->set_external_reference_table(external_reference_table);
}
-
- int size() const { return refs_.length(); }
-
- Address address(int i) { return refs_[i].address; }
-
- uint32_t code(int i) { return refs_[i].code; }
-
- const char* name(int i) { return refs_[i].name; }
-
- int max_id(int code) { return max_id_[code]; }
-
- private:
- explicit ExternalReferenceTable(Isolate* isolate) : refs_(64) {
- PopulateTable(isolate);
- }
- ~ExternalReferenceTable() { }
-
- struct ExternalReferenceEntry {
- Address address;
- uint32_t code;
- const char* name;
- };
-
- void PopulateTable(Isolate* isolate);
-
- // For a few types of references, we can get their address from their id.
- void AddFromId(TypeCode type,
- uint16_t id,
- const char* name,
- Isolate* isolate);
-
- // For other types of references, the caller will figure out the address.
- void Add(Address address, TypeCode type, uint16_t id, const char* name);
-
- List<ExternalReferenceEntry> refs_;
- int max_id_[kTypeCodeCount];
-};
+ return external_reference_table;
+}
void ExternalReferenceTable::AddFromId(TypeCode type,
diff --git a/src/serialize.h b/src/serialize.h
index d83722d..66d6fb5 100644
--- a/src/serialize.h
+++ b/src/serialize.h
@@ -60,6 +60,52 @@
const int kDebugIdShift = kDebugRegisterBits;
+// ExternalReferenceTable is a helper class that defines the relationship
+// between external references and their encodings. It is used to build
+// hashmaps in ExternalReferenceEncoder and ExternalReferenceDecoder.
+class ExternalReferenceTable {
+ public:
+ static ExternalReferenceTable* instance(Isolate* isolate);
+
+ ~ExternalReferenceTable() { }
+
+ int size() const { return refs_.length(); }
+
+ Address address(int i) { return refs_[i].address; }
+
+ uint32_t code(int i) { return refs_[i].code; }
+
+ const char* name(int i) { return refs_[i].name; }
+
+ int max_id(int code) { return max_id_[code]; }
+
+ private:
+ explicit ExternalReferenceTable(Isolate* isolate) : refs_(64) {
+ PopulateTable(isolate);
+ }
+
+ struct ExternalReferenceEntry {
+ Address address;
+ uint32_t code;
+ const char* name;
+ };
+
+ void PopulateTable(Isolate* isolate);
+
+ // For a few types of references, we can get their address from their id.
+ void AddFromId(TypeCode type,
+ uint16_t id,
+ const char* name,
+ Isolate* isolate);
+
+ // For other types of references, the caller will figure out the address.
+ void Add(Address address, TypeCode type, uint16_t id, const char* name);
+
+ List<ExternalReferenceEntry> refs_;
+ int max_id_[kTypeCodeCount];
+};
+
+
class ExternalReferenceEncoder {
public:
ExternalReferenceEncoder();
@@ -544,6 +590,7 @@
ASSERT(!o->IsScript());
return o->IsString() || o->IsSharedFunctionInfo() ||
o->IsHeapNumber() || o->IsCode() ||
+ o->IsSerializedScopeInfo() ||
o->map() == HEAP->fixed_cow_array_map();
}
diff --git a/src/small-pointer-list.h b/src/small-pointer-list.h
index 6291d9e..6c5ce89 100644
--- a/src/small-pointer-list.h
+++ b/src/small-pointer-list.h
@@ -44,6 +44,31 @@
public:
SmallPointerList() : data_(kEmptyTag) {}
+ explicit SmallPointerList(int capacity) : data_(kEmptyTag) {
+ Reserve(capacity);
+ }
+
+ void Reserve(int capacity) {
+ if (capacity < 2) return;
+ if ((data_ & kTagMask) == kListTag) {
+ if (list()->capacity() >= capacity) return;
+ int old_length = list()->length();
+ list()->AddBlock(NULL, capacity - list()->capacity());
+ list()->Rewind(old_length);
+ return;
+ }
+ PointerList* list = new PointerList(capacity);
+ if ((data_ & kTagMask) == kSingletonTag) {
+ list->Add(single_value());
+ }
+ ASSERT(IsAligned(reinterpret_cast<intptr_t>(list), kPointerAlignment));
+ data_ = reinterpret_cast<intptr_t>(list) | kListTag;
+ }
+
+ void Clear() {
+ data_ = kEmptyTag;
+ }
+
bool is_empty() const { return length() == 0; }
int length() const {
diff --git a/src/spaces-inl.h b/src/spaces-inl.h
index 0eb827d..35d7224 100644
--- a/src/spaces-inl.h
+++ b/src/spaces-inl.h
@@ -295,13 +295,13 @@
SetPageFlag(IS_NORMAL_PAGE, !is_large_object_page);
}
-bool Page::IsPageExecutable() {
- return GetPageFlag(IS_EXECUTABLE);
+Executability Page::PageExecutability() {
+ return GetPageFlag(IS_EXECUTABLE) ? EXECUTABLE : NOT_EXECUTABLE;
}
-void Page::SetIsPageExecutable(bool is_page_executable) {
- SetPageFlag(IS_EXECUTABLE, is_page_executable);
+void Page::SetPageExecutability(Executability executable) {
+ SetPageFlag(IS_EXECUTABLE, executable == EXECUTABLE);
}
@@ -435,23 +435,6 @@
// -----------------------------------------------------------------------------
-// LargeObjectChunk
-
-Address LargeObjectChunk::GetStartAddress() {
- // Round the chunk address up to the nearest page-aligned address
- // and return the heap object in that page.
- Page* page = Page::FromAddress(RoundUp(address(), Page::kPageSize));
- return page->ObjectAreaStart();
-}
-
-
-void LargeObjectChunk::Free(Executability executable) {
- Isolate* isolate =
- Page::FromAddress(RoundUp(address(), Page::kPageSize))->heap_->isolate();
- isolate->memory_allocator()->FreeRawMemory(address(), size(), executable);
-}
-
-// -----------------------------------------------------------------------------
// NewSpace
MaybeObject* NewSpace::AllocateRawInternal(int size_in_bytes,
diff --git a/src/spaces.cc b/src/spaces.cc
index 0f80496..97c6d2a 100644
--- a/src/spaces.cc
+++ b/src/spaces.cc
@@ -148,12 +148,12 @@
// CodeRange
-CodeRange::CodeRange()
- : code_range_(NULL),
+CodeRange::CodeRange(Isolate* isolate)
+ : isolate_(isolate),
+ code_range_(NULL),
free_list_(0),
allocation_list_(0),
- current_allocation_block_index_(0),
- isolate_(NULL) {
+ current_allocation_block_index_(0) {
}
@@ -279,8 +279,9 @@
const int kEstimatedNumberOfChunks = 270;
-MemoryAllocator::MemoryAllocator()
- : capacity_(0),
+MemoryAllocator::MemoryAllocator(Isolate* isolate)
+ : isolate_(isolate),
+ capacity_(0),
capacity_executable_(0),
size_(0),
size_executable_(0),
@@ -288,8 +289,7 @@
chunks_(kEstimatedNumberOfChunks),
free_chunk_ids_(kEstimatedNumberOfChunks),
max_nof_chunks_(0),
- top_(0),
- isolate_(NULL) {
+ top_(0) {
}
@@ -1542,6 +1542,7 @@
CASE(UNARY_OP_IC);
CASE(BINARY_OP_IC);
CASE(COMPARE_IC);
+ CASE(TO_BOOLEAN_IC);
}
}
@@ -2721,12 +2722,26 @@
LargeObjectChunk* chunk = reinterpret_cast<LargeObjectChunk*>(mem);
chunk->size_ = size;
- Page* page = Page::FromAddress(RoundUp(chunk->address(), Page::kPageSize));
- page->heap_ = isolate->heap();
+ chunk->GetPage()->heap_ = isolate->heap();
return chunk;
}
+void LargeObjectChunk::Free(Executability executable) {
+ size_t guard_size = (executable == EXECUTABLE) ? Page::kPageSize : 0;
+ ObjectSpace space =
+ (executable == EXECUTABLE) ? kObjectSpaceCodeSpace : kObjectSpaceLoSpace;
+ // Do not access instance fields after FreeRawMemory!
+ Address my_address = address();
+ size_t my_size = size();
+ Isolate* isolate = GetPage()->heap_->isolate();
+ MemoryAllocator* a = isolate->memory_allocator();
+ a->FreeRawMemory(my_address - guard_size, my_size + guard_size, executable);
+ a->PerformAllocationCallback(space, kAllocationActionFree, my_size);
+ LOG(isolate, DeleteEvent("LargeObjectChunk", my_address));
+}
+
+
int LargeObjectChunk::ChunkSizeFor(int size_in_bytes) {
int os_alignment = static_cast<int>(OS::AllocateAlignment());
if (os_alignment < Page::kPageSize) {
@@ -2759,25 +2774,9 @@
while (first_chunk_ != NULL) {
LargeObjectChunk* chunk = first_chunk_;
first_chunk_ = first_chunk_->next();
- LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", chunk->address()));
- Page* page = Page::FromAddress(RoundUp(chunk->address(), Page::kPageSize));
- Executability executable =
- page->IsPageExecutable() ? EXECUTABLE : NOT_EXECUTABLE;
- ObjectSpace space = kObjectSpaceLoSpace;
- if (executable == EXECUTABLE) space = kObjectSpaceCodeSpace;
- size_t size = chunk->size();
- size_t guard_size = (executable == EXECUTABLE) ? Page::kPageSize : 0;
- heap()->isolate()->memory_allocator()->FreeRawMemory(
- chunk->address() - guard_size,
- size + guard_size,
- executable);
- heap()->isolate()->memory_allocator()->PerformAllocationCallback(
- space, kAllocationActionFree, size);
+ chunk->Free(chunk->GetPage()->PageExecutability());
}
-
- size_ = 0;
- page_count_ = 0;
- objects_size_ = 0;
+ Setup();
}
@@ -2805,14 +2804,14 @@
first_chunk_ = chunk;
// Initialize page header.
- Page* page = Page::FromAddress(RoundUp(chunk->address(), Page::kPageSize));
+ Page* page = chunk->GetPage();
Address object_address = page->ObjectAreaStart();
// Clear the low order bit of the second word in the page to flag it as a
// large object page. If the chunk_size happened to be written there, its
// low order bit should already be clear.
page->SetIsLargeObjectPage(true);
- page->SetIsPageExecutable(executable);
+ page->SetPageExecutability(executable);
page->SetRegionMarks(Page::kAllRegionsCleanMarks);
return HeapObject::FromAddress(object_address);
}
@@ -2943,14 +2942,8 @@
previous = current;
current = current->next();
} else {
- Page* page = Page::FromAddress(RoundUp(current->address(),
- Page::kPageSize));
- Executability executable =
- page->IsPageExecutable() ? EXECUTABLE : NOT_EXECUTABLE;
- Address chunk_address = current->address();
- size_t chunk_size = current->size();
-
// Cut the chunk out from the chunk list.
+ LargeObjectChunk* current_chunk = current;
current = current->next();
if (previous == NULL) {
first_chunk_ = current;
@@ -2963,22 +2956,10 @@
object, heap()->isolate());
LiveObjectList::ProcessNonLive(object);
- size_ -= static_cast<int>(chunk_size);
+ size_ -= static_cast<int>(current_chunk->size());
objects_size_ -= object->Size();
page_count_--;
- ObjectSpace space = kObjectSpaceLoSpace;
- size_t guard_size = 0;
- if (executable == EXECUTABLE) {
- space = kObjectSpaceCodeSpace;
- guard_size = Page::kPageSize;
- }
- heap()->isolate()->memory_allocator()->FreeRawMemory(
- chunk_address - guard_size,
- chunk_size + guard_size,
- executable);
- heap()->isolate()->memory_allocator()->PerformAllocationCallback(
- space, kAllocationActionFree, size_);
- LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", chunk_address));
+ current_chunk->Free(current_chunk->GetPage()->PageExecutability());
}
}
}
diff --git a/src/spaces.h b/src/spaces.h
index ac5d998..908cd30 100644
--- a/src/spaces.h
+++ b/src/spaces.h
@@ -200,9 +200,9 @@
inline void SetIsLargeObjectPage(bool is_large_object_page);
- inline bool IsPageExecutable();
+ inline Executability PageExecutability();
- inline void SetIsPageExecutable(bool is_page_executable);
+ inline void SetPageExecutability(Executability executable);
// Returns the offset of a given address to this page.
INLINE(int Offset(Address a)) {
@@ -408,6 +408,9 @@
// manages a range of virtual memory.
class CodeRange {
public:
+ explicit CodeRange(Isolate* isolate);
+ ~CodeRange() { TearDown(); }
+
// Reserves a range of virtual memory, but does not commit any of it.
// Can only be called once, at heap initialization time.
// Returns false on failure.
@@ -417,9 +420,9 @@
// manage it.
void TearDown();
- bool exists() { return code_range_ != NULL; }
+ bool exists() { return this != NULL && code_range_ != NULL; }
bool contains(Address address) {
- if (code_range_ == NULL) return false;
+ if (this == NULL || code_range_ == NULL) return false;
Address start = static_cast<Address>(code_range_->address());
return start <= address && address < start + code_range_->size();
}
@@ -432,7 +435,7 @@
void FreeRawMemory(void* buf, size_t length);
private:
- CodeRange();
+ Isolate* isolate_;
// The reserved range of virtual memory that all code objects are put in.
VirtualMemory* code_range_;
@@ -466,10 +469,6 @@
static int CompareFreeBlockAddress(const FreeBlock* left,
const FreeBlock* right);
- friend class Isolate;
-
- Isolate* isolate_;
-
DISALLOW_COPY_AND_ASSIGN(CodeRange);
};
@@ -500,6 +499,8 @@
class MemoryAllocator {
public:
+ explicit MemoryAllocator(Isolate* isolate);
+
// Initializes its internal bookkeeping structures.
// Max capacity of the total space and executable memory limit.
bool Setup(intptr_t max_capacity, intptr_t capacity_executable);
@@ -657,10 +658,10 @@
#endif
private:
- MemoryAllocator();
-
static const int kChunkSize = kPagesPerChunk * Page::kPageSize;
+ Isolate* isolate_;
+
// Maximum space size in bytes.
intptr_t capacity_;
// Maximum subset of capacity_ that can be executable
@@ -753,10 +754,6 @@
Page* prev,
Page** last_page_in_use);
- friend class Isolate;
-
- Isolate* isolate_;
-
DISALLOW_COPY_AND_ASSIGN(MemoryAllocator);
};
@@ -2147,7 +2144,7 @@
static LargeObjectChunk* New(int size_in_bytes, Executability executable);
// Free the memory associated with the chunk.
- inline void Free(Executability executable);
+ void Free(Executability executable);
// Interpret a raw address as a large object chunk.
static LargeObjectChunk* FromAddress(Address address) {
@@ -2157,13 +2154,17 @@
// Returns the address of this chunk.
Address address() { return reinterpret_cast<Address>(this); }
+ Page* GetPage() {
+ return Page::FromAddress(RoundUp(address(), Page::kPageSize));
+ }
+
// Accessors for the fields of the chunk.
LargeObjectChunk* next() { return next_; }
void set_next(LargeObjectChunk* chunk) { next_ = chunk; }
size_t size() { return size_ & ~Page::kPageFlagMask; }
// Compute the start address in the chunk.
- inline Address GetStartAddress();
+ Address GetStartAddress() { return GetPage()->ObjectAreaStart(); }
// Returns the object in this chunk.
HeapObject* GetObject() { return HeapObject::FromAddress(GetStartAddress()); }
diff --git a/src/string.js b/src/string.js
index 600b131..a70eead 100644
--- a/src/string.js
+++ b/src/string.js
@@ -149,7 +149,7 @@
position = 0;
}
if (position + patLength < subLength) {
- index = position
+ index = position;
}
}
}
@@ -170,7 +170,7 @@
["String.prototype.localeCompare"]);
}
if (%_ArgumentsLength() === 0) return 0;
- return %StringLocaleCompare(TO_STRING_INLINE(this),
+ return %StringLocaleCompare(TO_STRING_INLINE(this),
TO_STRING_INLINE(other));
}
@@ -251,8 +251,7 @@
// Compute the string to replace with.
if (IS_FUNCTION(replace)) {
- var receiver =
- %_IsNativeOrStrictMode(replace) ? void 0 : %GetGlobalReceiver();
+ var receiver = %GetDefaultReceiver(replace);
builder.add(%_CallFunction(receiver,
search,
start,
@@ -420,8 +419,7 @@
if (NUMBER_OF_CAPTURES(lastMatchInfo) == 2) {
var match_start = 0;
var override = new InternalArray(null, 0, subject);
- var receiver =
- %_IsNativeOrStrictMode(replace) ? void 0 : %GetGlobalReceiver();
+ var receiver = %GetDefaultReceiver(replace);
while (i < len) {
var elem = res[i];
if (%_IsSmi(elem)) {
@@ -478,8 +476,7 @@
// No captures, only the match, which is always valid.
var s = SubString(subject, index, endOfMatch);
// Don't call directly to avoid exposing the built-in global object.
- var receiver =
- %_IsNativeOrStrictMode(replace) ? void 0 : %GetGlobalReceiver();
+ var receiver = %GetDefaultReceiver(replace);
replacement =
%_CallFunction(receiver, s, index, subject, replace);
} else {
diff --git a/src/stub-cache.cc b/src/stub-cache.cc
index 79cd7a0..13b0b63 100644
--- a/src/stub-cache.cc
+++ b/src/stub-cache.cc
@@ -29,6 +29,7 @@
#include "api.h"
#include "arguments.h"
+#include "ast.h"
#include "code-stubs.h"
#include "gdb-jit.h"
#include "ic-inl.h"
@@ -1161,7 +1162,7 @@
}
-void StubCache::CollectMatchingMaps(ZoneMapList* types,
+void StubCache::CollectMatchingMaps(SmallMapList* types,
String* name,
Code::Flags flags) {
for (int i = 0; i < kPrimaryTableSize; i++) {
diff --git a/src/stub-cache.h b/src/stub-cache.h
index ffe4241..dd06a1c 100644
--- a/src/stub-cache.h
+++ b/src/stub-cache.h
@@ -46,8 +46,10 @@
// invalidate the cache whenever a prototype map is changed. The stub
// validates the map chain as in the mono-morphic case.
+class SmallMapList;
class StubCache;
+
class SCTableReference {
public:
Address address() const { return address_; }
@@ -294,7 +296,7 @@
void Clear();
// Collect all maps that match the name and flags.
- void CollectMatchingMaps(ZoneMapList* types,
+ void CollectMatchingMaps(SmallMapList* types,
String* name,
Code::Flags flags);
@@ -355,7 +357,7 @@
// shift are equal. Shifting down the length field to get the
// hash code would effectively throw away two bits of the hash
// code.
- ASSERT(kHeapObjectTagSize == String::kHashShift);
+ STATIC_ASSERT(kHeapObjectTagSize == String::kHashShift);
// Compute the hash of the name (use entire hash field).
ASSERT(name->HasHashCode());
uint32_t field = name->hash_field();
diff --git a/src/token.cc b/src/token.cc
index feca7be..7ba7ed3 100644
--- a/src/token.cc
+++ b/src/token.cc
@@ -33,21 +33,21 @@
#define T(name, string, precedence) #name,
const char* const Token::name_[NUM_TOKENS] = {
- TOKEN_LIST(T, T, IGNORE_TOKEN)
+ TOKEN_LIST(T, T)
};
#undef T
#define T(name, string, precedence) string,
const char* const Token::string_[NUM_TOKENS] = {
- TOKEN_LIST(T, T, IGNORE_TOKEN)
+ TOKEN_LIST(T, T)
};
#undef T
#define T(name, string, precedence) precedence,
const int8_t Token::precedence_[NUM_TOKENS] = {
- TOKEN_LIST(T, T, IGNORE_TOKEN)
+ TOKEN_LIST(T, T)
};
#undef T
@@ -55,7 +55,7 @@
#define KT(a, b, c) 'T',
#define KK(a, b, c) 'K',
const char Token::token_type[] = {
- TOKEN_LIST(KT, KK, IGNORE_TOKEN)
+ TOKEN_LIST(KT, KK)
};
#undef KT
#undef KK
diff --git a/src/token.h b/src/token.h
index 77333bc..eb825c1 100644
--- a/src/token.h
+++ b/src/token.h
@@ -41,7 +41,6 @@
//
// T: Non-keyword tokens
// K: Keyword tokens
-// F: Future (reserved) keyword tokens
// IGNORE_TOKEN is a convenience macro that can be supplied as
// an argument (at any position) for a TOKEN_LIST call. It does
@@ -49,7 +48,7 @@
#define IGNORE_TOKEN(name, string, precedence)
-#define TOKEN_LIST(T, K, F) \
+#define TOKEN_LIST(T, K) \
/* End of source indicator. */ \
T(EOS, "EOS", 0) \
\
@@ -72,6 +71,7 @@
/* this block of enum values being contiguous and sorted in the */ \
/* same order! */ \
T(INIT_VAR, "=init_var", 2) /* AST-use only. */ \
+ T(INIT_LET, "=init_let", 2) /* AST-use only. */ \
T(INIT_CONST, "=init_const", 2) /* AST-use only. */ \
T(ASSIGN, "=", 2) \
T(ASSIGN_BIT_OR, "|=", 2) \
@@ -169,6 +169,7 @@
T(FUTURE_RESERVED_WORD, NULL, 0) \
T(FUTURE_STRICT_RESERVED_WORD, NULL, 0) \
K(CONST, "const", 0) \
+ K(LET, "let", 0) \
\
/* Illegal token - not able to scan. */ \
T(ILLEGAL, "ILLEGAL", 0) \
@@ -182,7 +183,7 @@
// All token values.
#define T(name, string, precedence) name,
enum Value {
- TOKEN_LIST(T, T, IGNORE_TOKEN)
+ TOKEN_LIST(T, T)
NUM_TOKENS
};
#undef T
diff --git a/src/type-info.cc b/src/type-info.cc
index defb1ae..bdf7bc3 100644
--- a/src/type-info.cc
+++ b/src/type-info.cc
@@ -159,23 +159,26 @@
}
-ZoneMapList* TypeFeedbackOracle::LoadReceiverTypes(Property* expr,
- Handle<String> name) {
+void TypeFeedbackOracle::LoadReceiverTypes(Property* expr,
+ Handle<String> name,
+ SmallMapList* types) {
Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, NORMAL);
- return CollectReceiverTypes(expr->id(), name, flags);
+ CollectReceiverTypes(expr->id(), name, flags, types);
}
-ZoneMapList* TypeFeedbackOracle::StoreReceiverTypes(Assignment* expr,
- Handle<String> name) {
+void TypeFeedbackOracle::StoreReceiverTypes(Assignment* expr,
+ Handle<String> name,
+ SmallMapList* types) {
Code::Flags flags = Code::ComputeMonomorphicFlags(Code::STORE_IC, NORMAL);
- return CollectReceiverTypes(expr->id(), name, flags);
+ CollectReceiverTypes(expr->id(), name, flags, types);
}
-ZoneMapList* TypeFeedbackOracle::CallReceiverTypes(Call* expr,
- Handle<String> name,
- CallKind call_kind) {
+void TypeFeedbackOracle::CallReceiverTypes(Call* expr,
+ Handle<String> name,
+ CallKind call_kind,
+ SmallMapList* types) {
int arity = expr->arguments()->length();
// Note: Currently we do not take string extra ic data into account
@@ -189,7 +192,7 @@
OWN_MAP,
NOT_IN_LOOP,
arity);
- return CollectReceiverTypes(expr->id(), name, flags);
+ CollectReceiverTypes(expr->id(), name, flags, types);
}
@@ -391,36 +394,30 @@
}
-ZoneMapList* TypeFeedbackOracle::CollectReceiverTypes(unsigned ast_id,
- Handle<String> name,
- Code::Flags flags) {
+void TypeFeedbackOracle::CollectReceiverTypes(unsigned ast_id,
+ Handle<String> name,
+ Code::Flags flags,
+ SmallMapList* types) {
Isolate* isolate = Isolate::Current();
Handle<Object> object = GetInfo(ast_id);
- if (object->IsUndefined() || object->IsSmi()) return NULL;
+ if (object->IsUndefined() || object->IsSmi()) return;
if (*object == isolate->builtins()->builtin(Builtins::kStoreIC_GlobalProxy)) {
// TODO(fschneider): We could collect the maps and signal that
// we need a generic store (or load) here.
ASSERT(Handle<Code>::cast(object)->ic_state() == MEGAMORPHIC);
- return NULL;
} else if (object->IsMap()) {
- ZoneMapList* types = new ZoneMapList(1);
types->Add(Handle<Map>::cast(object));
- return types;
} else if (Handle<Code>::cast(object)->ic_state() == MEGAMORPHIC) {
- ZoneMapList* types = new ZoneMapList(4);
+ types->Reserve(4);
ASSERT(object->IsCode());
isolate->stub_cache()->CollectMatchingMaps(types, *name, flags);
- return types->length() > 0 ? types : NULL;
- } else {
- return NULL;
}
}
-void TypeFeedbackOracle::CollectKeyedReceiverTypes(
- unsigned ast_id,
- ZoneMapList* types) {
+void TypeFeedbackOracle::CollectKeyedReceiverTypes(unsigned ast_id,
+ SmallMapList* types) {
Handle<Object> object = GetInfo(ast_id);
if (!object->IsCode()) return;
Handle<Code> code = Handle<Code>::cast(object);
@@ -439,6 +436,12 @@
}
+byte TypeFeedbackOracle::ToBooleanTypes(unsigned ast_id) {
+ Handle<Object> object = GetInfo(ast_id);
+ return object->IsCode() ? Handle<Code>::cast(object)->to_boolean_state() : 0;
+}
+
+
// Things are a bit tricky here: The iterator for the RelocInfos and the infos
// themselves are not GC-safe, so we first get all infos, then we create the
// dictionary (possibly triggering GC), and finally we relocate the collected
@@ -523,6 +526,7 @@
case Code::UNARY_OP_IC:
case Code::BINARY_OP_IC:
case Code::COMPARE_IC:
+ case Code::TO_BOOLEAN_IC:
SetInfo(ast_id, target);
break;
diff --git a/src/type-info.h b/src/type-info.h
index 0a8c935..448e4c9 100644
--- a/src/type-info.h
+++ b/src/type-info.h
@@ -203,14 +203,16 @@
// Forward declarations.
class Assignment;
-class UnaryOperation;
class BinaryOperation;
class Call;
-class CompareOperation;
-class CountOperation;
-class CompilationInfo;
-class Property;
class CaseClause;
+class CompareOperation;
+class CompilationInfo;
+class CountOperation;
+class Property;
+class SmallMapList;
+class UnaryOperation;
+
class TypeFeedbackOracle BASE_EMBEDDED {
public:
@@ -225,19 +227,29 @@
Handle<Map> LoadMonomorphicReceiverType(Property* expr);
Handle<Map> StoreMonomorphicReceiverType(Expression* expr);
- ZoneMapList* LoadReceiverTypes(Property* expr, Handle<String> name);
- ZoneMapList* StoreReceiverTypes(Assignment* expr, Handle<String> name);
- ZoneMapList* CallReceiverTypes(Call* expr,
- Handle<String> name,
- CallKind call_kind);
+ void LoadReceiverTypes(Property* expr,
+ Handle<String> name,
+ SmallMapList* types);
+ void StoreReceiverTypes(Assignment* expr,
+ Handle<String> name,
+ SmallMapList* types);
+ void CallReceiverTypes(Call* expr,
+ Handle<String> name,
+ CallKind call_kind,
+ SmallMapList* types);
void CollectKeyedReceiverTypes(unsigned ast_id,
- ZoneMapList* types);
+ SmallMapList* types);
CheckType GetCallCheckType(Call* expr);
Handle<JSObject> GetPrototypeForPrimitiveCheck(CheckType check);
bool LoadIsBuiltin(Property* expr, Builtins::Name id);
+ // TODO(1571) We can't use ToBooleanStub::Types as the return value because
+ // of various cylces in our headers. Death to tons of implementations in
+ // headers!! :-P
+ byte ToBooleanTypes(unsigned ast_id);
+
// Get type information for arithmetic operations and compares.
TypeInfo UnaryType(UnaryOperation* expr);
TypeInfo BinaryType(BinaryOperation* expr);
@@ -247,9 +259,10 @@
TypeInfo IncrementType(CountOperation* expr);
private:
- ZoneMapList* CollectReceiverTypes(unsigned ast_id,
- Handle<String> name,
- Code::Flags flags);
+ void CollectReceiverTypes(unsigned ast_id,
+ Handle<String> name,
+ Code::Flags flags,
+ SmallMapList* types);
void SetInfo(unsigned ast_id, Object* target);
diff --git a/src/utils.h b/src/utils.h
index ecdf1c7..785bc43 100644
--- a/src/utils.h
+++ b/src/utils.h
@@ -30,6 +30,7 @@
#include <stdlib.h>
#include <string.h>
+#include <climits>
#include "globals.h"
#include "checks.h"
@@ -885,6 +886,30 @@
DISALLOW_IMPLICIT_CONSTRUCTORS(SimpleStringBuilder);
};
+
+// A poor man's version of STL's bitset: A bit set of enums E (without explicit
+// values), fitting into an integral type T.
+template <class E, class T = int>
+class EnumSet {
+ public:
+ explicit EnumSet(T bits = 0) : bits_(bits) {}
+ bool IsEmpty() const { return bits_ == 0; }
+ bool Contains(E element) const { return (bits_ & Mask(element)) != 0; }
+ void Add(E element) { bits_ |= Mask(element); }
+ void Remove(E element) { bits_ &= ~Mask(element); }
+ T ToIntegral() const { return bits_; }
+
+ private:
+ T Mask(E element) const {
+ // The strange typing in ASSERT is necessary to avoid stupid warnings, see:
+ // http://gcc.gnu.org/bugzilla/show_bug.cgi?id=43680
+ ASSERT(element < static_cast<int>(sizeof(T) * CHAR_BIT));
+ return 1 << element;
+ }
+
+ T bits_;
+};
+
} } // namespace v8::internal
#endif // V8_UTILS_H_
diff --git a/src/v8.cc b/src/v8.cc
index 36f835f..1e9b5dc 100644
--- a/src/v8.cc
+++ b/src/v8.cc
@@ -28,6 +28,7 @@
#include "v8.h"
#include "isolate.h"
+#include "elements.h"
#include "bootstrapper.h"
#include "debug.h"
#include "deoptimizer.h"
@@ -212,6 +213,8 @@
// Peephole optimization might interfere with deoptimization.
FLAG_peephole_optimization = !use_crankshaft_;
+
+ ElementsAccessor::InitializeOncePerProcess();
}
} } // namespace v8::internal
diff --git a/src/v8natives.js b/src/v8natives.js
index ff87804..982e18e 100644
--- a/src/v8natives.js
+++ b/src/v8natives.js
@@ -106,13 +106,16 @@
// Truncate number.
return string | 0;
}
+ string = TO_STRING_INLINE(string);
radix = radix | 0;
} else {
+ // The spec says ToString should be evaluated before ToInt32.
+ string = TO_STRING_INLINE(string);
radix = TO_INT32(radix);
if (!(radix == 0 || (2 <= radix && radix <= 36)))
return $NaN;
}
- string = TO_STRING_INLINE(string);
+
if (%_HasCachedArrayIndex(string) &&
(radix == 0 || radix == 10)) {
return %_GetCachedArrayIndex(string);
@@ -209,7 +212,7 @@
if (IS_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
return '[object Undefined]';
}
- if (IS_NULL(this)) return '[object Null]';
+ if (IS_NULL(this)) return '[object Null]';
return "[object " + %_ClassOf(ToObject(this)) + "]";
}
@@ -232,6 +235,10 @@
// ECMA-262 - 15.2.4.5
function ObjectHasOwnProperty(V) {
+ if (%IsJSProxy(this)) {
+ var handler = %GetHandler(this);
+ return CallTrap1(handler, "hasOwn", DerivedHasOwnTrap, TO_STRING_INLINE(V));
+ }
return %HasLocalProperty(TO_OBJECT_INLINE(this), TO_STRING_INLINE(V));
}
@@ -249,7 +256,12 @@
// ECMA-262 - 15.2.4.6
function ObjectPropertyIsEnumerable(V) {
- return %IsPropertyEnumerable(ToObject(this), ToString(V));
+ var P = ToString(V);
+ if (%IsJSProxy(this)) {
+ var desc = GetOwnProperty(this, P);
+ return IS_UNDEFINED(desc) ? false : desc.isEnumerable();
+ }
+ return %IsPropertyEnumerable(ToObject(this), P);
}
@@ -310,9 +322,7 @@
throw MakeTypeError("obj_ctor_property_non_object", ["keys"]);
if (%IsJSProxy(obj)) {
var handler = %GetHandler(obj);
- var keys = handler.keys;
- if (IS_UNDEFINED(keys)) keys = DerivedKeysTrap;
- var names = %_CallFunction(handler, keys);
+ var names = CallTrap0(handler, "keys", DerivedKeysTrap);
return ToStringArray(names);
}
return %LocalKeys(obj);
@@ -601,16 +611,42 @@
}
+// For Harmony proxies.
+function GetTrap(handler, name, defaultTrap) {
+ var trap = handler[name];
+ if (IS_UNDEFINED(trap)) {
+ if (IS_UNDEFINED(defaultTrap)) {
+ throw MakeTypeError("handler_trap_missing", [handler, name]);
+ }
+ trap = defaultTrap;
+ } else if (!IS_FUNCTION(trap)) {
+ throw MakeTypeError("handler_trap_must_be_callable", [handler, name]);
+ }
+ return trap;
+}
+
+
+function CallTrap0(handler, name, defaultTrap) {
+ return %_CallFunction(handler, GetTrap(handler, name, defaultTrap));
+}
+
+
+function CallTrap1(handler, name, defaultTrap, x) {
+ return %_CallFunction(handler, x, GetTrap(handler, name, defaultTrap));
+}
+
+
+function CallTrap2(handler, name, defaultTrap, x, y) {
+ return %_CallFunction(handler, x, y, GetTrap(handler, name, defaultTrap));
+}
+
+
// ES5 section 8.12.1.
-function GetOwnProperty(obj, p) {
+function GetOwnProperty(obj, v) {
+ var p = ToString(v);
if (%IsJSProxy(obj)) {
var handler = %GetHandler(obj);
- var getOwnProperty = handler.getOwnPropertyDescriptor;
- if (IS_UNDEFINED(getOwnProperty)) {
- throw MakeTypeError("handler_trap_missing",
- [handler, "getOwnPropertyDescriptor"]);
- }
- var descriptor = %_CallFunction(handler, p, getOwnProperty);
+ var descriptor = CallTrap1(handler, "getOwnPropertyDescriptor", void 0, p);
if (IS_UNDEFINED(descriptor)) return descriptor;
var desc = ToCompletePropertyDescriptor(descriptor);
if (!desc.isConfigurable()) {
@@ -623,7 +659,7 @@
// GetOwnProperty returns an array indexed by the constants
// defined in macros.py.
// If p is not a property on obj undefined is returned.
- var props = %GetOwnProperty(ToObject(obj), ToString(p));
+ var props = %GetOwnProperty(ToObject(obj), ToString(v));
// A false value here means that access checks failed.
if (props === false) return void 0;
@@ -635,11 +671,7 @@
// Harmony proxies.
function DefineProxyProperty(obj, p, attributes, should_throw) {
var handler = %GetHandler(obj);
- var defineProperty = handler.defineProperty;
- if (IS_UNDEFINED(defineProperty)) {
- throw MakeTypeError("handler_trap_missing", [handler, "defineProperty"]);
- }
- var result = %_CallFunction(handler, p, attributes, defineProperty);
+ var result = CallTrap2(handler, "defineProperty", void 0, p, attributes);
if (!ToBoolean(result)) {
if (should_throw) {
throw MakeTypeError("handler_returned_false",
@@ -868,12 +900,7 @@
// Special handling for proxies.
if (%IsJSProxy(obj)) {
var handler = %GetHandler(obj);
- var getOwnPropertyNames = handler.getOwnPropertyNames;
- if (IS_UNDEFINED(getOwnPropertyNames)) {
- throw MakeTypeError("handler_trap_missing",
- [handler, "getOwnPropertyNames"]);
- }
- var names = %_CallFunction(handler, getOwnPropertyNames);
+ var names = CallTrap0(handler, "getOwnPropertyNames", void 0);
return ToStringArray(names, "getOwnPropertyNames");
}
@@ -977,24 +1004,27 @@
}
+function GetOwnEnumerablePropertyNames(properties) {
+ var names = new InternalArray();
+ for (var key in properties) {
+ if (%HasLocalProperty(properties, key)) {
+ names.push(key);
+ }
+ }
+ return names;
+}
+
+
// ES5 section 15.2.3.7.
function ObjectDefineProperties(obj, properties) {
if (!IS_SPEC_OBJECT(obj))
throw MakeTypeError("obj_ctor_property_non_object", ["defineProperties"]);
var props = ToObject(properties);
- var key_values = [];
- for (var key in props) {
- if (%HasLocalProperty(props, key)) {
- key_values.push(key);
- var value = props[key];
- var desc = ToPropertyDescriptor(value);
- key_values.push(desc);
- }
- }
- for (var i = 0; i < key_values.length; i += 2) {
- var key = key_values[i];
- var desc = key_values[i + 1];
- DefineOwnProperty(obj, key, desc, true);
+ var names = GetOwnEnumerablePropertyNames(props);
+ for (var i = 0; i < names.length; i++) {
+ var name = names[i];
+ var desc = ToPropertyDescriptor(props[name]);
+ DefineOwnProperty(obj, name, desc, true);
}
return obj;
}
@@ -1003,11 +1033,7 @@
// Harmony proxies.
function ProxyFix(obj) {
var handler = %GetHandler(obj);
- var fix = handler.fix;
- if (IS_UNDEFINED(fix)) {
- throw MakeTypeError("handler_trap_missing", [handler, "fix"]);
- }
- var props = %_CallFunction(handler, fix);
+ var props = CallTrap0(handler, "fix", void 0);
if (IS_UNDEFINED(props)) {
throw MakeTypeError("handler_returned_undefined", [handler, "fix"]);
}
@@ -1466,9 +1492,9 @@
// Set the correct length.
var length = (this.length - argc_bound) > 0 ? this.length - argc_bound : 0;
- %FunctionSetLength(result, length);
%FunctionRemovePrototype(result);
%FunctionSetBound(result);
+ %BoundFunctionSetLength(result, length);
return result;
}
diff --git a/src/v8threads.cc b/src/v8threads.cc
index c9a8bb6..3881d66 100644
--- a/src/v8threads.cc
+++ b/src/v8threads.cc
@@ -94,6 +94,11 @@
}
+bool Locker::IsActive() {
+ return active_;
+}
+
+
Locker::~Locker() {
ASSERT(isolate_->thread_manager()->IsLockedByCurrentThread());
if (has_lock_) {
@@ -300,7 +305,9 @@
ThreadManager::~ThreadManager() {
- // TODO(isolates): Destroy mutexes.
+ delete mutex_;
+ delete free_anchor_;
+ delete in_use_anchor_;
}
diff --git a/src/variables.cc b/src/variables.cc
index 67150ea..e82e674 100644
--- a/src/variables.cc
+++ b/src/variables.cc
@@ -41,6 +41,7 @@
switch (mode) {
case VAR: return "VAR";
case CONST: return "CONST";
+ case LET: return "LET";
case DYNAMIC: return "DYNAMIC";
case DYNAMIC_GLOBAL: return "DYNAMIC_GLOBAL";
case DYNAMIC_LOCAL: return "DYNAMIC_LOCAL";
diff --git a/src/variables.h b/src/variables.h
index a9c06d1..2095555 100644
--- a/src/variables.h
+++ b/src/variables.h
@@ -46,6 +46,8 @@
CONST, // declared via 'const' declarations
+ LET, // declared via 'let' declarations
+
// Variables introduced by the compiler:
DYNAMIC, // always require dynamic lookup (we don't know
// the declaration)
@@ -99,6 +101,7 @@
return is_accessed_from_inner_scope_;
}
void MarkAsAccessedFromInnerScope() {
+ ASSERT(mode_ != TEMPORARY);
is_accessed_from_inner_scope_ = true;
}
bool is_used() { return is_used_; }
diff --git a/src/version.cc b/src/version.cc
index fcf6f88..1a7c751 100644
--- a/src/version.cc
+++ b/src/version.cc
@@ -33,9 +33,9 @@
// NOTE these macros are used by the SCons build script so their names
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 3
-#define MINOR_VERSION 4
-#define BUILD_NUMBER 14
-#define PATCH_LEVEL 35
+#define MINOR_VERSION 5
+#define BUILD_NUMBER 10
+#define PATCH_LEVEL 24
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0
diff --git a/src/weakmap.js b/src/weakmap.js
new file mode 100644
index 0000000..3d261e5
--- /dev/null
+++ b/src/weakmap.js
@@ -0,0 +1,103 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+// This file relies on the fact that the following declaration has been made
+// in runtime.js:
+// const $Object = global.Object;
+const $WeakMap = global.WeakMap;
+
+// -------------------------------------------------------------------
+
+function WeakMapConstructor() {
+ if (%_IsConstructCall()) {
+ %WeakMapInitialize(this);
+ } else {
+ return new $WeakMap();
+ }
+}
+
+
+function WeakMapGet(key) {
+ if (!IS_SPEC_OBJECT(key)) {
+ throw %MakeTypeError('invalid_weakmap_key', [this, key]);
+ }
+ return %WeakMapGet(this, key);
+}
+
+
+function WeakMapSet(key, value) {
+ if (!IS_SPEC_OBJECT(key)) {
+ throw %MakeTypeError('invalid_weakmap_key', [this, key]);
+ }
+ return %WeakMapSet(this, key, value);
+}
+
+
+function WeakMapHas(key) {
+ if (!IS_SPEC_OBJECT(key)) {
+ throw %MakeTypeError('invalid_weakmap_key', [this, key]);
+ }
+ return !IS_UNDEFINED(%WeakMapGet(this, key));
+}
+
+
+function WeakMapDelete(key) {
+ if (!IS_SPEC_OBJECT(key)) {
+ throw %MakeTypeError('invalid_weakmap_key', [this, key]);
+ }
+ if (!IS_UNDEFINED(%WeakMapGet(this, key))) {
+ %WeakMapSet(this, key, void 0);
+ return true;
+ } else {
+ return false;
+ }
+}
+
+// -------------------------------------------------------------------
+
+function SetupWeakMap() {
+ // Set up the WeakMap constructor function.
+ %SetCode($WeakMap, WeakMapConstructor);
+
+ // Set up the WeakMap prototype object.
+ %FunctionSetPrototype($WeakMap, new $WeakMap());
+
+ // Set up the constructor property on the WeakMap prototype object.
+ %SetProperty($WeakMap.prototype, "constructor", $WeakMap, DONT_ENUM);
+
+ // Set up the non-enumerable functions on the WeakMap prototype object.
+ InstallFunctionsOnHiddenPrototype($WeakMap.prototype, DONT_ENUM, $Array(
+ "get", WeakMapGet,
+ "set", WeakMapSet,
+ "has", WeakMapHas,
+ "delete", WeakMapDelete
+ ));
+}
+
+
+SetupWeakMap();
diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc
index 0763989..7c6f7e3 100644
--- a/src/x64/builtins-x64.cc
+++ b/src/x64/builtins-x64.cc
@@ -139,7 +139,7 @@
// rdi: constructor
__ movq(rax, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ JumpIfSmi(rax, &rt_call);
// rdi: constructor
// rax: initial map (if proven valid below)
@@ -1283,7 +1283,7 @@
// Initial map for the builtin Array functions should be maps.
__ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
__ Check(not_smi, "Unexpected initial map for Array function");
__ CmpObjectType(rbx, MAP_TYPE, rcx);
@@ -1317,7 +1317,7 @@
// Initial map for the builtin Array function should be a map.
__ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
__ Check(not_smi, "Unexpected initial map for Array function");
__ CmpObjectType(rbx, MAP_TYPE, rcx);
diff --git a/src/x64/code-stubs-x64.cc b/src/x64/code-stubs-x64.cc
index 1a6efcb..9237a0a 100644
--- a/src/x64/code-stubs-x64.cc
+++ b/src/x64/code-stubs-x64.cc
@@ -230,68 +230,139 @@
}
-// The stub returns zero for false, and a non-zero value for true.
+// The stub expects its argument on the stack and returns its result in tos_:
+// zero for false, and a non-zero value for true.
void ToBooleanStub::Generate(MacroAssembler* masm) {
- Label false_result, true_result, not_string;
+ Label patch;
+ const Register argument = rax;
const Register map = rdx;
- __ movq(rax, Operand(rsp, 1 * kPointerSize));
+ if (!types_.IsEmpty()) {
+ __ movq(argument, Operand(rsp, 1 * kPointerSize));
+ }
// undefined -> false
- __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
- __ j(equal, &false_result);
+ CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false);
// Boolean -> its value
- __ CompareRoot(rax, Heap::kFalseValueRootIndex);
- __ j(equal, &false_result);
- __ CompareRoot(rax, Heap::kTrueValueRootIndex);
- __ j(equal, &true_result);
-
- // Smis: 0 -> false, all other -> true
- __ Cmp(rax, Smi::FromInt(0));
- __ j(equal, &false_result);
- __ JumpIfSmi(rax, &true_result);
+ CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false);
+ CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true);
// 'null' -> false.
- __ CompareRoot(rax, Heap::kNullValueRootIndex);
- __ j(equal, &false_result, Label::kNear);
+ CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false);
- // Get the map of the heap object.
- __ movq(map, FieldOperand(rax, HeapObject::kMapOffset));
+ if (types_.Contains(SMI)) {
+ // Smis: 0 -> false, all other -> true
+ Label not_smi;
+ __ JumpIfNotSmi(argument, ¬_smi, Label::kNear);
+ // argument contains the correct return value already
+ if (!tos_.is(argument)) {
+ __ movq(tos_, argument);
+ }
+ __ ret(1 * kPointerSize);
+ __ bind(¬_smi);
+ } else if (types_.NeedsMap()) {
+ // If we need a map later and have a Smi -> patch.
+ __ JumpIfSmi(argument, &patch, Label::kNear);
+ }
- // Undetectable -> false.
- __ testb(FieldOperand(map, Map::kBitFieldOffset),
- Immediate(1 << Map::kIsUndetectable));
- __ j(not_zero, &false_result, Label::kNear);
+ if (types_.NeedsMap()) {
+ __ movq(map, FieldOperand(argument, HeapObject::kMapOffset));
- // JavaScript object -> true.
- __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
- __ j(above_equal, &true_result, Label::kNear);
+ if (types_.CanBeUndetectable()) {
+ __ testb(FieldOperand(map, Map::kBitFieldOffset),
+ Immediate(1 << Map::kIsUndetectable));
+ // Undetectable -> false.
+ Label not_undetectable;
+ __ j(zero, ¬_undetectable, Label::kNear);
+ __ Set(tos_, 0);
+ __ ret(1 * kPointerSize);
+ __ bind(¬_undetectable);
+ }
+ }
- // String value -> false iff empty.
- __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
- __ j(above_equal, ¬_string, Label::kNear);
- __ cmpq(FieldOperand(rax, String::kLengthOffset), Immediate(0));
- __ j(zero, &false_result, Label::kNear);
- __ jmp(&true_result, Label::kNear);
+ if (types_.Contains(SPEC_OBJECT)) {
+ // spec object -> true.
+ Label not_js_object;
+ __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
+ __ j(below, ¬_js_object, Label::kNear);
+ // argument contains the correct return value already.
+ if (!tos_.is(argument)) {
+ __ Set(tos_, 1);
+ }
+ __ ret(1 * kPointerSize);
+ __ bind(¬_js_object);
+ }
- __ bind(¬_string);
- // HeapNumber -> false iff +0, -0, or NaN.
- // These three cases set the zero flag when compared to zero using ucomisd.
- __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
- __ j(not_equal, &true_result, Label::kNear);
- __ xorps(xmm0, xmm0);
- __ ucomisd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
- __ j(zero, &false_result, Label::kNear);
- // Fall through to |true_result|.
+ if (types_.Contains(STRING)) {
+ // String value -> false iff empty.
+ Label not_string;
+ __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
+ __ j(above_equal, ¬_string, Label::kNear);
+ __ movq(tos_, FieldOperand(argument, String::kLengthOffset));
+ __ ret(1 * kPointerSize); // the string length is OK as the return value
+ __ bind(¬_string);
+ }
- // Return 1/0 for true/false in tos_.
- __ bind(&true_result);
- __ Set(tos_, 1);
- __ ret(1 * kPointerSize);
- __ bind(&false_result);
- __ Set(tos_, 0);
- __ ret(1 * kPointerSize);
+ if (types_.Contains(HEAP_NUMBER)) {
+ // heap number -> false iff +0, -0, or NaN.
+ Label not_heap_number, false_result;
+ __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
+ __ j(not_equal, ¬_heap_number, Label::kNear);
+ __ xorps(xmm0, xmm0);
+ __ ucomisd(xmm0, FieldOperand(argument, HeapNumber::kValueOffset));
+ __ j(zero, &false_result, Label::kNear);
+ // argument contains the correct return value already.
+ if (!tos_.is(argument)) {
+ __ Set(tos_, 1);
+ }
+ __ ret(1 * kPointerSize);
+ __ bind(&false_result);
+ __ Set(tos_, 0);
+ __ ret(1 * kPointerSize);
+ __ bind(¬_heap_number);
+ }
+
+ __ bind(&patch);
+ GenerateTypeTransition(masm);
+}
+
+
+void ToBooleanStub::CheckOddball(MacroAssembler* masm,
+ Type type,
+ Heap::RootListIndex value,
+ bool result) {
+ const Register argument = rax;
+ if (types_.Contains(type)) {
+ // If we see an expected oddball, return its ToBoolean value tos_.
+ Label different_value;
+ __ CompareRoot(argument, value);
+ __ j(not_equal, &different_value, Label::kNear);
+ if (!result) {
+ // If we have to return zero, there is no way around clearing tos_.
+ __ Set(tos_, 0);
+ } else if (!tos_.is(argument)) {
+ // If we have to return non-zero, we can re-use the argument if it is the
+ // same register as the result, because we never see Smi-zero here.
+ __ Set(tos_, 1);
+ }
+ __ ret(1 * kPointerSize);
+ __ bind(&different_value);
+ }
+}
+
+
+void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
+ __ pop(rcx); // Get return address, operand is now on top of stack.
+ __ Push(Smi::FromInt(tos_.code()));
+ __ Push(Smi::FromInt(types_.ToByte()));
+ __ push(rcx); // Push return address.
+ // Patch the caller to an appropriate specialized stub and return the
+ // operation result to the caller of the stub.
+ __ TailCallExternalReference(
+ ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
+ 3,
+ 1);
}
@@ -2303,7 +2374,6 @@
__ testq(kScratchRegister, kScratchRegister);
__ j(zero, &runtime);
-
// Check that the first argument is a JSRegExp object.
__ movq(rax, Operand(rsp, kJSRegExpOffset));
__ JumpIfSmi(rax, &runtime);
@@ -2374,10 +2444,14 @@
__ cmpl(rdx, rdi);
__ j(greater, &runtime);
+ // Reset offset for possibly sliced string.
+ __ Set(r14, 0);
// rax: RegExp data (FixedArray)
// Check the representation and encoding of the subject string.
Label seq_ascii_string, seq_two_byte_string, check_code;
__ movq(rdi, Operand(rsp, kSubjectOffset));
+ // Make a copy of the original subject string.
+ __ movq(r15, rdi);
__ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
__ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
// First check for flat two byte string.
@@ -2386,28 +2460,40 @@
STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
__ j(zero, &seq_two_byte_string, Label::kNear);
// Any other flat string must be a flat ascii string.
- __ testb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask));
+ __ andb(rbx, Immediate(kIsNotStringMask | kStringRepresentationMask));
__ j(zero, &seq_ascii_string, Label::kNear);
- // Check for flat cons string.
+ // Check for flat cons string or sliced string.
// A flat cons string is a cons string where the second part is the empty
// string. In that case the subject string is just the first part of the cons
// string. Also in this case the first part of the cons string is known to be
// a sequential string or an external string.
- STATIC_ASSERT(kExternalStringTag !=0);
- STATIC_ASSERT((kConsStringTag & kExternalStringTag) == 0);
- __ testb(rbx, Immediate(kIsNotStringMask | kExternalStringTag));
- __ j(not_zero, &runtime);
- // String is a cons string.
+ // In the case of a sliced string its offset has to be taken into account.
+ Label cons_string, check_encoding;
+ STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+ STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
+ __ cmpq(rbx, Immediate(kExternalStringTag));
+ __ j(less, &cons_string, Label::kNear);
+ __ j(equal, &runtime);
+
+ // String is sliced.
+ __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset));
+ __ movq(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
+ // r14: slice offset
+ // r15: original subject string
+ // rdi: parent string
+ __ jmp(&check_encoding, Label::kNear);
+ // String is a cons string, check whether it is flat.
+ __ bind(&cons_string);
__ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
Heap::kEmptyStringRootIndex);
__ j(not_equal, &runtime);
__ movq(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
+ // rdi: first part of cons string or parent of sliced string.
+ // rbx: map of first part of cons string or map of parent of sliced string.
+ // Is first part of cons or parent of slice a flat two byte string?
+ __ bind(&check_encoding);
__ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
- // String is a cons string with empty second part.
- // rdi: first part of cons string.
- // rbx: map of first part of cons string.
- // Is first part a flat two byte string?
__ testb(FieldOperand(rbx, Map::kInstanceTypeOffset),
Immediate(kStringRepresentationMask | kStringEncodingMask));
STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
@@ -2504,33 +2590,40 @@
// rbx: previous index
// rcx: encoding of subject string (1 if ascii 0 if two_byte);
// r11: code
+ // r14: slice offset
+ // r15: original subject string
- // Argument 4: End of string data
- // Argument 3: Start of string data
- Label setup_two_byte, setup_rest;
- __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
- __ j(zero, &setup_two_byte, Label::kNear);
- __ SmiToInteger32(rcx, FieldOperand(rdi, String::kLengthOffset));
- __ lea(arg4, FieldOperand(rdi, rcx, times_1, SeqAsciiString::kHeaderSize));
- __ lea(arg3, FieldOperand(rdi, rbx, times_1, SeqAsciiString::kHeaderSize));
- __ jmp(&setup_rest, Label::kNear);
- __ bind(&setup_two_byte);
- __ SmiToInteger32(rcx, FieldOperand(rdi, String::kLengthOffset));
- __ lea(arg4, FieldOperand(rdi, rcx, times_2, SeqTwoByteString::kHeaderSize));
- __ lea(arg3, FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
-
- __ bind(&setup_rest);
// Argument 2: Previous index.
__ movq(arg2, rbx);
- // Argument 1: Subject string.
-#ifdef _WIN64
- __ movq(arg1, rdi);
-#else
- // Already there in AMD64 calling convention.
- ASSERT(arg1.is(rdi));
- USE(arg1);
-#endif
+ // Argument 4: End of string data
+ // Argument 3: Start of string data
+ Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
+ // Prepare start and end index of the input.
+ // Load the length from the original sliced string if that is the case.
+ __ addq(rbx, r14);
+ __ SmiToInteger32(arg3, FieldOperand(r15, String::kLengthOffset));
+ __ addq(r14, arg3); // Using arg3 as scratch.
+
+ // rbx: start index of the input
+ // r14: end index of the input
+ // r15: original subject string
+ __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
+ __ j(zero, &setup_two_byte, Label::kNear);
+ __ lea(arg4, FieldOperand(rdi, r14, times_1, SeqAsciiString::kHeaderSize));
+ __ lea(arg3, FieldOperand(rdi, rbx, times_1, SeqAsciiString::kHeaderSize));
+ __ jmp(&setup_rest, Label::kNear);
+ __ bind(&setup_two_byte);
+ __ lea(arg4, FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize));
+ __ lea(arg3, FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
+ __ bind(&setup_rest);
+
+ // Argument 1: Original subject string.
+ // The original subject is in the previous stack frame. Therefore we have to
+ // use rbp, which points exactly to one pointer size below the previous rsp.
+ // (Because creating a new stack frame pushes the previous rbp onto the stack
+ // and thereby moves up rsp by one kPointerSize.)
+ __ movq(arg1, r15);
// Locate the code entry and call it.
__ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
@@ -3780,6 +3873,7 @@
Label flat_string;
Label ascii_string;
Label got_char_code;
+ Label sliced_string;
// If the receiver is a smi trigger the non-string case.
__ JumpIfSmi(object_, receiver_not_string_);
@@ -3808,25 +3902,39 @@
__ j(zero, &flat_string);
// Handle non-flat strings.
- __ testb(result_, Immediate(kIsConsStringMask));
- __ j(zero, &call_runtime_);
+ __ and_(result_, Immediate(kStringRepresentationMask));
+ STATIC_ASSERT(kConsStringTag < kExternalStringTag);
+ STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
+ __ cmpb(result_, Immediate(kExternalStringTag));
+ __ j(greater, &sliced_string);
+ __ j(equal, &call_runtime_);
// ConsString.
// Check whether the right hand side is the empty string (i.e. if
// this is really a flat string in a cons string). If that is not
// the case we would rather go to the runtime system now to flatten
// the string.
+ Label assure_seq_string;
__ CompareRoot(FieldOperand(object_, ConsString::kSecondOffset),
Heap::kEmptyStringRootIndex);
__ j(not_equal, &call_runtime_);
// Get the first of the two strings and load its instance type.
__ movq(object_, FieldOperand(object_, ConsString::kFirstOffset));
+ __ jmp(&assure_seq_string, Label::kNear);
+
+ // SlicedString, unpack and add offset.
+ __ bind(&sliced_string);
+ __ addq(scratch_, FieldOperand(object_, SlicedString::kOffsetOffset));
+ __ movq(object_, FieldOperand(object_, SlicedString::kParentOffset));
+
+ __ bind(&assure_seq_string);
__ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
__ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
// If the first cons component is also non-flat, then go to runtime.
STATIC_ASSERT(kSeqStringTag == 0);
__ testb(result_, Immediate(kStringRepresentationMask));
__ j(not_zero, &call_runtime_);
+ __ jmp(&flat_string);
// Check for 1-byte or 2-byte string.
__ bind(&flat_string);
@@ -4137,6 +4245,8 @@
__ and_(rcx, Immediate(kStringRepresentationMask));
__ cmpl(rcx, Immediate(kExternalStringTag));
__ j(equal, &string_add_runtime);
+ // We cannot encounter sliced strings here since:
+ STATIC_ASSERT(SlicedString::kMinLength >= String::kMinNonFlatLength);
// Now check if both strings are ascii strings.
// rax: first string
// rbx: length of resulting flat string
@@ -4529,6 +4639,9 @@
void SubStringStub::Generate(MacroAssembler* masm) {
Label runtime;
+ if (FLAG_string_slices) {
+ __ jmp(&runtime);
+ }
// Stack frame on entry.
// rsp[0]: return address
// rsp[8]: to
diff --git a/src/x64/deoptimizer-x64.cc b/src/x64/deoptimizer-x64.cc
index 2a31f28..b52e659 100644
--- a/src/x64/deoptimizer-x64.cc
+++ b/src/x64/deoptimizer-x64.cc
@@ -38,7 +38,7 @@
namespace internal {
-int Deoptimizer::table_entry_size_ = 10;
+const int Deoptimizer::table_entry_size_ = 10;
int Deoptimizer::patch_size() {
@@ -605,8 +605,6 @@
output_frame->SetContinuation(
reinterpret_cast<intptr_t>(continuation->entry()));
}
-
- if (output_count_ - 1 == frame_index) iterator->Done();
}
diff --git a/src/x64/disasm-x64.cc b/src/x64/disasm-x64.cc
index 14c95bc..1b8871f 100644
--- a/src/x64/disasm-x64.cc
+++ b/src/x64/disasm-x64.cc
@@ -58,7 +58,7 @@
};
-static ByteMnemonic two_operands_instr[] = {
+static const ByteMnemonic two_operands_instr[] = {
{ 0x00, BYTE_OPER_REG_OP_ORDER, "add" },
{ 0x01, OPER_REG_OP_ORDER, "add" },
{ 0x02, BYTE_REG_OPER_OP_ORDER, "add" },
@@ -105,7 +105,7 @@
};
-static ByteMnemonic zero_operands_instr[] = {
+static const ByteMnemonic zero_operands_instr[] = {
{ 0xC3, UNSET_OP_ORDER, "ret" },
{ 0xC9, UNSET_OP_ORDER, "leave" },
{ 0xF4, UNSET_OP_ORDER, "hlt" },
@@ -125,14 +125,14 @@
};
-static ByteMnemonic call_jump_instr[] = {
+static const ByteMnemonic call_jump_instr[] = {
{ 0xE8, UNSET_OP_ORDER, "call" },
{ 0xE9, UNSET_OP_ORDER, "jmp" },
{ -1, UNSET_OP_ORDER, "" }
};
-static ByteMnemonic short_immediate_instr[] = {
+static const ByteMnemonic short_immediate_instr[] = {
{ 0x05, UNSET_OP_ORDER, "add" },
{ 0x0D, UNSET_OP_ORDER, "or" },
{ 0x15, UNSET_OP_ORDER, "adc" },
@@ -145,7 +145,7 @@
};
-static const char* conditional_code_suffix[] = {
+static const char* const conditional_code_suffix[] = {
"o", "no", "c", "nc", "z", "nz", "na", "a",
"s", "ns", "pe", "po", "l", "ge", "le", "g"
};
@@ -193,7 +193,7 @@
InstructionDesc instructions_[256];
void Clear();
void Init();
- void CopyTable(ByteMnemonic bm[], InstructionType type);
+ void CopyTable(const ByteMnemonic bm[], InstructionType type);
void SetTableRange(InstructionType type, byte start, byte end, bool byte_size,
const char* mnem);
void AddJumpConditionalShort();
@@ -228,7 +228,8 @@
}
-void InstructionTable::CopyTable(ByteMnemonic bm[], InstructionType type) {
+void InstructionTable::CopyTable(const ByteMnemonic bm[],
+ InstructionType type) {
for (int i = 0; bm[i].b >= 0; i++) {
InstructionDesc* id = &instructions_[bm[i].b];
id->mnem = bm[i].mnem;
diff --git a/src/x64/frames-x64.h b/src/x64/frames-x64.h
index b14267c..7012c76 100644
--- a/src/x64/frames-x64.h
+++ b/src/x64/frames-x64.h
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -50,12 +50,13 @@
class StackHandlerConstants : public AllStatic {
public:
- static const int kNextOffset = 0 * kPointerSize;
- static const int kFPOffset = 1 * kPointerSize;
- static const int kStateOffset = 2 * kPointerSize;
- static const int kPCOffset = 3 * kPointerSize;
+ static const int kNextOffset = 0 * kPointerSize;
+ static const int kContextOffset = 1 * kPointerSize;
+ static const int kFPOffset = 2 * kPointerSize;
+ static const int kStateOffset = 3 * kPointerSize;
+ static const int kPCOffset = 4 * kPointerSize;
- static const int kSize = 4 * kPointerSize;
+ static const int kSize = kPCOffset + kPointerSize;
};
diff --git a/src/x64/full-codegen-x64.cc b/src/x64/full-codegen-x64.cc
index 23c2bf8..eca349e 100644
--- a/src/x64/full-codegen-x64.cc
+++ b/src/x64/full-codegen-x64.cc
@@ -45,7 +45,6 @@
static unsigned GetPropertyId(Property* property) {
- if (property->is_synthetic()) return AstNode::kNoNumber;
return property->id();
}
@@ -665,95 +664,69 @@
Comment cmnt(masm_, "[ Declaration");
ASSERT(variable != NULL); // Must have been resolved.
Slot* slot = variable->AsSlot();
- Property* prop = variable->AsProperty();
-
- if (slot != NULL) {
- switch (slot->type()) {
- case Slot::PARAMETER:
- case Slot::LOCAL:
- if (mode == Variable::CONST) {
- __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
- __ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister);
- } else if (function != NULL) {
- VisitForAccumulatorValue(function);
- __ movq(Operand(rbp, SlotOffset(slot)), result_register());
- }
- break;
-
- case Slot::CONTEXT:
- // We bypass the general EmitSlotSearch because we know more about
- // this specific context.
-
- // The variable in the decl always resides in the current function
- // context.
- ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
- if (FLAG_debug_code) {
- // Check that we're not inside a with or catch context.
- __ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
- __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
- __ Check(not_equal, "Declaration in with context.");
- __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
- __ Check(not_equal, "Declaration in catch context.");
- }
- if (mode == Variable::CONST) {
- __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
- __ movq(ContextOperand(rsi, slot->index()), kScratchRegister);
- // No write barrier since the hole value is in old space.
- } else if (function != NULL) {
- VisitForAccumulatorValue(function);
- __ movq(ContextOperand(rsi, slot->index()), result_register());
- int offset = Context::SlotOffset(slot->index());
- __ movq(rbx, rsi);
- __ RecordWrite(rbx, offset, result_register(), rcx);
- }
- break;
-
- case Slot::LOOKUP: {
- __ push(rsi);
- __ Push(variable->name());
- // Declaration nodes are always introduced in one of two modes.
- ASSERT(mode == Variable::VAR || mode == Variable::CONST);
- PropertyAttributes attr = (mode == Variable::VAR) ? NONE : READ_ONLY;
- __ Push(Smi::FromInt(attr));
- // Push initial value, if any.
- // Note: For variables we must not push an initial value (such as
- // 'undefined') because we may have a (legal) redeclaration and we
- // must not destroy the current value.
- if (mode == Variable::CONST) {
- __ PushRoot(Heap::kTheHoleValueRootIndex);
- } else if (function != NULL) {
- VisitForStackValue(function);
- } else {
- __ Push(Smi::FromInt(0)); // no initial value!
- }
- __ CallRuntime(Runtime::kDeclareContextSlot, 4);
- break;
+ ASSERT(slot != NULL);
+ switch (slot->type()) {
+ case Slot::PARAMETER:
+ case Slot::LOCAL:
+ if (function != NULL) {
+ VisitForAccumulatorValue(function);
+ __ movq(Operand(rbp, SlotOffset(slot)), result_register());
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
+ __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
+ __ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister);
}
- }
+ break;
- } else if (prop != NULL) {
- // A const declaration aliasing a parameter is an illegal redeclaration.
- ASSERT(mode != Variable::CONST);
- if (function != NULL) {
- // We are declaring a function that rewrites to a property.
- // Use (keyed) IC to set the initial value. We cannot visit the
- // rewrite because it's shared and we risk recording duplicate AST
- // IDs for bailouts from optimized code.
- ASSERT(prop->obj()->AsVariableProxy() != NULL);
- { AccumulatorValueContext for_object(this);
- EmitVariableLoad(prop->obj()->AsVariableProxy());
+ case Slot::CONTEXT:
+ // We bypass the general EmitSlotSearch because we know more about
+ // this specific context.
+
+ // The variable in the decl always resides in the current function
+ // context.
+ ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
+ if (FLAG_debug_code) {
+ // Check that we're not inside a with or catch context.
+ __ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
+ __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
+ __ Check(not_equal, "Declaration in with context.");
+ __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
+ __ Check(not_equal, "Declaration in catch context.");
}
- __ push(rax);
- VisitForAccumulatorValue(function);
- __ pop(rdx);
- ASSERT(prop->key()->AsLiteral() != NULL &&
- prop->key()->AsLiteral()->handle()->IsSmi());
- __ Move(rcx, prop->key()->AsLiteral()->handle());
+ if (function != NULL) {
+ VisitForAccumulatorValue(function);
+ __ movq(ContextOperand(rsi, slot->index()), result_register());
+ int offset = Context::SlotOffset(slot->index());
+ __ movq(rbx, rsi);
+ __ RecordWrite(rbx, offset, result_register(), rcx);
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
+ __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
+ __ movq(ContextOperand(rsi, slot->index()), kScratchRegister);
+ // No write barrier since the hole value is in old space.
+ }
+ break;
- Handle<Code> ic = is_strict_mode()
- ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
- : isolate()->builtins()->KeyedStoreIC_Initialize();
- __ call(ic);
+ case Slot::LOOKUP: {
+ __ push(rsi);
+ __ Push(variable->name());
+ // Declaration nodes are always introduced in one of two modes.
+ ASSERT(mode == Variable::VAR ||
+ mode == Variable::CONST ||
+ mode == Variable::LET);
+ PropertyAttributes attr = (mode == Variable::CONST) ? READ_ONLY : NONE;
+ __ Push(Smi::FromInt(attr));
+ // Push initial value, if any.
+ // Note: For variables we must not push an initial value (such as
+ // 'undefined') because we may have a (legal) redeclaration and we
+ // must not destroy the current value.
+ if (function != NULL) {
+ VisitForStackValue(function);
+ } else if (mode == Variable::CONST || mode == Variable::LET) {
+ __ PushRoot(Heap::kTheHoleValueRootIndex);
+ } else {
+ __ Push(Smi::FromInt(0)); // no initial value!
+ }
+ __ CallRuntime(Runtime::kDeclareContextSlot, 4);
+ break;
}
}
}
@@ -840,7 +813,7 @@
__ bind(&next_test);
__ Drop(1); // Switch value is no longer needed.
if (default_clause == NULL) {
- __ jmp(nested_statement.break_target());
+ __ jmp(nested_statement.break_label());
} else {
__ jmp(default_clause->body_target());
}
@@ -854,7 +827,7 @@
VisitStatements(clause->statements());
}
- __ bind(nested_statement.break_target());
+ __ bind(nested_statement.break_label());
PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
}
@@ -980,7 +953,7 @@
__ bind(&loop);
__ movq(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
__ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
- __ j(above_equal, loop_statement.break_target());
+ __ j(above_equal, loop_statement.break_label());
// Get the current entry of the array into register rbx.
__ movq(rbx, Operand(rsp, 2 * kPointerSize));
@@ -1008,7 +981,7 @@
__ push(rbx); // Current entry.
__ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
__ Cmp(rax, Smi::FromInt(0));
- __ j(equal, loop_statement.continue_target());
+ __ j(equal, loop_statement.continue_label());
__ movq(rbx, rax);
// Update the 'each' property or variable from the possibly filtered
@@ -1025,14 +998,14 @@
// Generate code for going to the next element by incrementing the
// index (smi) stored on top of the stack.
- __ bind(loop_statement.continue_target());
+ __ bind(loop_statement.continue_label());
__ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
EmitStackCheck(stmt);
__ jmp(&loop);
// Remove the pointers stored on the stack.
- __ bind(loop_statement.break_target());
+ __ bind(loop_statement.break_label());
__ addq(rsp, Immediate(5 * kPointerSize));
// Exit and decrement the loop depth.
@@ -1273,6 +1246,18 @@
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
__ bind(&done);
context()->Plug(rax);
+ } else if (var->mode() == Variable::LET) {
+ // Let bindings may be the hole value if they have not been initialized.
+ // Throw a type error in this case.
+ Label done;
+ MemOperand slot_operand = EmitSlotSearch(slot, rax);
+ __ movq(rax, slot_operand);
+ __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
+ __ j(not_equal, &done, Label::kNear);
+ __ Push(var->name());
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ __ bind(&done);
+ context()->Plug(rax);
} else {
context()->Plug(slot);
}
@@ -1802,6 +1787,57 @@
}
__ bind(&skip);
+ } else if (var->mode() == Variable::LET && op != Token::INIT_LET) {
+ // Perform the assignment for non-const variables. Const assignments
+ // are simply skipped.
+ Slot* slot = var->AsSlot();
+ switch (slot->type()) {
+ case Slot::PARAMETER:
+ case Slot::LOCAL: {
+ Label assign;
+ // Check for an initialized let binding.
+ __ movq(rdx, Operand(rbp, SlotOffset(slot)));
+ __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
+ __ j(not_equal, &assign);
+ __ Push(var->name());
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ // Perform the assignment.
+ __ bind(&assign);
+ __ movq(Operand(rbp, SlotOffset(slot)), rax);
+ break;
+ }
+
+ case Slot::CONTEXT: {
+ // Let variables may be the hole value if they have not been
+ // initialized. Throw a type error in this case.
+ Label assign;
+ MemOperand target = EmitSlotSearch(slot, rcx);
+ // Check for an initialized let binding.
+ __ movq(rdx, target);
+ __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
+ __ j(not_equal, &assign, Label::kNear);
+ __ Push(var->name());
+ __ CallRuntime(Runtime::kThrowReferenceError, 1);
+ // Perform the assignment.
+ __ bind(&assign);
+ __ movq(target, rax);
+ // The value of the assignment is in eax. RecordWrite clobbers its
+ // register arguments.
+ __ movq(rdx, rax);
+ int offset = Context::SlotOffset(slot->index());
+ __ RecordWrite(rcx, offset, rdx, rbx);
+ break;
+ }
+
+ case Slot::LOOKUP:
+ // Call the runtime for the assignment.
+ __ push(rax); // Value.
+ __ push(rsi); // Context.
+ __ Push(var->name());
+ __ Push(Smi::FromInt(strict_mode_flag()));
+ __ CallRuntime(Runtime::kStoreContextSlot, 4);
+ break;
+ }
} else if (var->mode() != Variable::CONST) {
// Perform the assignment for non-const variables. Const assignments
// are simply skipped.
@@ -2167,38 +2203,10 @@
EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
} else {
// Call to a keyed property.
- // For a synthetic property use keyed load IC followed by function call,
- // for a regular property use EmitKeyedCallWithIC.
- if (prop->is_synthetic()) {
- // Do not visit the object and key subexpressions (they are shared
- // by all occurrences of the same rewritten parameter).
- ASSERT(prop->obj()->AsVariableProxy() != NULL);
- ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
- Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
- MemOperand operand = EmitSlotSearch(slot, rdx);
- __ movq(rdx, operand);
-
- ASSERT(prop->key()->AsLiteral() != NULL);
- ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
- __ Move(rax, prop->key()->AsLiteral()->handle());
-
- // Record source code position for IC call.
- SetSourcePosition(prop->position());
-
- Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
- __ call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
- // Push result (function).
- __ push(rax);
- // Push Global receiver.
- __ movq(rcx, GlobalObjectOperand());
- __ push(FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
- EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
- } else {
- { PreservePositionScope scope(masm()->positions_recorder());
- VisitForStackValue(prop->obj());
- }
- EmitKeyedCallWithIC(expr, prop->key());
+ { PreservePositionScope scope(masm()->positions_recorder());
+ VisitForStackValue(prop->obj());
}
+ EmitKeyedCallWithIC(expr, prop->key());
}
} else {
{ PreservePositionScope scope(masm()->positions_recorder());
@@ -3130,7 +3138,7 @@
Label done, not_found;
// tmp now holds finger offset as a smi.
- ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
+ STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
__ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
SmiIndex index =
__ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
@@ -3516,39 +3524,6 @@
}
-void FullCodeGenerator::EmitIsNativeOrStrictMode(ZoneList<Expression*>* args) {
- ASSERT(args->length() == 1);
-
- // Load the function into rax.
- VisitForAccumulatorValue(args->at(0));
-
- // Prepare for the test.
- Label materialize_true, materialize_false;
- Label* if_true = NULL;
- Label* if_false = NULL;
- Label* fall_through = NULL;
- context()->PrepareTest(&materialize_true, &materialize_false,
- &if_true, &if_false, &fall_through);
-
- // Test for strict mode function.
- __ movq(rdx, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
- __ testb(FieldOperand(rdx, SharedFunctionInfo::kStrictModeByteOffset),
- Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
- __ j(not_equal, if_true);
-
- // Test for native function.
- __ testb(FieldOperand(rdx, SharedFunctionInfo::kNativeByteOffset),
- Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
- __ j(not_equal, if_true);
-
- // Not native or strict-mode function.
- __ jmp(if_false);
-
- PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
- context()->Plug(if_true, if_false);
-}
-
-
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
Handle<String> name = expr->name();
if (name->length() > 0 && name->Get(0) == '_') {
@@ -3597,17 +3572,11 @@
Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
if (prop != NULL) {
- if (prop->is_synthetic()) {
- // Result of deleting parameters is false, even when they rewrite
- // to accesses on the arguments object.
- context()->Plug(false);
- } else {
- VisitForStackValue(prop->obj());
- VisitForStackValue(prop->key());
- __ Push(Smi::FromInt(strict_mode_flag()));
- __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
- context()->Plug(rax);
- }
+ VisitForStackValue(prop->obj());
+ VisitForStackValue(prop->key());
+ __ Push(Smi::FromInt(strict_mode_flag()));
+ __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
+ context()->Plug(rax);
} else if (var != NULL) {
// Delete of an unqualified identifier is disallowed in strict mode
// but "delete this" is.
@@ -3971,6 +3940,10 @@
__ j(equal, if_true);
__ CompareRoot(rax, Heap::kFalseValueRootIndex);
Split(equal, if_true, if_false, fall_through);
+ } else if (FLAG_harmony_typeof &&
+ check->Equals(isolate()->heap()->null_symbol())) {
+ __ CompareRoot(rax, Heap::kNullValueRootIndex);
+ Split(equal, if_true, if_false, fall_through);
} else if (check->Equals(isolate()->heap()->undefined_symbol())) {
__ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
__ j(equal, if_true);
@@ -3987,8 +3960,10 @@
Split(above_equal, if_true, if_false, fall_through);
} else if (check->Equals(isolate()->heap()->object_symbol())) {
__ JumpIfSmi(rax, if_false);
- __ CompareRoot(rax, Heap::kNullValueRootIndex);
- __ j(equal, if_true);
+ if (!FLAG_harmony_typeof) {
+ __ CompareRoot(rax, Heap::kNullValueRootIndex);
+ __ j(equal, if_true);
+ }
__ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
__ j(below, if_false);
__ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
@@ -4231,6 +4206,33 @@
#undef __
+#define __ ACCESS_MASM(masm())
+
+FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
+ int* stack_depth,
+ int* context_length) {
+ // The macros used here must preserve the result register.
+
+ // Because the handler block contains the context of the finally
+ // code, we can restore it directly from there for the finally code
+ // rather than iteratively unwinding contexts via their previous
+ // links.
+ __ Drop(*stack_depth); // Down to the handler block.
+ if (*context_length > 0) {
+ // Restore the context to its dedicated register and the stack.
+ __ movq(rsi, Operand(rsp, StackHandlerConstants::kContextOffset));
+ __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
+ }
+ __ PopTryHandler();
+ __ call(finally_entry_);
+
+ *stack_depth = 0;
+ *context_length = 0;
+ return previous_;
+}
+
+
+#undef __
} } // namespace v8::internal
diff --git a/src/x64/ic-x64.cc b/src/x64/ic-x64.cc
index 339d2c1..990c171 100644
--- a/src/x64/ic-x64.cc
+++ b/src/x64/ic-x64.cc
@@ -378,7 +378,7 @@
__ j(zero, index_string); // The value in hash is used at jump target.
// Is the string a symbol?
- ASSERT(kSymbolTag != 0);
+ STATIC_ASSERT(kSymbolTag != 0);
__ testb(FieldOperand(map, Map::kInstanceTypeOffset),
Immediate(kIsSymbolMask));
__ j(zero, not_symbol);
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc
index e505058..c182413 100644
--- a/src/x64/lithium-codegen-x64.cc
+++ b/src/x64/lithium-codegen-x64.cc
@@ -1216,17 +1216,10 @@
}
-void LCodeGen::DoFixedArrayLength(LFixedArrayLength* instr) {
+void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) {
Register result = ToRegister(instr->result());
Register array = ToRegister(instr->InputAt(0));
- __ movq(result, FieldOperand(array, FixedArray::kLengthOffset));
-}
-
-
-void LCodeGen::DoExternalArrayLength(LExternalArrayLength* instr) {
- Register result = ToRegister(instr->result());
- Register array = ToRegister(instr->InputAt(0));
- __ movl(result, FieldOperand(array, ExternalPixelArray::kLengthOffset));
+ __ movq(result, FieldOperand(array, FixedArrayBase::kLengthOffset));
}
@@ -1402,39 +1395,83 @@
Label* true_label = chunk_->GetAssemblyLabel(true_block);
Label* false_label = chunk_->GetAssemblyLabel(false_block);
- __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
- __ j(equal, false_label);
- __ CompareRoot(reg, Heap::kTrueValueRootIndex);
- __ j(equal, true_label);
- __ CompareRoot(reg, Heap::kFalseValueRootIndex);
- __ j(equal, false_label);
- __ Cmp(reg, Smi::FromInt(0));
- __ j(equal, false_label);
- __ JumpIfSmi(reg, true_label);
+ ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
+ // Avoid deopts in the case where we've never executed this path before.
+ if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
- // Test for double values. Plus/minus zero and NaN are false.
- Label call_stub;
- __ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset),
- Heap::kHeapNumberMapRootIndex);
- __ j(not_equal, &call_stub, Label::kNear);
+ if (expected.Contains(ToBooleanStub::UNDEFINED)) {
+ // undefined -> false.
+ __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
+ __ j(equal, false_label);
+ }
+ if (expected.Contains(ToBooleanStub::BOOLEAN)) {
+ // true -> true.
+ __ CompareRoot(reg, Heap::kTrueValueRootIndex);
+ __ j(equal, true_label);
+ // false -> false.
+ __ CompareRoot(reg, Heap::kFalseValueRootIndex);
+ __ j(equal, false_label);
+ }
+ if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
+ // 'null' -> false.
+ __ CompareRoot(reg, Heap::kNullValueRootIndex);
+ __ j(equal, false_label);
+ }
- // HeapNumber => false iff +0, -0, or NaN. These three cases set the
- // zero flag when compared to zero using ucomisd.
- __ xorps(xmm0, xmm0);
- __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
- __ j(zero, false_label);
- __ jmp(true_label);
+ if (expected.Contains(ToBooleanStub::SMI)) {
+ // Smis: 0 -> false, all other -> true.
+ __ Cmp(reg, Smi::FromInt(0));
+ __ j(equal, false_label);
+ __ JumpIfSmi(reg, true_label);
+ } else if (expected.NeedsMap()) {
+ // If we need a map later and have a Smi -> deopt.
+ __ testb(reg, Immediate(kSmiTagMask));
+ DeoptimizeIf(zero, instr->environment());
+ }
- // The conversion stub doesn't cause garbage collections so it's
- // safe to not record a safepoint after the call.
- __ bind(&call_stub);
- ToBooleanStub stub(rax);
- __ Pushad();
- __ push(reg);
- __ CallStub(&stub);
- __ testq(rax, rax);
- __ Popad();
- EmitBranch(true_block, false_block, not_zero);
+ const Register map = kScratchRegister;
+ if (expected.NeedsMap()) {
+ __ movq(map, FieldOperand(reg, HeapObject::kMapOffset));
+
+ if (expected.CanBeUndetectable()) {
+ // Undetectable -> false.
+ __ testb(FieldOperand(map, Map::kBitFieldOffset),
+ Immediate(1 << Map::kIsUndetectable));
+ __ j(not_zero, false_label);
+ }
+ }
+
+ if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
+ // spec object -> true.
+ __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
+ __ j(above_equal, true_label);
+ }
+
+ if (expected.Contains(ToBooleanStub::STRING)) {
+ // String value -> false iff empty.
+ Label not_string;
+ __ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
+ __ j(above_equal, ¬_string, Label::kNear);
+ __ cmpq(FieldOperand(reg, String::kLengthOffset), Immediate(0));
+ __ j(not_zero, true_label);
+ __ jmp(false_label);
+ __ bind(¬_string);
+ }
+
+ if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
+ // heap number -> false iff +0, -0, or NaN.
+ Label not_heap_number;
+ __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
+ __ j(not_equal, ¬_heap_number, Label::kNear);
+ __ xorps(xmm0, xmm0);
+ __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
+ __ j(zero, false_label);
+ __ jmp(true_label);
+ __ bind(¬_heap_number);
+ }
+
+ // We've seen something for the first time -> deopt.
+ DeoptimizeIf(no_condition, instr->environment());
}
}
}
@@ -2225,16 +2262,13 @@
void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
- Register elements = ToRegister(instr->elements());
- Register key = ToRegister(instr->key());
Register result = ToRegister(instr->result());
- ASSERT(result.is(elements));
// Load the result.
- __ movq(result, FieldOperand(elements,
- key,
- times_pointer_size,
- FixedArray::kHeaderSize));
+ __ movq(result,
+ BuildFastArrayOperand(instr->elements(), instr->key(),
+ JSObject::FAST_ELEMENTS,
+ FixedArray::kHeaderSize - kHeapObjectTag));
// Check for the hole value.
if (instr->hydrogen()->RequiresHoleCheck()) {
@@ -2268,22 +2302,22 @@
Operand LCodeGen::BuildFastArrayOperand(
- LOperand* external_pointer,
+ LOperand* elements_pointer,
LOperand* key,
JSObject::ElementsKind elements_kind,
uint32_t offset) {
- Register external_pointer_reg = ToRegister(external_pointer);
+ Register elements_pointer_reg = ToRegister(elements_pointer);
int shift_size = ElementsKindToShiftSize(elements_kind);
if (key->IsConstantOperand()) {
int constant_value = ToInteger32(LConstantOperand::cast(key));
if (constant_value & 0xF0000000) {
Abort("array index constant value too big");
}
- return Operand(external_pointer_reg,
+ return Operand(elements_pointer_reg,
constant_value * (1 << shift_size) + offset);
} else {
ScaleFactor scale_factor = static_cast<ScaleFactor>(shift_size);
- return Operand(external_pointer_reg, ToRegister(key),
+ return Operand(elements_pointer_reg, ToRegister(key),
scale_factor, offset);
}
}
@@ -2679,6 +2713,7 @@
XMMRegister xmm_scratch = xmm0;
Register output_reg = ToRegister(instr->result());
XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
+ Label done;
if (CpuFeatures::IsSupported(SSE4_1)) {
CpuFeatures::Scope scope(SSE4_1);
@@ -2693,13 +2728,20 @@
__ cmpl(output_reg, Immediate(0x80000000));
DeoptimizeIf(equal, instr->environment());
} else {
+ // Deoptimize on negative inputs.
__ xorps(xmm_scratch, xmm_scratch); // Zero the register.
__ ucomisd(input_reg, xmm_scratch);
-
+ DeoptimizeIf(below, instr->environment());
if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
- DeoptimizeIf(below_equal, instr->environment());
- } else {
- DeoptimizeIf(below, instr->environment());
+ // Check for negative zero.
+ Label positive_sign;
+ __ j(above, &positive_sign, Label::kNear);
+ __ movmskpd(output_reg, input_reg);
+ __ testq(output_reg, Immediate(1));
+ DeoptimizeIf(not_zero, instr->environment());
+ __ Set(output_reg, 0);
+ __ jmp(&done);
+ __ bind(&positive_sign);
}
// Use truncating instruction (OK because input is positive).
@@ -2709,6 +2751,7 @@
__ cmpl(output_reg, Immediate(0x80000000));
DeoptimizeIf(equal, instr->environment());
}
+ __ bind(&done);
}
@@ -3057,12 +3100,22 @@
void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
- if (instr->length()->IsRegister()) {
- __ cmpq(ToRegister(instr->index()), ToRegister(instr->length()));
+ if (instr->index()->IsConstantOperand()) {
+ if (instr->length()->IsRegister()) {
+ __ cmpq(ToRegister(instr->length()),
+ Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
+ } else {
+ __ cmpq(ToOperand(instr->length()),
+ Immediate(ToInteger32(LConstantOperand::cast(instr->index()))));
+ }
} else {
- __ cmpq(ToRegister(instr->index()), ToOperand(instr->length()));
+ if (instr->length()->IsRegister()) {
+ __ cmpq(ToRegister(instr->length()), ToRegister(instr->index()));
+ } else {
+ __ cmpq(ToOperand(instr->length()), ToRegister(instr->index()));
+ }
}
- DeoptimizeIf(above_equal, instr->environment());
+ DeoptimizeIf(below_equal, instr->environment());
}
@@ -3105,8 +3158,6 @@
__ ucomisd(value, value);
__ j(parity_odd, &have_value); // NaN.
- ExternalReference canonical_nan_reference =
- ExternalReference::address_of_canonical_non_hole_nan();
__ Set(kScratchRegister, BitCast<uint64_t>(
FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
__ movq(value, kScratchRegister);
@@ -3149,95 +3200,78 @@
};
Register string = ToRegister(instr->string());
- Register index = no_reg;
- int const_index = -1;
- if (instr->index()->IsConstantOperand()) {
- const_index = ToInteger32(LConstantOperand::cast(instr->index()));
- STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue);
- if (!Smi::IsValid(const_index)) {
- // Guaranteed to be out of bounds because of the assert above.
- // So the bounds check that must dominate this instruction must
- // have deoptimized already.
- if (FLAG_debug_code) {
- __ Abort("StringCharCodeAt: out of bounds index.");
- }
- // No code needs to be generated.
- return;
- }
- } else {
- index = ToRegister(instr->index());
- }
+ Register index = ToRegister(instr->index());
Register result = ToRegister(instr->result());
DeferredStringCharCodeAt* deferred =
new DeferredStringCharCodeAt(this, instr);
- Label flat_string, ascii_string, done;
-
// Fetch the instance type of the receiver into result register.
__ movq(result, FieldOperand(string, HeapObject::kMapOffset));
__ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
- // We need special handling for non-sequential strings.
- STATIC_ASSERT(kSeqStringTag == 0);
- __ testb(result, Immediate(kStringRepresentationMask));
- __ j(zero, &flat_string, Label::kNear);
+ // We need special handling for indirect strings.
+ Label check_sequential;
+ __ testb(result, Immediate(kIsIndirectStringMask));
+ __ j(zero, &check_sequential, Label::kNear);
- // Handle cons strings and go to deferred code for the rest.
- __ testb(result, Immediate(kIsConsStringMask));
- __ j(zero, deferred->entry());
+ // Dispatch on the indirect string shape: slice or cons.
+ Label cons_string;
+ __ testb(result, Immediate(kSlicedNotConsMask));
+ __ j(zero, &cons_string, Label::kNear);
- // ConsString.
+ // Handle slices.
+ Label indirect_string_loaded;
+ __ SmiToInteger32(result, FieldOperand(string, SlicedString::kOffsetOffset));
+ __ addq(index, result);
+ __ movq(string, FieldOperand(string, SlicedString::kParentOffset));
+ __ jmp(&indirect_string_loaded, Label::kNear);
+
+ // Handle conses.
// Check whether the right hand side is the empty string (i.e. if
// this is really a flat string in a cons string). If that is not
// the case we would rather go to the runtime system now to flatten
// the string.
+ __ bind(&cons_string);
__ CompareRoot(FieldOperand(string, ConsString::kSecondOffset),
Heap::kEmptyStringRootIndex);
__ j(not_equal, deferred->entry());
- // Get the first of the two strings and load its instance type.
__ movq(string, FieldOperand(string, ConsString::kFirstOffset));
+
+ __ bind(&indirect_string_loaded);
__ movq(result, FieldOperand(string, HeapObject::kMapOffset));
__ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
- // If the first cons component is also non-flat, then go to runtime.
+
+ // Check whether the string is sequential. The only non-sequential
+ // shapes we support have just been unwrapped above.
+ __ bind(&check_sequential);
STATIC_ASSERT(kSeqStringTag == 0);
__ testb(result, Immediate(kStringRepresentationMask));
__ j(not_zero, deferred->entry());
- // Check for ASCII or two-byte string.
- __ bind(&flat_string);
+ // Dispatch on the encoding: ASCII or two-byte.
+ Label ascii_string;
STATIC_ASSERT(kAsciiStringTag != 0);
__ testb(result, Immediate(kStringEncodingMask));
__ j(not_zero, &ascii_string, Label::kNear);
// Two-byte string.
// Load the two-byte character code into the result register.
+ Label done;
STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
- if (instr->index()->IsConstantOperand()) {
- __ movzxwl(result,
- FieldOperand(string,
- SeqTwoByteString::kHeaderSize +
- (kUC16Size * const_index)));
- } else {
- __ movzxwl(result, FieldOperand(string,
- index,
- times_2,
- SeqTwoByteString::kHeaderSize));
- }
+ __ movzxwl(result, FieldOperand(string,
+ index,
+ times_2,
+ SeqTwoByteString::kHeaderSize));
__ jmp(&done, Label::kNear);
// ASCII string.
// Load the byte into the result register.
__ bind(&ascii_string);
- if (instr->index()->IsConstantOperand()) {
- __ movzxbl(result, FieldOperand(string,
- SeqAsciiString::kHeaderSize + const_index));
- } else {
- __ movzxbl(result, FieldOperand(string,
- index,
- times_1,
- SeqAsciiString::kHeaderSize));
- }
+ __ movzxbl(result, FieldOperand(string,
+ index,
+ times_1,
+ SeqAsciiString::kHeaderSize));
__ bind(&done);
__ bind(deferred->exit());
}
@@ -3930,6 +3964,10 @@
__ CompareRoot(input, Heap::kFalseValueRootIndex);
final_branch_condition = equal;
+ } else if (FLAG_harmony_typeof && type_name->Equals(heap()->null_symbol())) {
+ __ CompareRoot(input, Heap::kNullValueRootIndex);
+ final_branch_condition = equal;
+
} else if (type_name->Equals(heap()->undefined_symbol())) {
__ CompareRoot(input, Heap::kUndefinedValueRootIndex);
__ j(equal, true_label);
@@ -3947,8 +3985,10 @@
} else if (type_name->Equals(heap()->object_symbol())) {
__ JumpIfSmi(input, false_label);
- __ CompareRoot(input, Heap::kNullValueRootIndex);
- __ j(equal, true_label);
+ if (!FLAG_harmony_typeof) {
+ __ CompareRoot(input, Heap::kNullValueRootIndex);
+ __ j(equal, true_label);
+ }
__ CmpObjectType(input, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, input);
__ j(below, false_label);
__ CmpInstanceType(input, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
diff --git a/src/x64/lithium-codegen-x64.h b/src/x64/lithium-codegen-x64.h
index d7c72b5..0622e9d 100644
--- a/src/x64/lithium-codegen-x64.h
+++ b/src/x64/lithium-codegen-x64.h
@@ -216,7 +216,7 @@
Register ToRegister(int index) const;
XMMRegister ToDoubleRegister(int index) const;
Operand BuildFastArrayOperand(
- LOperand* external_pointer,
+ LOperand* elements_pointer,
LOperand* key,
JSObject::ElementsKind elements_kind,
uint32_t offset);
diff --git a/src/x64/lithium-x64.cc b/src/x64/lithium-x64.cc
index 00b906d..9dc925d 100644
--- a/src/x64/lithium-x64.cc
+++ b/src/x64/lithium-x64.cc
@@ -1040,7 +1040,7 @@
: instr->SecondSuccessor();
return new LGoto(successor->block_id());
}
- return new LBranch(UseRegisterAtStart(v));
+ return AssignEnvironment(new LBranch(UseRegister(v)));
}
@@ -1502,16 +1502,10 @@
}
-LInstruction* LChunkBuilder::DoFixedArrayLength(HFixedArrayLength* instr) {
+LInstruction* LChunkBuilder::DoFixedArrayBaseLength(
+ HFixedArrayBaseLength* instr) {
LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LFixedArrayLength(array));
-}
-
-
-LInstruction* LChunkBuilder::DoExternalArrayLength(
- HExternalArrayLength* instr) {
- LOperand* array = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new LExternalArrayLength(array));
+ return DefineAsRegister(new LFixedArrayBaseLength(array));
}
@@ -1529,8 +1523,9 @@
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
- return AssignEnvironment(new LBoundsCheck(UseRegisterAtStart(instr->index()),
- Use(instr->length())));
+ return AssignEnvironment(new LBoundsCheck(
+ UseRegisterOrConstantAtStart(instr->index()),
+ Use(instr->length())));
}
@@ -1829,9 +1824,9 @@
ASSERT(instr->representation().IsTagged());
ASSERT(instr->key()->representation().IsInteger32());
LOperand* obj = UseRegisterAtStart(instr->object());
- LOperand* key = UseRegisterAtStart(instr->key());
+ LOperand* key = UseRegisterOrConstantAtStart(instr->key());
LLoadKeyedFastElement* result = new LLoadKeyedFastElement(obj, key);
- return AssignEnvironment(DefineSameAsFirst(result));
+ return AssignEnvironment(DefineAsRegister(result));
}
@@ -1993,8 +1988,8 @@
LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
- LOperand* string = UseRegister(instr->string());
- LOperand* index = UseRegisterOrConstant(instr->index());
+ LOperand* string = UseTempRegister(instr->string());
+ LOperand* index = UseTempRegister(instr->index());
LStringCharCodeAt* result = new LStringCharCodeAt(string, index);
return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
}
diff --git a/src/x64/lithium-x64.h b/src/x64/lithium-x64.h
index 18a036f..05b6637 100644
--- a/src/x64/lithium-x64.h
+++ b/src/x64/lithium-x64.h
@@ -92,8 +92,7 @@
V(DivI) \
V(DoubleToI) \
V(ElementsKind) \
- V(ExternalArrayLength) \
- V(FixedArrayLength) \
+ V(FixedArrayBaseLength) \
V(FunctionLiteral) \
V(GetCachedArrayIndex) \
V(GlobalObject) \
@@ -913,25 +912,15 @@
};
-class LExternalArrayLength: public LTemplateInstruction<1, 1, 0> {
+class LFixedArrayBaseLength: public LTemplateInstruction<1, 1, 0> {
public:
- explicit LExternalArrayLength(LOperand* value) {
+ explicit LFixedArrayBaseLength(LOperand* value) {
inputs_[0] = value;
}
- DECLARE_CONCRETE_INSTRUCTION(ExternalArrayLength, "external-array-length")
- DECLARE_HYDROGEN_ACCESSOR(ExternalArrayLength)
-};
-
-
-class LFixedArrayLength: public LTemplateInstruction<1, 1, 0> {
- public:
- explicit LFixedArrayLength(LOperand* value) {
- inputs_[0] = value;
- }
-
- DECLARE_CONCRETE_INSTRUCTION(FixedArrayLength, "fixed-array-length")
- DECLARE_HYDROGEN_ACCESSOR(FixedArrayLength)
+ DECLARE_CONCRETE_INSTRUCTION(FixedArrayBaseLength,
+ "fixed-array-base-length")
+ DECLARE_HYDROGEN_ACCESSOR(FixedArrayBaseLength)
};
@@ -2135,14 +2124,18 @@
template<int I, int T>
LInstruction* DefineFixedDouble(LTemplateInstruction<1, I, T>* instr,
XMMRegister reg);
+ // Assigns an environment to an instruction. An instruction which can
+ // deoptimize must have an environment.
LInstruction* AssignEnvironment(LInstruction* instr);
+ // Assigns a pointer map to an instruction. An instruction which can
+ // trigger a GC or a lazy deoptimization must have a pointer map.
LInstruction* AssignPointerMap(LInstruction* instr);
enum CanDeoptimize { CAN_DEOPTIMIZE_EAGERLY, CANNOT_DEOPTIMIZE_EAGERLY };
- // By default we assume that instruction sequences generated for calls
- // cannot deoptimize eagerly and we do not attach environment to this
- // instruction.
+ // Marks a call for the register allocator. Assigns a pointer map to
+ // support GC and lazy deoptimization. Assigns an environment to support
+ // eager deoptimization if CAN_DEOPTIMIZE_EAGERLY.
LInstruction* MarkAsCall(
LInstruction* instr,
HInstruction* hinstr,
diff --git a/src/x64/macro-assembler-x64.cc b/src/x64/macro-assembler-x64.cc
index 2b15553..b51d531 100644
--- a/src/x64/macro-assembler-x64.cc
+++ b/src/x64/macro-assembler-x64.cc
@@ -923,7 +923,7 @@
void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
if (!dst.is(src)) {
movl(dst, src);
}
@@ -961,7 +961,7 @@
void MacroAssembler::SmiToInteger32(Register dst, Register src) {
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
if (!dst.is(src)) {
movq(dst, src);
}
@@ -975,7 +975,7 @@
void MacroAssembler::SmiToInteger64(Register dst, Register src) {
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
if (!dst.is(src)) {
movq(dst, src);
}
@@ -1111,21 +1111,21 @@
Condition MacroAssembler::CheckSmi(Register src) {
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
testb(src, Immediate(kSmiTagMask));
return zero;
}
Condition MacroAssembler::CheckSmi(const Operand& src) {
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
testb(src, Immediate(kSmiTagMask));
return zero;
}
Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
// Test that both bits of the mask 0x8000000000000001 are zero.
movq(kScratchRegister, src);
rol(kScratchRegister, Immediate(1));
@@ -1138,7 +1138,7 @@
if (first.is(second)) {
return CheckSmi(first);
}
- ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
+ STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
leal(kScratchRegister, Operand(first, second, times_1, 0));
testb(kScratchRegister, Immediate(0x03));
return zero;
@@ -1294,7 +1294,7 @@
Label::Distance near_jump) {
// Does not assume that src is a smi.
ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
ASSERT(!dst.is(kScratchRegister));
ASSERT(!src.is(kScratchRegister));
@@ -1998,7 +1998,7 @@
Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
}
#endif
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
ASSERT_EQ(0, Smi::FromInt(0));
movl(kScratchRegister, Immediate(kSmiTagMask));
and_(kScratchRegister, src1);
@@ -2387,18 +2387,15 @@
void MacroAssembler::PushTryHandler(CodeLocation try_location,
HandlerType type) {
// Adjust this code if not the case.
- ASSERT(StackHandlerConstants::kSize == 4 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
// The pc (return address) is already on TOS. This code pushes state,
- // frame pointer and current handler. Check that they are expected
- // next on the stack, in that order.
- ASSERT_EQ(StackHandlerConstants::kStateOffset,
- StackHandlerConstants::kPCOffset - kPointerSize);
- ASSERT_EQ(StackHandlerConstants::kFPOffset,
- StackHandlerConstants::kStateOffset - kPointerSize);
- ASSERT_EQ(StackHandlerConstants::kNextOffset,
- StackHandlerConstants::kFPOffset - kPointerSize);
-
+ // frame pointer, context, and current handler.
if (try_location == IN_JAVASCRIPT) {
if (type == TRY_CATCH_HANDLER) {
push(Immediate(StackHandler::TRY_CATCH));
@@ -2406,6 +2403,7 @@
push(Immediate(StackHandler::TRY_FINALLY));
}
push(rbp);
+ push(rsi);
} else {
ASSERT(try_location == IN_JS_ENTRY);
// The frame pointer does not point to a JS frame so we save NULL
@@ -2413,6 +2411,7 @@
// before dereferencing it to restore the context.
push(Immediate(StackHandler::ENTRY));
push(Immediate(0)); // NULL frame pointer.
+ Push(Smi::FromInt(0)); // No context.
}
// Save the current handler.
Operand handler_operand =
@@ -2435,12 +2434,13 @@
void MacroAssembler::Throw(Register value) {
- // Check that stack should contain next handler, frame pointer, state and
- // return address in that order.
- STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
- StackHandlerConstants::kStateOffset);
- STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
- StackHandlerConstants::kPCOffset);
+ // Adjust this code if not the case.
+ STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
// Keep thrown value in rax.
if (!value.is(rax)) {
movq(rax, value);
@@ -2451,23 +2451,32 @@
movq(rsp, handler_operand);
// get next in chain
pop(handler_operand);
- pop(rbp); // pop frame pointer
- pop(rdx); // remove state
+ pop(rsi); // Context.
+ pop(rbp); // Frame pointer.
+ pop(rdx); // State.
- // Before returning we restore the context from the frame pointer if not NULL.
- // The frame pointer is NULL in the exception handler of a JS entry frame.
- Set(rsi, 0); // Tentatively set context pointer to NULL
+ // If the handler is a JS frame, restore the context to the frame.
+ // (rdx == ENTRY) == (rbp == 0) == (rsi == 0), so we could test any
+ // of them.
Label skip;
- cmpq(rbp, Immediate(0));
+ cmpq(rdx, Immediate(StackHandler::ENTRY));
j(equal, &skip, Label::kNear);
- movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
+ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
bind(&skip);
+
ret(0);
}
void MacroAssembler::ThrowUncatchable(UncatchableExceptionType type,
Register value) {
+ // Adjust this code if not the case.
+ STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kContextOffset == 1 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kFPOffset == 2 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kStateOffset == 3 * kPointerSize);
+ STATIC_ASSERT(StackHandlerConstants::kPCOffset == 4 * kPointerSize);
// Keep thrown value in rax.
if (!value.is(rax)) {
movq(rax, value);
@@ -2507,19 +2516,13 @@
Store(pending_exception, rax);
}
- // Clear the context pointer.
+ // Discard the context saved in the handler and clear the context pointer.
+ pop(rdx);
Set(rsi, 0);
- // Restore registers from handler.
- STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
- StackHandlerConstants::kFPOffset);
- pop(rbp); // FP
- STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
- StackHandlerConstants::kStateOffset);
- pop(rdx); // State
+ pop(rbp); // Restore frame pointer.
+ pop(rdx); // Discard state.
- STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
- StackHandlerConstants::kPCOffset);
ret(0);
}
@@ -2696,7 +2699,7 @@
Register instance_type) {
movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
- ASSERT(kNotStringTag != 0);
+ STATIC_ASSERT(kNotStringTag != 0);
testb(instance_type, Immediate(kIsNotStringMask));
return zero;
}
diff --git a/src/x64/regexp-macro-assembler-x64.cc b/src/x64/regexp-macro-assembler-x64.cc
index 395466e..a782bd7 100644
--- a/src/x64/regexp-macro-assembler-x64.cc
+++ b/src/x64/regexp-macro-assembler-x64.cc
@@ -1170,12 +1170,13 @@
}
// Prepare for possible GC.
- HandleScope handles;
+ HandleScope handles(isolate);
Handle<Code> code_handle(re_code);
Handle<String> subject(frame_entry<String*>(re_frame, kInputString));
+
// Current string.
- bool is_ascii = subject->IsAsciiRepresentation();
+ bool is_ascii = subject->IsAsciiRepresentationUnderneath();
ASSERT(re_code->instruction_start() <= *return_address);
ASSERT(*return_address <=
@@ -1184,7 +1185,7 @@
MaybeObject* result = Execution::HandleStackGuardInterrupt();
if (*code_handle != re_code) { // Return address no longer valid
- intptr_t delta = *code_handle - re_code;
+ intptr_t delta = code_handle->address() - re_code->address();
// Overwrite the return address on the stack.
*return_address += delta;
}
@@ -1193,8 +1194,20 @@
return EXCEPTION;
}
+ Handle<String> subject_tmp = subject;
+ int slice_offset = 0;
+
+ // Extract the underlying string and the slice offset.
+ if (StringShape(*subject_tmp).IsCons()) {
+ subject_tmp = Handle<String>(ConsString::cast(*subject_tmp)->first());
+ } else if (StringShape(*subject_tmp).IsSliced()) {
+ SlicedString* slice = SlicedString::cast(*subject_tmp);
+ subject_tmp = Handle<String>(slice->parent());
+ slice_offset = slice->offset();
+ }
+
// String might have changed.
- if (subject->IsAsciiRepresentation() != is_ascii) {
+ if (subject_tmp->IsAsciiRepresentation() != is_ascii) {
// If we changed between an ASCII and an UC16 string, the specialized
// code cannot be used, and we need to restart regexp matching from
// scratch (including, potentially, compiling a new version of the code).
@@ -1205,8 +1218,8 @@
// be a sequential or external string with the same content.
// Update the start and end pointers in the stack frame to the current
// location (whether it has actually moved or not).
- ASSERT(StringShape(*subject).IsSequential() ||
- StringShape(*subject).IsExternal());
+ ASSERT(StringShape(*subject_tmp).IsSequential() ||
+ StringShape(*subject_tmp).IsExternal());
// The original start address of the characters to match.
const byte* start_address = frame_entry<const byte*>(re_frame, kInputStart);
@@ -1214,7 +1227,8 @@
// Find the current start address of the same character at the current string
// position.
int start_index = frame_entry<int>(re_frame, kStartIndex);
- const byte* new_address = StringCharacterPosition(*subject, start_index);
+ const byte* new_address = StringCharacterPosition(*subject_tmp,
+ start_index + slice_offset);
if (start_address != new_address) {
// If there is a difference, update the object pointer and start and end
diff --git a/src/x64/stub-cache-x64.cc b/src/x64/stub-cache-x64.cc
index b8e5f22..5ea7257 100644
--- a/src/x64/stub-cache-x64.cc
+++ b/src/x64/stub-cache-x64.cc
@@ -258,7 +258,7 @@
// Check that the object is a string.
__ movq(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
__ movzxbq(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
- ASSERT(kNotStringTag != 0);
+ STATIC_ASSERT(kNotStringTag != 0);
__ testl(scratch, Immediate(kNotStringTag));
__ j(not_zero, non_string_object);
}
@@ -3070,7 +3070,7 @@
// Load the initial map and verify that it is in fact a map.
__ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
- ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTag == 0);
__ JumpIfSmi(rbx, &generic_stub_call);
__ CmpObjectType(rbx, MAP_TYPE, rcx);
__ j(not_equal, &generic_stub_call);
@@ -3244,7 +3244,7 @@
// Check that the index is in range.
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
__ SmiToInteger32(rcx, rax);
- __ cmpl(rcx, FieldOperand(rbx, ExternalArray::kLengthOffset));
+ __ cmpq(rax, FieldOperand(rbx, ExternalArray::kLengthOffset));
// Unsigned comparison catches both negative and too-large values.
__ j(above_equal, &miss_force_generic);
@@ -3379,7 +3379,7 @@
// Check that the index is in range.
__ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
__ SmiToInteger32(rdi, rcx); // Untag the index.
- __ cmpl(rdi, FieldOperand(rbx, ExternalArray::kLengthOffset));
+ __ cmpq(rcx, FieldOperand(rbx, ExternalArray::kLengthOffset));
// Unsigned comparison catches both negative and too-large values.
__ j(above_equal, &miss_force_generic);
@@ -3752,10 +3752,11 @@
__ bind(&smi_value);
// Value is a smi. convert to a double and store.
- __ SmiToInteger32(rax, rax);
- __ push(rax);
+ // Preserve original value.
+ __ SmiToInteger32(rdx, rax);
+ __ push(rdx);
__ fild_s(Operand(rsp, 0));
- __ pop(rax);
+ __ pop(rdx);
__ SmiToInteger32(rcx, rcx);
__ fstp_d(FieldOperand(rdi, rcx, times_8, FixedDoubleArray::kHeaderSize));
__ ret(0);
diff --git a/src/zone.cc b/src/zone.cc
index 42ce8c5..7574778 100644
--- a/src/zone.cc
+++ b/src/zone.cc
@@ -34,24 +34,6 @@
namespace internal {
-Zone::Zone()
- : zone_excess_limit_(256 * MB),
- segment_bytes_allocated_(0),
- position_(0),
- limit_(0),
- scope_nesting_(0),
- segment_head_(NULL) {
-}
-unsigned Zone::allocation_size_ = 0;
-
-
-ZoneScope::~ZoneScope() {
- ASSERT_EQ(Isolate::Current(), isolate_);
- if (ShouldDeleteOnExit()) isolate_->zone()->DeleteAll();
- isolate_->zone()->scope_nesting_--;
-}
-
-
// Segments represent chunks of memory: They have starting address
// (encoded in the this pointer) and a size in bytes. Segments are
// chained together forming a LIFO structure with the newest segment
@@ -60,6 +42,11 @@
class Segment {
public:
+ void Initialize(Segment* next, int size) {
+ next_ = next;
+ size_ = size;
+ }
+
Segment* next() const { return next_; }
void clear_next() { next_ = NULL; }
@@ -77,19 +64,33 @@
Segment* next_;
int size_;
-
- friend class Zone;
};
+Zone::Zone()
+ : zone_excess_limit_(256 * MB),
+ segment_bytes_allocated_(0),
+ position_(0),
+ limit_(0),
+ scope_nesting_(0),
+ segment_head_(NULL) {
+}
+unsigned Zone::allocation_size_ = 0;
+
+ZoneScope::~ZoneScope() {
+ ASSERT_EQ(Isolate::Current(), isolate_);
+ if (ShouldDeleteOnExit()) isolate_->zone()->DeleteAll();
+ isolate_->zone()->scope_nesting_--;
+}
+
+
// Creates a new segment, sets it size, and pushes it to the front
// of the segment chain. Returns the new segment.
Segment* Zone::NewSegment(int size) {
Segment* result = reinterpret_cast<Segment*>(Malloced::New(size));
adjust_segment_bytes_allocated(size);
if (result != NULL) {
- result->next_ = segment_head_;
- result->size_ = size;
+ result->Initialize(segment_head_, size);
segment_head_ = result;
}
return result;
@@ -155,6 +156,14 @@
}
+void Zone::DeleteKeptSegment() {
+ if (segment_head_ != NULL) {
+ DeleteSegment(segment_head_, segment_head_->size());
+ segment_head_ = NULL;
+ }
+}
+
+
Address Zone::NewExpand(int size) {
// Make sure the requested size is already properly aligned and that
// there isn't enough room in the Zone to satisfy the request.
diff --git a/src/zone.h b/src/zone.h
index abb53ad..f60ac0d 100644
--- a/src/zone.h
+++ b/src/zone.h
@@ -65,9 +65,13 @@
template <typename T>
inline T* NewArray(int length);
- // Delete all objects and free all memory allocated in the Zone.
+ // Deletes all objects and free all memory allocated in the Zone. Keeps one
+ // small (size <= kMaximumKeptSegmentSize) segment around if it finds one.
void DeleteAll();
+ // Deletes the last small segment kept around by DeleteAll().
+ void DeleteKeptSegment();
+
// Returns true if more memory has been allocated in zones than
// the limit allows.
inline bool excess_allocation();
@@ -148,6 +152,7 @@
// ZoneObjects should never be deleted individually; use
// Zone::DeleteAll() to delete all zone objects in one go.
void operator delete(void*, size_t) { UNREACHABLE(); }
+ void operator delete(void* pointer, Zone* zone) { UNREACHABLE(); }
};
@@ -193,13 +198,12 @@
: List<T, ZoneListAllocationPolicy>(other.length()) {
AddAll(other);
}
+
+ void operator delete(void* pointer) { UNREACHABLE(); }
+ void operator delete(void* pointer, Zone* zone) { UNREACHABLE(); }
};
-// Introduce a convenience type for zone lists of map handles.
-typedef ZoneList<Handle<Map> > ZoneMapList;
-
-
// ZoneScopes keep track of the current parsing and compilation
// nesting and cleans up generated ASTs in the Zone when exiting the
// outer-most scope.
diff --git a/test/benchmarks/testcfg.py b/test/benchmarks/testcfg.py
index 51d8520..ab9d40f 100644
--- a/test/benchmarks/testcfg.py
+++ b/test/benchmarks/testcfg.py
@@ -91,7 +91,7 @@
return [test]
def GetBuildRequirements(self):
- return ['sample', 'sample=shell']
+ return ['d8']
def GetTestStatus(self, sections, defs):
pass
diff --git a/test/cctest/SConscript b/test/cctest/SConscript
index b0a7166..621d8ec 100644
--- a/test/cctest/SConscript
+++ b/test/cctest/SConscript
@@ -65,6 +65,7 @@
'test-debug.cc',
'test-decls.cc',
'test-deoptimization.cc',
+ 'test-dictionary.cc',
'test-diy-fp.cc',
'test-double.cc',
'test-dtoa.cc',
@@ -95,7 +96,8 @@
'test-threads.cc',
'test-unbound-queue.cc',
'test-utils.cc',
- 'test-version.cc'
+ 'test-version.cc',
+ 'test-weakmaps.cc'
],
'arch:arm': [
'test-assembler-arm.cc',
diff --git a/test/cctest/cctest.gyp b/test/cctest/cctest.gyp
index 9cbcb9c..c0b5316 100644
--- a/test/cctest/cctest.gyp
+++ b/test/cctest/cctest.gyp
@@ -26,10 +26,10 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
{
+ 'includes': ['../../build/common.gypi'],
'variables': {
'generated_file': '<(SHARED_INTERMEDIATE_DIR)/resources.cc',
},
- 'includes': [ '../../build/v8-features.gypi' ],
'targets': [
{
'target_name': 'cctest',
@@ -61,6 +61,7 @@
'test-debug.cc',
'test-decls.cc',
'test-deoptimization.cc',
+ 'test-dictionary.cc',
'test-diy-fp.cc',
'test-double.cc',
'test-dtoa.cc',
@@ -78,6 +79,7 @@
'test-log.cc',
'test-mark-compact.cc',
'test-parsing.cc',
+ 'test-platform-tls.cc',
'test-profile-generator.cc',
'test-regexp.cc',
'test-reloc-info.cc',
diff --git a/test/cctest/cctest.status b/test/cctest/cctest.status
index 6e7824f..78f3756 100644
--- a/test/cctest/cctest.status
+++ b/test/cctest/cctest.status
@@ -38,6 +38,9 @@
test-serialize/TestThatAlwaysFails: FAIL
test-serialize/DependentTestThatAlwaysFails: FAIL
+# We do not yet shrink weak maps after they have been emptied by the GC
+test-weakmaps/Shrinking: FAIL
+
##############################################################################
[ $arch == arm ]
diff --git a/test/cctest/test-accessors.cc b/test/cctest/test-accessors.cc
index 028f82f..d95536d 100644
--- a/test/cctest/test-accessors.cc
+++ b/test/cctest/test-accessors.cc
@@ -44,8 +44,6 @@
using ::v8::AccessorInfo;
using ::v8::Extension;
-namespace i = ::v8::internal;
-
static v8::Handle<Value> handle_property(Local<String> name,
const AccessorInfo&) {
ApiTestFuzzer::Fuzz();
diff --git a/test/cctest/test-alloc.cc b/test/cctest/test-alloc.cc
index 4d9c218..9767192 100644
--- a/test/cctest/test-alloc.cc
+++ b/test/cctest/test-alloc.cc
@@ -186,7 +186,9 @@
TEST(CodeRange) {
const int code_range_size = 16*MB;
OS::Setup();
- Isolate::Current()->code_range()->Setup(code_range_size);
+ Isolate::Current()->InitializeLoggingAndCounters();
+ CodeRange* code_range = new CodeRange(Isolate::Current());
+ code_range->Setup(code_range_size);
int current_allocated = 0;
int total_allocated = 0;
List<Block> blocks(1000);
@@ -198,8 +200,7 @@
size_t requested = (Page::kPageSize << (Pseudorandom() % 6)) +
Pseudorandom() % 5000 + 1;
size_t allocated = 0;
- void* base = Isolate::Current()->code_range()->
- AllocateRawMemory(requested, &allocated);
+ void* base = code_range->AllocateRawMemory(requested, &allocated);
CHECK(base != NULL);
blocks.Add(Block(base, static_cast<int>(allocated)));
current_allocated += static_cast<int>(allocated);
@@ -207,8 +208,7 @@
} else {
// Free a block.
int index = Pseudorandom() % blocks.length();
- Isolate::Current()->code_range()->FreeRawMemory(
- blocks[index].base, blocks[index].size);
+ code_range->FreeRawMemory(blocks[index].base, blocks[index].size);
current_allocated -= blocks[index].size;
if (index < blocks.length() - 1) {
blocks[index] = blocks.RemoveLast();
@@ -218,5 +218,6 @@
}
}
- Isolate::Current()->code_range()->TearDown();
+ code_range->TearDown();
+ delete code_range;
}
diff --git a/test/cctest/test-api.cc b/test/cctest/test-api.cc
index 5d197be..3d40a73 100644
--- a/test/cctest/test-api.cc
+++ b/test/cctest/test-api.cc
@@ -72,8 +72,6 @@
using ::v8::V8;
using ::v8::Value;
-namespace i = ::i;
-
static void ExpectString(const char* code, const char* expected) {
Local<Value> result = CompileRun(code);
@@ -331,16 +329,14 @@
class TestResource: public String::ExternalStringResource {
public:
- static int dispose_count;
-
- explicit TestResource(uint16_t* data)
- : data_(data), length_(0) {
+ explicit TestResource(uint16_t* data, int* counter = NULL)
+ : data_(data), length_(0), counter_(counter) {
while (data[length_]) ++length_;
}
~TestResource() {
i::DeleteArray(data_);
- ++dispose_count;
+ if (counter_ != NULL) ++*counter_;
}
const uint16_t* data() const {
@@ -353,23 +349,18 @@
private:
uint16_t* data_;
size_t length_;
+ int* counter_;
};
-int TestResource::dispose_count = 0;
-
-
class TestAsciiResource: public String::ExternalAsciiStringResource {
public:
- static int dispose_count;
-
- explicit TestAsciiResource(const char* data)
- : data_(data),
- length_(strlen(data)) { }
+ explicit TestAsciiResource(const char* data, int* counter = NULL)
+ : data_(data), length_(strlen(data)), counter_(counter) { }
~TestAsciiResource() {
i::DeleteArray(data_);
- ++dispose_count;
+ if (counter_ != NULL) ++*counter_;
}
const char* data() const {
@@ -382,20 +373,18 @@
private:
const char* data_;
size_t length_;
+ int* counter_;
};
-int TestAsciiResource::dispose_count = 0;
-
-
THREADED_TEST(ScriptUsingStringResource) {
- TestResource::dispose_count = 0;
+ int dispose_count = 0;
const char* c_source = "1 + 2 * 3";
uint16_t* two_byte_source = AsciiToTwoByteString(c_source);
{
v8::HandleScope scope;
LocalContext env;
- TestResource* resource = new TestResource(two_byte_source);
+ TestResource* resource = new TestResource(two_byte_source, &dispose_count);
Local<String> source = String::NewExternal(resource);
Local<Script> script = Script::Compile(source);
Local<Value> value = script->Run();
@@ -405,37 +394,38 @@
CHECK_EQ(resource,
static_cast<TestResource*>(source->GetExternalStringResource()));
HEAP->CollectAllGarbage(false);
- CHECK_EQ(0, TestResource::dispose_count);
+ CHECK_EQ(0, dispose_count);
}
v8::internal::Isolate::Current()->compilation_cache()->Clear();
HEAP->CollectAllGarbage(false);
- CHECK_EQ(1, TestResource::dispose_count);
+ CHECK_EQ(1, dispose_count);
}
THREADED_TEST(ScriptUsingAsciiStringResource) {
- TestAsciiResource::dispose_count = 0;
+ int dispose_count = 0;
const char* c_source = "1 + 2 * 3";
{
v8::HandleScope scope;
LocalContext env;
Local<String> source =
- String::NewExternal(new TestAsciiResource(i::StrDup(c_source)));
+ String::NewExternal(new TestAsciiResource(i::StrDup(c_source),
+ &dispose_count));
Local<Script> script = Script::Compile(source);
Local<Value> value = script->Run();
CHECK(value->IsNumber());
CHECK_EQ(7, value->Int32Value());
HEAP->CollectAllGarbage(false);
- CHECK_EQ(0, TestAsciiResource::dispose_count);
+ CHECK_EQ(0, dispose_count);
}
i::Isolate::Current()->compilation_cache()->Clear();
HEAP->CollectAllGarbage(false);
- CHECK_EQ(1, TestAsciiResource::dispose_count);
+ CHECK_EQ(1, dispose_count);
}
THREADED_TEST(ScriptMakingExternalString) {
- TestResource::dispose_count = 0;
+ int dispose_count = 0;
uint16_t* two_byte_source = AsciiToTwoByteString("1 + 2 * 3");
{
v8::HandleScope scope;
@@ -444,23 +434,24 @@
// Trigger GCs so that the newly allocated string moves to old gen.
HEAP->CollectGarbage(i::NEW_SPACE); // in survivor space now
HEAP->CollectGarbage(i::NEW_SPACE); // in old gen now
- bool success = source->MakeExternal(new TestResource(two_byte_source));
+ bool success = source->MakeExternal(new TestResource(two_byte_source,
+ &dispose_count));
CHECK(success);
Local<Script> script = Script::Compile(source);
Local<Value> value = script->Run();
CHECK(value->IsNumber());
CHECK_EQ(7, value->Int32Value());
HEAP->CollectAllGarbage(false);
- CHECK_EQ(0, TestResource::dispose_count);
+ CHECK_EQ(0, dispose_count);
}
i::Isolate::Current()->compilation_cache()->Clear();
HEAP->CollectAllGarbage(false);
- CHECK_EQ(1, TestResource::dispose_count);
+ CHECK_EQ(1, dispose_count);
}
THREADED_TEST(ScriptMakingExternalAsciiString) {
- TestAsciiResource::dispose_count = 0;
+ int dispose_count = 0;
const char* c_source = "1 + 2 * 3";
{
v8::HandleScope scope;
@@ -470,18 +461,18 @@
HEAP->CollectGarbage(i::NEW_SPACE); // in survivor space now
HEAP->CollectGarbage(i::NEW_SPACE); // in old gen now
bool success = source->MakeExternal(
- new TestAsciiResource(i::StrDup(c_source)));
+ new TestAsciiResource(i::StrDup(c_source), &dispose_count));
CHECK(success);
Local<Script> script = Script::Compile(source);
Local<Value> value = script->Run();
CHECK(value->IsNumber());
CHECK_EQ(7, value->Int32Value());
HEAP->CollectAllGarbage(false);
- CHECK_EQ(0, TestAsciiResource::dispose_count);
+ CHECK_EQ(0, dispose_count);
}
i::Isolate::Current()->compilation_cache()->Clear();
HEAP->CollectAllGarbage(false);
- CHECK_EQ(1, TestAsciiResource::dispose_count);
+ CHECK_EQ(1, dispose_count);
}
@@ -605,49 +596,52 @@
THREADED_TEST(ScavengeExternalString) {
- TestResource::dispose_count = 0;
+ int dispose_count = 0;
bool in_new_space = false;
{
v8::HandleScope scope;
uint16_t* two_byte_string = AsciiToTwoByteString("test string");
Local<String> string =
- String::NewExternal(new TestResource(two_byte_string));
+ String::NewExternal(new TestResource(two_byte_string,
+ &dispose_count));
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
HEAP->CollectGarbage(i::NEW_SPACE);
in_new_space = HEAP->InNewSpace(*istring);
CHECK(in_new_space || HEAP->old_data_space()->Contains(*istring));
- CHECK_EQ(0, TestResource::dispose_count);
+ CHECK_EQ(0, dispose_count);
}
HEAP->CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_DATA_SPACE);
- CHECK_EQ(1, TestResource::dispose_count);
+ CHECK_EQ(1, dispose_count);
}
THREADED_TEST(ScavengeExternalAsciiString) {
- TestAsciiResource::dispose_count = 0;
+ int dispose_count = 0;
bool in_new_space = false;
{
v8::HandleScope scope;
const char* one_byte_string = "test string";
Local<String> string = String::NewExternal(
- new TestAsciiResource(i::StrDup(one_byte_string)));
+ new TestAsciiResource(i::StrDup(one_byte_string), &dispose_count));
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
HEAP->CollectGarbage(i::NEW_SPACE);
in_new_space = HEAP->InNewSpace(*istring);
CHECK(in_new_space || HEAP->old_data_space()->Contains(*istring));
- CHECK_EQ(0, TestAsciiResource::dispose_count);
+ CHECK_EQ(0, dispose_count);
}
HEAP->CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_DATA_SPACE);
- CHECK_EQ(1, TestAsciiResource::dispose_count);
+ CHECK_EQ(1, dispose_count);
}
class TestAsciiResourceWithDisposeControl: public TestAsciiResource {
public:
+ // Only used by non-threaded tests, so it can use static fields.
static int dispose_calls;
+ static int dispose_count;
TestAsciiResourceWithDisposeControl(const char* data, bool dispose)
- : TestAsciiResource(data),
+ : TestAsciiResource(data, &dispose_count),
dispose_(dispose) { }
void Dispose() {
@@ -659,6 +653,7 @@
};
+int TestAsciiResourceWithDisposeControl::dispose_count = 0;
int TestAsciiResourceWithDisposeControl::dispose_calls = 0;
@@ -666,7 +661,7 @@
const char* c_source = "1 + 2 * 3";
// Use a stack allocated external string resource allocated object.
- TestAsciiResource::dispose_count = 0;
+ TestAsciiResourceWithDisposeControl::dispose_count = 0;
TestAsciiResourceWithDisposeControl::dispose_calls = 0;
TestAsciiResourceWithDisposeControl res_stack(i::StrDup(c_source), false);
{
@@ -678,15 +673,15 @@
CHECK(value->IsNumber());
CHECK_EQ(7, value->Int32Value());
HEAP->CollectAllGarbage(false);
- CHECK_EQ(0, TestAsciiResource::dispose_count);
+ CHECK_EQ(0, TestAsciiResourceWithDisposeControl::dispose_count);
}
i::Isolate::Current()->compilation_cache()->Clear();
HEAP->CollectAllGarbage(false);
CHECK_EQ(1, TestAsciiResourceWithDisposeControl::dispose_calls);
- CHECK_EQ(0, TestAsciiResource::dispose_count);
+ CHECK_EQ(0, TestAsciiResourceWithDisposeControl::dispose_count);
// Use a heap allocated external string resource allocated object.
- TestAsciiResource::dispose_count = 0;
+ TestAsciiResourceWithDisposeControl::dispose_count = 0;
TestAsciiResourceWithDisposeControl::dispose_calls = 0;
TestAsciiResource* res_heap =
new TestAsciiResourceWithDisposeControl(i::StrDup(c_source), true);
@@ -699,12 +694,12 @@
CHECK(value->IsNumber());
CHECK_EQ(7, value->Int32Value());
HEAP->CollectAllGarbage(false);
- CHECK_EQ(0, TestAsciiResource::dispose_count);
+ CHECK_EQ(0, TestAsciiResourceWithDisposeControl::dispose_count);
}
i::Isolate::Current()->compilation_cache()->Clear();
HEAP->CollectAllGarbage(false);
CHECK_EQ(1, TestAsciiResourceWithDisposeControl::dispose_calls);
- CHECK_EQ(1, TestAsciiResource::dispose_count);
+ CHECK_EQ(1, TestAsciiResourceWithDisposeControl::dispose_count);
}
@@ -828,7 +823,7 @@
static v8::Handle<v8::Value> callback(const v8::Arguments& args) {
void* ptr = v8::External::Unwrap(args.Data());
CHECK_EQ(expected_ptr, ptr);
- return v8::Boolean::New(true);
+ return v8::True();
}
@@ -2642,7 +2637,7 @@
v8::Handle<Value> CCatcher(const v8::Arguments& args) {
- if (args.Length() < 1) return v8::Boolean::New(false);
+ if (args.Length() < 1) return v8::False();
v8::HandleScope scope;
v8::TryCatch try_catch;
Local<Value> result = v8::Script::Compile(args[0]->ToString())->Run();
@@ -3588,6 +3583,114 @@
}
+static v8::Handle<Value> UnboxedDoubleIndexedPropertyGetter(
+ uint32_t index,
+ const AccessorInfo& info) {
+ ApiTestFuzzer::Fuzz();
+ if (index < 25) {
+ return v8::Handle<Value>(v8_num(index));
+ }
+ return v8::Handle<Value>();
+}
+
+
+static v8::Handle<Value> UnboxedDoubleIndexedPropertySetter(
+ uint32_t index,
+ Local<Value> value,
+ const AccessorInfo& info) {
+ ApiTestFuzzer::Fuzz();
+ if (index < 25) {
+ return v8::Handle<Value>(v8_num(index));
+ }
+ return v8::Handle<Value>();
+}
+
+
+Handle<v8::Array> UnboxedDoubleIndexedPropertyEnumerator(
+ const AccessorInfo& info) {
+ // Force the list of returned keys to be stored in a FastDoubleArray.
+ Local<Script> indexed_property_names_script = Script::Compile(v8_str(
+ "keys = new Array(); keys[125000] = 1;"
+ "for(i = 0; i < 80000; i++) { keys[i] = i; };"
+ "keys.length = 25; keys;"));
+ Local<Value> result = indexed_property_names_script->Run();
+ return Local<v8::Array>(::v8::Array::Cast(*result));
+}
+
+
+// Make sure that the the interceptor code in the runtime properly handles
+// merging property name lists for double-array-backed arrays.
+THREADED_TEST(IndexedInterceptorUnboxedDoubleWithIndexedAccessor) {
+ v8::HandleScope scope;
+ Local<ObjectTemplate> templ = ObjectTemplate::New();
+ templ->SetIndexedPropertyHandler(UnboxedDoubleIndexedPropertyGetter,
+ UnboxedDoubleIndexedPropertySetter,
+ 0,
+ 0,
+ UnboxedDoubleIndexedPropertyEnumerator);
+ LocalContext context;
+ context->Global()->Set(v8_str("obj"), templ->NewInstance());
+ // When obj is created, force it to be Stored in a FastDoubleArray.
+ Local<Script> create_unboxed_double_script = Script::Compile(v8_str(
+ "obj[125000] = 1; for(i = 0; i < 80000; i+=2) { obj[i] = i; } "
+ "key_count = 0; "
+ "for (x in obj) {key_count++;};"
+ "obj;"));
+ Local<Value> result = create_unboxed_double_script->Run();
+ CHECK(result->ToObject()->HasRealIndexedProperty(2000));
+ Local<Script> key_count_check = Script::Compile(v8_str(
+ "key_count;"));
+ result = key_count_check->Run();
+ CHECK_EQ(v8_num(40013), result);
+}
+
+
+Handle<v8::Array> NonStrictArgsIndexedPropertyEnumerator(
+ const AccessorInfo& info) {
+ // Force the list of returned keys to be stored in a Arguments object.
+ Local<Script> indexed_property_names_script = Script::Compile(v8_str(
+ "function f(w,x) {"
+ " return arguments;"
+ "}"
+ "keys = f(0, 1, 2, 3);"
+ "keys;"));
+ Local<Value> result = indexed_property_names_script->Run();
+ return Local<v8::Array>(static_cast<v8::Array*>(::v8::Object::Cast(*result)));
+}
+
+
+static v8::Handle<Value> NonStrictIndexedPropertyGetter(
+ uint32_t index,
+ const AccessorInfo& info) {
+ ApiTestFuzzer::Fuzz();
+ if (index < 4) {
+ return v8::Handle<Value>(v8_num(index));
+ }
+ return v8::Handle<Value>();
+}
+
+
+// Make sure that the the interceptor code in the runtime properly handles
+// merging property name lists for non-string arguments arrays.
+THREADED_TEST(IndexedInterceptorNonStrictArgsWithIndexedAccessor) {
+ v8::HandleScope scope;
+ Local<ObjectTemplate> templ = ObjectTemplate::New();
+ templ->SetIndexedPropertyHandler(NonStrictIndexedPropertyGetter,
+ 0,
+ 0,
+ 0,
+ NonStrictArgsIndexedPropertyEnumerator);
+ LocalContext context;
+ context->Global()->Set(v8_str("obj"), templ->NewInstance());
+ Local<Script> create_args_script =
+ Script::Compile(v8_str(
+ "var key_count = 0;"
+ "for (x in obj) {key_count++;} key_count;"));
+ Local<Value> result = create_args_script->Run();
+ CHECK_EQ(v8_num(4), result);
+}
+
+
static v8::Handle<Value> IdentityIndexedPropertyGetter(
uint32_t index,
const AccessorInfo& info) {
@@ -5221,6 +5324,40 @@
CHECK_EQ(0, strncmp("d\1", buf, 2));
uint16_t answer7[] = {'d', 0x101};
CHECK_EQ(0, StrNCmp16(answer7, wbuf, 2));
+
+ memset(wbuf, 0x1, sizeof(wbuf));
+ wbuf[5] = 'X';
+ len = str->Write(wbuf, 0, 6, String::NO_NULL_TERMINATION);
+ CHECK_EQ(5, len);
+ CHECK_EQ('X', wbuf[5]);
+ uint16_t answer8a[] = {'a', 'b', 'c', 'd', 'e'};
+ uint16_t answer8b[] = {'a', 'b', 'c', 'd', 'e', '\0'};
+ CHECK_EQ(0, StrNCmp16(answer8a, wbuf, 5));
+ CHECK_NE(0, StrCmp16(answer8b, wbuf));
+ wbuf[5] = '\0';
+ CHECK_EQ(0, StrCmp16(answer8b, wbuf));
+
+ memset(buf, 0x1, sizeof(buf));
+ buf[5] = 'X';
+ len = str->WriteAscii(buf, 0, 6, String::NO_NULL_TERMINATION);
+ CHECK_EQ(5, len);
+ CHECK_EQ('X', buf[5]);
+ CHECK_EQ(0, strncmp("abcde", buf, 5));
+ CHECK_NE(0, strcmp("abcde", buf));
+ buf[5] = '\0';
+ CHECK_EQ(0, strcmp("abcde", buf));
+
+ memset(utf8buf, 0x1, sizeof(utf8buf));
+ utf8buf[8] = 'X';
+ len = str2->WriteUtf8(utf8buf, sizeof(utf8buf), &charlen,
+ String::NO_NULL_TERMINATION);
+ CHECK_EQ(8, len);
+ CHECK_EQ('X', utf8buf[8]);
+ CHECK_EQ(5, charlen);
+ CHECK_EQ(0, strncmp(utf8buf, "abc\303\260\342\230\203", 8));
+ CHECK_NE(0, strcmp(utf8buf, "abc\303\260\342\230\203"));
+ utf8buf[8] = '\0';
+ CHECK_EQ(0, strcmp(utf8buf, "abc\303\260\342\230\203"));
}
@@ -7021,53 +7158,34 @@
}
-THREADED_TEST(SetPrototypeProperties) {
+THREADED_TEST(FunctionReadOnlyPrototype) {
v8::HandleScope handle_scope;
LocalContext context;
Local<v8::FunctionTemplate> t1 = v8::FunctionTemplate::New();
- t1->SetPrototypeAttributes(v8::DontDelete);
+ t1->PrototypeTemplate()->Set(v8_str("x"), v8::Integer::New(42));
+ t1->ReadOnlyPrototype();
context->Global()->Set(v8_str("func1"), t1->GetFunction());
+ // Configured value of ReadOnly flag.
CHECK(CompileRun(
"(function() {"
" descriptor = Object.getOwnPropertyDescriptor(func1, 'prototype');"
- " return (descriptor['writable'] == true) &&"
- " (descriptor['enumerable'] == true) &&"
- " (descriptor['configurable'] == false);"
+ " return (descriptor['writable'] == false);"
"})()")->BooleanValue());
+ CHECK_EQ(42, CompileRun("func1.prototype.x")->Int32Value());
+ CHECK_EQ(42,
+ CompileRun("func1.prototype = {}; func1.prototype.x")->Int32Value());
Local<v8::FunctionTemplate> t2 = v8::FunctionTemplate::New();
- t2->SetPrototypeAttributes(v8::DontEnum);
+ t2->PrototypeTemplate()->Set(v8_str("x"), v8::Integer::New(42));
context->Global()->Set(v8_str("func2"), t2->GetFunction());
+ // Default value of ReadOnly flag.
CHECK(CompileRun(
"(function() {"
" descriptor = Object.getOwnPropertyDescriptor(func2, 'prototype');"
- " return (descriptor['writable'] == true) &&"
- " (descriptor['enumerable'] == false) &&"
- " (descriptor['configurable'] == true);"
+ " return (descriptor['writable'] == true);"
"})()")->BooleanValue());
-
- Local<v8::FunctionTemplate> t3 = v8::FunctionTemplate::New();
- t3->SetPrototypeAttributes(v8::ReadOnly);
- context->Global()->Set(v8_str("func3"), t3->GetFunction());
- CHECK(CompileRun(
- "(function() {"
- " descriptor = Object.getOwnPropertyDescriptor(func3, 'prototype');"
- " return (descriptor['writable'] == false) &&"
- " (descriptor['enumerable'] == true) &&"
- " (descriptor['configurable'] == true);"
- "})()")->BooleanValue());
-
- Local<v8::FunctionTemplate> t4 = v8::FunctionTemplate::New();
- t4->SetPrototypeAttributes(v8::ReadOnly | v8::DontEnum | v8::DontDelete);
- context->Global()->Set(v8_str("func4"), t4->GetFunction());
- CHECK(CompileRun(
- "(function() {"
- " descriptor = Object.getOwnPropertyDescriptor(func4, 'prototype');"
- " return (descriptor['writable'] == false) &&"
- " (descriptor['enumerable'] == false) &&"
- " (descriptor['configurable'] == false);"
- "})()")->BooleanValue());
+ CHECK_EQ(42, CompileRun("func2.prototype.x")->Int32Value());
}
@@ -7206,7 +7324,7 @@
CHECK(value->IsBoolean());
CHECK_EQ(true, value->BooleanValue());
- Handle<Value> args3[] = { v8::Boolean::New(true) };
+ Handle<Value> args3[] = { v8::True() };
Local<Value> value_obj3 = instance->CallAsConstructor(1, args3);
CHECK(value_obj3->IsObject());
Local<Object> object3 = Local<Object>::Cast(value_obj3);
@@ -9477,10 +9595,7 @@
static v8::Handle<Value> IsConstructHandler(const v8::Arguments& args) {
ApiTestFuzzer::Fuzz();
- if (args.IsConstructCall()) {
- return v8::Boolean::New(true);
- }
- return v8::Boolean::New(false);
+ return v8::Boolean::New(args.IsConstructCall());
}
@@ -11646,7 +11761,7 @@
}
HEAP->CollectAllGarbage(false); // Force GC to trigger verification.
for (int i = 0; i < kElementCount; i++) {
- CHECK_EQ(i % 256, pixels->get(i));
+ CHECK_EQ(i % 256, pixels->get_scalar(i));
CHECK_EQ(i % 256, pixel_data[i]);
}
@@ -12119,7 +12234,8 @@
}
HEAP->CollectAllGarbage(false); // Force GC to trigger verification.
for (int i = 0; i < kElementCount; i++) {
- CHECK_EQ(static_cast<int64_t>(i), static_cast<int64_t>(array->get(i)));
+ CHECK_EQ(static_cast<int64_t>(i),
+ static_cast<int64_t>(array->get_scalar(i)));
CHECK_EQ(static_cast<int64_t>(i), static_cast<int64_t>(array_data[i]));
}
@@ -13636,6 +13752,9 @@
"str2;";
Local<Value> result = CompileRun(init_code);
+ Local<Value> indexof = CompileRun("str2.indexOf('els')");
+ Local<Value> lastindexof = CompileRun("str2.lastIndexOf('dab')");
+
CHECK(result->IsString());
i::Handle<i::String> string = v8::Utils::OpenHandle(String::Cast(*result));
int length = string->length();
@@ -13701,6 +13820,10 @@
ExpectString("str2.charAt(2);", "e");
+ ExpectObject("str2.indexOf('els');", indexof);
+
+ ExpectObject("str2.lastIndexOf('dab');", lastindexof);
+
reresult = CompileRun("str2.charCodeAt(2);");
CHECK_EQ(static_cast<int32_t>('e'), reresult->Int32Value());
}
@@ -14354,34 +14477,34 @@
v8::Handle<v8::RegExp> re = v8::RegExp::New(v8_str("foo"), v8::RegExp::kNone);
CHECK(re->IsRegExp());
CHECK(re->GetSource()->Equals(v8_str("foo")));
- CHECK_EQ(re->GetFlags(), v8::RegExp::kNone);
+ CHECK_EQ(v8::RegExp::kNone, re->GetFlags());
re = v8::RegExp::New(v8_str("bar"),
static_cast<v8::RegExp::Flags>(v8::RegExp::kIgnoreCase |
v8::RegExp::kGlobal));
CHECK(re->IsRegExp());
CHECK(re->GetSource()->Equals(v8_str("bar")));
- CHECK_EQ(static_cast<int>(re->GetFlags()),
- v8::RegExp::kIgnoreCase | v8::RegExp::kGlobal);
+ CHECK_EQ(v8::RegExp::kIgnoreCase | v8::RegExp::kGlobal,
+ static_cast<int>(re->GetFlags()));
re = v8::RegExp::New(v8_str("baz"),
static_cast<v8::RegExp::Flags>(v8::RegExp::kIgnoreCase |
v8::RegExp::kMultiline));
CHECK(re->IsRegExp());
CHECK(re->GetSource()->Equals(v8_str("baz")));
- CHECK_EQ(static_cast<int>(re->GetFlags()),
- v8::RegExp::kIgnoreCase | v8::RegExp::kMultiline);
+ CHECK_EQ(v8::RegExp::kIgnoreCase | v8::RegExp::kMultiline,
+ static_cast<int>(re->GetFlags()));
re = CompileRun("/quux/").As<v8::RegExp>();
CHECK(re->IsRegExp());
CHECK(re->GetSource()->Equals(v8_str("quux")));
- CHECK_EQ(re->GetFlags(), v8::RegExp::kNone);
+ CHECK_EQ(v8::RegExp::kNone, re->GetFlags());
re = CompileRun("/quux/gm").As<v8::RegExp>();
CHECK(re->IsRegExp());
CHECK(re->GetSource()->Equals(v8_str("quux")));
- CHECK_EQ(static_cast<int>(re->GetFlags()),
- v8::RegExp::kGlobal | v8::RegExp::kMultiline);
+ CHECK_EQ(v8::RegExp::kGlobal | v8::RegExp::kMultiline,
+ static_cast<int>(re->GetFlags()));
// Override the RegExp constructor and check the API constructor
// still works.
@@ -14390,15 +14513,15 @@
re = v8::RegExp::New(v8_str("foobar"), v8::RegExp::kNone);
CHECK(re->IsRegExp());
CHECK(re->GetSource()->Equals(v8_str("foobar")));
- CHECK_EQ(re->GetFlags(), v8::RegExp::kNone);
+ CHECK_EQ(v8::RegExp::kNone, re->GetFlags());
re = v8::RegExp::New(v8_str("foobarbaz"),
static_cast<v8::RegExp::Flags>(v8::RegExp::kIgnoreCase |
v8::RegExp::kMultiline));
CHECK(re->IsRegExp());
CHECK(re->GetSource()->Equals(v8_str("foobarbaz")));
- CHECK_EQ(static_cast<int>(re->GetFlags()),
- v8::RegExp::kIgnoreCase | v8::RegExp::kMultiline);
+ CHECK_EQ(v8::RegExp::kIgnoreCase | v8::RegExp::kMultiline,
+ static_cast<int>(re->GetFlags()));
context->Global()->Set(v8_str("re"), re);
ExpectTrue("re.test('FoobarbaZ')");
@@ -14579,6 +14702,24 @@
}
+THREADED_TEST(CreationContextOfJsFunction) {
+ HandleScope handle_scope;
+ Persistent<Context> context = Context::New();
+ InstallContextId(context, 1);
+
+ Local<Object> function;
+ {
+ Context::Scope scope(context);
+ function = CompileRun("function foo() {}; foo").As<Object>();
+ }
+
+ CHECK(function->CreationContext() == context);
+ CheckContextId(function, 1);
+
+ context.Dispose();
+}
+
+
Handle<Value> HasOwnPropertyIndexedPropertyGetter(uint32_t index,
const AccessorInfo& info) {
if (index == 42) return v8_str("yes");
@@ -14930,3 +15071,107 @@
context.Dispose();
}
+
+
+static void TestReceiver(Local<Value> expected_result,
+ Local<Value> expected_receiver,
+ const char* code) {
+ Local<Value> result = CompileRun(code);
+ CHECK(result->IsObject());
+ CHECK(expected_receiver->Equals(result->ToObject()->Get(1)));
+ CHECK(expected_result->Equals(result->ToObject()->Get(0)));
+}
+
+
+THREADED_TEST(ForeignFunctionReceiver) {
+ HandleScope scope;
+
+ // Create two contexts with different "id" properties ('i' and 'o').
+ // Call a function both from its own context and from a the foreign
+ // context, and see what "this" is bound to (returning both "this"
+ // and "this.id" for comparison).
+
+ Persistent<Context> foreign_context = v8::Context::New();
+ foreign_context->Enter();
+ Local<Value> foreign_function =
+ CompileRun("function func() { return { 0: this.id, "
+ " 1: this, "
+ " toString: function() { "
+ " return this[0];"
+ " }"
+ " };"
+ "}"
+ "var id = 'i';"
+ "func;");
+ CHECK(foreign_function->IsFunction());
+ foreign_context->Exit();
+
+ LocalContext context;
+
+ Local<String> password = v8_str("Password");
+ // Don't get hit by security checks when accessing foreign_context's
+ // global receiver (aka. global proxy).
+ context->SetSecurityToken(password);
+ foreign_context->SetSecurityToken(password);
+
+ Local<String> i = v8_str("i");
+ Local<String> o = v8_str("o");
+ Local<String> id = v8_str("id");
+
+ CompileRun("function ownfunc() { return { 0: this.id, "
+ " 1: this, "
+ " toString: function() { "
+ " return this[0];"
+ " }"
+ " };"
+ "}"
+ "var id = 'o';"
+ "ownfunc");
+ context->Global()->Set(v8_str("func"), foreign_function);
+
+ // Sanity check the contexts.
+ CHECK(i->Equals(foreign_context->Global()->Get(id)));
+ CHECK(o->Equals(context->Global()->Get(id)));
+
+ // Checking local function's receiver.
+ // Calling function using its call/apply methods.
+ TestReceiver(o, context->Global(), "ownfunc.call()");
+ TestReceiver(o, context->Global(), "ownfunc.apply()");
+ // Making calls through built-in functions.
+ TestReceiver(o, context->Global(), "[1].map(ownfunc)[0]");
+ CHECK(o->Equals(CompileRun("'abcbd'.replace(/b/,ownfunc)[1]")));
+ CHECK(o->Equals(CompileRun("'abcbd'.replace(/b/g,ownfunc)[1]")));
+ CHECK(o->Equals(CompileRun("'abcbd'.replace(/b/g,ownfunc)[3]")));
+ // Calling with environment record as base.
+ TestReceiver(o, context->Global(), "ownfunc()");
+ // Calling with no base.
+ TestReceiver(o, context->Global(), "(1,ownfunc)()");
+
+ // Checking foreign function return value.
+ // Calling function using its call/apply methods.
+ TestReceiver(i, foreign_context->Global(), "func.call()");
+ TestReceiver(i, foreign_context->Global(), "func.apply()");
+ // Calling function using another context's call/apply methods.
+ TestReceiver(i, foreign_context->Global(),
+ "Function.prototype.call.call(func)");
+ TestReceiver(i, foreign_context->Global(),
+ "Function.prototype.call.apply(func)");
+ TestReceiver(i, foreign_context->Global(),
+ "Function.prototype.apply.call(func)");
+ TestReceiver(i, foreign_context->Global(),
+ "Function.prototype.apply.apply(func)");
+ // Making calls through built-in functions.
+ TestReceiver(i, foreign_context->Global(), "[1].map(func)[0]");
+ // ToString(func()) is func()[0], i.e., the returned this.id.
+ CHECK(i->Equals(CompileRun("'abcbd'.replace(/b/,func)[1]")));
+ CHECK(i->Equals(CompileRun("'abcbd'.replace(/b/g,func)[1]")));
+ CHECK(i->Equals(CompileRun("'abcbd'.replace(/b/g,func)[3]")));
+
+ // TODO(1547): Make the following also return "i".
+ // Calling with environment record as base.
+ TestReceiver(o, context->Global(), "func()");
+ // Calling with no base.
+ TestReceiver(o, context->Global(), "(1,func)()");
+
+ foreign_context.Dispose();
+}
diff --git a/test/cctest/test-assembler-arm.cc b/test/cctest/test-assembler-arm.cc
index 1703203..ecbf956 100644
--- a/test/cctest/test-assembler-arm.cc
+++ b/test/cctest/test-assembler-arm.cc
@@ -1010,4 +1010,18 @@
CHECK_EQ(0xffffffff, i.d);
}
+
+TEST(12) {
+ // Test chaining of label usages within instructions (issue 1644).
+ InitializeVM();
+ v8::HandleScope scope;
+ Assembler assm(Isolate::Current(), NULL, 0);
+
+ Label target;
+ __ b(eq, &target);
+ __ b(ne, &target);
+ __ bind(&target);
+ __ nop();
+}
+
#undef __
diff --git a/test/cctest/test-assembler-ia32.cc b/test/cctest/test-assembler-ia32.cc
index e9d799b..839b7f5 100644
--- a/test/cctest/test-assembler-ia32.cc
+++ b/test/cctest/test-assembler-ia32.cc
@@ -394,4 +394,18 @@
CHECK_EQ(kNaN, f(OS::nan_value(), 1.1));
}
+
+TEST(AssemblerIa3210) {
+ // Test chaining of label usages within instructions (issue 1644).
+ InitializeVM();
+ v8::HandleScope scope;
+ Assembler assm(Isolate::Current(), NULL, 0);
+
+ Label target;
+ __ j(equal, &target);
+ __ j(not_equal, &target);
+ __ bind(&target);
+ __ nop();
+}
+
#undef __
diff --git a/test/cctest/test-assembler-mips.cc b/test/cctest/test-assembler-mips.cc
index 8ac89f6..ca11a2a 100644
--- a/test/cctest/test-assembler-mips.cc
+++ b/test/cctest/test-assembler-mips.cc
@@ -1083,17 +1083,17 @@
CpuFeatures::Scope scope(FPU);
__ sw(t0, MemOperand(a0, OFFSET_OF(T, cvt_small_in)));
- __ Cvt_d_uw(f10, t0);
+ __ Cvt_d_uw(f10, t0, f22);
__ sdc1(f10, MemOperand(a0, OFFSET_OF(T, cvt_small_out)));
- __ Trunc_uw_d(f10, f10);
+ __ Trunc_uw_d(f10, f10, f22);
__ swc1(f10, MemOperand(a0, OFFSET_OF(T, trunc_small_out)));
__ sw(t0, MemOperand(a0, OFFSET_OF(T, cvt_big_in)));
- __ Cvt_d_uw(f8, t0);
+ __ Cvt_d_uw(f8, t0, f22);
__ sdc1(f8, MemOperand(a0, OFFSET_OF(T, cvt_big_out)));
- __ Trunc_uw_d(f8, f8);
+ __ Trunc_uw_d(f8, f8, f22);
__ swc1(f8, MemOperand(a0, OFFSET_OF(T, trunc_big_out)));
__ jr(ra);
@@ -1259,4 +1259,18 @@
}
}
+
+TEST(MIPS15) {
+ // Test chaining of label usages within instructions (issue 1644).
+ InitializeVM();
+ v8::HandleScope scope;
+ Assembler assm(Isolate::Current(), NULL, 0);
+
+ Label target;
+ __ beq(v0, v1, &target);
+ __ bne(v0, v1, &target);
+ __ bind(&target);
+ __ nop();
+}
+
#undef __
diff --git a/test/cctest/test-assembler-x64.cc b/test/cctest/test-assembler-x64.cc
index ea70f54..28f7c9b 100644
--- a/test/cctest/test-assembler-x64.cc
+++ b/test/cctest/test-assembler-x64.cc
@@ -46,6 +46,7 @@
using v8::internal::byte;
using v8::internal::greater;
using v8::internal::less_equal;
+using v8::internal::equal;
using v8::internal::not_equal;
using v8::internal::r13;
using v8::internal::r15;
@@ -345,4 +346,17 @@
}
}
+
+TEST(AssemblerX64LabelChaining) {
+ // Test chaining of label usages within instructions (issue 1644).
+ v8::HandleScope scope;
+ Assembler assm(Isolate::Current(), NULL, 0);
+
+ Label target;
+ __ j(equal, &target);
+ __ j(not_equal, &target);
+ __ bind(&target);
+ __ nop();
+}
+
#undef __
diff --git a/test/cctest/test-ast.cc b/test/cctest/test-ast.cc
index 786a54a..2aa7207 100644
--- a/test/cctest/test-ast.cc
+++ b/test/cctest/test-ast.cc
@@ -56,14 +56,3 @@
CHECK_EQ(0, list->length());
delete list;
}
-
-
-TEST(DeleteEmpty) {
- {
- List<int>* list = new List<int>(0);
- delete list;
- }
- {
- List<int> list(0);
- }
-}
diff --git a/test/cctest/test-circular-queue.cc b/test/cctest/test-circular-queue.cc
index c4e5c4c..2861b1f 100644
--- a/test/cctest/test-circular-queue.cc
+++ b/test/cctest/test-circular-queue.cc
@@ -6,8 +6,6 @@
#include "circular-queue-inl.h"
#include "cctest.h"
-namespace i = v8::internal;
-
using i::SamplingCircularQueue;
diff --git a/test/cctest/test-cpu-profiler.cc b/test/cctest/test-cpu-profiler.cc
index 9ff2a17..81c487d 100644
--- a/test/cctest/test-cpu-profiler.cc
+++ b/test/cctest/test-cpu-profiler.cc
@@ -7,8 +7,6 @@
#include "cctest.h"
#include "../include/v8-profiler.h"
-namespace i = v8::internal;
-
using i::CodeEntry;
using i::CpuProfile;
using i::CpuProfiler;
diff --git a/test/cctest/test-debug.cc b/test/cctest/test-debug.cc
index 73b84b7..b7962de 100644
--- a/test/cctest/test-debug.cc
+++ b/test/cctest/test-debug.cc
@@ -2174,7 +2174,7 @@
f = v8::Local<v8::Function>::Cast(env->Global()->Get(v8::String::New("f")));
g = v8::Local<v8::Function>::Cast(env->Global()->Get(v8::String::New("g")));
- // Chesk that a break point was hit when the script was run.
+ // Check that a break point was hit when the script was run.
CHECK_EQ(1, break_point_hit_count);
CHECK_EQ(0, StrLength(last_function_hit));
@@ -5844,6 +5844,7 @@
TEST(DebuggerAgent) {
+ v8::V8::Initialize();
i::Debugger* debugger = i::Isolate::Current()->debugger();
// Make sure these ports is not used by other tests to allow tests to run in
// parallel.
diff --git a/test/cctest/test-dictionary.cc b/test/cctest/test-dictionary.cc
new file mode 100644
index 0000000..15a854b
--- /dev/null
+++ b/test/cctest/test-dictionary.cc
@@ -0,0 +1,85 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+
+#include "api.h"
+#include "debug.h"
+#include "execution.h"
+#include "factory.h"
+#include "macro-assembler.h"
+#include "objects.h"
+#include "global-handles.h"
+#include "cctest.h"
+
+using namespace v8::internal;
+
+TEST(ObjectHashTable) {
+ v8::HandleScope scope;
+ LocalContext context;
+ Handle<ObjectHashTable> table = FACTORY->NewObjectHashTable(23);
+ Handle<JSObject> a = FACTORY->NewJSArray(7);
+ Handle<JSObject> b = FACTORY->NewJSArray(11);
+ table = PutIntoObjectHashTable(table, a, b);
+ CHECK_EQ(table->NumberOfElements(), 1);
+ CHECK_EQ(table->Lookup(*a), *b);
+ CHECK_EQ(table->Lookup(*b), HEAP->undefined_value());
+
+ // Keys still have to be valid after objects were moved.
+ HEAP->CollectGarbage(NEW_SPACE);
+ CHECK_EQ(table->NumberOfElements(), 1);
+ CHECK_EQ(table->Lookup(*a), *b);
+ CHECK_EQ(table->Lookup(*b), HEAP->undefined_value());
+
+ // Keys that are overwritten should not change number of elements.
+ table = PutIntoObjectHashTable(table, a, FACTORY->NewJSArray(13));
+ CHECK_EQ(table->NumberOfElements(), 1);
+ CHECK_NE(table->Lookup(*a), *b);
+
+ // Keys mapped to undefined should be removed permanently.
+ table = PutIntoObjectHashTable(table, a, FACTORY->undefined_value());
+ CHECK_EQ(table->NumberOfElements(), 0);
+ CHECK_EQ(table->NumberOfDeletedElements(), 1);
+ CHECK_EQ(table->Lookup(*a), HEAP->undefined_value());
+
+ // Keys should map back to their respective values.
+ for (int i = 0; i < 100; i++) {
+ Handle<JSObject> key = FACTORY->NewJSArray(7);
+ Handle<JSObject> value = FACTORY->NewJSArray(11);
+ table = PutIntoObjectHashTable(table, key, value);
+ CHECK_EQ(table->NumberOfElements(), i + 1);
+ CHECK_NE(table->FindEntry(*key), ObjectHashTable::kNotFound);
+ CHECK_EQ(table->Lookup(*key), *value);
+ }
+
+ // Keys never added to the map should not be found.
+ for (int i = 0; i < 1000; i++) {
+ Handle<JSObject> o = FACTORY->NewJSArray(100);
+ CHECK_EQ(table->FindEntry(*o), ObjectHashTable::kNotFound);
+ CHECK_EQ(table->Lookup(*o), HEAP->undefined_value());
+ }
+}
diff --git a/test/cctest/test-func-name-inference.cc b/test/cctest/test-func-name-inference.cc
index 4d993af..bb930c8 100644
--- a/test/cctest/test-func-name-inference.cc
+++ b/test/cctest/test-func-name-inference.cc
@@ -361,3 +361,42 @@
// Can't infer the function name statically.
CheckFunctionName(script, "return 1", "obj.(anonymous function)");
}
+
+
+TEST(GlobalAssignmentAndCall) {
+ InitializeVM();
+ v8::HandleScope scope;
+
+ v8::Handle<v8::Script> script = Compile(
+ "var Foo = function() {\n"
+ " return 1;\n"
+ "}();\n"
+ "var Baz = Bar = function() {\n"
+ " return 2;\n"
+ "}");
+ // The inferred name is empty, because this is an assignment of a result.
+ CheckFunctionName(script, "return 1", "");
+ // See MultipleAssignments test.
+ CheckFunctionName(script, "return 2", "Bar");
+}
+
+
+TEST(AssignmentAndCall) {
+ InitializeVM();
+ v8::HandleScope scope;
+
+ v8::Handle<v8::Script> script = Compile(
+ "(function Enclosing() {\n"
+ " var Foo;\n"
+ " Foo = function() {\n"
+ " return 1;\n"
+ " }();\n"
+ " var Baz = Bar = function() {\n"
+ " return 2;\n"
+ " }\n"
+ "})();");
+ // The inferred name is empty, because this is an assignment of a result.
+ CheckFunctionName(script, "return 1", "");
+ // See MultipleAssignments test.
+ CheckFunctionName(script, "return 2", "Enclosing.Bar");
+}
diff --git a/test/cctest/test-heap-profiler.cc b/test/cctest/test-heap-profiler.cc
index 8675a01..143a23c 100644
--- a/test/cctest/test-heap-profiler.cc
+++ b/test/cctest/test-heap-profiler.cc
@@ -10,8 +10,6 @@
#include "utils-inl.h"
#include "../include/v8-profiler.h"
-namespace i = v8::internal;
-
namespace {
class NamedEntriesDetector {
@@ -87,7 +85,7 @@
"var b2_1 = new B2(a2), b2_2 = new B2(a2);\n"
"var c2 = new C2(a2);");
const v8::HeapSnapshot* snapshot_env2 =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("env2"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("env2"));
i::HeapSnapshot* i_snapshot_env2 =
const_cast<i::HeapSnapshot*>(
reinterpret_cast<const i::HeapSnapshot*>(snapshot_env2));
@@ -126,7 +124,7 @@
"x = new X(new X(), new X());\n"
"(function() { x.a.a = x.b; })();");
const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("sizes"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("sizes"));
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
const v8::HeapGraphNode* x =
GetProperty(global, v8::HeapGraphEdge::kShortcut, "x");
@@ -157,7 +155,7 @@
"function A() { }\n"
"a = new A;");
const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("children"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("children"));
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
for (int i = 0, count = global->GetChildrenCount(); i < count; ++i) {
const v8::HeapGraphEdge* prop = global->GetChild(i);
@@ -183,7 +181,7 @@
"var anonymous = (function() { return function() { return 0; } })();\n"
"compiled(1)");
const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("code"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("code"));
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
const v8::HeapGraphNode* compiled =
@@ -245,7 +243,7 @@
"a = 1; // a is Smi\n"
"b = 2.5; // b is HeapNumber");
const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("numbers"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("numbers"));
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
CHECK_EQ(NULL, GetProperty(global, v8::HeapGraphEdge::kShortcut, "a"));
const v8::HeapGraphNode* b =
@@ -267,7 +265,7 @@
global->SetInternalField(0, v8_num(17));
global->SetInternalField(1, obj);
const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("internals"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("internals"));
const v8::HeapGraphNode* global_node = GetGlobalObject(snapshot);
// The first reference will not present, because it's a Smi.
CHECK_EQ(NULL, GetProperty(global_node, v8::HeapGraphEdge::kInternal, "0"));
@@ -294,12 +292,12 @@
"var a = new A();\n"
"var b = new B(a);");
const v8::HeapSnapshot* snapshot1 =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("s1"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("s1"));
HEAP->CollectAllGarbage(true); // Enforce compaction.
const v8::HeapSnapshot* snapshot2 =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("s2"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("s2"));
const v8::HeapGraphNode* global1 = GetGlobalObject(snapshot1);
const v8::HeapGraphNode* global2 = GetGlobalObject(snapshot2);
@@ -344,7 +342,7 @@
v8::HandleScope scope;
LocalContext env;
const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("s"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("s"));
const v8::HeapGraphNode* root1 = snapshot->GetRoot();
const_cast<i::HeapSnapshot*>(reinterpret_cast<const i::HeapSnapshot*>(
snapshot))->GetSortedEntriesList();
@@ -382,7 +380,7 @@
"})();");
const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("dominators"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("dominators"));
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
CHECK_NE(NULL, global);
@@ -465,7 +463,7 @@
"var a = new A(" STRING_LITERAL_FOR_TEST ");\n"
"var b = new B(a);");
const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("json"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("json"));
TestJSONStream stream;
snapshot->Serialize(&stream, v8::HeapSnapshot::kJSON);
CHECK_GT(stream.size(), 0);
@@ -476,17 +474,17 @@
// Verify that snapshot string is valid JSON.
AsciiResource json_res(json);
v8::Local<v8::String> json_string = v8::String::NewExternal(&json_res);
- env->Global()->Set(v8::String::New("json_snapshot"), json_string);
+ env->Global()->Set(v8_str("json_snapshot"), json_string);
v8::Local<v8::Value> snapshot_parse_result = CompileRun(
"var parsed = JSON.parse(json_snapshot); true;");
CHECK(!snapshot_parse_result.IsEmpty());
// Verify that snapshot object has required fields.
v8::Local<v8::Object> parsed_snapshot =
- env->Global()->Get(v8::String::New("parsed"))->ToObject();
- CHECK(parsed_snapshot->Has(v8::String::New("snapshot")));
- CHECK(parsed_snapshot->Has(v8::String::New("nodes")));
- CHECK(parsed_snapshot->Has(v8::String::New("strings")));
+ env->Global()->Get(v8_str("parsed"))->ToObject();
+ CHECK(parsed_snapshot->Has(v8_str("snapshot")));
+ CHECK(parsed_snapshot->Has(v8_str("nodes")));
+ CHECK(parsed_snapshot->Has(v8_str("strings")));
// Get node and edge "member" offsets.
v8::Local<v8::Value> meta_analysis_result = CompileRun(
@@ -538,12 +536,12 @@
int string_obj_pos =
static_cast<int>(string_obj_pos_val->ToNumber()->Value());
v8::Local<v8::Object> nodes_array =
- parsed_snapshot->Get(v8::String::New("nodes"))->ToObject();
+ parsed_snapshot->Get(v8_str("nodes"))->ToObject();
int string_index = static_cast<int>(
nodes_array->Get(string_obj_pos + 1)->ToNumber()->Value());
CHECK_GT(string_index, 0);
v8::Local<v8::Object> strings_array =
- parsed_snapshot->Get(v8::String::New("strings"))->ToObject();
+ parsed_snapshot->Get(v8_str("strings"))->ToObject();
v8::Local<v8::String> string = strings_array->Get(string_index)->ToString();
v8::Local<v8::String> ref_string =
CompileRun(STRING_LITERAL_FOR_TEST)->ToString();
@@ -557,7 +555,7 @@
v8::HandleScope scope;
LocalContext env;
const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("abort"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("abort"));
TestJSONStream stream(5);
snapshot->Serialize(&stream, v8::HeapSnapshot::kJSON);
CHECK_GT(stream.size(), 0);
@@ -570,7 +568,7 @@
LocalContext env;
const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("id"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("id"));
const v8::HeapGraphNode* root = snapshot->GetRoot();
CHECK_EQ(root, snapshot->GetNodeById(root->GetId()));
for (int i = 0, count = root->GetChildrenCount(); i < count; ++i) {
@@ -611,7 +609,7 @@
const int snapshots_count = v8::HeapProfiler::GetSnapshotsCount();
TestActivityControl aborting_control(3);
const v8::HeapSnapshot* no_snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("abort"),
+ v8::HeapProfiler::TakeSnapshot(v8_str("abort"),
v8::HeapSnapshot::kFull,
&aborting_control);
CHECK_EQ(NULL, no_snapshot);
@@ -620,7 +618,7 @@
TestActivityControl control(-1); // Don't abort.
const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("full"),
+ v8::HeapProfiler::TakeSnapshot(v8_str("full"),
v8::HeapSnapshot::kFull,
&control);
CHECK_NE(NULL, snapshot);
@@ -730,7 +728,7 @@
p_CCC.SetWrapperClassId(2);
CHECK_EQ(0, TestRetainedObjectInfo::instances.length());
const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("retained"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("retained"));
CHECK_EQ(3, TestRetainedObjectInfo::instances.length());
for (int i = 0; i < TestRetainedObjectInfo::instances.length(); ++i) {
@@ -774,12 +772,12 @@
CHECK_EQ(0, v8::HeapProfiler::GetSnapshotsCount());
v8::HeapProfiler::DeleteAllSnapshots();
CHECK_EQ(0, v8::HeapProfiler::GetSnapshotsCount());
- CHECK_NE(NULL, v8::HeapProfiler::TakeSnapshot(v8::String::New("1")));
+ CHECK_NE(NULL, v8::HeapProfiler::TakeSnapshot(v8_str("1")));
CHECK_EQ(1, v8::HeapProfiler::GetSnapshotsCount());
v8::HeapProfiler::DeleteAllSnapshots();
CHECK_EQ(0, v8::HeapProfiler::GetSnapshotsCount());
- CHECK_NE(NULL, v8::HeapProfiler::TakeSnapshot(v8::String::New("1")));
- CHECK_NE(NULL, v8::HeapProfiler::TakeSnapshot(v8::String::New("2")));
+ CHECK_NE(NULL, v8::HeapProfiler::TakeSnapshot(v8_str("1")));
+ CHECK_NE(NULL, v8::HeapProfiler::TakeSnapshot(v8_str("2")));
CHECK_EQ(2, v8::HeapProfiler::GetSnapshotsCount());
v8::HeapProfiler::DeleteAllSnapshots();
CHECK_EQ(0, v8::HeapProfiler::GetSnapshotsCount());
@@ -792,7 +790,7 @@
CHECK_EQ(0, v8::HeapProfiler::GetSnapshotsCount());
const v8::HeapSnapshot* s1 =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("1"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("1"));
CHECK_NE(NULL, s1);
CHECK_EQ(1, v8::HeapProfiler::GetSnapshotsCount());
unsigned uid1 = s1->GetUid();
@@ -802,14 +800,14 @@
CHECK_EQ(NULL, v8::HeapProfiler::FindSnapshot(uid1));
const v8::HeapSnapshot* s2 =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("2"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("2"));
CHECK_NE(NULL, s2);
CHECK_EQ(1, v8::HeapProfiler::GetSnapshotsCount());
unsigned uid2 = s2->GetUid();
CHECK_NE(static_cast<int>(uid1), static_cast<int>(uid2));
CHECK_EQ(s2, v8::HeapProfiler::FindSnapshot(uid2));
const v8::HeapSnapshot* s3 =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("3"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("3"));
CHECK_NE(NULL, s3);
CHECK_EQ(2, v8::HeapProfiler::GetSnapshotsCount());
unsigned uid3 = s3->GetUid();
@@ -832,7 +830,7 @@
CompileRun("document = { URL:\"abcdefgh\" };");
const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("document"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("document"));
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
CHECK_NE(NULL, global);
CHECK_EQ("Object / abcdefgh",
@@ -848,7 +846,7 @@
CompileRun(
"this.__defineGetter__(\"document\", function() { throw new Error(); })");
const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("document"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("document"));
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
CHECK_NE(NULL, global);
CHECK_EQ("Object",
@@ -866,7 +864,7 @@
"URLWithException.prototype = { get URL() { throw new Error(); } };\n"
"document = { URL: new URLWithException() };");
const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("document"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("document"));
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
CHECK_NE(NULL, global);
CHECK_EQ("Object",
@@ -879,7 +877,7 @@
v8::HandleScope scope;
LocalContext env;
const v8::HeapSnapshot* snapshot =
- v8::HeapProfiler::TakeSnapshot(v8::String::New("iteration"));
+ v8::HeapProfiler::TakeSnapshot(v8_str("iteration"));
const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
CHECK_NE(NULL, global);
// Verify that we can find this object by iteration.
@@ -891,3 +889,115 @@
}
CHECK_EQ(1, count);
}
+
+
+TEST(GetHeapValue) {
+ v8::HandleScope scope;
+ LocalContext env;
+
+ CompileRun("a = { s_prop: \'value\', n_prop: 0.1 };");
+ const v8::HeapSnapshot* snapshot =
+ v8::HeapProfiler::TakeSnapshot(v8_str("value"));
+ const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
+ CHECK(global->GetHeapValue()->IsObject());
+ v8::Local<v8::Object> js_global =
+ env->Global()->GetPrototype().As<v8::Object>();
+ CHECK(js_global == global->GetHeapValue());
+ const v8::HeapGraphNode* obj = GetProperty(
+ global, v8::HeapGraphEdge::kShortcut, "a");
+ CHECK(obj->GetHeapValue()->IsObject());
+ v8::Local<v8::Object> js_obj = js_global->Get(v8_str("a")).As<v8::Object>();
+ CHECK(js_obj == obj->GetHeapValue());
+ const v8::HeapGraphNode* s_prop =
+ GetProperty(obj, v8::HeapGraphEdge::kProperty, "s_prop");
+ v8::Local<v8::String> js_s_prop =
+ js_obj->Get(v8_str("s_prop")).As<v8::String>();
+ CHECK(js_s_prop == s_prop->GetHeapValue());
+ const v8::HeapGraphNode* n_prop =
+ GetProperty(obj, v8::HeapGraphEdge::kProperty, "n_prop");
+ v8::Local<v8::Number> js_n_prop =
+ js_obj->Get(v8_str("n_prop")).As<v8::Number>();
+ CHECK(js_n_prop == n_prop->GetHeapValue());
+}
+
+
+TEST(GetHeapValueForDeletedObject) {
+ v8::HandleScope scope;
+ LocalContext env;
+
+ // It is impossible to delete a global property, so we are about to delete a
+ // property of the "a" object. Also, the "p" object can't be an empty one
+ // because the empty object is static and isn't actually deleted.
+ CompileRun("a = { p: { r: {} } };");
+ const v8::HeapSnapshot* snapshot =
+ v8::HeapProfiler::TakeSnapshot(v8_str("snapshot"));
+ const v8::HeapGraphNode* global = GetGlobalObject(snapshot);
+ const v8::HeapGraphNode* obj = GetProperty(
+ global, v8::HeapGraphEdge::kShortcut, "a");
+ const v8::HeapGraphNode* prop = GetProperty(
+ obj, v8::HeapGraphEdge::kProperty, "p");
+ {
+ // Perform the check inside a nested local scope to avoid creating a
+ // reference to the object we are deleting.
+ v8::HandleScope scope;
+ CHECK(prop->GetHeapValue()->IsObject());
+ }
+ CompileRun("delete a.p;");
+ CHECK(prop->GetHeapValue()->IsUndefined());
+}
+
+
+static int StringCmp(const char* ref, i::String* act) {
+ i::SmartPointer<char> s_act = act->ToCString();
+ int result = strcmp(ref, *s_act);
+ if (result != 0)
+ fprintf(stderr, "Expected: \"%s\", Actual: \"%s\"\n", ref, *s_act);
+ return result;
+}
+
+
+TEST(GetConstructorName) {
+ v8::HandleScope scope;
+ LocalContext env;
+
+ CompileRun(
+ "function Constructor1() {};\n"
+ "var obj1 = new Constructor1();\n"
+ "var Constructor2 = function() {};\n"
+ "var obj2 = new Constructor2();\n"
+ "var obj3 = {};\n"
+ "obj3.constructor = function Constructor3() {};\n"
+ "var obj4 = {};\n"
+ "// Slow properties\n"
+ "for (var i=0; i<2000; ++i) obj4[\"p\" + i] = i;\n"
+ "obj4.constructor = function Constructor4() {};\n"
+ "var obj5 = {};\n"
+ "var obj6 = {};\n"
+ "obj6.constructor = 6;");
+ v8::Local<v8::Object> js_global =
+ env->Global()->GetPrototype().As<v8::Object>();
+ v8::Local<v8::Object> obj1 = js_global->Get(v8_str("obj1")).As<v8::Object>();
+ i::Handle<i::JSObject> js_obj1 = v8::Utils::OpenHandle(*obj1);
+ CHECK_EQ(0, StringCmp(
+ "Constructor1", i::V8HeapExplorer::GetConstructorName(*js_obj1)));
+ v8::Local<v8::Object> obj2 = js_global->Get(v8_str("obj2")).As<v8::Object>();
+ i::Handle<i::JSObject> js_obj2 = v8::Utils::OpenHandle(*obj2);
+ CHECK_EQ(0, StringCmp(
+ "Constructor2", i::V8HeapExplorer::GetConstructorName(*js_obj2)));
+ v8::Local<v8::Object> obj3 = js_global->Get(v8_str("obj3")).As<v8::Object>();
+ i::Handle<i::JSObject> js_obj3 = v8::Utils::OpenHandle(*obj3);
+ CHECK_EQ(0, StringCmp(
+ "Constructor3", i::V8HeapExplorer::GetConstructorName(*js_obj3)));
+ v8::Local<v8::Object> obj4 = js_global->Get(v8_str("obj4")).As<v8::Object>();
+ i::Handle<i::JSObject> js_obj4 = v8::Utils::OpenHandle(*obj4);
+ CHECK_EQ(0, StringCmp(
+ "Constructor4", i::V8HeapExplorer::GetConstructorName(*js_obj4)));
+ v8::Local<v8::Object> obj5 = js_global->Get(v8_str("obj5")).As<v8::Object>();
+ i::Handle<i::JSObject> js_obj5 = v8::Utils::OpenHandle(*obj5);
+ CHECK_EQ(0, StringCmp(
+ "Object", i::V8HeapExplorer::GetConstructorName(*js_obj5)));
+ v8::Local<v8::Object> obj6 = js_global->Get(v8_str("obj6")).As<v8::Object>();
+ i::Handle<i::JSObject> js_obj6 = v8::Utils::OpenHandle(*obj6);
+ CHECK_EQ(0, StringCmp(
+ "Object", i::V8HeapExplorer::GetConstructorName(*js_obj6)));
+}
diff --git a/test/cctest/test-heap.cc b/test/cctest/test-heap.cc
index fac9f0a..11b8813 100644
--- a/test/cctest/test-heap.cc
+++ b/test/cctest/test-heap.cc
@@ -291,8 +291,8 @@
TEST(GlobalHandles) {
- GlobalHandles* global_handles = Isolate::Current()->global_handles();
InitializeVM();
+ GlobalHandles* global_handles = Isolate::Current()->global_handles();
Handle<Object> h1;
Handle<Object> h2;
@@ -339,8 +339,8 @@
TEST(WeakGlobalHandlesScavenge) {
- GlobalHandles* global_handles = Isolate::Current()->global_handles();
InitializeVM();
+ GlobalHandles* global_handles = Isolate::Current()->global_handles();
WeakPointerCleared = false;
@@ -377,8 +377,8 @@
TEST(WeakGlobalHandlesMark) {
- GlobalHandles* global_handles = Isolate::Current()->global_handles();
InitializeVM();
+ GlobalHandles* global_handles = Isolate::Current()->global_handles();
WeakPointerCleared = false;
@@ -416,8 +416,8 @@
}
TEST(DeleteWeakGlobalHandle) {
- GlobalHandles* global_handles = Isolate::Current()->global_handles();
InitializeVM();
+ GlobalHandles* global_handles = Isolate::Current()->global_handles();
WeakPointerCleared = false;
diff --git a/test/cctest/test-list.cc b/test/cctest/test-list.cc
index e20ee8a..7520b05 100644
--- a/test/cctest/test-list.cc
+++ b/test/cctest/test-list.cc
@@ -1,4 +1,4 @@
-// Copyright 2009 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -138,3 +138,14 @@
list.Clear();
CHECK_EQ(0, list.length());
}
+
+
+TEST(DeleteEmpty) {
+ {
+ List<int>* list = new List<int>(0);
+ delete list;
+ }
+ {
+ List<int> list(0);
+ }
+}
diff --git a/test/cctest/test-lockers.cc b/test/cctest/test-lockers.cc
index 2b184e9..d61fde2 100644
--- a/test/cctest/test-lockers.cc
+++ b/test/cctest/test-lockers.cc
@@ -54,10 +54,6 @@
using ::v8::Value;
using ::v8::V8;
-namespace i = ::i;
-
-
-
// Migrating an isolate
class KangarooThread : public v8::internal::Thread {
diff --git a/test/cctest/test-log-stack-tracer.cc b/test/cctest/test-log-stack-tracer.cc
index 2bcb3fe..f536e6b 100644
--- a/test/cctest/test-log-stack-tracer.cc
+++ b/test/cctest/test-log-stack-tracer.cc
@@ -54,8 +54,6 @@
using v8::internal::StackTracer;
using v8::internal::TickSample;
-namespace i = v8::internal;
-
static v8::Persistent<v8::Context> env;
diff --git a/test/cctest/test-log.cc b/test/cctest/test-log.cc
index 262e7bb..dfbc733 100644
--- a/test/cctest/test-log.cc
+++ b/test/cctest/test-log.cc
@@ -23,8 +23,6 @@
using v8::internal::Logger;
using v8::internal::StrLength;
-namespace i = v8::internal;
-
namespace {
class ScopedLoggerInitializer {
diff --git a/test/cctest/test-parsing.cc b/test/cctest/test-parsing.cc
index 96a181d..8b6afdc 100755
--- a/test/cctest/test-parsing.cc
+++ b/test/cctest/test-parsing.cc
@@ -40,9 +40,7 @@
#include "preparser.h"
#include "cctest.h"
-namespace i = ::v8::internal;
-
-TEST(KeywordMatcher) {
+TEST(ScanKeywords) {
struct KeywordToken {
const char* keyword;
i::Token::Value token;
@@ -50,90 +48,64 @@
static const KeywordToken keywords[] = {
#define KEYWORD(t, s, d) { s, i::Token::t },
-#define IGNORE(t, s, d) /* */
- TOKEN_LIST(IGNORE, KEYWORD, IGNORE)
+ TOKEN_LIST(IGNORE_TOKEN, KEYWORD)
#undef KEYWORD
{ NULL, i::Token::IDENTIFIER }
};
- static const char* future_keywords[] = {
-#define FUTURE(t, s, d) s,
- TOKEN_LIST(IGNORE, IGNORE, FUTURE)
-#undef FUTURE
-#undef IGNORE
- NULL
- };
-
KeywordToken key_token;
+ i::UnicodeCache unicode_cache;
+ i::byte buffer[32];
for (int i = 0; (key_token = keywords[i]).keyword != NULL; i++) {
- i::KeywordMatcher matcher;
- const char* keyword = key_token.keyword;
- int length = i::StrLength(keyword);
- for (int j = 0; j < length; j++) {
- if (key_token.token == i::Token::INSTANCEOF && j == 2) {
- // "in" is a prefix of "instanceof". It's the only keyword
- // that is a prefix of another.
- CHECK_EQ(i::Token::IN, matcher.token());
- } else {
- CHECK_EQ(i::Token::IDENTIFIER, matcher.token());
- }
- matcher.AddChar(keyword[j]);
+ const i::byte* keyword =
+ reinterpret_cast<const i::byte*>(key_token.keyword);
+ int length = i::StrLength(key_token.keyword);
+ CHECK(static_cast<int>(sizeof(buffer)) >= length);
+ {
+ i::Utf8ToUC16CharacterStream stream(keyword, length);
+ i::JavaScriptScanner scanner(&unicode_cache);
+ // The scanner should parse 'let' as Token::LET for this test.
+ scanner.SetHarmonyBlockScoping(true);
+ scanner.Initialize(&stream);
+ CHECK_EQ(key_token.token, scanner.Next());
+ CHECK_EQ(i::Token::EOS, scanner.Next());
}
- CHECK_EQ(key_token.token, matcher.token());
- // Adding more characters will make keyword matching fail.
- matcher.AddChar('z');
- CHECK_EQ(i::Token::IDENTIFIER, matcher.token());
- // Adding a keyword later will not make it match again.
- matcher.AddChar('i');
- matcher.AddChar('f');
- CHECK_EQ(i::Token::IDENTIFIER, matcher.token());
- }
-
- // Future keywords are not recognized.
- const char* future_keyword;
- for (int i = 0; (future_keyword = future_keywords[i]) != NULL; i++) {
- i::KeywordMatcher matcher;
- int length = i::StrLength(future_keyword);
- for (int j = 0; j < length; j++) {
- matcher.AddChar(future_keyword[j]);
+ // Removing characters will make keyword matching fail.
+ {
+ i::Utf8ToUC16CharacterStream stream(keyword, length - 1);
+ i::JavaScriptScanner scanner(&unicode_cache);
+ scanner.Initialize(&stream);
+ CHECK_EQ(i::Token::IDENTIFIER, scanner.Next());
+ CHECK_EQ(i::Token::EOS, scanner.Next());
}
- CHECK_EQ(i::Token::IDENTIFIER, matcher.token());
+ // Adding characters will make keyword matching fail.
+ static const char chars_to_append[] = { 'z', '0', '_' };
+ for (int j = 0; j < static_cast<int>(ARRAY_SIZE(chars_to_append)); ++j) {
+ memmove(buffer, keyword, length);
+ buffer[length] = chars_to_append[j];
+ i::Utf8ToUC16CharacterStream stream(buffer, length + 1);
+ i::JavaScriptScanner scanner(&unicode_cache);
+ scanner.Initialize(&stream);
+ CHECK_EQ(i::Token::IDENTIFIER, scanner.Next());
+ CHECK_EQ(i::Token::EOS, scanner.Next());
+ }
+ // Replacing characters will make keyword matching fail.
+ {
+ memmove(buffer, keyword, length);
+ buffer[length - 1] = '_';
+ i::Utf8ToUC16CharacterStream stream(buffer, length);
+ i::JavaScriptScanner scanner(&unicode_cache);
+ scanner.Initialize(&stream);
+ CHECK_EQ(i::Token::IDENTIFIER, scanner.Next());
+ CHECK_EQ(i::Token::EOS, scanner.Next());
+ }
}
-
- // Zero isn't ignored at first.
- i::KeywordMatcher bad_start;
- bad_start.AddChar(0);
- CHECK_EQ(i::Token::IDENTIFIER, bad_start.token());
- bad_start.AddChar('i');
- bad_start.AddChar('f');
- CHECK_EQ(i::Token::IDENTIFIER, bad_start.token());
-
- // Zero isn't ignored at end.
- i::KeywordMatcher bad_end;
- bad_end.AddChar('i');
- bad_end.AddChar('f');
- CHECK_EQ(i::Token::IF, bad_end.token());
- bad_end.AddChar(0);
- CHECK_EQ(i::Token::IDENTIFIER, bad_end.token());
-
- // Case isn't ignored.
- i::KeywordMatcher bad_case;
- bad_case.AddChar('i');
- bad_case.AddChar('F');
- CHECK_EQ(i::Token::IDENTIFIER, bad_case.token());
-
- // If we mark it as failure, continuing won't help.
- i::KeywordMatcher full_stop;
- full_stop.AddChar('i');
- CHECK_EQ(i::Token::IDENTIFIER, full_stop.token());
- full_stop.Fail();
- CHECK_EQ(i::Token::IDENTIFIER, full_stop.token());
- full_stop.AddChar('f');
- CHECK_EQ(i::Token::IDENTIFIER, full_stop.token());
}
TEST(ScanHTMLEndComments) {
+ v8::V8::Initialize();
+
// Regression test. See:
// http://code.google.com/p/chromium/issues/detail?id=53548
// Tests that --> is correctly interpreted as comment-to-end-of-line if there
@@ -263,6 +235,8 @@
TEST(StandAlonePreParser) {
+ v8::V8::Initialize();
+
int marker;
i::Isolate::Current()->stack_guard()->SetStackLimit(
reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
@@ -299,6 +273,8 @@
TEST(RegressChromium62639) {
+ v8::V8::Initialize();
+
int marker;
i::Isolate::Current()->stack_guard()->SetStackLimit(
reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
@@ -313,13 +289,15 @@
i::Utf8ToUC16CharacterStream stream(reinterpret_cast<const i::byte*>(program),
static_cast<unsigned>(strlen(program)));
i::ScriptDataImpl* data =
- i::ParserApi::PreParse(&stream, NULL);
+ i::ParserApi::PreParse(&stream, NULL, false);
CHECK(data->HasError());
delete data;
}
TEST(Regress928) {
+ v8::V8::Initialize();
+
// Preparsing didn't consider the catch clause of a try statement
// as with-content, which made it assume that a function inside
// the block could be lazily compiled, and an extra, unexpected,
@@ -335,7 +313,7 @@
i::Utf8ToUC16CharacterStream stream(reinterpret_cast<const i::byte*>(program),
static_cast<unsigned>(strlen(program)));
i::ScriptDataImpl* data =
- i::ParserApi::PartialPreParse(&stream, NULL);
+ i::ParserApi::PartialPreParse(&stream, NULL, false);
CHECK(!data->HasError());
data->Initialize();
@@ -360,6 +338,8 @@
TEST(PreParseOverflow) {
+ v8::V8::Initialize();
+
int marker;
i::Isolate::Current()->stack_guard()->SetStackLimit(
reinterpret_cast<uintptr_t>(&marker) - 128 * 1024);
@@ -610,6 +590,8 @@
}
TEST(StreamScanner) {
+ v8::V8::Initialize();
+
const char* str1 = "{ foo get for : */ <- \n\n /*foo*/ bib";
i::Utf8ToUC16CharacterStream stream1(reinterpret_cast<const i::byte*>(str1),
static_cast<unsigned>(strlen(str1)));
@@ -690,6 +672,8 @@
TEST(RegExpScanning) {
+ v8::V8::Initialize();
+
// RegExp token with added garbage at the end. The scanner should only
// scan the RegExp until the terminating slash just before "flipperwald".
TestScanRegExp("/b/flipperwald", "b");
diff --git a/test/cctest/test-profile-generator.cc b/test/cctest/test-profile-generator.cc
index 6d30443..250ebd4 100644
--- a/test/cctest/test-profile-generator.cc
+++ b/test/cctest/test-profile-generator.cc
@@ -7,8 +7,6 @@
#include "cctest.h"
#include "../include/v8-profiler.h"
-namespace i = v8::internal;
-
using i::CodeEntry;
using i::CodeMap;
using i::CpuProfile;
diff --git a/test/cctest/test-serialize.cc b/test/cctest/test-serialize.cc
index 730d72a..8e85444 100644
--- a/test/cctest/test-serialize.cc
+++ b/test/cctest/test-serialize.cc
@@ -99,10 +99,10 @@
TEST(ExternalReferenceEncoder) {
- OS::Setup();
Isolate* isolate = i::Isolate::Current();
isolate->stats_table()->SetCounterFunction(counter_function);
- HEAP->Setup(false);
+ v8::V8::Initialize();
+
ExternalReferenceEncoder encoder;
CHECK_EQ(make_code(BUILTIN, Builtins::kArrayCode),
Encode(encoder, Builtins::kArrayCode));
@@ -139,10 +139,10 @@
TEST(ExternalReferenceDecoder) {
- OS::Setup();
Isolate* isolate = i::Isolate::Current();
isolate->stats_table()->SetCounterFunction(counter_function);
- HEAP->Setup(false);
+ v8::V8::Initialize();
+
ExternalReferenceDecoder decoder;
CHECK_EQ(AddressOf(Builtins::kArrayCode),
decoder.Decode(make_code(BUILTIN, Builtins::kArrayCode)));
@@ -459,7 +459,9 @@
CHECK(root->IsString());
}
v8::HandleScope handle_scope;
- Handle<Object>root_handle(root);
+ Handle<Object> root_handle(root);
+
+ ReserveSpaceForPartialSnapshot(file_name);
Object* root2;
{
@@ -542,7 +544,9 @@
CHECK(root->IsContext());
}
v8::HandleScope handle_scope;
- Handle<Object>root_handle(root);
+ Handle<Object> root_handle(root);
+
+ ReserveSpaceForPartialSnapshot(file_name);
Object* root2;
{
diff --git a/test/cctest/test-spaces.cc b/test/cctest/test-spaces.cc
index de0c41e..0f22ce1 100644
--- a/test/cctest/test-spaces.cc
+++ b/test/cctest/test-spaces.cc
@@ -91,46 +91,74 @@
}
+namespace v8 {
+namespace internal {
+
+// Temporarily sets a given allocator in an isolate.
+class TestMemoryAllocatorScope {
+ public:
+ TestMemoryAllocatorScope(Isolate* isolate, MemoryAllocator* allocator)
+ : isolate_(isolate),
+ old_allocator_(isolate->memory_allocator_) {
+ isolate->memory_allocator_ = allocator;
+ }
+
+ ~TestMemoryAllocatorScope() {
+ isolate_->memory_allocator_ = old_allocator_;
+ }
+
+ private:
+ Isolate* isolate_;
+ MemoryAllocator* old_allocator_;
+
+ DISALLOW_COPY_AND_ASSIGN(TestMemoryAllocatorScope);
+};
+
+} } // namespace v8::internal
+
+
TEST(MemoryAllocator) {
OS::Setup();
Isolate* isolate = Isolate::Current();
- CHECK(HEAP->ConfigureHeapDefault());
- CHECK(isolate->memory_allocator()->Setup(HEAP->MaxReserved(),
- HEAP->MaxExecutableSize()));
+ isolate->InitializeLoggingAndCounters();
+ Heap* heap = isolate->heap();
+ CHECK(heap->ConfigureHeapDefault());
+ MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
+ CHECK(memory_allocator->Setup(heap->MaxReserved(),
+ heap->MaxExecutableSize()));
+ TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
- OldSpace faked_space(HEAP,
- HEAP->MaxReserved(),
+ OldSpace faked_space(heap,
+ heap->MaxReserved(),
OLD_POINTER_SPACE,
NOT_EXECUTABLE);
int total_pages = 0;
int requested = MemoryAllocator::kPagesPerChunk;
int allocated;
// If we request n pages, we should get n or n - 1.
- Page* first_page =
- isolate->memory_allocator()->AllocatePages(
- requested, &allocated, &faked_space);
+ Page* first_page = memory_allocator->AllocatePages(
+ requested, &allocated, &faked_space);
CHECK(first_page->is_valid());
CHECK(allocated == requested || allocated == requested - 1);
total_pages += allocated;
Page* last_page = first_page;
for (Page* p = first_page; p->is_valid(); p = p->next_page()) {
- CHECK(isolate->memory_allocator()->IsPageInSpace(p, &faked_space));
+ CHECK(memory_allocator->IsPageInSpace(p, &faked_space));
last_page = p;
}
// Again, we should get n or n - 1 pages.
- Page* others =
- isolate->memory_allocator()->AllocatePages(
- requested, &allocated, &faked_space);
+ Page* others = memory_allocator->AllocatePages(
+ requested, &allocated, &faked_space);
CHECK(others->is_valid());
CHECK(allocated == requested || allocated == requested - 1);
total_pages += allocated;
- isolate->memory_allocator()->SetNextPage(last_page, others);
+ memory_allocator->SetNextPage(last_page, others);
int page_count = 0;
for (Page* p = first_page; p->is_valid(); p = p->next_page()) {
- CHECK(isolate->memory_allocator()->IsPageInSpace(p, &faked_space));
+ CHECK(memory_allocator->IsPageInSpace(p, &faked_space));
page_count++;
}
CHECK(total_pages == page_count);
@@ -141,34 +169,39 @@
// Freeing pages at the first chunk starting at or after the second page
// should free the entire second chunk. It will return the page it was passed
// (since the second page was in the first chunk).
- Page* free_return = isolate->memory_allocator()->FreePages(second_page);
+ Page* free_return = memory_allocator->FreePages(second_page);
CHECK(free_return == second_page);
- isolate->memory_allocator()->SetNextPage(first_page, free_return);
+ memory_allocator->SetNextPage(first_page, free_return);
// Freeing pages in the first chunk starting at the first page should free
// the first chunk and return an invalid page.
- Page* invalid_page = isolate->memory_allocator()->FreePages(first_page);
+ Page* invalid_page = memory_allocator->FreePages(first_page);
CHECK(!invalid_page->is_valid());
- isolate->memory_allocator()->TearDown();
+ memory_allocator->TearDown();
+ delete memory_allocator;
}
TEST(NewSpace) {
OS::Setup();
- CHECK(HEAP->ConfigureHeapDefault());
- CHECK(Isolate::Current()->memory_allocator()->Setup(
- HEAP->MaxReserved(), HEAP->MaxExecutableSize()));
+ Isolate* isolate = Isolate::Current();
+ isolate->InitializeLoggingAndCounters();
+ Heap* heap = isolate->heap();
+ CHECK(heap->ConfigureHeapDefault());
+ MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
+ CHECK(memory_allocator->Setup(heap->MaxReserved(),
+ heap->MaxExecutableSize()));
+ TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
- NewSpace new_space(HEAP);
+ NewSpace new_space(heap);
void* chunk =
- Isolate::Current()->memory_allocator()->ReserveInitialChunk(
- 4 * HEAP->ReservedSemiSpaceSize());
+ memory_allocator->ReserveInitialChunk(4 * heap->ReservedSemiSpaceSize());
CHECK(chunk != NULL);
Address start = RoundUp(static_cast<Address>(chunk),
- 2 * HEAP->ReservedSemiSpaceSize());
- CHECK(new_space.Setup(start, 2 * HEAP->ReservedSemiSpaceSize()));
+ 2 * heap->ReservedSemiSpaceSize());
+ CHECK(new_space.Setup(start, 2 * heap->ReservedSemiSpaceSize()));
CHECK(new_space.HasBeenSetup());
while (new_space.Available() >= Page::kMaxHeapObjectSize) {
@@ -178,28 +211,33 @@
}
new_space.TearDown();
- Isolate::Current()->memory_allocator()->TearDown();
+ memory_allocator->TearDown();
+ delete memory_allocator;
}
TEST(OldSpace) {
OS::Setup();
- CHECK(HEAP->ConfigureHeapDefault());
- CHECK(Isolate::Current()->memory_allocator()->Setup(
- HEAP->MaxReserved(), HEAP->MaxExecutableSize()));
+ Isolate* isolate = Isolate::Current();
+ isolate->InitializeLoggingAndCounters();
+ Heap* heap = isolate->heap();
+ CHECK(heap->ConfigureHeapDefault());
+ MemoryAllocator* memory_allocator = new MemoryAllocator(isolate);
+ CHECK(memory_allocator->Setup(heap->MaxReserved(),
+ heap->MaxExecutableSize()));
+ TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
- OldSpace* s = new OldSpace(HEAP,
- HEAP->MaxOldGenerationSize(),
+ OldSpace* s = new OldSpace(heap,
+ heap->MaxOldGenerationSize(),
OLD_POINTER_SPACE,
NOT_EXECUTABLE);
CHECK(s != NULL);
- void* chunk =
- Isolate::Current()->memory_allocator()->ReserveInitialChunk(
- 4 * HEAP->ReservedSemiSpaceSize());
+ void* chunk = memory_allocator->ReserveInitialChunk(
+ 4 * heap->ReservedSemiSpaceSize());
CHECK(chunk != NULL);
Address start = static_cast<Address>(chunk);
- size_t size = RoundUp(start, 2 * HEAP->ReservedSemiSpaceSize()) - start;
+ size_t size = RoundUp(start, 2 * heap->ReservedSemiSpaceSize()) - start;
CHECK(s->Setup(start, size));
@@ -209,13 +247,13 @@
s->TearDown();
delete s;
- Isolate::Current()->memory_allocator()->TearDown();
+ memory_allocator->TearDown();
+ delete memory_allocator;
}
TEST(LargeObjectSpace) {
- OS::Setup();
- CHECK(HEAP->Setup(false));
+ v8::V8::Initialize();
LargeObjectSpace* lo = HEAP->lo_space();
CHECK(lo != NULL);
@@ -247,9 +285,4 @@
CHECK(!lo->IsEmpty());
CHECK(lo->AllocateRaw(lo_size)->IsFailure());
-
- lo->TearDown();
- delete lo;
-
- Isolate::Current()->memory_allocator()->TearDown();
}
diff --git a/test/cctest/test-strings.cc b/test/cctest/test-strings.cc
index 4d9b264..17020a3 100644
--- a/test/cctest/test-strings.cc
+++ b/test/cctest/test-strings.cc
@@ -430,8 +430,7 @@
" return 0;"
"};"
"test()";
- CHECK_EQ(0,
- v8::Script::Compile(v8::String::New(source))->Run()->Int32Value());
+ CHECK_EQ(0, CompileRun(source)->Int32Value());
}
@@ -481,3 +480,52 @@
}
}
}
+
+
+TEST(SliceFromCons) {
+ FLAG_string_slices = true;
+ InitializeVM();
+ v8::HandleScope scope;
+ Handle<String> string =
+ FACTORY->NewStringFromAscii(CStrVector("parentparentparent"));
+ Handle<String> parent = FACTORY->NewConsString(string, string);
+ CHECK(parent->IsConsString());
+ CHECK(!parent->IsFlat());
+ Handle<String> slice = FACTORY->NewSubString(parent, 1, 25);
+ // After slicing, the original string becomes a flat cons.
+ CHECK(parent->IsFlat());
+ CHECK(slice->IsSlicedString());
+ CHECK_EQ(SlicedString::cast(*slice)->parent(),
+ ConsString::cast(*parent)->first());
+ CHECK(SlicedString::cast(*slice)->parent()->IsSeqString());
+ CHECK(slice->IsFlat());
+}
+
+
+TEST(TrivialSlice) {
+ // This tests whether a slice that contains the entire parent string
+ // actually creates a new string (it should not).
+ FLAG_string_slices = true;
+ InitializeVM();
+ HandleScope scope;
+ v8::Local<v8::Value> result;
+ Handle<String> string;
+ const char* init = "var str = 'abcdefghijklmnopqrstuvwxyz';";
+ const char* check = "str.slice(0,26)";
+ const char* crosscheck = "str.slice(1,25)";
+
+ CompileRun(init);
+
+ result = CompileRun(check);
+ CHECK(result->IsString());
+ string = v8::Utils::OpenHandle(v8::String::Cast(*result));
+ CHECK(!string->IsSlicedString());
+
+ string = FACTORY->NewSubString(string, 0, 26);
+ CHECK(!string->IsSlicedString());
+ result = CompileRun(crosscheck);
+ CHECK(result->IsString());
+ string = v8::Utils::OpenHandle(v8::String::Cast(*result));
+ CHECK(string->IsSlicedString());
+ CHECK_EQ("bcdefghijklmnopqrstuvwxy", *(string->ToCString()));
+}
diff --git a/test/cctest/test-unbound-queue.cc b/test/cctest/test-unbound-queue.cc
index df5509e..3dc87ae 100644
--- a/test/cctest/test-unbound-queue.cc
+++ b/test/cctest/test-unbound-queue.cc
@@ -6,8 +6,6 @@
#include "unbound-queue-inl.h"
#include "cctest.h"
-namespace i = v8::internal;
-
using i::UnboundQueue;
diff --git a/test/cctest/test-weakmaps.cc b/test/cctest/test-weakmaps.cc
new file mode 100644
index 0000000..db4db25
--- /dev/null
+++ b/test/cctest/test-weakmaps.cc
@@ -0,0 +1,149 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include "v8.h"
+
+#include "global-handles.h"
+#include "snapshot.h"
+#include "cctest.h"
+
+using namespace v8::internal;
+
+
+static Handle<JSWeakMap> AllocateJSWeakMap() {
+ Handle<Map> map = FACTORY->NewMap(JS_WEAK_MAP_TYPE, JSWeakMap::kSize);
+ Handle<JSObject> weakmap_obj = FACTORY->NewJSObjectFromMap(map);
+ Handle<JSWeakMap> weakmap(JSWeakMap::cast(*weakmap_obj));
+ // Do not use handles for the hash table, it would make entries strong.
+ Object* table_obj = ObjectHashTable::Allocate(1)->ToObjectChecked();
+ ObjectHashTable* table = ObjectHashTable::cast(table_obj);
+ weakmap->set_table(table);
+ weakmap->set_next(Smi::FromInt(0));
+ return weakmap;
+}
+
+static void PutIntoWeakMap(Handle<JSWeakMap> weakmap,
+ Handle<JSObject> key,
+ int value) {
+ Handle<ObjectHashTable> table = PutIntoObjectHashTable(
+ Handle<ObjectHashTable>(weakmap->table()),
+ Handle<JSObject>(JSObject::cast(*key)),
+ Handle<Smi>(Smi::FromInt(value)));
+ weakmap->set_table(*table);
+}
+
+static int NumberOfWeakCalls = 0;
+static void WeakPointerCallback(v8::Persistent<v8::Value> handle, void* id) {
+ ASSERT(id == reinterpret_cast<void*>(1234));
+ NumberOfWeakCalls++;
+ handle.Dispose();
+}
+
+
+TEST(Weakness) {
+ LocalContext context;
+ v8::HandleScope scope;
+ Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
+ GlobalHandles* global_handles = Isolate::Current()->global_handles();
+
+ // Keep global reference to the key.
+ Handle<Object> key;
+ {
+ v8::HandleScope scope;
+ Handle<Map> map = FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
+ Handle<JSObject> object = FACTORY->NewJSObjectFromMap(map);
+ key = global_handles->Create(*object);
+ }
+ CHECK(!global_handles->IsWeak(key.location()));
+
+ // Put entry into weak map.
+ {
+ v8::HandleScope scope;
+ PutIntoWeakMap(weakmap, Handle<JSObject>(JSObject::cast(*key)), 23);
+ }
+ CHECK_EQ(1, weakmap->table()->NumberOfElements());
+
+ // Force a full GC.
+ HEAP->CollectAllGarbage(false);
+ CHECK_EQ(0, NumberOfWeakCalls);
+ CHECK_EQ(1, weakmap->table()->NumberOfElements());
+ CHECK_EQ(0, weakmap->table()->NumberOfDeletedElements());
+
+ // Make the global reference to the key weak.
+ {
+ v8::HandleScope scope;
+ global_handles->MakeWeak(key.location(),
+ reinterpret_cast<void*>(1234),
+ &WeakPointerCallback);
+ }
+ CHECK(global_handles->IsWeak(key.location()));
+
+ // Force a full GC.
+ // Perform two consecutive GCs because the first one will only clear
+ // weak references whereas the second one will also clear weak maps.
+ HEAP->CollectAllGarbage(false);
+ CHECK_EQ(1, NumberOfWeakCalls);
+ CHECK_EQ(1, weakmap->table()->NumberOfElements());
+ CHECK_EQ(0, weakmap->table()->NumberOfDeletedElements());
+ HEAP->CollectAllGarbage(false);
+ CHECK_EQ(1, NumberOfWeakCalls);
+ CHECK_EQ(0, weakmap->table()->NumberOfElements());
+ CHECK_EQ(1, weakmap->table()->NumberOfDeletedElements());
+}
+
+
+TEST(Shrinking) {
+ LocalContext context;
+ v8::HandleScope scope;
+ Handle<JSWeakMap> weakmap = AllocateJSWeakMap();
+
+ // Check initial capacity.
+ CHECK_EQ(32, weakmap->table()->Capacity());
+
+ // Fill up weak map to trigger capacity change.
+ {
+ v8::HandleScope scope;
+ Handle<Map> map = FACTORY->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
+ for (int i = 0; i < 32; i++) {
+ Handle<JSObject> object = FACTORY->NewJSObjectFromMap(map);
+ PutIntoWeakMap(weakmap, object, i);
+ }
+ }
+
+ // Check increased capacity.
+ CHECK_EQ(128, weakmap->table()->Capacity());
+
+ // Force a full GC.
+ CHECK_EQ(32, weakmap->table()->NumberOfElements());
+ CHECK_EQ(0, weakmap->table()->NumberOfDeletedElements());
+ HEAP->CollectAllGarbage(false);
+ CHECK_EQ(0, weakmap->table()->NumberOfElements());
+ CHECK_EQ(32, weakmap->table()->NumberOfDeletedElements());
+
+ // Check shrunk capacity.
+ CHECK_EQ(32, weakmap->table()->Capacity());
+}
diff --git a/test/cctest/testcfg.py b/test/cctest/testcfg.py
index a137275..b2eabc4 100644
--- a/test/cctest/testcfg.py
+++ b/test/cctest/testcfg.py
@@ -48,7 +48,11 @@
return self.path[-1]
def BuildCommand(self, name):
- serialization_file = join('obj', 'test', self.mode, 'serdes')
+ serialization_file = ''
+ if exists(join(self.context.buildspace, 'obj', 'test', self.mode)):
+ serialization_file = join('obj', 'test', self.mode, 'serdes')
+ else:
+ serialization_file = join('obj', 'serdes')
serialization_file += '_' + self.GetName()
serialization_file = join(self.context.buildspace, serialization_file)
serialization_file += ''.join(self.variant_flags).replace('-', '_')
@@ -78,10 +82,15 @@
return ['cctests']
def ListTests(self, current_path, path, mode, variant_flags):
- executable = join('obj', 'test', mode, 'cctest')
+ executable = 'cctest'
if utils.IsWindows():
executable += '.exe'
executable = join(self.context.buildspace, executable)
+ if not exists(executable):
+ executable = join('obj', 'test', mode, 'cctest')
+ if utils.IsWindows():
+ executable += '.exe'
+ executable = join(self.context.buildspace, executable)
output = test.Execute([executable, '--list'], self.context)
if output.exit_code != 0:
print output.stdout
diff --git a/test/es5conform/testcfg.py b/test/es5conform/testcfg.py
index af74b8c..b6a17d9 100644
--- a/test/es5conform/testcfg.py
+++ b/test/es5conform/testcfg.py
@@ -97,7 +97,7 @@
return tests
def GetBuildRequirements(self):
- return ['sample', 'sample=shell']
+ return ['d8']
def GetTestStatus(self, sections, defs):
status_file = join(self.root, 'es5conform.status')
diff --git a/test/message/testcfg.py b/test/message/testcfg.py
index aabbfef..af467e6 100644
--- a/test/message/testcfg.py
+++ b/test/message/testcfg.py
@@ -125,7 +125,7 @@
return result
def GetBuildRequirements(self):
- return ['sample', 'sample=shell']
+ return ['d8']
def GetTestStatus(self, sections, defs):
status_file = join(self.root, 'message.status')
diff --git a/test/mjsunit/assert-opt-and-deopt.js b/test/mjsunit/assert-opt-and-deopt.js
index f589868..c9adb5b 100644
--- a/test/mjsunit/assert-opt-and-deopt.js
+++ b/test/mjsunit/assert-opt-and-deopt.js
@@ -54,7 +54,7 @@
* that you later want to track de/optimizations for. It is necessary because
* tests are sometimes executed several times in a row, and you want to
* disregard counts from previous runs.
- */
+ */
OptTracker.prototype.CheckpointOptCount = function(func) {
this.opt_counts_[func] = %GetOptimizationCount(func);
};
@@ -148,7 +148,7 @@
tracker.AssertDeoptHappened(f, false);
tracker.AssertDeoptCount(f, 0);
-for (var i = 0; i < 2; i++) f(1);
+f(1);
tracker.AssertOptCount(f, 0);
tracker.AssertIsOptimized(f, false);
diff --git a/test/mjsunit/bugs/harmony/debug-blockscopes.js b/test/mjsunit/bugs/harmony/debug-blockscopes.js
new file mode 100644
index 0000000..a407c53
--- /dev/null
+++ b/test/mjsunit/bugs/harmony/debug-blockscopes.js
@@ -0,0 +1,224 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug --harmony-block-scoping
+// The functions used for testing backtraces. They are at the top to make the
+// testing of source line/column easier.
+
+
+// Get the Debug object exposed from the debug context global object.
+Debug = debug.Debug;
+
+var test_name;
+var listener_delegate;
+var listener_called;
+var exception;
+var begin_test_count = 0;
+var end_test_count = 0;
+var break_count = 0;
+
+
+// Debug event listener which delegates.
+function listener(event, exec_state, event_data, data) {
+ try {
+ if (event == Debug.DebugEvent.Break) {
+ break_count++;
+ listener_called = true;
+ listener_delegate(exec_state);
+ }
+ } catch (e) {
+ exception = e;
+ }
+}
+
+// Add the debug event listener.
+Debug.setListener(listener);
+
+
+// Initialize for a new test.
+function BeginTest(name) {
+ test_name = name;
+ listener_delegate = null;
+ listener_called = false;
+ exception = null;
+ begin_test_count++;
+}
+
+
+// Check result of a test.
+function EndTest() {
+ assertTrue(listener_called, "listerner not called for " + test_name);
+ assertNull(exception, test_name);
+ end_test_count++;
+}
+
+
+// Check that the scope chain contains the expected types of scopes.
+function CheckScopeChain(scopes, exec_state) {
+ assertEquals(scopes.length, exec_state.frame().scopeCount());
+ for (var i = 0; i < scopes.length; i++) {
+ var scope = exec_state.frame().scope(i);
+ assertTrue(scope.isScope());
+ assertEquals(scopes[i], scope.scopeType());
+
+ // Check the global object when hitting the global scope.
+ if (scopes[i] == debug.ScopeType.Global) {
+ // Objects don't have same class (one is "global", other is "Object",
+ // so just check the properties directly.
+ assertPropertiesEqual(this, scope.scopeObject().value());
+ }
+ }
+
+ // Get the debug command processor.
+ var dcp = exec_state.debugCommandProcessor("unspecified_running_state");
+
+ // Send a scopes request and check the result.
+ var json;
+ var request_json = '{"seq":0,"type":"request","command":"scopes"}';
+ var response_json = dcp.processDebugJSONRequest(request_json);
+ var response = JSON.parse(response_json);
+ assertEquals(scopes.length, response.body.scopes.length);
+ for (var i = 0; i < scopes.length; i++) {
+ assertEquals(i, response.body.scopes[i].index);
+ assertEquals(scopes[i], response.body.scopes[i].type);
+ if (scopes[i] == debug.ScopeType.Local ||
+ scopes[i] == debug.ScopeType.Closure) {
+ assertTrue(response.body.scopes[i].object.ref < 0);
+ } else {
+ assertTrue(response.body.scopes[i].object.ref >= 0);
+ }
+ var found = false;
+ for (var j = 0; j < response.refs.length && !found; j++) {
+ found = response.refs[j].handle == response.body.scopes[i].object.ref;
+ }
+ assertTrue(found, "Scope object " + response.body.scopes[i].object.ref + " not found");
+ }
+}
+
+// Check that the content of the scope is as expected. For functions just check
+// that there is a function.
+function CheckScopeContent(content, number, exec_state) {
+ var scope = exec_state.frame().scope(number);
+ var count = 0;
+ for (var p in content) {
+ var property_mirror = scope.scopeObject().property(p);
+ if (property_mirror.isUndefined()) {
+ print('property ' + p + ' not found in scope');
+ }
+ assertFalse(property_mirror.isUndefined(), 'property ' + p + ' not found in scope');
+ if (typeof(content[p]) === 'function') {
+ assertTrue(property_mirror.value().isFunction());
+ } else {
+ assertEquals(content[p], property_mirror.value().value(), 'property ' + p + ' has unexpected value');
+ }
+ count++;
+ }
+
+ // 'arguments' and might be exposed in the local and closure scope. Just
+ // ignore this.
+ var scope_size = scope.scopeObject().properties().length;
+ if (!scope.scopeObject().property('arguments').isUndefined()) {
+ scope_size--;
+ }
+ // Also ignore synthetic variable from catch block.
+ if (!scope.scopeObject().property('.catch-var').isUndefined()) {
+ scope_size--;
+ }
+ // Skip property with empty name.
+ if (!scope.scopeObject().property('').isUndefined()) {
+ scope_size--;
+ }
+ // Also ignore synthetic variable from block scopes.
+ if (!scope.scopeObject().property('.block').isUndefined()) {
+ scope_size--;
+ }
+
+ if (count != scope_size) {
+ print('Names found in scope:');
+ var names = scope.scopeObject().propertyNames();
+ for (var i = 0; i < names.length; i++) {
+ print(names[i]);
+ }
+ }
+ assertEquals(count, scope_size);
+
+ // Get the debug command processor.
+ var dcp = exec_state.debugCommandProcessor("unspecified_running_state");
+
+ // Send a scope request for information on a single scope and check the
+ // result.
+ var request_json = '{"seq":0,"type":"request","command":"scope","arguments":{"number":';
+ request_json += scope.scopeIndex();
+ request_json += '}}';
+ var response_json = dcp.processDebugJSONRequest(request_json);
+ var response = JSON.parse(response_json);
+ assertEquals(scope.scopeType(), response.body.type);
+ assertEquals(number, response.body.index);
+ if (scope.scopeType() == debug.ScopeType.Local ||
+ scope.scopeType() == debug.ScopeType.Closure) {
+ assertTrue(response.body.object.ref < 0);
+ } else {
+ assertTrue(response.body.object.ref >= 0);
+ }
+ var found = false;
+ for (var i = 0; i < response.refs.length && !found; i++) {
+ found = response.refs[i].handle == response.body.object.ref;
+ }
+ assertTrue(found, "Scope object " + response.body.object.ref + " not found");
+}
+
+
+// Simple closure formed by returning an inner function referering to an outer
+// block local variable and an outer function's parameter. Due to VM
+// optimizations parts of the actual closure is missing from the debugger
+// information.
+BeginTest("Closure 1");
+
+function closure_1(a) {
+ var x = 2;
+ let y = 3;
+ if (true) {
+ let z = 4;
+ function f() {
+ debugger;
+ return a + x + y + z;
+ };
+ return f;
+ }
+}
+
+listener_delegate = function(exec_state) {
+ CheckScopeChain([debug.ScopeType.Local,
+ debug.ScopeType.Block,
+ debug.ScopeType.Closure,
+ debug.ScopeType.Global], exec_state);
+ CheckScopeContent({}, 0, exec_state);
+ CheckScopeContent({z:4}, 1, exec_state);
+ CheckScopeContent({a:1,x:2,y:3}, 2, exec_state);
+};
+closure_1(1)();
+EndTest();
diff --git a/test/mjsunit/d8-os.js b/test/mjsunit/d8-os.js
index 630a39e..fd6fb77 100644
--- a/test/mjsunit/d8-os.js
+++ b/test/mjsunit/d8-os.js
@@ -30,6 +30,9 @@
// implemented on Windows, and even if it were then many of the things
// we are calling would not be available.
+var TEST_DIR = "d8-os-test-directory-" + ((Math.random() * (1<<30)) | 0);
+
+
function arg_error(str) {
try {
eval(str);
@@ -53,96 +56,98 @@
if (this.os && os.system) {
try {
// Delete the dir if it is lying around from last time.
- os.system("ls", ["d8-os-test-directory"]);
- os.system("rm", ["-r", "d8-os-test-directory"]);
+ os.system("ls", [TEST_DIR]);
+ os.system("rm", ["-r", TEST_DIR]);
} catch (e) {
}
- os.mkdirp("d8-os-test-directory");
- os.chdir("d8-os-test-directory");
- // Check the chdir worked.
- os.system('ls', ['../d8-os-test-directory']);
- // Simple create dir.
- os.mkdirp("dir");
- // Create dir in dir.
- os.mkdirp("dir/foo");
- // Check that they are there.
- os.system('ls', ['dir/foo']);
- // Check that we can detect when something is not there.
- assertThrows("os.system('ls', ['dir/bar']);", "dir not there");
- // Check that mkdirp makes intermediate directories.
- os.mkdirp("dir2/foo");
- os.system("ls", ["dir2/foo"]);
- // Check that mkdirp doesn't mind if the dir is already there.
- os.mkdirp("dir2/foo");
- os.mkdirp("dir2/foo/");
- // Check that mkdirp can cope with trailing /
- os.mkdirp("dir3/");
- os.system("ls", ["dir3"]);
- // Check that we get an error if the name is taken by a file.
- os.system("sh", ["-c", "echo foo > file1"]);
- os.system("ls", ["file1"]);
- assertThrows("os.mkdirp('file1');", "mkdir over file1");
- assertThrows("os.mkdirp('file1/foo');", "mkdir over file2");
- assertThrows("os.mkdirp('file1/');", "mkdir over file3");
- assertThrows("os.mkdirp('file1/foo/');", "mkdir over file4");
- // Create a dir we cannot read.
- os.mkdirp("dir4", 0);
- // This test fails if you are root since root can read any dir.
- assertThrows("os.chdir('dir4');", "chdir dir4 I");
- os.rmdir("dir4");
- assertThrows("os.chdir('dir4');", "chdir dir4 II");
- // Set umask.
- var old_umask = os.umask(0777);
- // Create a dir we cannot read.
- os.mkdirp("dir5");
- // This test fails if you are root since root can read any dir.
- assertThrows("os.chdir('dir5');", "cd dir5 I");
- os.rmdir("dir5");
- assertThrows("os.chdir('dir5');", "chdir dir5 II");
- os.umask(old_umask);
-
- os.mkdirp("hest/fisk/../fisk/ged");
- os.system("ls", ["hest/fisk/ged"]);
-
- os.setenv("FOO", "bar");
- var environment = os.system("printenv");
- assertTrue(/FOO=bar/.test(environment));
-
- // Check we time out.
- var have_sleep = true;
- var have_echo = true;
+ os.mkdirp(TEST_DIR);
+ os.chdir(TEST_DIR);
try {
- os.system("ls", ["/bin/sleep"]);
- } catch (e) {
- have_sleep = false;
- }
- try {
- os.system("ls", ["/bin/echo"]);
- } catch (e) {
- have_echo = false;
- }
- if (have_sleep) {
- assertThrows("os.system('sleep', ['2000'], 200);", "sleep 1");
+ // Check the chdir worked.
+ os.system('ls', ['../' + TEST_DIR]);
+ // Simple create dir.
+ os.mkdirp("dir");
+ // Create dir in dir.
+ os.mkdirp("dir/foo");
+ // Check that they are there.
+ os.system('ls', ['dir/foo']);
+ // Check that we can detect when something is not there.
+ assertThrows("os.system('ls', ['dir/bar']);", "dir not there");
+ // Check that mkdirp makes intermediate directories.
+ os.mkdirp("dir2/foo");
+ os.system("ls", ["dir2/foo"]);
+ // Check that mkdirp doesn't mind if the dir is already there.
+ os.mkdirp("dir2/foo");
+ os.mkdirp("dir2/foo/");
+ // Check that mkdirp can cope with trailing /
+ os.mkdirp("dir3/");
+ os.system("ls", ["dir3"]);
+ // Check that we get an error if the name is taken by a file.
+ os.system("sh", ["-c", "echo foo > file1"]);
+ os.system("ls", ["file1"]);
+ assertThrows("os.mkdirp('file1');", "mkdir over file1");
+ assertThrows("os.mkdirp('file1/foo');", "mkdir over file2");
+ assertThrows("os.mkdirp('file1/');", "mkdir over file3");
+ assertThrows("os.mkdirp('file1/foo/');", "mkdir over file4");
+ // Create a dir we cannot read.
+ os.mkdirp("dir4", 0);
+ // This test fails if you are root since root can read any dir.
+ assertThrows("os.chdir('dir4');", "chdir dir4 I");
+ os.rmdir("dir4");
+ assertThrows("os.chdir('dir4');", "chdir dir4 II");
+ // Set umask.
+ var old_umask = os.umask(0777);
+ // Create a dir we cannot read.
+ os.mkdirp("dir5");
+ // This test fails if you are root since root can read any dir.
+ assertThrows("os.chdir('dir5');", "cd dir5 I");
+ os.rmdir("dir5");
+ assertThrows("os.chdir('dir5');", "chdir dir5 II");
+ os.umask(old_umask);
- // Check we time out with total time.
- assertThrows("os.system('sleep', ['2000'], -1, 200);", "sleep 2");
+ os.mkdirp("hest/fisk/../fisk/ged");
+ os.system("ls", ["hest/fisk/ged"]);
- // Check that -1 means no timeout.
- os.system('sleep', ['1'], -1, -1);
+ os.setenv("FOO", "bar");
+ var environment = os.system("printenv");
+ assertTrue(/FOO=bar/.test(environment));
- }
+ // Check we time out.
+ var have_sleep = true;
+ var have_echo = true;
+ try {
+ os.system("ls", ["/bin/sleep"]);
+ } catch (e) {
+ have_sleep = false;
+ }
+ try {
+ os.system("ls", ["/bin/echo"]);
+ } catch (e) {
+ have_echo = false;
+ }
+ if (have_sleep) {
+ assertThrows("os.system('sleep', ['2000'], 200);", "sleep 1");
- // Check that we don't fill up the process table with zombies.
- // Disabled because it's too slow.
- if (have_echo) {
- //for (var i = 0; i < 65536; i++) {
+ // Check we time out with total time.
+ assertThrows("os.system('sleep', ['2000'], -1, 200);", "sleep 2");
+
+ // Check that -1 means no timeout.
+ os.system('sleep', ['1'], -1, -1);
+
+ }
+
+ // Check that we don't fill up the process table with zombies.
+ // Disabled because it's too slow.
+ if (have_echo) {
+ //for (var i = 0; i < 65536; i++) {
assertEquals("baz\n", os.system("echo", ["baz"]));
- //}
+ //}
+ }
+ } finally {
+ os.chdir("..");
+ os.system("rm", ["-r", TEST_DIR]);
}
- os.chdir("..");
- os.system("rm", ["-r", "d8-os-test-directory"]);
-
// Too few args.
arg_error("os.umask();");
arg_error("os.system();");
diff --git a/test/mjsunit/debug-script.js b/test/mjsunit/debug-script.js
index 643dd8c..9767888 100644
--- a/test/mjsunit/debug-script.js
+++ b/test/mjsunit/debug-script.js
@@ -34,13 +34,19 @@
// Count script types.
var named_native_count = 0;
+var named_native_names = {};
var extension_count = 0;
var normal_count = 0;
var scripts = Debug.scripts();
for (i = 0; i < scripts.length; i++) {
if (scripts[i].type == Debug.ScriptType.Native) {
if (scripts[i].name) {
- named_native_count++;
+ // TODO(1641): Remove check for equally named native scripts once the
+ // underlying issue is fixed.
+ if (!named_native_names[scripts[i].name]) {
+ named_native_names[scripts[i].name] = true;
+ named_native_count++;
+ }
}
} else if (scripts[i].type == Debug.ScriptType.Extension) {
extension_count++;
diff --git a/test/mjsunit/external-array.js b/test/mjsunit/external-array.js
index 94105ec..d7e9de0 100644
--- a/test/mjsunit/external-array.js
+++ b/test/mjsunit/external-array.js
@@ -190,9 +190,19 @@
gc(); // Makes V8 forget about type information for test_func.
}
+function run_bounds_test(test_func, array, expected_result) {
+ assertEquals(undefined, a[kElementCount]);
+ a[kElementCount] = 456;
+ assertEquals(undefined, a[kElementCount]);
+ assertEquals(undefined, a[kElementCount+1]);
+ a[kElementCount+1] = 456;
+ assertEquals(undefined, a[kElementCount+1]);
+}
+
for (var t = 0; t < types.length; t++) {
var type = types[t];
var a = new type(kElementCount);
+
for (var i = 0; i < kElementCount; i++) {
a[i] = i;
}
@@ -220,6 +230,16 @@
assertTrue(delete a.length);
a.length = 2;
assertEquals(2, a.length);
+
+ // Make sure bounds checks are handled correctly for external arrays.
+ run_bounds_test(a);
+ run_bounds_test(a);
+ run_bounds_test(a);
+ %OptimizeFunctionOnNextCall(run_bounds_test);
+ run_bounds_test(a);
+ %DeoptimizeFunction(run_bounds_test);
+ gc(); // Makes V8 forget about type information for test_func.
+
}
function array_load_set_smi_check(a) {
diff --git a/test/mjsunit/fuzz-natives.js b/test/mjsunit/fuzz-natives.js
index ffa9268..f8f0a28 100644
--- a/test/mjsunit/fuzz-natives.js
+++ b/test/mjsunit/fuzz-natives.js
@@ -146,6 +146,7 @@
"NewStrictArgumentsFast": true,
"PushWithContext": true,
"PushCatchContext": true,
+ "PushBlockContext": true,
"LazyCompile": true,
"LazyRecompile": true,
"NotifyDeoptimized": true,
diff --git a/src/shell.h b/test/mjsunit/harmony/block-lazy-compile.js
similarity index 66%
copy from src/shell.h
copy to test/mjsunit/harmony/block-lazy-compile.js
index ca51040..a6efcbf 100644
--- a/src/shell.h
+++ b/test/mjsunit/harmony/block-lazy-compile.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,27 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
+// Flags: --allow-natives-syntax
+// Test deserialization of block contexts during lazy compilation
+// of closures.
-#include "../public/debug.h"
+function f() {
+ var g;
+ {
+ // TODO(keuchel): introduce let
+ var x = 0;
+ g = function () {
+ x = x + 1;
+ return x;
+ }
+ }
+ return g;
+}
-namespace v8 {
-namespace internal {
-
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
-
-
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
-};
-
-} } // namespace v8::internal
-
-#endif // V8_SHELL_H_
+var o = f();
+assertEquals(1, o());
+assertEquals(2, o());
+assertEquals(3, o());
+%OptimizeFunctionOnNextCall(o);
+assertEquals(4, o());
diff --git a/src/shell.h b/test/mjsunit/harmony/block-let-crankshaft.js
similarity index 66%
copy from src/shell.h
copy to test/mjsunit/harmony/block-let-crankshaft.js
index ca51040..c2fb96b 100644
--- a/src/shell.h
+++ b/test/mjsunit/harmony/block-let-crankshaft.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,40 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
+// Flags: --harmony-block-scoping --allow-natives-syntax
-#include "../public/debug.h"
+// Test that temporal dead zone semantics for function and block scoped
+// ket bindings are handled by the optimizing compiler.
-namespace v8 {
-namespace internal {
+function f(x, b) {
+ let y = (b ? y : x) + 42;
+ return y;
+}
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
+function g(x, b) {
+ {
+ let y = (b ? y : x) + 42;
+ return y;
+ }
+}
+for (var i=0; i<10; i++) {
+ f(i, false);
+ g(i, false);
+}
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
-};
+%OptimizeFunctionOnNextCall(f);
+%OptimizeFunctionOnNextCall(g);
-} } // namespace v8::internal
+try {
+ f(42, true);
+} catch (e) {
+ assertInstanceof(e, ReferenceError);
+}
-#endif // V8_SHELL_H_
+try {
+ g(42, true);
+} catch (e) {
+ assertInstanceof(e, ReferenceError);
+}
diff --git a/src/shell.h b/test/mjsunit/harmony/block-let-declaration.js
similarity index 61%
copy from src/shell.h
copy to test/mjsunit/harmony/block-let-declaration.js
index ca51040..19c943f 100644
--- a/src/shell.h
+++ b/test/mjsunit/harmony/block-let-declaration.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,44 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
+// Flags: --harmony-block-scoping
-#include "../public/debug.h"
+// Test let declarations in various settings.
-namespace v8 {
-namespace internal {
+// Global
+let x;
+let y = 2;
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
+// Block local
+{
+ let y;
+ let x = 3;
+}
+assertEquals(undefined, x);
+assertEquals(2,y);
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
-};
+if (true) {
+ let y;
+ assertEquals(undefined, y);
+}
-} } // namespace v8::internal
+function TestLocalThrows(str, expect) {
+ assertThrows("(function(){" + str + "})()", expect);
+}
-#endif // V8_SHELL_H_
+function TestLocalDoesNotThrow(str) {
+ assertDoesNotThrow("(function(){" + str + "})()");
+}
+
+// Unprotected statement
+TestLocalThrows("if (true) let x;", SyntaxError);
+TestLocalThrows("with ({}) let x;", SyntaxError);
+TestLocalThrows("do let x; while (false)", SyntaxError);
+TestLocalThrows("while (false) let x;", SyntaxError);
+
+TestLocalDoesNotThrow("if (true) var x;");
+TestLocalDoesNotThrow("with ({}) var x;");
+TestLocalDoesNotThrow("do var x; while (false)");
+TestLocalDoesNotThrow("while (false) var x;");
diff --git a/test/mjsunit/harmony/block-let-semantics.js b/test/mjsunit/harmony/block-let-semantics.js
new file mode 100644
index 0000000..198c3b4
--- /dev/null
+++ b/test/mjsunit/harmony/block-let-semantics.js
@@ -0,0 +1,138 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-block-scoping
+
+// Test temporal dead zone semantics of let bound variables in
+// function and block scopes.
+
+function TestFunctionLocal(s) {
+ try {
+ eval("(function(){" + s + "; })")();
+ } catch (e) {
+ assertInstanceof(e, ReferenceError);
+ return;
+ }
+ assertUnreachable();
+}
+
+function TestBlockLocal(s,e) {
+ try {
+ eval("(function(){ {" + s + ";} })")();
+ } catch (e) {
+ assertInstanceof(e, ReferenceError);
+ return;
+ }
+ assertUnreachable();
+}
+
+
+function TestAll(s) {
+ TestBlockLocal(s);
+ TestFunctionLocal(s);
+}
+
+// Use before initialization in declaration statement.
+TestAll('let x = x + 1');
+TestAll('let x = x += 1');
+TestAll('let x = x++');
+TestAll('let x = ++x');
+
+// Use before initialization in prior statement.
+TestAll('x + 1; let x;');
+TestAll('x = 1; let x;');
+TestAll('x += 1; let x;');
+TestAll('++x; let x;');
+TestAll('x++; let x;');
+
+TestAll('f(); let x; function f() { return x + 1; }');
+TestAll('f(); let x; function f() { x = 1; }');
+TestAll('f(); let x; function f() { x += 1; }');
+TestAll('f(); let x; function f() { ++x; }');
+TestAll('f(); let x; function f() { x++; }');
+
+TestAll('f()(); let x; function f() { return function() { return x + 1; } }');
+TestAll('f()(); let x; function f() { return function() { x = 1; } }');
+TestAll('f()(); let x; function f() { return function() { x += 1; } }');
+TestAll('f()(); let x; function f() { return function() { ++x; } }');
+TestAll('f()(); let x; function f() { return function() { x++; } }');
+
+// Use in before initialization with a dynamic lookup.
+TestAll('eval("x + 1;"); let x;');
+TestAll('eval("x = 1;"); let x;');
+TestAll('eval("x += 1;"); let x;');
+TestAll('eval("++x;"); let x;');
+TestAll('eval("x++;"); let x;');
+
+// Test that variables introduced by function declarations are created and
+// initialized upon entering a function / block scope.
+function f() {
+ {
+ assertEquals(2, g1());
+ assertEquals(2, eval("g1()"));
+
+ // block scoped function declaration
+ function g1() {
+ return 2;
+ }
+ }
+
+ assertEquals(3, g2());
+ assertEquals(3, eval("g2()"));
+ // function scoped function declaration
+ function g2() {
+ return 3;
+ }
+}
+f();
+
+// Test that a function declaration introduces a block scoped variable.
+TestAll('{ function k() { return 0; } }; k(); ');
+
+// Test that a function declaration sees the scope it resides in.
+function f2() {
+ let m, n;
+ {
+ m = g;
+ function g() {
+ return a;
+ }
+ let a = 1;
+ }
+ assertEquals(1, m());
+
+ try {
+ throw 2;
+ } catch(b) {
+ n = h;
+ function h() {
+ return b + c;
+ }
+ let b = 3;
+ }
+ assertEquals(5, n());
+}
diff --git a/test/mjsunit/harmony/block-scoping.js b/test/mjsunit/harmony/block-scoping.js
new file mode 100644
index 0000000..266e380
--- /dev/null
+++ b/test/mjsunit/harmony/block-scoping.js
@@ -0,0 +1,216 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --allow-natives-syntax --harmony-block-scoping
+// Test functionality of block scopes.
+
+// Hoisting of var declarations.
+function f1() {
+ {
+ var x = 1;
+ var y;
+ }
+ assertEquals(1, x)
+ assertEquals(undefined, y)
+}
+f1();
+
+
+// Dynamic lookup in and through block contexts.
+function f2(one) {
+ var x = one + 1;
+ let y = one + 2;
+ {
+ let z = one + 3;
+ assertEquals(1, eval('one'));
+ assertEquals(2, eval('x'));
+ assertEquals(3, eval('y'));
+ assertEquals(4, eval('z'));
+ }
+}
+f2(1);
+
+
+// Lookup in and through block contexts.
+function f3(one) {
+ var x = one + 1;
+ let y = one + 2;
+ {
+ let z = one + 3;
+ assertEquals(1, one);
+ assertEquals(2, x);
+ assertEquals(3, y);
+ assertEquals(4, z);
+ }
+}
+f3(1);
+
+
+// Dynamic lookup from closure.
+function f4(one) {
+ var x = one + 1;
+ let y = one + 2;
+ {
+ let z = one + 3;
+ function f() {
+ assertEquals(1, eval('one'));
+ assertEquals(2, eval('x'));
+ assertEquals(3, eval('y'));
+ assertEquals(4, eval('z'));
+ };
+ }
+}
+f4(1);
+
+
+// Lookup from closure.
+function f5(one) {
+ var x = one + 1;
+ let y = one + 2;
+ {
+ let z = one + 3;
+ function f() {
+ assertEquals(1, one);
+ assertEquals(2, x);
+ assertEquals(3, y);
+ assertEquals(4, z);
+ };
+ }
+}
+f5(1);
+
+
+// Return from block.
+function f6() {
+ let x = 1;
+ {
+ let y = 2;
+ return x + y;
+ }
+}
+assertEquals(3, f6(6));
+
+
+// Variable shadowing and lookup.
+function f7(a) {
+ let b = 1;
+ var c = 1;
+ var d = 1;
+ { // let variables shadowing argument, let and var variables
+ let a = 2;
+ let b = 2;
+ let c = 2;
+ assertEquals(2,a);
+ assertEquals(2,b);
+ assertEquals(2,c);
+ }
+ try {
+ throw 'stuff1';
+ } catch (a) {
+ assertEquals('stuff1',a);
+ // catch variable shadowing argument
+ a = 2;
+ assertEquals(2,a);
+ {
+ // let variable shadowing catch variable
+ let a = 3;
+ assertEquals(3,a);
+ try {
+ throw 'stuff2';
+ } catch (a) {
+ assertEquals('stuff2',a);
+ // catch variable shadowing let variable
+ a = 4;
+ assertEquals(4,a);
+ }
+ assertEquals(3,a);
+ }
+ assertEquals(2,a);
+ }
+ try {
+ throw 'stuff3';
+ } catch (c) {
+ // catch variable shadowing var variable
+ assertEquals('stuff3',c);
+ try {
+ throw 'stuff4';
+ } catch(c) {
+ assertEquals('stuff4',c);
+ // catch variable shadowing catch variable
+ c = 3;
+ assertEquals(3,c);
+ }
+ (function(c) {
+ // argument shadowing catch variable
+ c = 3;
+ assertEquals(3,c);
+ })();
+ assertEquals('stuff3', c);
+ (function() {
+ // var variable shadowing catch variable
+ var c = 3;
+ })();
+ assertEquals('stuff3', c);
+ c = 2;
+ }
+ assertEquals(1,c);
+ (function(a,b,c) {
+ // arguments shadowing argument, let and var variable
+ a = 2;
+ b = 2;
+ c = 2;
+ assertEquals(2,a);
+ assertEquals(2,b);
+ assertEquals(2,c);
+ // var variable shadowing var variable
+ var d = 2;
+ })(1,1);
+ assertEquals(1,a);
+ assertEquals(1,b);
+ assertEquals(1,c);
+ assertEquals(1,d);
+}
+f7(1);
+
+
+// Ensure let variables are block local and var variables function local.
+function f8() {
+ var let_accessors = [];
+ var var_accessors = [];
+ for (var i = 0; i < 10; i++) {
+ let x = i;
+ var y = i;
+ let_accessors[i] = function() { return x; }
+ var_accessors[i] = function() { return y; }
+ }
+ for (var j = 0; j < 10; j++) {
+ y = j + 10;
+ assertEquals(j, let_accessors[j]());
+ assertEquals(y, var_accessors[j]());
+ }
+}
+f8();
diff --git a/test/mjsunit/harmony/debug-blockscopes.js b/test/mjsunit/harmony/debug-blockscopes.js
new file mode 100644
index 0000000..e0df71b
--- /dev/null
+++ b/test/mjsunit/harmony/debug-blockscopes.js
@@ -0,0 +1,389 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug --harmony-block-scoping
+// The functions used for testing backtraces. They are at the top to make the
+// testing of source line/column easier.
+
+
+// Get the Debug object exposed from the debug context global object.
+Debug = debug.Debug;
+
+var test_name;
+var listener_delegate;
+var listener_called;
+var exception;
+var begin_test_count = 0;
+var end_test_count = 0;
+var break_count = 0;
+
+
+// Debug event listener which delegates.
+function listener(event, exec_state, event_data, data) {
+ try {
+ if (event == Debug.DebugEvent.Break) {
+ break_count++;
+ listener_called = true;
+ listener_delegate(exec_state);
+ }
+ } catch (e) {
+ exception = e;
+ }
+}
+
+// Add the debug event listener.
+Debug.setListener(listener);
+
+
+// Initialize for a new test.
+function BeginTest(name) {
+ test_name = name;
+ listener_delegate = null;
+ listener_called = false;
+ exception = null;
+ begin_test_count++;
+}
+
+
+// Check result of a test.
+function EndTest() {
+ assertTrue(listener_called, "listerner not called for " + test_name);
+ assertNull(exception, test_name);
+ end_test_count++;
+}
+
+
+// Check that the scope chain contains the expected types of scopes.
+function CheckScopeChain(scopes, exec_state) {
+ assertEquals(scopes.length, exec_state.frame().scopeCount());
+ for (var i = 0; i < scopes.length; i++) {
+ var scope = exec_state.frame().scope(i);
+ assertTrue(scope.isScope());
+ assertEquals(scopes[i], scope.scopeType());
+
+ // Check the global object when hitting the global scope.
+ if (scopes[i] == debug.ScopeType.Global) {
+ // Objects don't have same class (one is "global", other is "Object",
+ // so just check the properties directly.
+ assertPropertiesEqual(this, scope.scopeObject().value());
+ }
+ }
+
+ // Get the debug command processor.
+ var dcp = exec_state.debugCommandProcessor("unspecified_running_state");
+
+ // Send a scopes request and check the result.
+ var json;
+ var request_json = '{"seq":0,"type":"request","command":"scopes"}';
+ var response_json = dcp.processDebugJSONRequest(request_json);
+ var response = JSON.parse(response_json);
+ assertEquals(scopes.length, response.body.scopes.length);
+ for (var i = 0; i < scopes.length; i++) {
+ assertEquals(i, response.body.scopes[i].index);
+ assertEquals(scopes[i], response.body.scopes[i].type);
+ if (scopes[i] == debug.ScopeType.Local ||
+ scopes[i] == debug.ScopeType.Closure) {
+ assertTrue(response.body.scopes[i].object.ref < 0);
+ } else {
+ assertTrue(response.body.scopes[i].object.ref >= 0);
+ }
+ var found = false;
+ for (var j = 0; j < response.refs.length && !found; j++) {
+ found = response.refs[j].handle == response.body.scopes[i].object.ref;
+ }
+ assertTrue(found, "Scope object " + response.body.scopes[i].object.ref + " not found");
+ }
+}
+
+// Check that the content of the scope is as expected. For functions just check
+// that there is a function.
+function CheckScopeContent(content, number, exec_state) {
+ var scope = exec_state.frame().scope(number);
+ var count = 0;
+ for (var p in content) {
+ var property_mirror = scope.scopeObject().property(p);
+ if (property_mirror.isUndefined()) {
+ print('property ' + p + ' not found in scope');
+ }
+ assertFalse(property_mirror.isUndefined(), 'property ' + p + ' not found in scope');
+ if (typeof(content[p]) === 'function') {
+ assertTrue(property_mirror.value().isFunction());
+ } else {
+ assertEquals(content[p], property_mirror.value().value(), 'property ' + p + ' has unexpected value');
+ }
+ count++;
+ }
+
+ // 'arguments' and might be exposed in the local and closure scope. Just
+ // ignore this.
+ var scope_size = scope.scopeObject().properties().length;
+ if (!scope.scopeObject().property('arguments').isUndefined()) {
+ scope_size--;
+ }
+ // Also ignore synthetic variable from catch block.
+ if (!scope.scopeObject().property('.catch-var').isUndefined()) {
+ scope_size--;
+ }
+ // Skip property with empty name.
+ if (!scope.scopeObject().property('').isUndefined()) {
+ scope_size--;
+ }
+ // Also ignore synthetic variable from block scopes.
+ if (!scope.scopeObject().property('.block').isUndefined()) {
+ scope_size--;
+ }
+
+ if (count != scope_size) {
+ print('Names found in scope:');
+ var names = scope.scopeObject().propertyNames();
+ for (var i = 0; i < names.length; i++) {
+ print(names[i]);
+ }
+ }
+ assertEquals(count, scope_size);
+
+ // Get the debug command processor.
+ var dcp = exec_state.debugCommandProcessor("unspecified_running_state");
+
+ // Send a scope request for information on a single scope and check the
+ // result.
+ var request_json = '{"seq":0,"type":"request","command":"scope","arguments":{"number":';
+ request_json += scope.scopeIndex();
+ request_json += '}}';
+ var response_json = dcp.processDebugJSONRequest(request_json);
+ var response = JSON.parse(response_json);
+ assertEquals(scope.scopeType(), response.body.type);
+ assertEquals(number, response.body.index);
+ if (scope.scopeType() == debug.ScopeType.Local ||
+ scope.scopeType() == debug.ScopeType.Closure) {
+ assertTrue(response.body.object.ref < 0);
+ } else {
+ assertTrue(response.body.object.ref >= 0);
+ }
+ var found = false;
+ for (var i = 0; i < response.refs.length && !found; i++) {
+ found = response.refs[i].handle == response.body.object.ref;
+ }
+ assertTrue(found, "Scope object " + response.body.object.ref + " not found");
+}
+
+
+// Simple empty block scope in local scope.
+BeginTest("Local block 1");
+
+function local_block_1() {
+ {
+ debugger;
+ }
+}
+
+listener_delegate = function(exec_state) {
+ CheckScopeChain([debug.ScopeType.Block,
+ debug.ScopeType.Local,
+ debug.ScopeType.Global], exec_state);
+ CheckScopeContent({}, 0, exec_state);
+ CheckScopeContent({}, 1, exec_state);
+};
+local_block_1();
+EndTest();
+
+
+// Local scope with a parameter.
+BeginTest("Local 2");
+
+function local_2(a) {
+ {
+ debugger;
+ }
+}
+
+listener_delegate = function(exec_state) {
+ CheckScopeChain([debug.ScopeType.Block,
+ debug.ScopeType.Local,
+ debug.ScopeType.Global], exec_state);
+ CheckScopeContent({a:1}, 1, exec_state);
+};
+local_2(1);
+EndTest();
+
+
+// Local scope with a parameter and a local variable.
+BeginTest("Local 3");
+
+function local_3(a) {
+ let x = 3;
+ debugger;
+}
+
+listener_delegate = function(exec_state) {
+ CheckScopeChain([debug.ScopeType.Local,
+ debug.ScopeType.Global], exec_state);
+ CheckScopeContent({a:1,x:3}, 0, exec_state);
+};
+local_3(1);
+EndTest();
+
+
+// Local scope with parameters and local variables.
+BeginTest("Local 4");
+
+function local_4(a, b) {
+ let x = 3;
+ let y = 4;
+ debugger;
+}
+
+listener_delegate = function(exec_state) {
+ CheckScopeChain([debug.ScopeType.Local,
+ debug.ScopeType.Global], exec_state);
+ CheckScopeContent({a:1,b:2,x:3,y:4}, 0, exec_state);
+};
+local_4(1, 2);
+EndTest();
+
+
+// Single empty with block.
+BeginTest("With block 1");
+
+function with_block_1() {
+ with({}) {
+ debugger;
+ }
+}
+
+listener_delegate = function(exec_state) {
+ CheckScopeChain([debug.ScopeType.Block,
+ debug.ScopeType.With,
+ debug.ScopeType.Local,
+ debug.ScopeType.Global], exec_state);
+ CheckScopeContent({}, 0, exec_state);
+ CheckScopeContent({}, 1, exec_state);
+};
+with_block_1();
+EndTest();
+
+
+// Nested empty with blocks.
+BeginTest("With block 2");
+
+function with_block_2() {
+ with({}) {
+ with({}) {
+ debugger;
+ }
+ }
+}
+
+listener_delegate = function(exec_state) {
+ CheckScopeChain([debug.ScopeType.Block,
+ debug.ScopeType.With,
+ debug.ScopeType.Block,
+ debug.ScopeType.With,
+ debug.ScopeType.Local,
+ debug.ScopeType.Global], exec_state);
+ CheckScopeContent({}, 0, exec_state);
+ CheckScopeContent({}, 1, exec_state);
+ CheckScopeContent({}, 2, exec_state);
+ CheckScopeContent({}, 3, exec_state);
+};
+with_block_2();
+EndTest();
+
+
+// With block using an in-place object literal.
+BeginTest("With block 3");
+
+function with_block_3() {
+ with({a:1,b:2}) {
+ debugger;
+ }
+}
+
+listener_delegate = function(exec_state) {
+ CheckScopeChain([debug.ScopeType.Block,
+ debug.ScopeType.With,
+ debug.ScopeType.Local,
+ debug.ScopeType.Global], exec_state);
+ CheckScopeContent({}, 0, exec_state);
+ CheckScopeContent({a:1,b:2}, 1, exec_state);
+};
+with_block_3();
+EndTest();
+
+
+// Nested with blocks using in-place object literals.
+BeginTest("With block 4");
+
+function with_block_4() {
+ with({a:1,b:2}) {
+ with({a:2,b:1}) {
+ debugger;
+ }
+ }
+}
+
+listener_delegate = function(exec_state) {
+ CheckScopeChain([debug.ScopeType.Block,
+ debug.ScopeType.With,
+ debug.ScopeType.Block,
+ debug.ScopeType.With,
+ debug.ScopeType.Local,
+ debug.ScopeType.Global], exec_state);
+ CheckScopeContent({a:2,b:1}, 1, exec_state);
+ CheckScopeContent({a:1,b:2}, 3, exec_state);
+};
+with_block_4();
+EndTest();
+
+
+// Simple closure formed by returning an inner function referering to an outer
+// block local variable and an outer function's parameter.
+BeginTest("Closure 1");
+
+function closure_1(a) {
+ var x = 2;
+ let y = 3;
+ if (true) {
+ let z = 4;
+ function f() {
+ debugger;
+ return a + x + y + z;
+ };
+ return f;
+ }
+}
+
+listener_delegate = function(exec_state) {
+ CheckScopeChain([debug.ScopeType.Local,
+ debug.ScopeType.Block,
+ debug.ScopeType.Closure,
+ debug.ScopeType.Global], exec_state);
+ CheckScopeContent({}, 0, exec_state);
+ CheckScopeContent({a:1,x:2,y:3}, 2, exec_state);
+};
+closure_1(1)();
+EndTest();
diff --git a/src/shell.h b/test/mjsunit/harmony/debug-evaluate-blockscopes.js
similarity index 60%
copy from src/shell.h
copy to test/mjsunit/harmony/debug-evaluate-blockscopes.js
index ca51040..549960a 100644
--- a/src/shell.h
+++ b/test/mjsunit/harmony/debug-evaluate-blockscopes.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,41 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
+// Flags: --expose-debug-as debug --harmony-block-scoping
-#include "../public/debug.h"
+// Test debug evaluation for functions without local context, but with
+// nested catch contexts.
-namespace v8 {
-namespace internal {
-
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
-
-
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
+function f() {
+ { // Line 1.
+ let i = 1; // Line 2.
+ try { // Line 3.
+ throw 'stuff'; // Line 4.
+ } catch (e) { // Line 5.
+ x = 2; // Line 6.
+ }
+ }
};
-} } // namespace v8::internal
+// Get the Debug object exposed from the debug context global object.
+Debug = debug.Debug
+// Set breakpoint on line 6.
+var bp = Debug.setBreakPoint(f, 6);
-#endif // V8_SHELL_H_
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ result = exec_state.frame().evaluate("i").value();
+ }
+};
+
+// Add the debug event listener.
+Debug.setListener(listener);
+result = -1;
+f();
+assertEquals(1, result);
+
+// Clear breakpoint.
+Debug.clearBreakPoint(bp);
+// Get rid of the debug event listener.
+Debug.setListener(null);
diff --git a/test/mjsunit/harmony/proxies.js b/test/mjsunit/harmony/proxies.js
index 84641d5..640033d 100644
--- a/test/mjsunit/harmony/proxies.js
+++ b/test/mjsunit/harmony/proxies.js
@@ -42,22 +42,27 @@
TestGet({
get: function(r, k) { return 42 }
})
+
TestGet({
get: function(r, k) { return this.get2(r, k) },
get2: function(r, k) { return 42 }
})
+
TestGet({
getPropertyDescriptor: function(k) { return {value: 42} }
})
+
TestGet({
getPropertyDescriptor: function(k) { return this.getPropertyDescriptor2(k) },
getPropertyDescriptor2: function(k) { return {value: 42} }
})
+
TestGet({
getPropertyDescriptor: function(k) {
return {get value() { return 42 }}
}
})
+
TestGet({
get: undefined,
getPropertyDescriptor: function(k) { return {value: 42} }
@@ -83,32 +88,38 @@
TestGetCall({
get: function(r, k) { return function() { return 55 } }
})
+
TestGetCall({
get: function(r, k) { return this.get2(r, k) },
get2: function(r, k) { return function() { return 55 } }
})
+
TestGetCall({
getPropertyDescriptor: function(k) {
return {value: function() { return 55 }}
}
})
+
TestGetCall({
getPropertyDescriptor: function(k) { return this.getPropertyDescriptor2(k) },
getPropertyDescriptor2: function(k) {
return {value: function() { return 55 }}
}
})
+
TestGetCall({
getPropertyDescriptor: function(k) {
return {get value() { return function() { return 55 } }}
}
})
+
TestGetCall({
get: undefined,
getPropertyDescriptor: function(k) {
return {value: function() { return 55 }}
}
})
+
TestGetCall({
get: function(r, k) {
if (k == "gg") {
@@ -146,14 +157,17 @@
TestSet({
set: function(r, k, v) { key = k; val = v; return true }
})
+
TestSet({
set: function(r, k, v) { return this.set2(r, k, v) },
set2: function(r, k, v) { key = k; val = v; return true }
})
+
TestSet({
getOwnPropertyDescriptor: function(k) { return {writable: true} },
defineProperty: function(k, desc) { key = k; val = desc.value }
})
+
TestSet({
getOwnPropertyDescriptor: function(k) {
return this.getOwnPropertyDescriptor2(k)
@@ -162,22 +176,26 @@
defineProperty: function(k, desc) { this.defineProperty2(k, desc) },
defineProperty2: function(k, desc) { key = k; val = desc.value }
})
+
TestSet({
getOwnPropertyDescriptor: function(k) {
return {get writable() { return true }}
},
defineProperty: function(k, desc) { key = k; val = desc.value }
})
+
TestSet({
getOwnPropertyDescriptor: function(k) {
return {set: function(v) { key = k; val = v }}
}
})
+
TestSet({
getOwnPropertyDescriptor: function(k) { return null },
getPropertyDescriptor: function(k) { return {writable: true} },
defineProperty: function(k, desc) { key = k; val = desc.value }
})
+
TestSet({
getOwnPropertyDescriptor: function(k) { return null },
getPropertyDescriptor: function(k) {
@@ -185,12 +203,14 @@
},
defineProperty: function(k, desc) { key = k; val = desc.value }
})
+
TestSet({
getOwnPropertyDescriptor: function(k) { return null },
getPropertyDescriptor: function(k) {
return {set: function(v) { key = k; val = v }}
}
})
+
TestSet({
getOwnPropertyDescriptor: function(k) { return null },
getPropertyDescriptor: function(k) { return null },
@@ -279,10 +299,12 @@
TestDefine({
defineProperty: function(k, d) { key = k; desc = d; return true }
})
+
TestDefine({
defineProperty: function(k, d) { return this.defineProperty2(k, d) },
defineProperty2: function(k, d) { key = k; desc = d; return true }
})
+
TestDefine(Proxy.create({
get: function(pr, pk) {
return function(k, d) { key = k; desc = d; return true }
@@ -323,10 +345,12 @@
TestDelete({
'delete': function(k) { key = k; return k < "z" }
})
+
TestDelete({
'delete': function(k) { return this.delete2(k) },
delete2: function(k) { key = k; return k < "z" }
})
+
TestDelete(Proxy.create({
get: function(pr, pk) {
return function(k) { key = k; return k < "z" }
@@ -363,6 +387,7 @@
defineProperty: function(k, d) { this["__" + k] = d; return true },
getOwnPropertyDescriptor: function(k) { return this["__" + k] }
})
+
TestDescriptor({
defineProperty: function(k, d) { this["__" + k] = d; return true },
getOwnPropertyDescriptor: function(k) {
@@ -404,7 +429,7 @@
-// Element (in).
+// Membership test (in).
var key
function TestIn(handler) {
@@ -442,26 +467,31 @@
TestIn({
has: function(k) { key = k; return k < "z" }
})
+
TestIn({
has: function(k) { return this.has2(k) },
has2: function(k) { key = k; return k < "z" }
})
+
TestIn({
getPropertyDescriptor: function(k) {
key = k; return k < "z" ? {value: 42} : void 0
}
})
+
TestIn({
getPropertyDescriptor: function(k) { return this.getPropertyDescriptor2(k) },
getPropertyDescriptor2: function(k) {
key = k; return k < "z" ? {value: 42} : void 0
}
})
+
TestIn({
getPropertyDescriptor: function(k) {
key = k; return k < "z" ? {get value() { return 42 }} : void 0
}
})
+
TestIn({
get: undefined,
getPropertyDescriptor: function(k) {
@@ -477,7 +507,65 @@
-// Instanceof (instanceof).
+// Own Properties (Object.prototype.hasOwnProperty).
+
+var key
+function TestHasOwn(handler) {
+ var o = Proxy.create(handler)
+ assertTrue(Object.prototype.hasOwnProperty.call(o, "a"))
+ assertEquals("a", key)
+ assertTrue(Object.prototype.hasOwnProperty.call(o, 99))
+ assertEquals("99", key)
+ assertFalse(Object.prototype.hasOwnProperty.call(o, "z"))
+ assertEquals("z", key)
+}
+
+TestHasOwn({
+ hasOwn: function(k) { key = k; return k < "z" }
+})
+
+TestHasOwn({
+ hasOwn: function(k) { return this.hasOwn2(k) },
+ hasOwn2: function(k) { key = k; return k < "z" }
+})
+
+TestHasOwn({
+ getOwnPropertyDescriptor: function(k) {
+ key = k; return k < "z" ? {value: 42} : void 0
+ }
+})
+
+TestHasOwn({
+ getOwnPropertyDescriptor: function(k) {
+ return this.getOwnPropertyDescriptor2(k)
+ },
+ getOwnPropertyDescriptor2: function(k) {
+ key = k; return k < "z" ? {value: 42} : void 0
+ }
+})
+
+TestHasOwn({
+ getOwnPropertyDescriptor: function(k) {
+ key = k; return k < "z" ? {get value() { return 42 }} : void 0
+ }
+})
+
+TestHasOwn({
+ hasOwn: undefined,
+ getOwnPropertyDescriptor: function(k) {
+ key = k; return k < "z" ? {value: 42} : void 0
+ }
+})
+
+TestHasOwn(Proxy.create({
+ get: function(pr, pk) {
+ return function(k) { key = k; return k < "z" }
+ }
+}))
+
+
+
+// Instanceof (instanceof)
function TestInstanceof() {
var o = {}
@@ -514,7 +602,7 @@
-// Prototype (Object.getPrototypeOf).
+// Prototype (Object.getPrototypeOf, Object.prototype.isPrototypeOf).
function TestPrototype() {
var o = {}
@@ -528,6 +616,32 @@
assertSame(Object.getPrototypeOf(p2), o)
assertSame(Object.getPrototypeOf(p3), p2)
assertSame(Object.getPrototypeOf(p4), null)
+
+ assertTrue(Object.prototype.isPrototypeOf(o))
+ assertFalse(Object.prototype.isPrototypeOf(p1))
+ assertTrue(Object.prototype.isPrototypeOf(p2))
+ assertTrue(Object.prototype.isPrototypeOf(p3))
+ assertFalse(Object.prototype.isPrototypeOf(p4))
+ assertTrue(Object.prototype.isPrototypeOf.call(Object.prototype, o))
+ assertFalse(Object.prototype.isPrototypeOf.call(Object.prototype, p1))
+ assertTrue(Object.prototype.isPrototypeOf.call(Object.prototype, p2))
+ assertTrue(Object.prototype.isPrototypeOf.call(Object.prototype, p3))
+ assertFalse(Object.prototype.isPrototypeOf.call(Object.prototype, p4))
+ assertFalse(Object.prototype.isPrototypeOf.call(o, o))
+ assertFalse(Object.prototype.isPrototypeOf.call(o, p1))
+ assertTrue(Object.prototype.isPrototypeOf.call(o, p2))
+ assertTrue(Object.prototype.isPrototypeOf.call(o, p3))
+ assertFalse(Object.prototype.isPrototypeOf.call(o, p4))
+ assertFalse(Object.prototype.isPrototypeOf.call(p1, p1))
+ assertFalse(Object.prototype.isPrototypeOf.call(p1, o))
+ assertFalse(Object.prototype.isPrototypeOf.call(p1, p2))
+ assertFalse(Object.prototype.isPrototypeOf.call(p1, p3))
+ assertFalse(Object.prototype.isPrototypeOf.call(p1, p4))
+ assertFalse(Object.prototype.isPrototypeOf.call(p2, p1))
+ assertFalse(Object.prototype.isPrototypeOf.call(p2, p2))
+ assertTrue(Object.prototype.isPrototypeOf.call(p2, p3))
+ assertFalse(Object.prototype.isPrototypeOf.call(p2, p4))
+ assertFalse(Object.prototype.isPrototypeOf.call(p3, p2))
}
TestPrototype()
@@ -544,13 +658,16 @@
TestPropertyNames([], {
getOwnPropertyNames: function() { return [] }
})
+
TestPropertyNames(["a", "zz", " ", "0"], {
getOwnPropertyNames: function() { return ["a", "zz", " ", 0] }
})
+
TestPropertyNames(["throw", "function "], {
getOwnPropertyNames: function() { return this.getOwnPropertyNames2() },
getOwnPropertyNames2: function() { return ["throw", "function "] }
})
+
TestPropertyNames(["[object Object]"], {
get getOwnPropertyNames() {
return function() { return [{}] }
@@ -566,22 +683,27 @@
TestKeys([], {
keys: function() { return [] }
})
+
TestKeys(["a", "zz", " ", "0"], {
keys: function() { return ["a", "zz", " ", 0] }
})
+
TestKeys(["throw", "function "], {
keys: function() { return this.keys2() },
keys2: function() { return ["throw", "function "] }
})
+
TestKeys(["[object Object]"], {
get keys() {
return function() { return [{}] }
}
})
+
TestKeys(["a", "0"], {
getOwnPropertyNames: function() { return ["a", 23, "zz", "", 0] },
getOwnPropertyDescriptor: function(k) { return {enumerable: k.length == 1} }
})
+
TestKeys(["23", "zz", ""], {
getOwnPropertyNames: function() { return this.getOwnPropertyNames2() },
getOwnPropertyNames2: function() { return ["a", 23, "zz", "", 0] },
@@ -590,6 +712,7 @@
},
getOwnPropertyDescriptor2: function(k) { return {enumerable: k.length != 1} }
})
+
TestKeys(["a", "b", "c", "5"], {
get getOwnPropertyNames() {
return function() { return ["0", 4, "a", "b", "c", 5] }
@@ -598,6 +721,7 @@
return function(k) { return {enumerable: k >= "44"} }
}
})
+
TestKeys([], {
get getOwnPropertyNames() {
return function() { return ["a", "b", "c"] }
@@ -661,6 +785,7 @@
TestFix([], {
fix: function() { return {} }
})
+
TestFix(["a", "b", "c", "d", "zz"], {
fix: function() {
return {
@@ -672,12 +797,14 @@
}
}
})
+
TestFix(["a"], {
fix: function() { return this.fix2() },
fix2: function() {
return {a: {value: 4, writable: true, configurable: true, enumerable: true}}
}
})
+
TestFix(["b"], {
get fix() {
return function() {
@@ -685,3 +812,87 @@
}
}
})
+
+
+
+// String conversion (Object.prototype.toString, Object.prototype.toLocaleString)
+
+var key
+function TestToString(handler) {
+ var o = Proxy.create(handler)
+ key = ""
+ assertEquals("[object Object]", Object.prototype.toString.call(o))
+ assertEquals("", key)
+ assertEquals("my_proxy", Object.prototype.toLocaleString.call(o))
+ assertEquals("toString", key)
+}
+
+TestToString({
+ get: function(r, k) { key = k; return function() { return "my_proxy" } }
+})
+
+TestToString({
+ get: function(r, k) { return this.get2(r, k) },
+ get2: function(r, k) { key = k; return function() { return "my_proxy" } }
+})
+
+TestToString(Proxy.create({
+ get: function(pr, pk) {
+ return function(r, k) { key = k; return function() { return "my_proxy" } }
+ }
+}))
+
+
+
+// Value conversion (Object.prototype.toValue)
+
+function TestValueOf(handler) {
+ var o = Proxy.create(handler)
+ assertSame(o, Object.prototype.valueOf.call(o))
+}
+
+TestValueOf({})
+
+
+
+// Enumerability (Object.prototype.propertyIsEnumerable)
+
+var key
+function TestIsEnumerable(handler) {
+ var o = Proxy.create(handler)
+ assertTrue(Object.prototype.propertyIsEnumerable.call(o, "a"))
+ assertEquals("a", key)
+ assertTrue(Object.prototype.propertyIsEnumerable.call(o, 2))
+ assertEquals("2", key)
+ assertFalse(Object.prototype.propertyIsEnumerable.call(o, "z"))
+ assertEquals("z", key)
+}
+
+TestIsEnumerable({
+ getOwnPropertyDescriptor: function(k) {
+ key = k; return {enumerable: k < "z", configurable: true}
+ },
+})
+
+TestIsEnumerable({
+ getOwnPropertyDescriptor: function(k) {
+ return this.getOwnPropertyDescriptor2(k)
+ },
+ getOwnPropertyDescriptor2: function(k) {
+ key = k; return {enumerable: k < "z", configurable: true}
+ },
+})
+
+TestIsEnumerable({
+ getOwnPropertyDescriptor: function(k) {
+ key = k; return {get enumerable() { return k < "z" }, configurable: true}
+ },
+})
+
+TestIsEnumerable(Proxy.create({
+ get: function(pr, pk) {
+ return function(k) {
+ key = k; return {enumerable: k < "z", configurable: true}
+ }
+ }
+}))
diff --git a/src/shell.h b/test/mjsunit/harmony/typeof.js
similarity index 66%
copy from src/shell.h
copy to test/mjsunit/harmony/typeof.js
index ca51040..acde977 100644
--- a/src/shell.h
+++ b/test/mjsunit/harmony/typeof.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,12 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
+// Flags: --harmony-typeof
-#include "../public/debug.h"
-
-namespace v8 {
-namespace internal {
-
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
-
-
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
-};
-
-} } // namespace v8::internal
-
-#endif // V8_SHELL_H_
+assertFalse(typeof null == 'object')
+assertFalse(typeof null === 'object')
+assertTrue(typeof null == 'null')
+assertTrue(typeof null === 'null')
+assertEquals("null", typeof null)
+assertSame("null", typeof null)
diff --git a/test/mjsunit/harmony/weakmaps.js b/test/mjsunit/harmony/weakmaps.js
new file mode 100644
index 0000000..e43f916
--- /dev/null
+++ b/test/mjsunit/harmony/weakmaps.js
@@ -0,0 +1,163 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --harmony-weakmaps --expose-gc
+
+
+// Test valid getter and setter calls
+var m = new WeakMap;
+assertDoesNotThrow(function () { m.get(new Object) });
+assertDoesNotThrow(function () { m.set(new Object) });
+assertDoesNotThrow(function () { m.has(new Object) });
+assertDoesNotThrow(function () { m.delete(new Object) });
+
+
+// Test invalid getter and setter calls
+var m = new WeakMap;
+assertThrows(function () { m.get(undefined) }, TypeError);
+assertThrows(function () { m.set(undefined, 0) }, TypeError);
+assertThrows(function () { m.get(0) }, TypeError);
+assertThrows(function () { m.set(0, 0) }, TypeError);
+assertThrows(function () { m.get('a-key') }, TypeError);
+assertThrows(function () { m.set('a-key', 0) }, TypeError);
+
+
+// Test expected mapping behavior
+var m = new WeakMap;
+function TestMapping(map, key, value) {
+ map.set(key, value);
+ assertSame(value, map.get(key));
+}
+TestMapping(m, new Object, 23);
+TestMapping(m, new Object, 'the-value');
+TestMapping(m, new Object, new Object);
+
+
+// Test expected querying behavior
+var m = new WeakMap;
+var key = new Object;
+TestMapping(m, key, 'to-be-present');
+assertTrue(m.has(key));
+assertFalse(m.has(new Object));
+TestMapping(m, key, undefined);
+assertFalse(m.has(key));
+assertFalse(m.has(new Object));
+
+
+// Test expected deletion behavior
+var m = new WeakMap;
+var key = new Object;
+TestMapping(m, key, 'to-be-deleted');
+assertTrue(m.delete(key));
+assertFalse(m.delete(key));
+assertFalse(m.delete(new Object));
+assertSame(m.get(key), undefined);
+
+
+// Test GC of map with entry
+var m = new WeakMap;
+var key = new Object;
+m.set(key, 'not-collected');
+gc();
+assertSame('not-collected', m.get(key));
+
+
+// Test GC of map with chained entries
+var m = new WeakMap;
+var head = new Object;
+for (key = head, i = 0; i < 10; i++, key = m.get(key)) {
+ m.set(key, new Object);
+}
+gc();
+var count = 0;
+for (key = head; key != undefined; key = m.get(key)) {
+ count++;
+}
+assertEquals(11, count);
+
+
+// Test property attribute [[Enumerable]]
+var m = new WeakMap;
+function props(x) {
+ var array = [];
+ for (var p in x) array.push(p);
+ return array.sort();
+}
+assertArrayEquals([], props(WeakMap));
+assertArrayEquals([], props(WeakMap.prototype));
+assertArrayEquals([], props(m));
+
+
+// Test arbitrary properties on weak maps
+var m = new WeakMap;
+function TestProperty(map, property, value) {
+ map[property] = value;
+ assertEquals(value, map[property]);
+}
+for (i = 0; i < 20; i++) {
+ TestProperty(m, i, 'val' + i);
+ TestProperty(m, 'foo' + i, 'bar' + i);
+}
+TestMapping(m, new Object, 'foobar');
+
+
+// Test direct constructor call
+var m = WeakMap();
+assertTrue(m instanceof WeakMap);
+
+
+// Test some common JavaScript idioms
+var m = new WeakMap;
+assertTrue(m instanceof WeakMap);
+assertTrue(WeakMap.prototype.set instanceof Function)
+assertTrue(WeakMap.prototype.get instanceof Function)
+assertTrue(WeakMap.prototype.has instanceof Function)
+assertTrue(WeakMap.prototype.delete instanceof Function)
+assertTrue(WeakMap.prototype.constructor === WeakMap)
+
+
+// Regression test for issue 1617: The prototype of the WeakMap constructor
+// needs to be unique (i.e. different from the one of the Object constructor).
+assertFalse(WeakMap.prototype === Object.prototype);
+var o = Object.create({});
+assertFalse("get" in o);
+assertFalse("set" in o);
+assertEquals(undefined, o.get);
+assertEquals(undefined, o.set);
+var o = Object.create({}, { myValue: {
+ value: 10,
+ enumerable: false,
+ configurable: true,
+ writable: true
+}});
+assertEquals(10, o.myValue);
+
+
+// Stress Test
+// There is a proposed stress-test available at the es-discuss mailing list
+// which cannot be reasonably automated. Check it out by hand if you like:
+// https://mail.mozilla.org/pipermail/es-discuss/2011-May/014096.html
diff --git a/test/mjsunit/math-floor.js b/test/mjsunit/math-floor.js
index 11f4cd7..f211ce2 100644
--- a/test/mjsunit/math-floor.js
+++ b/test/mjsunit/math-floor.js
@@ -27,10 +27,11 @@
// Flags: --max-new-space-size=256 --allow-natives-syntax
+var test_id = 0;
+
function testFloor(expect, input) {
- function test(n) {
- return Math.floor(n);
- }
+ var test = new Function('n',
+ '"' + (test_id++) + '";return Math.floor(n)');
assertEquals(expect, test(input));
assertEquals(expect, test(input));
assertEquals(expect, test(input));
@@ -51,6 +52,17 @@
testFloor(-Infinity, -Infinity);
testFloor(NaN, NaN);
+ // Ensure that a negative zero coming from Math.floor is properly handled
+ // by other operations.
+ function ifloor(x) {
+ return 1 / Math.floor(x);
+ }
+ assertEquals(-Infinity, ifloor(-0));
+ assertEquals(-Infinity, ifloor(-0));
+ assertEquals(-Infinity, ifloor(-0));
+ %OptimizeFunctionOnNextCall(ifloor);
+ assertEquals(-Infinity, ifloor(-0));
+
testFloor(0, 0.1);
testFloor(0, 0.49999999999999994);
testFloor(0, 0.5);
@@ -129,3 +141,19 @@
for (var i = 0; i < 500; i++) {
test();
}
+
+
+// Regression test for a bug where a negative zero coming from Math.floor
+// was not properly handled by other operations.
+function floorsum(i, n) {
+ var ret = Math.floor(n);
+ while (--i > 0) {
+ ret += Math.floor(n);
+ }
+ return ret;
+}
+assertEquals(-0, floorsum(1, -0));
+%OptimizeFunctionOnNextCall(floorsum);
+// The optimized function will deopt. Run it with enough iterations to try
+// to optimize via OSR (triggering the bug).
+assertEquals(-0, floorsum(100000, -0));
diff --git a/test/mjsunit/math-round.js b/test/mjsunit/math-round.js
index 1366557..102c970 100644
--- a/test/mjsunit/math-round.js
+++ b/test/mjsunit/math-round.js
@@ -1,4 +1,4 @@
-// Copyright 2010 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -27,10 +27,12 @@
// Flags: --allow-natives-syntax
+var test_id = 0;
function testRound(expect, input) {
- function doRound(input) {
- return Math.round(input);
- }
+ // Make source code different on each invocation to make
+ // sure it gets optimized each time.
+ var doRound = new Function('input',
+ '"' + (test_id++) + '";return Math.round(input)');
assertEquals(expect, doRound(input));
assertEquals(expect, doRound(input));
assertEquals(expect, doRound(input));
@@ -44,6 +46,21 @@
testRound(-Infinity, -Infinity);
testRound(NaN, NaN);
+// Regression test for a bug where a negative zero coming from Math.round
+// was not properly handled by other operations.
+function roundsum(i, n) {
+ var ret = Math.round(n);
+ while (--i > 0) {
+ ret += Math.round(n);
+ }
+ return ret;
+}
+assertEquals(-0, roundsum(1, -0));
+%OptimizeFunctionOnNextCall(roundsum);
+// The optimized function will deopt. Run it with enough iterations to try
+// to optimize via OSR (triggering the bug).
+assertEquals(-0, roundsum(100000, -0));
+
testRound(1, 0.5);
testRound(1, 0.7);
testRound(1, 1);
diff --git a/test/mjsunit/parse-int-float.js b/test/mjsunit/parse-int-float.js
index a4f09df..2e4f648 100644
--- a/test/mjsunit/parse-int-float.js
+++ b/test/mjsunit/parse-int-float.js
@@ -100,4 +100,17 @@
assertEquals(Infinity, parseFloat(1/0), "parseFloat Infinity");
assertEquals(-Infinity, parseFloat(-1/0), "parseFloat -Infinity");
+var state;
+var throwingRadix = { valueOf: function() { state = "throwingRadix"; throw null; } };
+var throwingString = { toString: function() { state = "throwingString"; throw null; } };
+state = null;
+try { parseInt('123', throwingRadix); } catch (e) {}
+assertEquals(state, "throwingRadix");
+state = null;
+try { parseInt(throwingString, 10); } catch (e) {}
+assertEquals(state, "throwingString");
+
+state = null;
+try { parseInt(throwingString, throwingRadix); } catch (e) {}
+assertEquals(state, "throwingString");
diff --git a/src/shell.h b/test/mjsunit/regress/regress-103259.js
similarity index 66%
copy from src/shell.h
copy to test/mjsunit/regress/regress-103259.js
index ca51040..447073c 100644
--- a/src/shell.h
+++ b/test/mjsunit/regress/regress-103259.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,13 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
+// Flags: --allow-natives-syntax
-#include "../public/debug.h"
-
-namespace v8 {
-namespace internal {
-
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
-
-
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
-};
-
-} } // namespace v8::internal
-
-#endif // V8_SHELL_H_
+var a = [];
+a[8192] = '';
+assertTrue(%HasDictionaryElements(a));
+var uc16 = '\u0094';
+var test = uc16;
+for (var i = 0; i < 13; i++) test += test;
+assertEquals(test, a.join(uc16));
diff --git a/src/shell.h b/test/mjsunit/regress/regress-1419.js
similarity index 66%
copy from src/shell.h
copy to test/mjsunit/regress/regress-1419.js
index ca51040..98a8b76 100644
--- a/src/shell.h
+++ b/test/mjsunit/regress/regress-1419.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,24 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
+// Test that using bind several time does not change the length of existing
+// bound functions.
-#include "../public/debug.h"
+function foo() {
+}
-namespace v8 {
-namespace internal {
+var f1 = function (x) {}.bind(foo);
+var f2 = function () {};
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
+assertEquals(1, f1.length);
+// the object we bind to can be any object
+f2.bind(foo);
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
-};
+assertEquals(1, f1.length);
-} } // namespace v8::internal
-
-#endif // V8_SHELL_H_
+var desc = Object.getOwnPropertyDescriptor(f1, 'length');
+assertEquals(false, desc.writable);
+assertEquals(false, desc.enumerable);
+assertEquals(false, desc.configurable);
diff --git a/src/shell.h b/test/mjsunit/regress/regress-1546.js
similarity index 66%
copy from src/shell.h
copy to test/mjsunit/regress/regress-1546.js
index ca51040..7f1fa58 100644
--- a/src/shell.h
+++ b/test/mjsunit/regress/regress-1546.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,9 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
+// See: http://code.google.com/p/v8/issues/detail?id=1546
-#include "../public/debug.h"
-
-namespace v8 {
-namespace internal {
-
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
-
-
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
-};
-
-} } // namespace v8::internal
-
-#endif // V8_SHELL_H_
+// Should't throw. Scanner incorrectly truncated to char before comparing
+// with "*", so it ended the comment early.
+eval("/*\u822a/ */");
\ No newline at end of file
diff --git a/src/shell.h b/test/mjsunit/regress/regress-1563.js
similarity index 66%
copy from src/shell.h
copy to test/mjsunit/regress/regress-1563.js
index ca51040..c25b6c7 100644
--- a/src/shell.h
+++ b/test/mjsunit/regress/regress-1563.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2008 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,21 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
+// Flags: --allow-natives-syntax
-#include "../public/debug.h"
+obj = new PixelArray(10);
-namespace v8 {
-namespace internal {
+// Test that undefined gets properly clamped in Crankshafted pixel array
+// assignments.
+function set_pixel(obj, arg) {
+ obj[0] = arg;
+}
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
+set_pixel(obj, 1.5);
+set_pixel(obj, NaN);
+%OptimizeFunctionOnNextCall(set_pixel);
+set_pixel(obj, undefined);
+set_pixel(obj, undefined);
-
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
-};
-
-} } // namespace v8::internal
-
-#endif // V8_SHELL_H_
+assertEquals(0, obj[0]);
diff --git a/src/shell.h b/test/mjsunit/regress/regress-1586.js
similarity index 61%
copy from src/shell.h
copy to test/mjsunit/regress/regress-1586.js
index ca51040..b15e2f2 100644
--- a/src/shell.h
+++ b/test/mjsunit/regress/regress-1586.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,41 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
+// Flags: --expose-debug-as debug
-#include "../public/debug.h"
+// Test debug evaluation for functions without local context, but with
+// nested catch contexts.
-namespace v8 {
-namespace internal {
-
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
-
-
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
+function f() {
+ var i = 1; // Line 1.
+ { // Line 2.
+ try { // Line 3.
+ throw 'stuff'; // Line 4.
+ } catch (e) { // Line 5.
+ x = 2; // Line 6.
+ }
+ }
};
-} } // namespace v8::internal
+// Get the Debug object exposed from the debug context global object.
+Debug = debug.Debug
+// Set breakpoint on line 6.
+var bp = Debug.setBreakPoint(f, 6);
-#endif // V8_SHELL_H_
+function listener(event, exec_state, event_data, data) {
+ if (event == Debug.DebugEvent.Break) {
+ result = exec_state.frame().evaluate("i").value();
+ }
+};
+
+// Add the debug event listener.
+Debug.setListener(listener);
+result = -1;
+f();
+assertEquals(1, result);
+
+// Clear breakpoint.
+Debug.clearBreakPoint(bp);
+// Get rid of the debug event listener.
+Debug.setListener(null);
diff --git a/test/mjsunit/regress/regress-1620.js b/test/mjsunit/regress/regress-1620.js
new file mode 100644
index 0000000..6d72974
--- /dev/null
+++ b/test/mjsunit/regress/regress-1620.js
@@ -0,0 +1,54 @@
+// Copyright 2011 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Don't allow malformed unicode escape sequences in identifiers.
+// In strings and regexps we currently allow malformed unicode escape
+// sequences without throwing a SyntaxError. Instead "\u22gk" would
+// treat the "\u" as an identity escape, and evaluate to "u22gk".
+// Due to code sharing, we did the same in identifiers. This should
+// no longer be the case.
+// See: http://code.google.com/p/v8/issues/detail?id=1620
+
+assertThrows("var \\u\\u\\u = 42;");
+assertThrows("var \\u41 = 42;");
+assertThrows("var \\u123 = 42;");
+eval("var \\u1234 = 42;");
+assertEquals(42, eval("\u1234"));
+assertThrows("var uuu = 42; var x = \\u\\u\\u");
+
+// Regressions introduced and fixed again while fixing the above.
+
+// Handle 0xFFFD correctly (it's a valid value, and shouldn't be used
+// to mark an error).
+assertEquals(0xFFFD, "\uFFFD".charCodeAt(0));
+
+// Handle unicode escapes in regexp flags correctly.
+assertThrows("/x/g\\uim", SyntaxError);
+assertThrows("/x/g\\u2im", SyntaxError);
+assertThrows("/x/g\\u22im", SyntaxError);
+assertThrows("/x/g\\u222im", SyntaxError);
+assertThrows("/x/g\\\\u2222im", SyntaxError);
diff --git a/src/shell.h b/test/mjsunit/regress/regress-1625.js
similarity index 66%
copy from src/shell.h
copy to test/mjsunit/regress/regress-1625.js
index ca51040..a2ef8df 100644
--- a/src/shell.h
+++ b/test/mjsunit/regress/regress-1625.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,13 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
+// Test that overwriting Array.prototype.push does not make
+// Object.defineProperties misbehave.
-#include "../public/debug.h"
-
-namespace v8 {
-namespace internal {
-
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
-
-
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
-};
-
-} } // namespace v8::internal
-
-#endif // V8_SHELL_H_
+Array.prototype.push = 1;
+var desc = {foo: {value: 10}, bar: {get: function() {return 42; }}};
+var obj = {};
+var x = Object.defineProperties(obj, desc);
+assertEquals(x.foo, 10);
+assertEquals(x.bar, 42);
diff --git a/test/mjsunit/regress/regress-219.js b/test/mjsunit/regress/regress-219.js
index 4bfabdc..b751f0f 100644
--- a/test/mjsunit/regress/regress-219.js
+++ b/test/mjsunit/regress/regress-219.js
@@ -30,6 +30,10 @@
// We should now allow duplicates of flags.
// (See http://code.google.com/p/v8/issues/detail?id=219)
+// This has been reversed by issue 1628, since other browsers have also
+// tightened their syntax.
+// (See http://code.google.com/p/v8/issues/detail?id=1628)
+
// Base tests: we recognize the basic flags
function assertFlags(re, global, multiline, ignoreCase) {
@@ -53,124 +57,92 @@
// Double i's
-re = /a/ii;
-assertFlags(re, false, false, true)
+assertThrows("/a/ii");
-re = /a/gii;
-assertFlags(re, true, false, true)
+assertThrows("/a/gii");
-re = /a/igi;
-assertFlags(re, true, false, true)
+assertThrows("/a/igi");
-re = /a/iig;
-assertFlags(re, true, false, true)
+assertThrows("/a/iig");
-re = /a/gimi;
-assertFlags(re, true, true, true)
+assertThrows("/a/gimi");
-re = /a/giim;
-assertFlags(re, true, true, true)
+assertThrows("/a/giim");
-re = /a/igim;
-assertFlags(re, true, true, true)
+assertThrows("/a/igim");
+assertThrows(function(){ return RegExp("a", "ii"); })
-re = RegExp("a", "ii");
-assertFlags(re, false, false, true)
+assertThrows(function(){ return RegExp("a", "gii"); })
-re = RegExp("a", "gii");
-assertFlags(re, true, false, true)
+assertThrows(function(){ return RegExp("a", "igi"); })
-re = RegExp("a", "igi");
-assertFlags(re, true, false, true)
+assertThrows(function(){ return RegExp("a", "iig"); })
-re = RegExp("a", "iig");
-assertFlags(re, true, false, true)
+assertThrows(function(){ return RegExp("a", "gimi"); })
-re = RegExp("a", "gimi");
-assertFlags(re, true, true, true)
+assertThrows(function(){ return RegExp("a", "giim"); })
-re = RegExp("a", "giim");
-assertFlags(re, true, true, true)
-
-re = RegExp("a", "igim");
-assertFlags(re, true, true, true)
+assertThrows(function(){ return RegExp("a", "igim"); })
// Tripple i's
-re = /a/iii;
-assertFlags(re, false, false, true)
+assertThrows("/a/iii");
-re = /a/giii;
-assertFlags(re, true, false, true)
+assertThrows("/a/giii");
-re = /a/igii;
-assertFlags(re, true, false, true)
+assertThrows("/a/igii");
-re = /a/iigi;
-assertFlags(re, true, false, true)
+assertThrows("/a/iigi");
-re = /a/iiig;
-assertFlags(re, true, false, true)
+assertThrows("/a/iiig");
-re = /a/miiig;
-assertFlags(re, true, true, true)
+assertThrows("/a/miiig");
+assertThrows(function(){ return RegExp("a", "iii"); })
-re = RegExp("a", "iii");
-assertFlags(re, false, false, true)
+assertThrows(function(){ return RegExp("a", "giii"); })
-re = RegExp("a", "giii");
-assertFlags(re, true, false, true)
+assertThrows(function(){ return RegExp("a", "igii"); })
-re = RegExp("a", "igii");
-assertFlags(re, true, false, true)
+assertThrows(function(){ return RegExp("a", "iigi"); })
-re = RegExp("a", "iigi");
-assertFlags(re, true, false, true)
+assertThrows(function(){ return RegExp("a", "iiig"); })
-re = RegExp("a", "iiig");
-assertFlags(re, true, false, true)
+assertThrows(function(){ return RegExp("a", "miiig"); })
-re = RegExp("a", "miiig");
-assertFlags(re, true, true, true)
+// Illegal flags - valid flags late in string.
-// Illegal flags - flags late in string.
+assertThrows("/a/arglebargleglopglyf");
-re = /a/arglebargleglopglyf;
-assertFlags(re, true, false, false)
+assertThrows("/a/arglebargleglopglif");
-re = /a/arglebargleglopglif;
-assertFlags(re, true, false, true)
+assertThrows("/a/arglebargleglopglym");
-re = /a/arglebargleglopglym;
-assertFlags(re, true, true, false)
-
-re = /a/arglebargleglopglim;
-assertFlags(re, true, true, true)
+assertThrows("/a/arglebargleglopglim");
// Case of flags still matters.
-re = /a/gmi;
+var re = /a/gmi;
assertFlags(re, true, true, true)
-re = /a/Gmi;
-assertFlags(re, false, true, true)
+assertThrows("/a/Gmi");
-re = /a/gMi;
-assertFlags(re, true, false, true)
+assertThrows("/a/gMi");
-re = /a/gmI;
-assertFlags(re, true, true, false)
+assertThrows("/a/gmI");
-re = /a/GMi;
-assertFlags(re, false, false, true)
+assertThrows("/a/GMi");
-re = /a/GmI;
-assertFlags(re, false, true, false)
+assertThrows("/a/GmI");
-re = /a/gMI;
-assertFlags(re, true, false, false)
+assertThrows("/a/gMI");
-re = /a/GMI;
-assertFlags(re, false, false, false)
+assertThrows("/a/GMI");
+
+// Unicode escape sequences are not interpreted.
+
+assertThrows("/a/\\u0067");
+assertThrows("/a/\\u0069");
+assertThrows("/a/\\u006d");
+assertThrows("/a/\\u006D");
diff --git a/test/mjsunit/regress/regress-87.js b/test/mjsunit/regress/regress-87.js
index 131cb58..10446fd 100644
--- a/test/mjsunit/regress/regress-87.js
+++ b/test/mjsunit/regress/regress-87.js
@@ -25,34 +25,29 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-function testFlags(flagstring, global, ignoreCase, multiline) {
- var text = "/x/"+flagstring;
- var re = eval(text);
- assertEquals(global, re.global, text + ".global");
- assertEquals(ignoreCase, re.ignoreCase, text + ".ignoreCase");
- assertEquals(multiline, re.multiline, text + ".multiline");
-}
+// In Issue 87, we allowed unicode escape sequences in RegExp flags.
+// However, according to ES5, they should not be interpreted, but passed
+// verbatim to the RegExp constructor.
+// (On top of that, the original test was bugged and never tested anything).
+// The behavior was changed in r8969 to not interpret escapes, but this
+// test didn't test that, and only failed when making invalid flag characters
+// an error too.
-testFlags("", false, false, false);
+assertThrows("/x/\\u0067");
+assertThrows("/x/\\u0069");
+assertThrows("/x/\\u006d");
-testFlags("\u0067", true, false, false);
+assertThrows("/x/\\u0067i");
+assertThrows("/x/\\u0069m");
+assertThrows("/x/\\u006dg");
-testFlags("\u0069", false, true, false)
+assertThrows("/x/m\\u0067");
+assertThrows("/x/g\\u0069");
+assertThrows("/x/i\\u006d");
-testFlags("\u006d", false, false, true);
+assertThrows("/x/m\\u0067i");
+assertThrows("/x/g\\u0069m");
+assertThrows("/x/i\\u006dg");
-testFlags("\u0068", false, false, false);
-
-testFlags("\u0020", false, false, false);
-
-
-testFlags("\u0067g", true, false, false);
-
-testFlags("g\u0067", true, false, false);
-
-testFlags("abc\u0067efg", true, false, false);
-
-testFlags("i\u0067", true, true, false);
-
-testFlags("\u0067i", true, true, false);
-
+assertThrows("/x/\\u0068");
+assertThrows("/x/\\u0020");
diff --git a/src/shell.h b/test/mjsunit/regress/regress-91008.js
similarity index 66%
copy from src/shell.h
copy to test/mjsunit/regress/regress-91008.js
index ca51040..d7ea2df 100644
--- a/src/shell.h
+++ b/test/mjsunit/regress/regress-91008.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,20 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
+function testsort(n) {
+ var numbers=new Array(n);
+ for (var i=0;i<n;i++) numbers[i]=i;
+ delete numbers[50];
+ delete numbers[150];
+ delete numbers[25000];
+ delete numbers[n-1];
+ delete numbers[n-2];
+ delete numbers[30];
+ delete numbers[2];
+ delete numbers[1];
+ delete numbers[0];
+ numbers.sort();
+}
-#include "../public/debug.h"
-
-namespace v8 {
-namespace internal {
-
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
-
-
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
-};
-
-} } // namespace v8::internal
-
-#endif // V8_SHELL_H_
+testsort(100000)
diff --git a/src/shell.h b/test/mjsunit/regress/regress-91010.js
similarity index 66%
copy from src/shell.h
copy to test/mjsunit/regress/regress-91010.js
index ca51040..a077999 100644
--- a/src/shell.h
+++ b/test/mjsunit/regress/regress-91010.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,13 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
-
-#include "../public/debug.h"
-
-namespace v8 {
-namespace internal {
-
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
-
-
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
-};
-
-} } // namespace v8::internal
-
-#endif // V8_SHELL_H_
+try {
+ try {
+ var N = 100*1000;
+ var array = Array(N);
+ for (var i = 0; i != N; ++i)
+ array[i] = i;
+ } catch(ex) {}
+ array.unshift('Kibo');
+} catch(ex) {}
diff --git a/src/shell.h b/test/mjsunit/regress/regress-91013.js
similarity index 66%
rename from src/shell.h
rename to test/mjsunit/regress/regress-91013.js
index ca51040..c61e2b1 100644
--- a/src/shell.h
+++ b/test/mjsunit/regress/regress-91013.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,28 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
+// Test that KeyedStore stub for unboxed double arrays backing store
+// correctly returns stored value as the result.
-#include "../public/debug.h"
+// Flags: --allow-natives-syntax --unbox-double-arrays
-namespace v8 {
-namespace internal {
+// Create array with unboxed double array backing store.
+var i = 100000;
+var a = new Array(i);
+for (var j = 0; j < i; j++) {
+ a[j] = 0.5;
+}
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
+assertTrue(%HasFastDoubleElements(a));
+// Store some smis into it.
+for (var j = 0; j < 10; j++) {
+ assertEquals(j, a[j] = j);
+}
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
-};
-
-} } // namespace v8::internal
-
-#endif // V8_SHELL_H_
+// Store some heap numbers into it.
+for (var j = 0; j < 10; j++) {
+ var v = j + 0.5;
+ assertEquals(v, a[j] = v);
+}
diff --git a/src/shell.h b/test/mjsunit/regress/regress-95113.js
similarity index 66%
copy from src/shell.h
copy to test/mjsunit/regress/regress-95113.js
index ca51040..f01b270 100644
--- a/src/shell.h
+++ b/test/mjsunit/regress/regress-95113.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,25 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
+// Flags: --allow-natives-syntax
-#include "../public/debug.h"
+function get_double_array() {
+ var a = new Array(100000);
+ var i = 0;
+ while (!%HasFastDoubleElements(a)) {
+ a[i] = i;
+ i++;
+ }
+ assertTrue(%HasFastDoubleElements(a));
+ a.length = 1;
+ a[0] = 1.5;
+ a.length = 2;
+ a[1] = 2.5;
+ assertEquals(a[0], 1.5);
+ assertEquals(a[1], 2.5);
+ assertTrue(%HasFastDoubleElements(a));
+ return a;
+}
-namespace v8 {
-namespace internal {
-
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
-
-
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
-};
-
-} } // namespace v8::internal
-
-#endif // V8_SHELL_H_
+var a = get_double_array();
diff --git a/src/shell.h b/test/mjsunit/regress/regress-95485.js
similarity index 66%
copy from src/shell.h
copy to test/mjsunit/regress/regress-95485.js
index ca51040..2510072 100644
--- a/src/shell.h
+++ b/test/mjsunit/regress/regress-95485.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,19 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
+function Test() {
+ var left = 'XXX';
+ var right = 'YYY';
+ for (var i = 0; i < 3; i++) {
+ var cons = left + right;
+ var substring = cons.substring(2, 4);
+ try {
+ with ({Test: i})
+ continue;
+ } finally { }
+ }
+ return substring;
+}
-#include "../public/debug.h"
-
-namespace v8 {
-namespace internal {
-
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
-
-
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
-};
-
-} } // namespace v8::internal
-
-#endif // V8_SHELL_H_
+assertEquals('XY', Test());
diff --git a/src/shell.h b/test/mjsunit/regress/regress-96523.js
similarity index 66%
copy from src/shell.h
copy to test/mjsunit/regress/regress-96523.js
index ca51040..e611ce3 100644
--- a/src/shell.h
+++ b/test/mjsunit/regress/regress-96523.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,14 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
-
-#include "../public/debug.h"
-
-namespace v8 {
-namespace internal {
-
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
-
-
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
-};
-
-} } // namespace v8::internal
-
-#endif // V8_SHELL_H_
+with ({x:'outer'}) {
+ (function() {
+ var x = 'inner';
+ try {
+ throw 'Exception';
+ } catch (e) {
+ assertEquals('inner', x);
+ }
+ })()
+}
diff --git a/src/shell.h b/test/mjsunit/scope-calls-eval.js
similarity index 66%
copy from src/shell.h
copy to test/mjsunit/scope-calls-eval.js
index ca51040..4a941aa 100644
--- a/src/shell.h
+++ b/test/mjsunit/scope-calls-eval.js
@@ -1,4 +1,4 @@
-// Copyright 2006-2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -24,32 +24,42 @@
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// A simple interactive shell. Enable with --shell.
-#ifndef V8_SHELL_H_
-#define V8_SHELL_H_
-
-#include "../public/debug.h"
-
-namespace v8 {
-namespace internal {
-
-// Debug event handler for interactive debugging.
-void handle_debug_event(v8::DebugEvent event,
- v8::Handle<v8::Object> exec_state,
- v8::Handle<v8::Object> event_data,
- v8::Handle<Value> data);
+// Tests if the information about eval calls in a function is
+// propagated correctly through catch and with blocks.
-class Shell {
- public:
- static void PrintObject(v8::Handle<v8::Value> obj);
- // Run the read-eval loop, executing code in the specified
- // environment.
- static void Run(v8::Handle<v8::Context> context);
-};
+function f1() {
+ var x = 5;
+ function g() {
+ try {
+ throw '';
+ } catch (e) {
+ eval('var x = 3;');
+ }
+ try {
+ throw '';
+ } catch (e) {
+ return x;
+ }
+ }
+ return g();
+}
-} } // namespace v8::internal
-#endif // V8_SHELL_H_
+function f2() {
+ var x = 5;
+ function g() {
+ with ({e:42}) {
+ eval('var x = 3;');
+ }
+ with ({e:42}) {
+ return x;
+ }
+ }
+ return g();
+}
+
+
+assertEquals(3, f1());
+assertEquals(3, f2());
diff --git a/test/mjsunit/string-slices-regexp.js b/test/mjsunit/string-slices-regexp.js
new file mode 100644
index 0000000..a8cadae
--- /dev/null
+++ b/test/mjsunit/string-slices-regexp.js
@@ -0,0 +1,81 @@
+// Copyright 2009 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Flags: --string-slices
+
+//assertEquals('345"12345 6"1234567"123',
+// '12345""12345 6""1234567""1234'.slice(2,-1).replace(/""/g, '"'));
+
+var foo = "lsdfj sldkfj sdklfj læsdfjl sdkfjlsdk fjsdl fjsdljskdj flsj flsdkj flskd regexp: /foobar/\nldkfj sdlkfj sdkl";
+for(var i = 0; i < 1000; i++) {
+ assertTrue(/^([a-z]+): (.*)/.test(foo.substring(foo.indexOf("regexp:"))));
+ assertEquals("regexp", RegExp.$1, "RegExp.$1");
+}
+
+var re = /^(((N({)?)|(R)|(U)|(V)|(B)|(H)|(n((n)|(r)|(v)|(h))?)|(r(r)?)|(v)|(b((n)|(b))?)|(h))|((Y)|(A)|(E)|(o(u)?)|(p(u)?)|(q(u)?)|(s)|(t)|(u)|(w)|(x(u)?)|(y)|(z)|(a((T)|(A)|(L))?)|(c)|(e)|(f(u)?)|(g(u)?)|(i)|(j)|(l)|(m(u)?)))+/;
+var r = new RegExp(re)
+var str = "_Avtnennan gunzvmu pubExnY nEvln vaTxh rmuhguhaTxnY_".slice(1,-1);
+str = str + str;
+assertTrue(r.test(str));
+assertTrue(r.test(str));
+var re = /x/;
+assertEquals("a.yb", "_axyb_".slice(1,-1).replace(re, "."));
+re.compile("y");
+assertEquals("ax.b", "_axyb_".slice(1,-1).replace(re, "."));
+re.compile("(x)");
+assertEquals(["x", "x"], re.exec("_axyb_".slice(1,-1)));
+re.compile("(y)");
+assertEquals(["y", "y"], re.exec("_axyb_".slice(1,-1)));
+
+for(var i = 0; i < 100; i++) {
+ var a = "aaaaaaaaaaaaaaaaaaaaaaaabbaacabbabaaaaabbaaaabbac".slice(24,-1);
+ var b = "bbaacabbabaaaaabbaaaabba" + a;
+ // The first time, the cons string will be flattened and handled by the
+ // runtime system.
+ assertEquals(["bbaa", "a", "", "a"], /((\3|b)\2(a)){2,}/.exec(b));
+ // The second time, the cons string is already flattened and will be
+ // handled by generated code.
+ assertEquals(["bbaa", "a", "", "a"], /((\3|b)\2(a)){2,}/.exec(b));
+ assertEquals(["bbaa", "a", "", "a"], /((\3|b)\2(a)){2,}/.exec(a));
+ assertEquals(["bbaa", "a", "", "a"], /((\3|b)\2(a)){2,}/.exec(a));
+}
+
+var c = "ABCDEFGHIJKLMN".slice(2,-2);
+var d = "ABCDEF\u1234GHIJKLMN".slice(2,-2);
+var e = "ABCDEFGHIJKLMN".slice(0,-2);
+assertTrue(/^C.*L$/.test(c));
+assertTrue(/^C.*L$/.test(c));
+assertTrue(/^C.*L$/.test(d));
+assertTrue(/^C.*L$/.test(d));
+assertTrue(/^A\w{10}L$/.test(e));
+assertTrue(/^A\w{10}L$/.test(e));
+
+var e = "qui-opIasd-fghjklzx-cvbn-mqwer-tyuio-pasdf-ghIjkl-zx".slice(6,-6);
+var e_split = e.split("-");
+assertEquals(e_split[0], "Iasd");
+assertEquals(e_split[1], "fghjklzx");
+assertEquals(e_split[6], "ghI");
diff --git a/test/mjsunit/string-slices.js b/test/mjsunit/string-slices.js
new file mode 100755
index 0000000..f629ca9
--- /dev/null
+++ b/test/mjsunit/string-slices.js
@@ -0,0 +1,199 @@
+// Copyright 2008 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --string-slices --expose-externalize-string
+
+var s = 'abcdefghijklmn';
+assertEquals(s, s.substr());
+assertEquals(s, s.substr(0));
+assertEquals(s, s.substr('0'));
+assertEquals(s, s.substr(void 0));
+assertEquals(s, s.substr(null));
+assertEquals(s, s.substr(false));
+assertEquals(s, s.substr(0.9));
+assertEquals(s, s.substr({ valueOf: function() { return 0; } }));
+assertEquals(s, s.substr({ toString: function() { return '0'; } }));
+
+var s1 = s.substring(1);
+assertEquals(s1, s.substr(1));
+assertEquals(s1, s.substr('1'));
+assertEquals(s1, s.substr(true));
+assertEquals(s1, s.substr(1.1));
+assertEquals(s1, s.substr({ valueOf: function() { return 1; } }));
+assertEquals(s1, s.substr({ toString: function() { return '1'; } }));
+
+
+assertEquals(s.substring(s.length - 1), s.substr(-1));
+assertEquals(s.substring(s.length - 1), s.substr(-1.2));
+assertEquals(s.substring(s.length - 1), s.substr(-1.7));
+assertEquals(s.substring(s.length - 2), s.substr(-2));
+assertEquals(s.substring(s.length - 2), s.substr(-2.3));
+assertEquals(s.substring(s.length - 2, s.length - 1), s.substr(-2, 1));
+assertEquals(s, s.substr(-100));
+assertEquals('abc', s.substr(-100, 3));
+assertEquals(s1, s.substr(-s.length + 1));
+
+// assertEquals('', s.substr(0, void 0)); // smjs and rhino
+assertEquals('abcdefghijklmn', s.substr(0, void 0)); // kjs and v8
+assertEquals('', s.substr(0, null));
+assertEquals(s, s.substr(0, String(s.length)));
+assertEquals('a', s.substr(0, true));
+
+
+// Test substrings of different lengths and alignments.
+// First ASCII.
+var x = "ASCII";
+for (var i = 0; i < 25; i++) {
+ x += (i >> 4).toString(16) + (i & 0x0f).toString(16);
+}
+/x/.exec(x); // Try to force a flatten.
+for (var i = 5; i < 25; i++) {
+ for (var j = 0; j < 25; j++) {
+ var z = x.substring(i, i+j);
+ var w = Math.random() * 42; // Allocate something new in new-space.
+ assertEquals(j, z.length);
+ for (var k = 0; k < j; k++) {
+ assertEquals(x.charAt(i+k), z.charAt(k));
+ }
+ }
+}
+// Then two-byte strings.
+x = "UC16\u2028"; // Non-ascii char forces two-byte string.
+for (var i = 0; i < 25; i++) {
+ x += (i >> 4).toString(16) + (i & 0x0f).toString(16);
+}
+/x/.exec(x); // Try to force a flatten.
+for (var i = 5; i < 25; i++) {
+ for (var j = 0; j < 25; j++) {
+ var z = x.substring(i, i + j);
+ var w = Math.random() * 42; // Allocate something new in new-space.
+ assertEquals(j, z.length);
+ for (var k = 0; k < j; k++) {
+ assertEquals(x.charAt(i+k), z.charAt(k));
+ }
+ }
+}
+
+// Keep creating strings to to force allocation failure on substring creation.
+var x = "0123456789ABCDEF";
+x += x; // 2^5
+x += x;
+x += x;
+x += x;
+x += x;
+x += x; // 2^10
+x += x;
+x += x;
+var xl = x.length;
+var cache = [];
+for (var i = 0; i < 1000; i++) {
+ var z = x.substring(i % xl);
+ assertEquals(xl - (i % xl), z.length);
+ cache.push(z);
+}
+
+
+// Same with two-byte strings
+var x = "\u2028123456789ABCDEF";
+x += x; // 2^5
+x += x;
+x += x;
+x += x;
+x += x;
+x += x; // 2^10
+x += x;
+x += x;
+var xl = x.length;
+var cache = [];
+for (var i = 0; i < 1000; i++) {
+ var z = x.substring(i % xl);
+ assertEquals(xl - (i % xl), z.length);
+ cache.push(z);
+}
+
+// Substring of substring.
+var cache = [];
+var last = x;
+var offset = 0;
+for (var i = 0; i < 64; i++) {
+ var z = last.substring(i);
+ last = z;
+ cache.push(z);
+ offset += i;
+}
+for (var i = 63; i >= 0; i--) {
+ var z = cache.pop();
+ assertTrue(/\u2028123456789ABCDEF/.test(z));
+ assertEquals(xl - offset, z.length);
+ assertEquals(x.charAt(i*(i+1)/2), z.charAt(0));
+ offset -= i;
+}
+
+// Test charAt for different strings.
+function f(s1, s2, s3, i) {
+ assertEquals(String.fromCharCode(97+i%11), s1.charAt(i%11));
+ assertEquals(String.fromCharCode(97+i%11), s2.charAt(i%11));
+ assertEquals(String.fromCharCode(98+i%11), s3.charAt(i%11));
+ assertEquals(String.fromCharCode(101), s3.charAt(3));
+}
+
+flat = "abcdefghijkl12345";
+cons = flat + flat.toUpperCase();
+slice = "abcdefghijklmn12345".slice(1, -1);
+for ( var i = 0; i < 1000; i++) {
+ f(flat, cons, slice, i);
+}
+flat = "abcdefghijkl1\u20232345";
+cons = flat + flat.toUpperCase();
+slice = "abcdefghijklmn1\u20232345".slice(1, -1);
+for ( var i = 0; i < 1000; i++) {
+ f(flat, cons, slice, i);
+}
+
+// Concatenate substrings.
+var ascii = 'abcdefghijklmnop';
+var utf = '\u03B1\u03B2\u03B3\u03B4\u03B5\u03B6\u03B7\u03B8\u03B9\u03BA\u03BB';
+assertEquals("klmno", ascii.substring(10,15) + ascii.substring(16));
+assertEquals("\u03B4\u03B7", utf.substring(3,4) + utf.substring(6,7));
+assertEquals("klp", ascii.substring(10,12) + ascii.substring(15,16));
+assertEquals("\u03B1\u03B4\u03B5", utf.substring(0,1) + utf.substring(5,3));
+assertEquals("", ascii.substring(16) + utf.substring(16));
+assertEquals("bcdef\u03B4\u03B5\u03B6\u03B7\u03B8\u03B9",
+ ascii.substring(1,6) + utf.substring(3,9));
+assertEquals("\u03B4\u03B5\u03B6\u03B7\u03B8\u03B9abcdefghijklmnop",
+ utf.substring(3,9) + ascii);
+assertEquals("\u03B2\u03B3\u03B4\u03B5\u03B4\u03B5\u03B6\u03B7",
+ utf.substring(5,1) + utf.substring(3,7));
+
+/*
+// Externalizing strings.
+var a = "123456789qwertyuiopasdfghjklzxcvbnm";
+var b = a.slice(1,-1);
+assertEquals(a.slice(1,-1), b);
+externalizeString(a);
+assertEquals(a.slice(1,-1), b);
+*/
\ No newline at end of file
diff --git a/test/mjsunit/string-split.js b/test/mjsunit/string-split.js
index 6fcf557..bc50945 100644
--- a/test/mjsunit/string-split.js
+++ b/test/mjsunit/string-split.js
@@ -116,3 +116,15 @@
assertEquals(["a", "b", "c"], "abc".split("", numberObj(3)));
assertEquals(["a", "b", "c"], "abc".split("", 4));
assertEquals(["a", "b", "c"], "abc".split("", numberObj(4)));
+
+
+var all_ascii_codes = [];
+for (var i = 0; i < 128; i++) all_ascii_codes[i] = i;
+var all_ascii_string = String.fromCharCode.apply(String, all_ascii_codes);
+
+var split_chars = all_ascii_string.split("");
+assertEquals(128, split_chars.length);
+for (var i = 0; i < 128; i++) {
+ assertEquals(1, split_chars[i].length);
+ assertEquals(i, split_chars[i].charCodeAt(0));
+}
diff --git a/test/mjsunit/substr.js b/test/mjsunit/substr.js
index f69a9c0..cffaf94 100755
--- a/test/mjsunit/substr.js
+++ b/test/mjsunit/substr.js
@@ -135,3 +135,20 @@
assertEquals(xl - (i % xl), z.length);
cache.push(z);
}
+
+// Substring of substring.
+var cache = [];
+var last = x;
+var offset = 0;
+for (var i = 0; i < 64; i++) {
+ var z = last.substring(i);
+ last = z;
+ cache.push(z);
+ offset += i;
+}
+for (var i = 63; i >= 0; i--) {
+ var z = cache.pop();
+ assertTrue(/\u2028123456789ABCDEF/.test(z));
+ assertEquals(xl - offset, z.length);
+ offset -= i;
+}
diff --git a/test/mjsunit/testcfg.py b/test/mjsunit/testcfg.py
index 7c6311b..87ed4fa 100644
--- a/test/mjsunit/testcfg.py
+++ b/test/mjsunit/testcfg.py
@@ -145,7 +145,7 @@
return result
def GetBuildRequirements(self):
- return ['sample', 'sample=shell']
+ return ['d8']
def GetTestStatus(self, sections, defs):
status_file = join(self.root, 'mjsunit.status')
diff --git a/test/mjsunit/unbox-double-arrays.js b/test/mjsunit/unbox-double-arrays.js
index 351765e..feecaec 100644
--- a/test/mjsunit/unbox-double-arrays.js
+++ b/test/mjsunit/unbox-double-arrays.js
@@ -29,12 +29,12 @@
// Flags: --allow-natives-syntax --unbox-double-arrays --expose-gc
var large_array_size = 100000;
-var approx_dict_to_elements_threshold = 75000;
+var approx_dict_to_elements_threshold = 70000;
var name = 0;
function expected_array_value(i) {
- if ((i % 2) == 0) {
+ if ((i % 50) != 0) {
return i;
} else {
return i + 0.5;
@@ -466,3 +466,62 @@
test_for_in();
test_for_in();
test_for_in();
+
+function test_get_property_names() {
+ names = %GetPropertyNames(large_array3);
+ property_name_count = 0;
+ for (x in names) { property_name_count++; };
+ assertEquals(26, property_name_count);
+}
+
+test_get_property_names();
+test_get_property_names();
+test_get_property_names();
+
+// Test elements getters.
+assertEquals(expected_array_value(10), large_array3[10]);
+assertEquals(expected_array_value(-NaN), large_array3[2]);
+large_array3.__defineGetter__("2", function(){
+ return expected_array_value(10);
+});
+
+function test_getter() {
+ assertEquals(expected_array_value(10), large_array3[10]);
+ assertEquals(expected_array_value(10), large_array3[2]);
+}
+
+test_getter();
+test_getter();
+test_getter();
+%OptimizeFunctionOnNextCall(test_getter);
+test_getter();
+test_getter();
+test_getter();
+
+// Test element setters.
+large_array4 = new Array(large_array_size);
+force_to_fast_double_array(large_array4);
+
+var setter_called = false;
+
+assertEquals(expected_array_value(10), large_array4[10]);
+assertEquals(expected_array_value(2), large_array4[2]);
+large_array4.__defineSetter__("10", function(value){
+ setter_called = true;
+ });
+
+function test_setter() {
+ setter_called = false;
+ large_array4[10] = 119;
+ assertTrue(setter_called);
+ assertEquals(undefined, large_array4[10]);
+ assertEquals(expected_array_value(2), large_array4[2]);
+}
+
+test_setter();
+test_setter();
+test_setter();
+%OptimizeFunctionOnNextCall(test_setter);
+test_setter();
+test_setter();
+test_setter();
diff --git a/test/mjsunit/with-leave.js b/test/mjsunit/with-leave.js
index ded62ca..7369faa 100644
--- a/test/mjsunit/with-leave.js
+++ b/test/mjsunit/with-leave.js
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -59,3 +59,162 @@
}
assertTrue(caught);
+
+// We want to test the context chain shape. In each of the tests cases
+// below, the outer with is to force a runtime lookup of the identifier 'x'
+// to actually verify that the inner context has been discarded. A static
+// lookup of 'x' might accidentally succeed.
+with ({x: 'outer'}) {
+ label: {
+ with ({x: 'inner'}) {
+ break label;
+ }
+ }
+ assertEquals('outer', x);
+}
+
+
+with ({x: 'outer'}) {
+ label: {
+ with ({x: 'middle'}) {
+ with ({x: 'inner'}) {
+ break label;
+ }
+ }
+ }
+ assertEquals('outer', x);
+}
+
+
+with ({x: 'outer'}) {
+ for (var i = 0; i < 10; ++i) {
+ with ({x: 'inner' + i}) {
+ continue;
+ }
+ }
+ assertEquals('outer', x);
+}
+
+
+with ({x: 'outer'}) {
+ label: for (var i = 0; i < 10; ++i) {
+ with ({x: 'middle' + i}) {
+ for (var j = 0; j < 10; ++j) {
+ with ({x: 'inner' + j}) {
+ continue label;
+ }
+ }
+ }
+ }
+ assertEquals('outer', x);
+}
+
+
+with ({x: 'outer'}) {
+ try {
+ with ({x: 'inner'}) {
+ throw 0;
+ }
+ } catch (e) {
+ assertEquals('outer', x);
+ }
+}
+
+
+with ({x: 'outer'}) {
+ try {
+ with ({x: 'middle'}) {
+ with ({x: 'inner'}) {
+ throw 0;
+ }
+ }
+ } catch (e) {
+ assertEquals('outer', x);
+ }
+}
+
+
+try {
+ with ({x: 'outer'}) {
+ try {
+ with ({x: 'inner'}) {
+ throw 0;
+ }
+ } finally {
+ assertEquals('outer', x);
+ }
+ }
+} catch (e) {
+ if (e instanceof MjsUnitAssertionError) throw e;
+}
+
+
+try {
+ with ({x: 'outer'}) {
+ try {
+ with ({x: 'middle'}) {
+ with ({x: 'inner'}) {
+ throw 0;
+ }
+ }
+ } finally {
+ assertEquals('outer', x);
+ }
+ }
+} catch (e) {
+ if (e instanceof MjsUnitAssertionError) throw e;
+}
+
+
+// Verify that the context is correctly set in the stack frame after exiting
+// from with.
+function f() {}
+
+with ({x: 'outer'}) {
+ label: {
+ with ({x: 'inner'}) {
+ break label;
+ }
+ }
+ f(); // The context could be restored from the stack after the call.
+ assertEquals('outer', x);
+}
+
+
+with ({x: 'outer'}) {
+ for (var i = 0; i < 10; ++i) {
+ with ({x: 'inner' + i}) {
+ continue;
+ }
+ }
+ f();
+ assertEquals('outer', x);
+}
+
+
+with ({x: 'outer'}) {
+ try {
+ with ({x: 'inner'}) {
+ throw 0;
+ }
+ } catch (e) {
+ f();
+ assertEquals('outer', x);
+ }
+}
+
+
+try {
+ with ({x: 'outer'}) {
+ try {
+ with ({x: 'inner'}) {
+ throw 0;
+ }
+ } finally {
+ f();
+ assertEquals('outer', x);
+ }
+ }
+} catch (e) {
+ if (e instanceof MjsUnitAssertionError) throw e;
+}
diff --git a/test/mozilla/mozilla.status b/test/mozilla/mozilla.status
index c62d770..f6d6925 100644
--- a/test/mozilla/mozilla.status
+++ b/test/mozilla/mozilla.status
@@ -246,9 +246,8 @@
ecma_3/Number/15.7.4.6-1: FAIL_OK
#:=== RegExp:===
-# To be compatible with JSC we silently ignore flags that do not make
-# sense. These tests expects us to throw exceptions.
-ecma_3/RegExp/regress-57631: FAIL_OK
+# We don't match the syntax error message of Mozilla for invalid
+# RegExp flags.
ecma_3/RegExp/15.10.4.1-6: FAIL_OK
# PCRE doesn't allow subpattern nesting deeper than 200, this tests
diff --git a/test/mozilla/testcfg.py b/test/mozilla/testcfg.py
index 3728f79..587781d 100644
--- a/test/mozilla/testcfg.py
+++ b/test/mozilla/testcfg.py
@@ -125,7 +125,7 @@
return tests
def GetBuildRequirements(self):
- return ['sample', 'sample=shell']
+ return ['d8']
def GetTestStatus(self, sections, defs):
status_file = join(self.root, 'mozilla.status')
diff --git a/test/preparser/testcfg.py b/test/preparser/testcfg.py
index 39b62c3..d900e26 100644
--- a/test/preparser/testcfg.py
+++ b/test/preparser/testcfg.py
@@ -27,7 +27,7 @@
import test
import os
-from os.path import join, dirname, exists
+from os.path import join, dirname, exists, isfile
import platform
import utils
import re
@@ -122,10 +122,15 @@
{"Test": Test, "Template": Template}, {})
def ListTests(self, current_path, path, mode, variant_flags):
- executable = join('obj', 'preparser', mode, 'preparser')
+ executable = 'preparser'
if utils.IsWindows():
executable += '.exe'
executable = join(self.context.buildspace, executable)
+ if not isfile(executable):
+ executable = join('obj', 'preparser', mode, 'preparser')
+ if utils.IsWindows():
+ executable += '.exe'
+ executable = join(self.context.buildspace, executable)
expectations = self.GetExpectations()
result = []
# Find all .js files in tests/preparser directory.
diff --git a/test/sputnik/sputnik.status b/test/sputnik/sputnik.status
index 82d8a61..868509d 100644
--- a/test/sputnik/sputnik.status
+++ b/test/sputnik/sputnik.status
@@ -56,15 +56,6 @@
# errors, for compatibility.
S15.10.2.11_A1_T2: FAIL
S15.10.2.11_A1_T3: FAIL
-S15.10.4.1_A5_T1: FAIL
-S15.10.4.1_A5_T2: FAIL
-S15.10.4.1_A5_T3: FAIL
-S15.10.4.1_A5_T4: FAIL
-S15.10.4.1_A5_T5: FAIL
-S15.10.4.1_A5_T6: FAIL
-S15.10.4.1_A5_T7: FAIL
-S15.10.4.1_A5_T8: FAIL
-S15.10.4.1_A5_T9: FAIL
# We are more lenient in which string character escapes we allow than
# the spec (7.8.4 p. 19) wants us to be. This is for compatibility.
@@ -99,6 +90,13 @@
S7.8.4_A4.3_T5: FAIL_OK
S7.8.4_A7.2_T5: FAIL_OK
+# Sputnik expects unicode escape sequences in RegExp flags to be interpreted.
+# The specification requires them to be passed uninterpreted to the RegExp
+# constructor. We now implement that.
+S7.8.5_A3.1_T7: FAIL_OK
+S7.8.5_A3.1_T8: FAIL_OK
+S7.8.5_A3.1_T9: FAIL_OK
+
# We allow some keywords to be used as identifiers.
S7.5.3_A1.15: FAIL_OK
S7.5.3_A1.18: FAIL_OK
diff --git a/test/sputnik/testcfg.py b/test/sputnik/testcfg.py
index c9eb4f2..1032c13 100644
--- a/test/sputnik/testcfg.py
+++ b/test/sputnik/testcfg.py
@@ -101,7 +101,7 @@
return result
def GetBuildRequirements(self):
- return ['sample', 'sample=shell']
+ return ['d8']
def GetTestStatus(self, sections, defs):
status_file = join(self.root, 'sputnik.status')
diff --git a/test/test262/testcfg.py b/test/test262/testcfg.py
index aa1212e..9482046 100644
--- a/test/test262/testcfg.py
+++ b/test/test262/testcfg.py
@@ -111,7 +111,7 @@
return tests
def GetBuildRequirements(self):
- return ['sample', 'sample=shell']
+ return ['d8']
def GetTestStatus(self, sections, defs):
status_file = join(self.root, 'test262.status')
diff --git a/tools/grokdump.py b/tools/grokdump.py
index 468e7cc..6bc49c6 100755
--- a/tools/grokdump.py
+++ b/tools/grokdump.py
@@ -345,7 +345,7 @@
for r in self.memory_list64.ranges:
if r.start <= address < r.start + r.size:
return self.memory_list64.base_rva + offset + address - r.start
- offset += r.size
+ offset += r.size
if self.memory_list is not None:
for r in self.memory_list.ranges:
if r.start <= address < r.start + r.memory.data_size:
@@ -379,64 +379,65 @@
# };
# static P p;
INSTANCE_TYPES = {
-64: "SYMBOL_TYPE",
-68: "ASCII_SYMBOL_TYPE",
-65: "CONS_SYMBOL_TYPE",
-69: "CONS_ASCII_SYMBOL_TYPE",
-66: "EXTERNAL_SYMBOL_TYPE",
-74: "EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE",
-70: "EXTERNAL_ASCII_SYMBOL_TYPE",
-0: "STRING_TYPE",
-4: "ASCII_STRING_TYPE",
-1: "CONS_STRING_TYPE",
-5: "CONS_ASCII_STRING_TYPE",
-2: "EXTERNAL_STRING_TYPE",
-10: "EXTERNAL_STRING_WITH_ASCII_DATA_TYPE",
-6: "EXTERNAL_ASCII_STRING_TYPE",
-6: "PRIVATE_EXTERNAL_ASCII_STRING_TYPE",
-128: "MAP_TYPE",
-129: "CODE_TYPE",
-130: "ODDBALL_TYPE",
-131: "JS_GLOBAL_PROPERTY_CELL_TYPE",
-132: "HEAP_NUMBER_TYPE",
-133: "FOREIGN_TYPE",
-134: "BYTE_ARRAY_TYPE",
-135: "EXTERNAL_BYTE_ARRAY_TYPE",
-136: "EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE",
-137: "EXTERNAL_SHORT_ARRAY_TYPE",
-138: "EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE",
-139: "EXTERNAL_INT_ARRAY_TYPE",
-140: "EXTERNAL_UNSIGNED_INT_ARRAY_TYPE",
-141: "EXTERNAL_FLOAT_ARRAY_TYPE",
-143: "EXTERNAL_PIXEL_ARRAY_TYPE",
-145: "FILLER_TYPE",
-146: "ACCESSOR_INFO_TYPE",
-147: "ACCESS_CHECK_INFO_TYPE",
-148: "INTERCEPTOR_INFO_TYPE",
-149: "CALL_HANDLER_INFO_TYPE",
-150: "FUNCTION_TEMPLATE_INFO_TYPE",
-151: "OBJECT_TEMPLATE_INFO_TYPE",
-152: "SIGNATURE_INFO_TYPE",
-153: "TYPE_SWITCH_INFO_TYPE",
-154: "SCRIPT_TYPE",
-155: "CODE_CACHE_TYPE",
-156: "POLYMORPHIC_CODE_CACHE_TYPE",
-159: "FIXED_ARRAY_TYPE",
-160: "SHARED_FUNCTION_INFO_TYPE",
-161: "JS_MESSAGE_OBJECT_TYPE",
-162: "JS_VALUE_TYPE",
-163: "JS_OBJECT_TYPE",
-164: "JS_CONTEXT_EXTENSION_OBJECT_TYPE",
-165: "JS_GLOBAL_OBJECT_TYPE",
-166: "JS_BUILTINS_OBJECT_TYPE",
-167: "JS_GLOBAL_PROXY_TYPE",
-168: "JS_ARRAY_TYPE",
-169: "JS_PROXY_TYPE",
-170: "JS_REGEXP_TYPE",
-171: "JS_FUNCTION_TYPE",
-172: "JS_FUNCTION_PROXY_TYPE",
-157: "DEBUG_INFO_TYPE",
-158: "BREAK_POINT_INFO_TYPE",
+ 64: "SYMBOL_TYPE",
+ 68: "ASCII_SYMBOL_TYPE",
+ 65: "CONS_SYMBOL_TYPE",
+ 69: "CONS_ASCII_SYMBOL_TYPE",
+ 66: "EXTERNAL_SYMBOL_TYPE",
+ 74: "EXTERNAL_SYMBOL_WITH_ASCII_DATA_TYPE",
+ 70: "EXTERNAL_ASCII_SYMBOL_TYPE",
+ 0: "STRING_TYPE",
+ 4: "ASCII_STRING_TYPE",
+ 1: "CONS_STRING_TYPE",
+ 5: "CONS_ASCII_STRING_TYPE",
+ 2: "EXTERNAL_STRING_TYPE",
+ 10: "EXTERNAL_STRING_WITH_ASCII_DATA_TYPE",
+ 6: "EXTERNAL_ASCII_STRING_TYPE",
+ 6: "PRIVATE_EXTERNAL_ASCII_STRING_TYPE",
+ 128: "MAP_TYPE",
+ 129: "CODE_TYPE",
+ 130: "ODDBALL_TYPE",
+ 131: "JS_GLOBAL_PROPERTY_CELL_TYPE",
+ 132: "HEAP_NUMBER_TYPE",
+ 133: "FOREIGN_TYPE",
+ 134: "BYTE_ARRAY_TYPE",
+ 135: "EXTERNAL_BYTE_ARRAY_TYPE",
+ 136: "EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE",
+ 137: "EXTERNAL_SHORT_ARRAY_TYPE",
+ 138: "EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE",
+ 139: "EXTERNAL_INT_ARRAY_TYPE",
+ 140: "EXTERNAL_UNSIGNED_INT_ARRAY_TYPE",
+ 141: "EXTERNAL_FLOAT_ARRAY_TYPE",
+ 143: "EXTERNAL_PIXEL_ARRAY_TYPE",
+ 145: "FILLER_TYPE",
+ 146: "ACCESSOR_INFO_TYPE",
+ 147: "ACCESS_CHECK_INFO_TYPE",
+ 148: "INTERCEPTOR_INFO_TYPE",
+ 149: "CALL_HANDLER_INFO_TYPE",
+ 150: "FUNCTION_TEMPLATE_INFO_TYPE",
+ 151: "OBJECT_TEMPLATE_INFO_TYPE",
+ 152: "SIGNATURE_INFO_TYPE",
+ 153: "TYPE_SWITCH_INFO_TYPE",
+ 154: "SCRIPT_TYPE",
+ 155: "CODE_CACHE_TYPE",
+ 156: "POLYMORPHIC_CODE_CACHE_TYPE",
+ 159: "FIXED_ARRAY_TYPE",
+ 160: "SHARED_FUNCTION_INFO_TYPE",
+ 161: "JS_MESSAGE_OBJECT_TYPE",
+ 162: "JS_VALUE_TYPE",
+ 163: "JS_OBJECT_TYPE",
+ 164: "JS_CONTEXT_EXTENSION_OBJECT_TYPE",
+ 165: "JS_GLOBAL_OBJECT_TYPE",
+ 166: "JS_BUILTINS_OBJECT_TYPE",
+ 167: "JS_GLOBAL_PROXY_TYPE",
+ 168: "JS_ARRAY_TYPE",
+ 169: "JS_PROXY_TYPE",
+ 170: "JS_WEAK_MAP_TYPE",
+ 171: "JS_REGEXP_TYPE",
+ 172: "JS_FUNCTION_TYPE",
+ 173: "JS_FUNCTION_PROXY_TYPE",
+ 157: "DEBUG_INFO_TYPE",
+ 158: "BREAK_POINT_INFO_TYPE",
}
diff --git a/tools/gyp/v8.gyp b/tools/gyp/v8.gyp
index 2650483..2da8213 100644
--- a/tools/gyp/v8.gyp
+++ b/tools/gyp/v8.gyp
@@ -26,211 +26,18 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
{
- 'variables': {
- 'use_system_v8%': 0,
- 'msvs_use_common_release': 0,
- 'gcc_version%': 'unknown',
- 'v8_compress_startup_data%': 'off',
- 'v8_target_arch%': '<(target_arch)',
-
- # Setting 'v8_can_use_unaligned_accesses' to 'true' will allow the code
- # generated by V8 to do unaligned memory access, and setting it to 'false'
- # will ensure that the generated code will always do aligned memory
- # accesses. The default value of 'default' will try to determine the correct
- # setting. Note that for Intel architectures (ia32 and x64) unaligned memory
- # access is allowed for all CPUs.
- 'v8_can_use_unaligned_accesses%': 'default',
-
- # Setting 'v8_can_use_vfp_instructions' to 'true' will enable use of ARM VFP
- # instructions in the V8 generated code. VFP instructions will be enabled
- # both for the snapshot and for the ARM target. Leaving the default value
- # of 'false' will avoid VFP instructions in the snapshot and use CPU feature
- # probing when running on the target.
- 'v8_can_use_vfp_instructions%': 'false',
-
- # Setting v8_use_arm_eabi_hardfloat to true will turn on V8 support for ARM
- # EABI calling convention where double arguments are passed in VFP
- # registers. Note that the GCC flag '-mfloat-abi=hard' should be used as
- # well when compiling for the ARM target.
- 'v8_use_arm_eabi_hardfloat%': 'false',
-
- 'v8_use_snapshot%': 'true',
- 'host_os%': '<(OS)',
- 'v8_use_liveobjectlist%': 'false',
- },
+ 'includes': ['../../build/common.gypi'],
'conditions': [
['use_system_v8==0', {
- 'target_defaults': {
- 'defines': [
- 'ENABLE_DEBUGGER_SUPPORT',
- ],
- 'conditions': [
- ['OS!="mac"', {
- # TODO(mark): The OS!="mac" conditional is temporary. It can be
- # removed once the Mac Chromium build stops setting target_arch to
- # ia32 and instead sets it to mac. Other checks in this file for
- # OS=="mac" can be removed at that time as well. This can be cleaned
- # up once http://crbug.com/44205 is fixed.
- 'conditions': [
- ['v8_target_arch=="arm"', {
- 'defines': [
- 'V8_TARGET_ARCH_ARM',
- ],
- 'conditions': [
- [ 'v8_can_use_unaligned_accesses=="true"', {
- 'defines': [
- 'CAN_USE_UNALIGNED_ACCESSES=1',
- ],
- }],
- [ 'v8_can_use_unaligned_accesses=="false"', {
- 'defines': [
- 'CAN_USE_UNALIGNED_ACCESSES=0',
- ],
- }],
- [ 'v8_can_use_vfp_instructions=="true"', {
- 'defines': [
- 'CAN_USE_VFP_INSTRUCTIONS',
- ],
- }],
- [ 'v8_use_arm_eabi_hardfloat=="true"', {
- 'defines': [
- 'USE_EABI_HARDFLOAT=1',
- 'CAN_USE_VFP_INSTRUCTIONS',
- ],
- }],
- ],
- }],
- ['v8_target_arch=="ia32"', {
- 'defines': [
- 'V8_TARGET_ARCH_IA32',
- ],
- }],
- ['v8_target_arch=="x64"', {
- 'defines': [
- 'V8_TARGET_ARCH_X64',
- ],
- }],
- ],
- }],
- ['v8_use_liveobjectlist=="true"', {
- 'defines': [
- 'ENABLE_DEBUGGER_SUPPORT',
- 'INSPECTOR',
- 'OBJECT_PRINT',
- 'LIVEOBJECTLIST',
- ],
- }],
- ['v8_compress_startup_data=="bz2"', {
- 'defines': [
- 'COMPRESS_STARTUP_DATA_BZ2',
- ],
- }],
- ],
- 'configurations': {
- 'Debug': {
- 'defines': [
- 'DEBUG',
- '_DEBUG',
- 'ENABLE_DISASSEMBLER',
- 'V8_ENABLE_CHECKS',
- 'OBJECT_PRINT',
- ],
- 'msvs_settings': {
- 'VCCLCompilerTool': {
- 'Optimization': '0',
-
- 'conditions': [
- ['OS=="win" and component=="shared_library"', {
- 'RuntimeLibrary': '3', # /MDd
- }, {
- 'RuntimeLibrary': '1', # /MTd
- }],
- ],
- },
- 'VCLinkerTool': {
- 'LinkIncremental': '2',
- },
- },
- 'conditions': [
- ['OS=="freebsd" or OS=="openbsd"', {
- 'cflags': [ '-I/usr/local/include' ],
- }],
- ],
- },
- 'Release': {
- 'conditions': [
- ['OS=="linux" or OS=="freebsd" or OS=="openbsd"', {
- 'cflags!': [
- '-O2',
- '-Os',
- ],
- 'cflags': [
- '-fomit-frame-pointer',
- '-O3',
- ],
- 'conditions': [
- [ 'gcc_version==44', {
- 'cflags': [
- # Avoid crashes with gcc 4.4 in the v8 test suite.
- '-fno-tree-vrp',
- ],
- }],
- ],
- }],
- ['OS=="freebsd" or OS=="openbsd"', {
- 'cflags': [ '-I/usr/local/include' ],
- }],
- ['OS=="mac"', {
- 'xcode_settings': {
- 'GCC_OPTIMIZATION_LEVEL': '3', # -O3
-
- # -fstrict-aliasing. Mainline gcc
- # enables this at -O2 and above,
- # but Apple gcc does not unless it
- # is specified explicitly.
- 'GCC_STRICT_ALIASING': 'YES',
- },
- }],
- ['OS=="win"', {
- 'msvs_configuration_attributes': {
- 'OutputDirectory': '$(SolutionDir)$(ConfigurationName)',
- 'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
- 'CharacterSet': '1',
- },
- 'msvs_settings': {
- 'VCCLCompilerTool': {
- 'Optimization': '2',
- 'InlineFunctionExpansion': '2',
- 'EnableIntrinsicFunctions': 'true',
- 'FavorSizeOrSpeed': '0',
- 'OmitFramePointers': 'true',
- 'StringPooling': 'true',
-
- 'conditions': [
- ['OS=="win" and component=="shared_library"', {
- 'RuntimeLibrary': '2', #/MD
- }, {
- 'RuntimeLibrary': '0', #/MT
- }],
- ],
- },
- 'VCLinkerTool': {
- 'LinkIncremental': '1',
- 'OptimizeReferences': '2',
- 'OptimizeForWindows98': '1',
- 'EnableCOMDATFolding': '2',
- },
- },
- }],
- ],
- },
- },
- },
'targets': [
{
'target_name': 'v8',
- 'toolsets': ['host', 'target'],
'conditions': [
+ ['want_separate_host_toolset==1', {
+ 'toolsets': ['host', 'target'],
+ }, {
+ 'toolsets': ['target'],
+ }],
['v8_use_snapshot=="true"', {
'dependencies': ['v8_snapshot'],
},
@@ -272,23 +79,23 @@
],
'direct_dependent_settings': {
'include_dirs': [
- '../../include',
+ '../../include',
],
},
},
{
'target_name': 'v8_snapshot',
'type': '<(library)',
- 'toolsets': ['host', 'target'],
'conditions': [
+ ['want_separate_host_toolset==1', {
+ 'toolsets': ['host', 'target'],
+ 'dependencies': ['mksnapshot#host', 'js2c#host'],
+ }, {
+ 'toolsets': ['target'],
+ 'dependencies': ['mksnapshot', 'js2c'],
+ }],
['component=="shared_library"', {
'conditions': [
- # The ARM assembler assumes the host is 32 bits, so force building
- # 32-bit host tools.
- ['v8_target_arch=="arm" and host_arch=="x64" and _toolset=="host"', {
- 'cflags': ['-m32'],
- 'ldflags': ['-m32'],
- }],
['OS=="win"', {
'defines': [
'BUILDING_V8_SHARED',
@@ -312,8 +119,6 @@
}],
],
'dependencies': [
- 'mksnapshot#host',
- 'js2c#host',
'v8_base',
],
'include_dirs+': [
@@ -334,7 +139,10 @@
'<(INTERMEDIATE_DIR)/snapshot.cc',
],
'variables': {
- 'mksnapshot_flags': [],
+ 'mksnapshot_flags': [
+ '--log-snapshot-positions',
+ '--logfile', '<(INTERMEDIATE_DIR)/snapshot.log',
+ ],
},
'conditions': [
['v8_target_arch=="arm"', {
@@ -380,9 +188,7 @@
{
'target_name': 'v8_nosnapshot',
'type': '<(library)',
- 'toolsets': ['host', 'target'],
'dependencies': [
- 'js2c#host',
'v8_base',
],
'include_dirs+': [
@@ -394,11 +200,12 @@
'../../src/snapshot-empty.cc',
],
'conditions': [
- # The ARM assembler assumes the host is 32 bits, so force building
- # 32-bit host tools.
- ['v8_target_arch=="arm" and host_arch=="x64" and _toolset=="host"', {
- 'cflags': ['-m32'],
- 'ldflags': ['-m32'],
+ ['want_separate_host_toolset==1', {
+ 'toolsets': ['host', 'target'],
+ 'dependencies': ['js2c#host'],
+ }, {
+ 'toolsets': ['target'],
+ 'dependencies': ['js2c'],
}],
['component=="shared_library"', {
'defines': [
@@ -411,7 +218,6 @@
{
'target_name': 'v8_base',
'type': '<(library)',
- 'toolsets': ['host', 'target'],
'include_dirs+': [
'../../src',
],
@@ -451,7 +257,6 @@
'../../src/code-stubs.cc',
'../../src/code-stubs.h',
'../../src/code.h',
- '../../src/codegen-inl.h',
'../../src/codegen.cc',
'../../src/codegen.h',
'../../src/compilation-cache.cc',
@@ -488,6 +293,8 @@
'../../src/diy-fp.cc',
'../../src/diy-fp.h',
'../../src/double.h',
+ '../../src/elements.cc',
+ '../../src/elements.h',
'../../src/execution.cc',
'../../src/execution.h',
'../../src/factory.cc',
@@ -610,7 +417,6 @@
'../../src/scopes.h',
'../../src/serialize.cc',
'../../src/serialize.h',
- '../../src/shell.h',
'../../src/small-pointer-list.h',
'../../src/smart-pointer.h',
'../../src/snapshot-common.cc',
@@ -666,10 +472,12 @@
'../../src/extensions/gc-extension.h',
],
'conditions': [
+ ['want_separate_host_toolset==1', {
+ 'toolsets': ['host', 'target'],
+ }, {
+ 'toolsets': ['target'],
+ }],
['v8_target_arch=="arm"', {
- 'include_dirs+': [
- '../../src/arm',
- ],
'sources': [
'../../src/arm/assembler-arm-inl.h',
'../../src/arm/assembler-arm.cc',
@@ -702,19 +510,8 @@
'../../src/arm/simulator-arm.cc',
'../../src/arm/stub-cache-arm.cc',
],
- 'conditions': [
- # The ARM assembler assumes the host is 32 bits,
- # so force building 32-bit host tools.
- ['host_arch=="x64" and _toolset=="host"', {
- 'cflags': ['-m32'],
- 'ldflags': ['-m32'],
- }]
- ]
}],
['v8_target_arch=="ia32" or v8_target_arch=="mac" or OS=="mac"', {
- 'include_dirs+': [
- '../../src/ia32',
- ],
'sources': [
'../../src/ia32/assembler-ia32-inl.h',
'../../src/ia32/assembler-ia32.cc',
@@ -746,9 +543,6 @@
],
}],
['v8_target_arch=="x64" or v8_target_arch=="mac" or OS=="mac"', {
- 'include_dirs+': [
- '../../src/x64',
- ],
'sources': [
'../../src/x64/assembler-x64-inl.h',
'../../src/x64/assembler-x64.cc',
@@ -781,10 +575,6 @@
}],
['OS=="linux"', {
'link_settings': {
- 'libraries': [
- # Needed for clock_gettime() used by src/platform-linux.cc.
- '-lrt',
- ],
'conditions': [
['v8_compress_startup_data=="bz2"', {
'libraries': [
@@ -856,7 +646,7 @@
],
'msvs_disabled_warnings': [4351, 4355, 4800],
'link_settings': {
- 'libraries': [ '-lwinmm.lib' ],
+ 'libraries': [ '-lwinmm.lib', '-lws2_32.lib' ],
},
}],
['component=="shared_library"', {
@@ -870,7 +660,13 @@
{
'target_name': 'js2c',
'type': 'none',
- 'toolsets': ['host'],
+ 'conditions': [
+ ['want_separate_host_toolset==1', {
+ 'toolsets': ['host'],
+ }, {
+ 'toolsets': ['target'],
+ }],
+ ],
'variables': {
'library_files': [
'../../src/runtime.js',
@@ -890,8 +686,9 @@
'../../src/macros.py',
],
'experimental_library_files': [
- '../../src/proxy.js',
'../../src/macros.py',
+ '../../src/proxy.js',
+ '../../src/weakmap.js',
],
},
'actions': [
@@ -936,7 +733,6 @@
{
'target_name': 'mksnapshot',
'type': 'executable',
- 'toolsets': ['host'],
'dependencies': [
'v8_nosnapshot',
],
@@ -947,22 +743,21 @@
'../../src/mksnapshot.cc',
],
'conditions': [
- # The ARM assembler assumes the host is 32 bits, so force building
- # 32-bit host tools.
- ['v8_target_arch=="arm" and host_arch=="x64" and _toolset=="host"', {
- 'cflags': ['-m32'],
- 'ldflags': ['-m32'],
+ ['want_separate_host_toolset==1', {
+ 'toolsets': ['host'],
+ }, {
+ 'toolsets': ['target'],
}],
['v8_compress_startup_data=="bz2"', {
'libraries': [
'-lbz2',
- ]}],
- ]
+ ]}
+ ],
+ ],
},
{
'target_name': 'v8_shell',
'type': 'executable',
- 'toolsets': ['host'],
'dependencies': [
'v8'
],
@@ -970,29 +765,56 @@
'../../samples/shell.cc',
],
'conditions': [
+ ['want_separate_host_toolset==1', {
+ 'toolsets': ['host'],
+ }, {
+ 'toolsets': ['target'],
+ }],
['OS=="win"', {
# This could be gotten by not setting chromium_code, if that's OK.
'defines': ['_CRT_SECURE_NO_WARNINGS'],
}],
- # The ARM assembler assumes the host is 32 bits, so force building
- # 32-bit host tools.
- ['v8_target_arch=="arm" and host_arch=="x64" and _toolset=="host"', {
- 'cflags': ['-m32'],
- 'ldflags': ['-m32'],
- }],
['v8_compress_startup_data=="bz2"', {
'libraries': [
'-lbz2',
]}],
],
},
+ {
+ 'target_name': 'preparser_lib',
+ 'type': '<(library)',
+ 'include_dirs+': [
+ '../../src',
+ ],
+ 'sources': [
+ '../../src/allocation.cc',
+ '../../src/bignum.cc',
+ '../../src/cached-powers.cc',
+ '../../src/conversions.cc',
+ '../../src/hashmap.cc',
+ '../../src/preparse-data.cc',
+ '../../src/preparser.cc',
+ '../../src/preparser-api.cc',
+ '../../src/scanner-base.cc',
+ '../../src/strtod.cc',
+ '../../src/token.cc',
+ '../../src/unicode.cc',
+ '../../src/utils.cc',
+ ],
+ },
],
}, { # use_system_v8 != 0
'targets': [
{
'target_name': 'v8',
'type': 'settings',
- 'toolsets': ['host', 'target'],
+ 'conditions': [
+ ['want_separate_host_toolset==1', {
+ 'toolsets': ['host', 'target'],
+ }, {
+ 'toolsets': ['target'],
+ }],
+ ],
'link_settings': {
'libraries': [
'-lv8',
@@ -1002,7 +824,13 @@
{
'target_name': 'v8_shell',
'type': 'none',
- 'toolsets': ['host'],
+ 'conditions': [
+ ['want_separate_host_toolset==1', {
+ 'toolsets': ['host'],
+ }, {
+ 'toolsets': ['target'],
+ }],
+ ],
'dependencies': [
'v8'
],
diff --git a/tools/oom_dump/README b/tools/oom_dump/README
index 0be7511..1d840b9 100644
--- a/tools/oom_dump/README
+++ b/tools/oom_dump/README
@@ -16,7 +16,9 @@
Next step is to build v8. Note: you should build x64 version of v8,
if you're on 64-bit platform, otherwise you would get a link error when
-building oom_dump.
+building oom_dump. Also, if you are testing against an older version of chrome
+you should build the corresponding version of V8 to make sure that the type-id
+enum have the correct values.
The last step is to build oom_dump itself. The following command should work:
diff --git a/tools/presubmit.py b/tools/presubmit.py
index 1d80f92..c191fc7 100755
--- a/tools/presubmit.py
+++ b/tools/presubmit.py
@@ -248,12 +248,19 @@
def IgnoreDir(self, name):
return (super(SourceProcessor, self).IgnoreDir(name)
or (name == 'third_party')
+ or (name == 'gyp')
+ or (name == 'out')
or (name == 'obj'))
- IGNORE_COPYRIGHTS = ['earley-boyer.js', 'raytrace.js', 'crypto.js',
- 'libraries.cc', 'libraries-empty.cc', 'jsmin.py', 'regexp-pcre.js']
- IGNORE_TABS = IGNORE_COPYRIGHTS + ['unicode-test.js',
- 'html-comments.js']
+ IGNORE_COPYRIGHTS = ['cpplint.py',
+ 'earley-boyer.js',
+ 'raytrace.js',
+ 'crypto.js',
+ 'libraries.cc',
+ 'libraries-empty.cc',
+ 'jsmin.py',
+ 'regexp-pcre.js']
+ IGNORE_TABS = IGNORE_COPYRIGHTS + ['unicode-test.js', 'html-comments.js']
def ProcessContents(self, name, contents):
result = True
diff --git a/tools/test-wrapper-gypbuild.py b/tools/test-wrapper-gypbuild.py
new file mode 100755
index 0000000..9bc6bf6
--- /dev/null
+++ b/tools/test-wrapper-gypbuild.py
@@ -0,0 +1,227 @@
+#!/usr/bin/env python
+#
+# Copyright 2011 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived
+# from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+# This is a convenience script to run the existing tools/test.py script
+# when using the gyp/make based build.
+# It is intended as a stop-gap rather than a long-term solution.
+
+
+import optparse
+import os
+from os.path import join, dirname, abspath
+import subprocess
+import sys
+
+
+PROGRESS_INDICATORS = ['verbose', 'dots', 'color', 'mono']
+
+
+def BuildOptions():
+ result = optparse.OptionParser()
+
+ # Flags specific to this wrapper script:
+ result.add_option("--arch-and-mode",
+ help='Architecture and mode in the format "arch.mode"',
+ default=None)
+ result.add_option("--outdir",
+ help='Base output directory',
+ default='out')
+
+ # Flags this wrapper script handles itself:
+ result.add_option("-m", "--mode",
+ help="The test modes in which to run (comma-separated)",
+ default='release,debug')
+ result.add_option("--arch",
+ help='The architectures to run tests for (comma-separated)',
+ default='ia32,x64,arm')
+
+ # Flags that are passed on to the wrapped test.py script:
+ result.add_option("-v", "--verbose", help="Verbose output",
+ default=False, action="store_true")
+ result.add_option("-p", "--progress",
+ help="The style of progress indicator (verbose, dots, color, mono)",
+ choices=PROGRESS_INDICATORS, default="mono")
+ result.add_option("--report", help="Print a summary of the tests to be run",
+ default=False, action="store_true")
+ result.add_option("-s", "--suite", help="A test suite",
+ default=[], action="append")
+ result.add_option("-t", "--timeout", help="Timeout in seconds",
+ default=60, type="int")
+ result.add_option("--snapshot", help="Run the tests with snapshot turned on",
+ default=False, action="store_true")
+ result.add_option("--special-command", default=None)
+ result.add_option("--valgrind", help="Run tests through valgrind",
+ default=False, action="store_true")
+ result.add_option("--cat", help="Print the source of the tests",
+ default=False, action="store_true")
+ result.add_option("--warn-unused", help="Report unused rules",
+ default=False, action="store_true")
+ result.add_option("-j", help="The number of parallel tasks to run",
+ default=1, type="int")
+ result.add_option("--time", help="Print timing information after running",
+ default=False, action="store_true")
+ result.add_option("--suppress-dialogs", help="Suppress Windows dialogs for crashing tests",
+ dest="suppress_dialogs", default=True, action="store_true")
+ result.add_option("--no-suppress-dialogs", help="Display Windows dialogs for crashing tests",
+ dest="suppress_dialogs", action="store_false")
+ result.add_option("--isolates", help="Whether to test isolates", default=False, action="store_true")
+ result.add_option("--store-unexpected-output",
+ help="Store the temporary JS files from tests that fails",
+ dest="store_unexpected_output", default=True, action="store_true")
+ result.add_option("--no-store-unexpected-output",
+ help="Deletes the temporary JS files from tests that fails",
+ dest="store_unexpected_output", action="store_false")
+ result.add_option("--stress-only",
+ help="Only run tests with --always-opt --stress-opt",
+ default=False, action="store_true")
+ result.add_option("--nostress",
+ help="Don't run crankshaft --always-opt --stress-op test",
+ default=False, action="store_true")
+ result.add_option("--crankshaft",
+ help="Run with the --crankshaft flag",
+ default=False, action="store_true")
+ result.add_option("--shard-count",
+ help="Split testsuites into this number of shards",
+ default=1, type="int")
+ result.add_option("--shard-run",
+ help="Run this shard from the split up tests.",
+ default=1, type="int")
+ result.add_option("--noprof", help="Disable profiling support",
+ default=False)
+
+ # Flags present in the original test.py that are unsupported in this wrapper:
+ # -S [-> scons_flags] (we build with gyp/make, not scons)
+ # --no-build (always true)
+ # --build-only (always false)
+ # --build-system (always 'gyp')
+ # --simulator (always true if arch==arm, always false otherwise)
+ # --shell (automatically chosen depending on arch and mode)
+
+ return result
+
+
+def ProcessOptions(options):
+ if options.arch_and_mode != None and options.arch_and_mode != "":
+ tokens = options.arch_and_mode.split(".")
+ options.arch = tokens[0]
+ options.mode = tokens[1]
+ options.mode = options.mode.split(',')
+ for mode in options.mode:
+ if not mode in ['debug', 'release']:
+ print "Unknown mode %s" % mode
+ return False
+ options.arch = options.arch.split(',')
+ for arch in options.arch:
+ if not arch in ['ia32', 'x64', 'arm']:
+ print "Unknown architecture %s" % arch
+ return False
+
+ return True
+
+
+def PassOnOptions(options):
+ result = []
+ if options.verbose:
+ result += ['--verbose']
+ if options.progress != 'mono':
+ result += ['--progress=' + options.progress]
+ if options.report:
+ result += ['--report']
+ if options.suite != []:
+ for suite in options.suite:
+ result += ['--suite=../../test/' + suite]
+ if options.timeout != 60:
+ result += ['--timeout=%s' % options.timeout]
+ if options.snapshot:
+ result += ['--snapshot']
+ if options.special_command:
+ result += ['--special-command=' + options.special_command]
+ if options.valgrind:
+ result += ['--valgrind']
+ if options.cat:
+ result += ['--cat']
+ if options.warn_unused:
+ result += ['--warn-unused']
+ if options.j != 1:
+ result += ['-j%s' % options.j]
+ if options.time:
+ result += ['--time']
+ if not options.suppress_dialogs:
+ result += ['--no-suppress-dialogs']
+ if options.isolates:
+ result += ['--isolates']
+ if not options.store_unexpected_output:
+ result += ['--no-store-unexpected_output']
+ if options.stress_only:
+ result += ['--stress-only']
+ if options.nostress:
+ result += ['--nostress']
+ if options.crankshaft:
+ result += ['--crankshaft']
+ if options.shard_count != 1:
+ result += ['--shard_count=%s' % options.shard_count]
+ if options.shard_run != 1:
+ result += ['--shard_run=%s' % options.shard_run]
+ if options.noprof:
+ result += ['--noprof']
+ return result
+
+
+def Main():
+ parser = BuildOptions()
+ (options, args) = parser.parse_args()
+ if not ProcessOptions(options):
+ parser.print_help()
+ return 1
+
+ workspace = abspath(join(dirname(sys.argv[0]), '..'))
+ args_for_children = [workspace + '/tools/test.py'] + PassOnOptions(options)
+ args_for_children += ['--no-build', '--build-system=gyp']
+ for arg in args:
+ args_for_children += [arg]
+ returncodes = 0
+
+ for mode in options.mode:
+ for arch in options.arch:
+ print ">>> running tests for %s.%s" % (arch, mode)
+ shell = workspace + '/' + options.outdir + '/' + arch + '.' + mode + "/d8"
+ child = subprocess.Popen(' '.join(args_for_children +
+ ['--arch=' + arch] +
+ ['--mode=' + mode] +
+ ['--shell=' + shell]),
+ shell=True,
+ cwd=workspace)
+ returncodes += child.wait()
+
+ return returncodes
+
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/tools/test.py b/tools/test.py
index ec21ffe..d40159c 100755
--- a/tools/test.py
+++ b/tools/test.py
@@ -1181,6 +1181,8 @@
default=False, action="store_true")
result.add_option("--build-only", help="Only build requirements, don't run the tests",
default=False, action="store_true")
+ result.add_option("--build-system", help="Build system in use (scons or gyp)",
+ default='scons')
result.add_option("--report", help="Print a summary of the tests to be run",
default=False, action="store_true")
result.add_option("-s", "--suite", help="A test suite",
@@ -1208,7 +1210,7 @@
dest="suppress_dialogs", default=True, action="store_true")
result.add_option("--no-suppress-dialogs", help="Display Windows dialogs for crashing tests",
dest="suppress_dialogs", action="store_false")
- result.add_option("--shell", help="Path to V8 shell", default="shell")
+ result.add_option("--shell", help="Path to V8 shell", default="d8")
result.add_option("--isolates", help="Whether to test isolates", default=False, action="store_true")
result.add_option("--store-unexpected-output",
help="Store the temporary JS files from tests that fails",
@@ -1271,21 +1273,30 @@
if options.special_command:
options.special_command += " --crankshaft"
else:
- options.special_command = "@--crankshaft"
- if options.shell == "d8":
+ options.special_command = "@ --crankshaft"
+ if options.shell.endswith("d8"):
if options.special_command:
options.special_command += " --test"
else:
- options.special_command = "@--test"
+ options.special_command = "@ --test"
if options.noprof:
options.scons_flags.append("prof=off")
options.scons_flags.append("profilingsupport=off")
+ if options.build_system == 'gyp':
+ if options.build_only:
+ print "--build-only not supported for gyp, please build manually."
+ options.build_only = False
return True
+def DoSkip(case):
+ return (SKIP in case.outcomes) or (SLOW in case.outcomes)
+
+
REPORT_TEMPLATE = """\
Total: %(total)i tests
* %(skipped)4d tests will be skipped
+ * %(timeout)4d tests are expected to timeout sometimes
* %(nocrash)4d tests are expected to be flaky but not crash
* %(pass)4d tests are expected to pass
* %(fail_ok)4d tests are expected to fail that we won't fix
@@ -1297,10 +1308,11 @@
return (PASS in o) and (FAIL in o) and (not CRASH in o) and (not OKAY in o)
def IsFailOk(o):
return (len(o) == 2) and (FAIL in o) and (OKAY in o)
- unskipped = [c for c in cases if not SKIP in c.outcomes]
+ unskipped = [c for c in cases if not DoSkip(c)]
print REPORT_TEMPLATE % {
'total': len(cases),
'skipped': len(cases) - len(unskipped),
+ 'timeout': len([t for t in unskipped if TIMEOUT in t.outcomes]),
'nocrash': len([t for t in unskipped if IsFlaky(t.outcomes)]),
'pass': len([t for t in unskipped if list(t.outcomes) == [PASS]]),
'fail_ok': len([t for t in unskipped if IsFailOk(t.outcomes)]),
@@ -1399,6 +1411,9 @@
run_valgrind = join(workspace, "tools", "run-valgrind.py")
options.special_command = "python -u " + run_valgrind + " @"
+ if options.build_system == 'gyp':
+ SUFFIX['debug'] = ''
+
shell = abspath(options.shell)
buildspace = dirname(shell)
@@ -1472,15 +1487,14 @@
for rule in globally_unused_rules:
print "Rule for '%s' was not used." % '/'.join([str(s) for s in rule.path])
+ if not options.isolates:
+ all_cases = [c for c in all_cases if not c.TestsIsolates()]
+
if options.report:
PrintReport(all_cases)
result = None
- def DoSkip(case):
- return SKIP in case.outcomes or SLOW in case.outcomes
cases_to_run = [ c for c in all_cases if not DoSkip(c) ]
- if not options.isolates:
- cases_to_run = [c for c in cases_to_run if not c.TestsIsolates()]
if len(cases_to_run) == 0:
print "No tests to run."
return 0