deps: update V8 to 10.7.193.13

PR-URL: https://github.com/nodejs/node/pull/44741
Fixes: https://github.com/nodejs/node/issues/44650
Fixes: https://github.com/nodejs/node/issues/37472
Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl>
Reviewed-By: Jiawen Geng <technicalcute@gmail.com>
Reviewed-By: James M Snell <jasnell@gmail.com>
This commit is contained in:
Michaël Zasso 2022-09-21 13:28:42 +02:00
parent 624dadb007
commit 6bd756d7c6
No known key found for this signature in database
GPG key ID: 770F7A9A5AE15600
3737 changed files with 277746 additions and 186344 deletions

1
deps/v8/.gitignore vendored
View file

@ -30,6 +30,7 @@
.cproject .cproject
.gclient_entries .gclient_entries
.gdb_history .gdb_history
.idea
.jslint-cache .jslint-cache
.landmines .landmines
.project .project

9
deps/v8/.gn vendored
View file

@ -19,10 +19,17 @@ no_check_targets = [
"//:cppgc_base", "//:cppgc_base",
"//:v8_internal_headers", "//:v8_internal_headers",
"//src/inspector:inspector", "//src/inspector:inspector",
"//test/cctest:cctest_sources", "//test/cctest:cctest_sources", # 15 errors
"//test/unittests:inspector_unittests_sources", # 2 errors
"//third_party/icu:*", "//third_party/icu:*",
] ]
default_args = {
# Overwrite default args declared in the Fuchsia sdk
# Please maintain this in sync with Chromium version in src/.gn
fuchsia_target_api_level = 9
}
# These are the list of GN files that run exec_script. This whitelist exists # These are the list of GN files that run exec_script. This whitelist exists
# to force additional review for new uses of exec_script, which is strongly # to force additional review for new uses of exec_script, which is strongly
# discouraged except for gypi_to_gn calls. # discouraged except for gypi_to_gn calls.

2
deps/v8/.mailmap vendored
View file

@ -11,4 +11,6 @@
# Please keep the list sorted. # Please keep the list sorted.
Clemens Backes <clemensb@chromium.org> Clemens Hammacher <clemensh@chromium.org> Clemens Backes <clemensb@chromium.org> Clemens Hammacher <clemensh@chromium.org>
Jakob Linke <jgruber@chromium.org>
Jakob Linke <jgruber@chromium.org> <jgruber@google.com>
Timothy Gu <timothygu@chromium.org> <timothygu99@gmail.com> Timothy Gu <timothygu@chromium.org> <timothygu99@gmail.com>

5
deps/v8/.vpython3 vendored
View file

@ -69,3 +69,8 @@ wheel: <
name: "infra/python/wheels/numpy/${vpython_platform}" name: "infra/python/wheels/numpy/${vpython_platform}"
version: "version:1.2x.supported.1" version: "version:1.2x.supported.1"
> >
wheel: <
name: "infra/python/wheels/protobuf-py3"
version: "version:3.19.3"
>

7
deps/v8/AUTHORS vendored
View file

@ -42,6 +42,7 @@ Cloudflare, Inc. <*@cloudflare.com>
Julia Computing, Inc. <*@juliacomputing.com> Julia Computing, Inc. <*@juliacomputing.com>
CodeWeavers, Inc. <*@codeweavers.com> CodeWeavers, Inc. <*@codeweavers.com>
Alibaba, Inc. <*@alibaba-inc.com> Alibaba, Inc. <*@alibaba-inc.com>
SiFive, Inc. <*@sifive.com>
Aaron Bieber <deftly@gmail.com> Aaron Bieber <deftly@gmail.com>
Aaron O'Mullan <aaron.omullan@gmail.com> Aaron O'Mullan <aaron.omullan@gmail.com>
@ -99,21 +100,25 @@ David Sanders <dsanders11@ucsbalum.com>
Deepak Mohan <hop2deep@gmail.com> Deepak Mohan <hop2deep@gmail.com>
Deon Dior <diaoyuanjie@gmail.com> Deon Dior <diaoyuanjie@gmail.com>
Derek Tu <derek.t@rioslab.org> Derek Tu <derek.t@rioslab.org>
Divy Srivastava <dj.srivastava23@gmail.com>
Dominic Chen <d.c.ddcc@gmail.com> Dominic Chen <d.c.ddcc@gmail.com>
Dominic Farolini <domfarolino@gmail.com> Dominic Farolini <domfarolino@gmail.com>
Douglas Crosher <dtc-v8@scieneer.com> Douglas Crosher <dtc-v8@scieneer.com>
Dusan Milosavljevic <dusan.m.milosavljevic@gmail.com> Dusan Milosavljevic <dusan.m.milosavljevic@gmail.com>
Eden Wang <nedenwang@tencent.com>
Eric Rannaud <eric.rannaud@gmail.com> Eric Rannaud <eric.rannaud@gmail.com>
Erich Ocean <erich.ocean@me.com> Erich Ocean <erich.ocean@me.com>
Evan Lucas <evan.lucas@help.com> Evan Lucas <evan.lucas@help.com>
Fedor Indutny <fedor@indutny.com> Fedor Indutny <fedor@indutny.com>
Felix Geisendörfer <haimuiba@gmail.com> Felix Geisendörfer <haimuiba@gmail.com>
Feng Yu <f3n67u@gmail.com>
Filipe David Manana <fdmanana@gmail.com> Filipe David Manana <fdmanana@gmail.com>
Franziska Hinkelmann <franziska.hinkelmann@gmail.com> Franziska Hinkelmann <franziska.hinkelmann@gmail.com>
Gao Sheng <gaosheng08@meituan.com> Gao Sheng <gaosheng08@meituan.com>
Geoffrey Garside <ggarside@gmail.com> Geoffrey Garside <ggarside@gmail.com>
Gergely Nagy <ngg@ngg.hu> Gergely Nagy <ngg@ngg.hu>
Gilang Mentari Hamidy <gilang@hamidy.net> Gilang Mentari Hamidy <gilang@hamidy.net>
Giovanny Gutierrez <giovannygutierrez@gmail.com>
Gus Caplan <me@gus.host> Gus Caplan <me@gus.host>
Gwang Yoon Hwang <ryumiel@company100.net> Gwang Yoon Hwang <ryumiel@company100.net>
Haichuan Wang <hc.opensource@gmail.com> Haichuan Wang <hc.opensource@gmail.com>
@ -122,6 +127,7 @@ Harshal Nandigramwar <pro.bbcom18@gmail.com>
Harshil Jain <twitharshil@gmail.com> Harshil Jain <twitharshil@gmail.com>
Henrique Ferreiro <henrique.ferreiro@gmail.com> Henrique Ferreiro <henrique.ferreiro@gmail.com>
Hirofumi Mako <mkhrfm@gmail.com> Hirofumi Mako <mkhrfm@gmail.com>
Hisham Muhammad <hisham@gobolinux.org>
Honggyu Kim <honggyu.kp@gmail.com> Honggyu Kim <honggyu.kp@gmail.com>
Huáng Jùnliàng <jlhwung@gmail.com> Huáng Jùnliàng <jlhwung@gmail.com>
HyeockJin Kim <kherootz@gmail.com> HyeockJin Kim <kherootz@gmail.com>
@ -194,6 +200,7 @@ Paolo Giarrusso <p.giarrusso@gmail.com>
Patrick Gansterer <paroga@paroga.com> Patrick Gansterer <paroga@paroga.com>
Paul Lind <plind44@gmail.com> Paul Lind <plind44@gmail.com>
Pavel Medvedev <pmedvedev@gmail.com> Pavel Medvedev <pmedvedev@gmail.com>
Pedro Falcato <pedro.falcato@gmail.com>
Peng Fei <pfgenyun@gmail.com> Peng Fei <pfgenyun@gmail.com>
Peng Wu <peng.w@rioslab.org> Peng Wu <peng.w@rioslab.org>
Peng-Yu Chen <pengyu@libstarrify.so> Peng-Yu Chen <pengyu@libstarrify.so>

186
deps/v8/BUILD.bazel vendored
View file

@ -68,6 +68,7 @@ load(":bazel/v8-non-pointer-compression.bzl", "v8_binary_non_pointer_compression
# cppgc_enable_caged_heap # cppgc_enable_caged_heap
# cppgc_enable_check_assignments_in_prefinalizers # cppgc_enable_check_assignments_in_prefinalizers
# cppgc_enable_object_names # cppgc_enable_object_names
# cppgc_enable_pointer_compression
# cppgc_enable_verify_heap # cppgc_enable_verify_heap
# cppgc_enable_young_generation # cppgc_enable_young_generation
# v8_enable_zone_compression # v8_enable_zone_compression
@ -323,7 +324,6 @@ v8_config(
}, },
defines = [ defines = [
"GOOGLE3", "GOOGLE3",
"CHROMIUM_ZLIB_NO_CHROMECONF",
"ENABLE_DEBUGGER_SUPPORT", "ENABLE_DEBUGGER_SUPPORT",
"V8_ADVANCED_BIGINT_ALGORITHMS", "V8_ADVANCED_BIGINT_ALGORITHMS",
"V8_CONCURRENT_MARKING", "V8_CONCURRENT_MARKING",
@ -457,15 +457,19 @@ filegroup(
"include/cppgc/garbage-collected.h", "include/cppgc/garbage-collected.h",
"include/cppgc/heap.h", "include/cppgc/heap.h",
"include/cppgc/heap-consistency.h", "include/cppgc/heap-consistency.h",
"include/cppgc/heap-handle.h",
"include/cppgc/heap-state.h", "include/cppgc/heap-state.h",
"include/cppgc/heap-statistics.h", "include/cppgc/heap-statistics.h",
"include/cppgc/internal/api-constants.h", "include/cppgc/internal/api-constants.h",
"include/cppgc/internal/atomic-entry-flag.h", "include/cppgc/internal/atomic-entry-flag.h",
"include/cppgc/internal/base-page-handle.h",
"include/cppgc/internal/caged-heap-local-data.h", "include/cppgc/internal/caged-heap-local-data.h",
"include/cppgc/internal/caged-heap.h",
"include/cppgc/internal/compiler-specific.h", "include/cppgc/internal/compiler-specific.h",
"include/cppgc/internal/finalizer-trait.h", "include/cppgc/internal/finalizer-trait.h",
"include/cppgc/internal/gc-info.h", "include/cppgc/internal/gc-info.h",
"include/cppgc/internal/logging.h", "include/cppgc/internal/logging.h",
"include/cppgc/internal/member-storage.h",
"include/cppgc/internal/name-trait.h", "include/cppgc/internal/name-trait.h",
"include/cppgc/internal/persistent-node.h", "include/cppgc/internal/persistent-node.h",
"include/cppgc/internal/pointer-policies.h", "include/cppgc/internal/pointer-policies.h",
@ -579,7 +583,6 @@ filegroup(
"src/base/address-region.h", "src/base/address-region.h",
"src/base/atomic-utils.h", "src/base/atomic-utils.h",
"src/base/atomicops.h", "src/base/atomicops.h",
"src/base/atomicops_internals_atomicword_compat.h",
"src/base/base-export.h", "src/base/base-export.h",
"src/base/bit-field.h", "src/base/bit-field.h",
"src/base/bits-iterator.h", "src/base/bits-iterator.h",
@ -590,6 +593,7 @@ filegroup(
"src/base/bounds.h", "src/base/bounds.h",
"src/base/build_config.h", "src/base/build_config.h",
"src/base/compiler-specific.h", "src/base/compiler-specific.h",
"src/base/container-utils.h",
"src/base/cpu.cc", "src/base/cpu.cc",
"src/base/cpu.h", "src/base/cpu.h",
"src/base/debug/stack_trace.cc", "src/base/debug/stack_trace.cc",
@ -604,7 +608,6 @@ filegroup(
"src/base/file-utils.h", "src/base/file-utils.h",
"src/base/flags.h", "src/base/flags.h",
"src/base/free_deleter.h", "src/base/free_deleter.h",
"src/base/functional.cc",
"src/base/functional.h", "src/base/functional.h",
"src/base/hashmap-entry.h", "src/base/hashmap-entry.h",
"src/base/hashmap.h", "src/base/hashmap.h",
@ -643,6 +646,9 @@ filegroup(
"src/base/platform/condition-variable.cc", "src/base/platform/condition-variable.cc",
"src/base/platform/condition-variable.h", "src/base/platform/condition-variable.h",
"src/base/platform/elapsed-timer.h", "src/base/platform/elapsed-timer.h",
"src/base/platform/memory.h",
"src/base/platform/memory-protection-key.cc",
"src/base/platform/memory-protection-key.h",
"src/base/platform/mutex.cc", "src/base/platform/mutex.cc",
"src/base/platform/mutex.h", "src/base/platform/mutex.h",
"src/base/platform/platform.h", "src/base/platform/platform.h",
@ -659,6 +665,7 @@ filegroup(
"src/base/safe_conversions_arm_impl.h", "src/base/safe_conversions_arm_impl.h",
"src/base/safe_conversions_impl.h", "src/base/safe_conversions_impl.h",
"src/base/small-vector.h", "src/base/small-vector.h",
"src/base/string-format.h",
"src/base/strings.cc", "src/base/strings.cc",
"src/base/strings.h", "src/base/strings.h",
"src/base/sys-info.cc", "src/base/sys-info.cc",
@ -689,6 +696,7 @@ filegroup(
"@v8//bazel/config:is_linux": [ "@v8//bazel/config:is_linux": [
"src/base/debug/stack_trace_posix.cc", "src/base/debug/stack_trace_posix.cc",
"src/base/platform/platform-linux.cc", "src/base/platform/platform-linux.cc",
"src/base/platform/platform-linux.h",
], ],
"@v8//bazel/config:is_android": [ "@v8//bazel/config:is_android": [
"src/base/debug/stack_trace_android.cc", "src/base/debug/stack_trace_android.cc",
@ -779,7 +787,11 @@ filegroup(
"src/builtins/array-slice.tq", "src/builtins/array-slice.tq",
"src/builtins/array-some.tq", "src/builtins/array-some.tq",
"src/builtins/array-splice.tq", "src/builtins/array-splice.tq",
"src/builtins/array-to-reversed.tq",
"src/builtins/array-to-sorted.tq",
"src/builtins/array-to-spliced.tq",
"src/builtins/array-unshift.tq", "src/builtins/array-unshift.tq",
"src/builtins/array-with.tq",
"src/builtins/array.tq", "src/builtins/array.tq",
"src/builtins/arraybuffer.tq", "src/builtins/arraybuffer.tq",
"src/builtins/base.tq", "src/builtins/base.tq",
@ -878,7 +890,9 @@ filegroup(
"src/builtins/typed-array-some.tq", "src/builtins/typed-array-some.tq",
"src/builtins/typed-array-sort.tq", "src/builtins/typed-array-sort.tq",
"src/builtins/typed-array-subarray.tq", "src/builtins/typed-array-subarray.tq",
"src/builtins/typed-array-to-reversed.tq",
"src/builtins/typed-array-values.tq", "src/builtins/typed-array-values.tq",
"src/builtins/typed-array-with.tq",
"src/builtins/typed-array.tq", "src/builtins/typed-array.tq",
"src/builtins/weak-ref.tq", "src/builtins/weak-ref.tq",
"src/ic/handler-configuration.tq", "src/ic/handler-configuration.tq",
@ -903,6 +917,7 @@ filegroup(
"src/objects/heap-object.tq", "src/objects/heap-object.tq",
"src/objects/js-array-buffer.tq", "src/objects/js-array-buffer.tq",
"src/objects/js-array.tq", "src/objects/js-array.tq",
"src/objects/js-atomics-synchronization.tq",
"src/objects/js-collection-iterator.tq", "src/objects/js-collection-iterator.tq",
"src/objects/js-collection.tq", "src/objects/js-collection.tq",
"src/objects/js-function.tq", "src/objects/js-function.tq",
@ -912,7 +927,8 @@ filegroup(
"src/objects/js-proxy.tq", "src/objects/js-proxy.tq",
"src/objects/js-regexp-string-iterator.tq", "src/objects/js-regexp-string-iterator.tq",
"src/objects/js-regexp.tq", "src/objects/js-regexp.tq",
"src/objects/js-shadow-realms.tq", "src/objects/js-shadow-realm.tq",
"src/objects/js-shared-array.tq",
"src/objects/js-struct.tq", "src/objects/js-struct.tq",
"src/objects/js-temporal-objects.tq", "src/objects/js-temporal-objects.tq",
"src/objects/js-weak-refs.tq", "src/objects/js-weak-refs.tq",
@ -1034,6 +1050,15 @@ filegroup(
], ],
) )
# Default setting for v8_enable_pointer_compression when target is x64.
selects.config_setting_group(
name = "is_v8_enable_webassembly_on_non_android_posix_x64",
match_all = [
":is_v8_enable_webassembly",
"@v8//bazel/config:is_non_android_posix_x64",
],
)
filegroup( filegroup(
name = "v8_base_without_compiler_files", name = "v8_base_without_compiler_files",
srcs = [ srcs = [
@ -1091,6 +1116,7 @@ filegroup(
"src/builtins/builtins-array.cc", "src/builtins/builtins-array.cc",
"src/builtins/builtins-arraybuffer.cc", "src/builtins/builtins-arraybuffer.cc",
"src/builtins/builtins-async-module.cc", "src/builtins/builtins-async-module.cc",
"src/builtins/builtins-atomics-synchronization.cc",
"src/builtins/builtins-bigint.cc", "src/builtins/builtins-bigint.cc",
"src/builtins/builtins-callsite.cc", "src/builtins/builtins-callsite.cc",
"src/builtins/builtins-collections.cc", "src/builtins/builtins-collections.cc",
@ -1110,8 +1136,9 @@ filegroup(
"src/builtins/builtins-promise.h", "src/builtins/builtins-promise.h",
"src/builtins/builtins-reflect.cc", "src/builtins/builtins-reflect.cc",
"src/builtins/builtins-regexp.cc", "src/builtins/builtins-regexp.cc",
"src/builtins/builtins-shadow-realms.cc", "src/builtins/builtins-shadow-realm.cc",
"src/builtins/builtins-sharedarraybuffer.cc", "src/builtins/builtins-sharedarraybuffer.cc",
"src/builtins/builtins-shared-array.cc",
"src/builtins/builtins-string.cc", "src/builtins/builtins-string.cc",
"src/builtins/builtins-struct.cc", "src/builtins/builtins-struct.cc",
"src/builtins/builtins-symbol.cc", "src/builtins/builtins-symbol.cc",
@ -1121,6 +1148,7 @@ filegroup(
"src/builtins/builtins-utils-inl.h", "src/builtins/builtins-utils-inl.h",
"src/builtins/builtins-utils.h", "src/builtins/builtins-utils.h",
"src/builtins/builtins-weak-refs.cc", "src/builtins/builtins-weak-refs.cc",
"src/builtins/builtins-web-snapshots.cc",
"src/builtins/builtins.cc", "src/builtins/builtins.cc",
"src/builtins/builtins.h", "src/builtins/builtins.h",
"src/builtins/constants-table-builder.cc", "src/builtins/constants-table-builder.cc",
@ -1133,6 +1161,7 @@ filegroup(
"src/codegen/assembler.cc", "src/codegen/assembler.cc",
"src/codegen/assembler.h", "src/codegen/assembler.h",
"src/codegen/atomic-memory-order.h", "src/codegen/atomic-memory-order.h",
"src/codegen/background-merge-task.h",
"src/codegen/bailout-reason.cc", "src/codegen/bailout-reason.cc",
"src/codegen/bailout-reason.h", "src/codegen/bailout-reason.h",
"src/codegen/callable.h", "src/codegen/callable.h",
@ -1170,6 +1199,8 @@ filegroup(
"src/codegen/machine-type.h", "src/codegen/machine-type.h",
"src/codegen/macro-assembler-inl.h", "src/codegen/macro-assembler-inl.h",
"src/codegen/macro-assembler.h", "src/codegen/macro-assembler.h",
"src/codegen/maglev-safepoint-table.cc",
"src/codegen/maglev-safepoint-table.h",
"src/codegen/optimized-compilation-info.cc", "src/codegen/optimized-compilation-info.cc",
"src/codegen/optimized-compilation-info.h", "src/codegen/optimized-compilation-info.h",
"src/codegen/pending-optimization-table.cc", "src/codegen/pending-optimization-table.cc",
@ -1183,6 +1214,7 @@ filegroup(
"src/codegen/reglist.h", "src/codegen/reglist.h",
"src/codegen/reloc-info.cc", "src/codegen/reloc-info.cc",
"src/codegen/reloc-info.h", "src/codegen/reloc-info.h",
"src/codegen/safepoint-table-base.h",
"src/codegen/safepoint-table.cc", "src/codegen/safepoint-table.cc",
"src/codegen/safepoint-table.h", "src/codegen/safepoint-table.h",
"src/codegen/script-details.h", "src/codegen/script-details.h",
@ -1191,8 +1223,6 @@ filegroup(
"src/codegen/source-position-table.h", "src/codegen/source-position-table.h",
"src/codegen/source-position.cc", "src/codegen/source-position.cc",
"src/codegen/source-position.h", "src/codegen/source-position.h",
"src/codegen/string-constants.cc",
"src/codegen/string-constants.h",
"src/codegen/tick-counter.cc", "src/codegen/tick-counter.cc",
"src/codegen/tick-counter.h", "src/codegen/tick-counter.h",
"src/codegen/tnode.cc", "src/codegen/tnode.cc",
@ -1203,8 +1233,10 @@ filegroup(
"src/codegen/unoptimized-compilation-info.h", "src/codegen/unoptimized-compilation-info.h",
"src/common/assert-scope.cc", "src/common/assert-scope.cc",
"src/common/assert-scope.h", "src/common/assert-scope.h",
"src/common/allow-deprecated.h",
"src/common/checks.h", "src/common/checks.h",
"src/common/code-memory-access-inl.h",
"src/common/code-memory-access.cc",
"src/common/code-memory-access.h",
"src/common/high-allocation-throughput-scope.h", "src/common/high-allocation-throughput-scope.h",
"src/common/message-template.h", "src/common/message-template.h",
"src/common/operation.h", "src/common/operation.h",
@ -1240,6 +1272,8 @@ filegroup(
"src/debug/debug.cc", "src/debug/debug.cc",
"src/debug/debug.h", "src/debug/debug.h",
"src/debug/interface-types.h", "src/debug/interface-types.h",
"src/debug/liveedit-diff.cc",
"src/debug/liveedit-diff.h",
"src/debug/liveedit.cc", "src/debug/liveedit.cc",
"src/debug/liveedit.h", "src/debug/liveedit.h",
"src/deoptimizer/deoptimize-reason.cc", "src/deoptimizer/deoptimize-reason.cc",
@ -1356,6 +1390,8 @@ filegroup(
"src/handles/maybe-handles.h", "src/handles/maybe-handles.h",
"src/handles/persistent-handles.cc", "src/handles/persistent-handles.cc",
"src/handles/persistent-handles.h", "src/handles/persistent-handles.h",
"src/handles/shared-object-conveyor-handles.cc",
"src/handles/shared-object-conveyor-handles.h",
"src/heap/base/active-system-pages.cc", "src/heap/base/active-system-pages.cc",
"src/heap/base/active-system-pages.h", "src/heap/base/active-system-pages.h",
"src/heap/allocation-observer.cc", "src/heap/allocation-observer.cc",
@ -1410,11 +1446,14 @@ filegroup(
"src/heap/free-list-inl.h", "src/heap/free-list-inl.h",
"src/heap/free-list.cc", "src/heap/free-list.cc",
"src/heap/free-list.h", "src/heap/free-list.h",
"src/heap/gc-callbacks.h",
"src/heap/gc-idle-time-handler.cc", "src/heap/gc-idle-time-handler.cc",
"src/heap/gc-idle-time-handler.h", "src/heap/gc-idle-time-handler.h",
"src/heap/gc-tracer.cc", "src/heap/gc-tracer.cc",
"src/heap/gc-tracer-inl.h", "src/heap/gc-tracer-inl.h",
"src/heap/gc-tracer.h", "src/heap/gc-tracer.h",
"src/heap/global-handle-marking-visitor.cc",
"src/heap/global-handle-marking-visitor.h",
"src/heap/heap-allocator-inl.h", "src/heap/heap-allocator-inl.h",
"src/heap/heap-allocator.cc", "src/heap/heap-allocator.cc",
"src/heap/heap-allocator.h", "src/heap/heap-allocator.h",
@ -1423,6 +1462,8 @@ filegroup(
"src/heap/heap-inl.h", "src/heap/heap-inl.h",
"src/heap/heap-layout-tracer.cc", "src/heap/heap-layout-tracer.cc",
"src/heap/heap-layout-tracer.h", "src/heap/heap-layout-tracer.h",
"src/heap/heap-verifier.cc",
"src/heap/heap-verifier.h",
"src/heap/heap-write-barrier-inl.h", "src/heap/heap-write-barrier-inl.h",
"src/heap/heap-write-barrier.cc", "src/heap/heap-write-barrier.cc",
"src/heap/heap-write-barrier.h", "src/heap/heap-write-barrier.h",
@ -1595,8 +1636,8 @@ filegroup(
"src/logging/local-logger.cc", "src/logging/local-logger.cc",
"src/logging/local-logger.h", "src/logging/local-logger.h",
"src/logging/log-inl.h", "src/logging/log-inl.h",
"src/logging/log-utils.cc", "src/logging/log-file.cc",
"src/logging/log-utils.h", "src/logging/log-file.h",
"src/logging/log.cc", "src/logging/log.cc",
"src/logging/log.h", "src/logging/log.h",
"src/logging/metrics.cc", "src/logging/metrics.cc",
@ -1693,6 +1734,9 @@ filegroup(
"src/objects/js-array-buffer.h", "src/objects/js-array-buffer.h",
"src/objects/js-array-inl.h", "src/objects/js-array-inl.h",
"src/objects/js-array.h", "src/objects/js-array.h",
"src/objects/js-atomics-synchronization-inl.h",
"src/objects/js-atomics-synchronization.h",
"src/objects/js-atomics-synchronization.cc",
"src/objects/js-collection-inl.h", "src/objects/js-collection-inl.h",
"src/objects/js-collection-iterator.h", "src/objects/js-collection-iterator.h",
"src/objects/js-collection-iterator-inl.h", "src/objects/js-collection-iterator-inl.h",
@ -1714,8 +1758,10 @@ filegroup(
"src/objects/js-regexp-string-iterator.h", "src/objects/js-regexp-string-iterator.h",
"src/objects/js-regexp.cc", "src/objects/js-regexp.cc",
"src/objects/js-regexp.h", "src/objects/js-regexp.h",
"src/objects/js-shadow-realms.h", "src/objects/js-shadow-realm.h",
"src/objects/js-shadow-realms-inl.h", "src/objects/js-shadow-realm-inl.h",
"src/objects/js-shared-array.h",
"src/objects/js-shared-array-inl.h",
"src/objects/js-struct.h", "src/objects/js-struct.h",
"src/objects/js-struct-inl.h", "src/objects/js-struct-inl.h",
"src/objects/js-temporal-objects.h", "src/objects/js-temporal-objects.h",
@ -1771,9 +1817,6 @@ filegroup(
"src/objects/ordered-hash-table-inl.h", "src/objects/ordered-hash-table-inl.h",
"src/objects/ordered-hash-table.cc", "src/objects/ordered-hash-table.cc",
"src/objects/ordered-hash-table.h", "src/objects/ordered-hash-table.h",
"src/objects/osr-optimized-code-cache-inl.h",
"src/objects/osr-optimized-code-cache.cc",
"src/objects/osr-optimized-code-cache.h",
"src/objects/primitive-heap-object-inl.h", "src/objects/primitive-heap-object-inl.h",
"src/objects/primitive-heap-object.h", "src/objects/primitive-heap-object.h",
"src/objects/promise-inl.h", "src/objects/promise-inl.h",
@ -1803,6 +1846,8 @@ filegroup(
"src/objects/shared-function-info-inl.h", "src/objects/shared-function-info-inl.h",
"src/objects/shared-function-info.cc", "src/objects/shared-function-info.cc",
"src/objects/shared-function-info.h", "src/objects/shared-function-info.h",
"src/objects/simd.cc",
"src/objects/simd.h",
"src/objects/slots-atomic-inl.h", "src/objects/slots-atomic-inl.h",
"src/objects/slots-inl.h", "src/objects/slots-inl.h",
"src/objects/slots.h", "src/objects/slots.h",
@ -1816,6 +1861,9 @@ filegroup(
"src/objects/string-inl.h", "src/objects/string-inl.h",
"src/objects/string-set-inl.h", "src/objects/string-set-inl.h",
"src/objects/string-set.h", "src/objects/string-set.h",
"src/objects/string-forwarding-table-inl.h",
"src/objects/string-forwarding-table.cc",
"src/objects/string-forwarding-table.h",
"src/objects/string-table-inl.h", "src/objects/string-table-inl.h",
"src/objects/string-table.cc", "src/objects/string-table.cc",
"src/objects/symbol-table.cc", "src/objects/symbol-table.cc",
@ -1994,6 +2042,7 @@ filegroup(
"src/runtime/runtime-shadow-realm.cc", "src/runtime/runtime-shadow-realm.cc",
"src/runtime/runtime-strings.cc", "src/runtime/runtime-strings.cc",
"src/runtime/runtime-symbol.cc", "src/runtime/runtime-symbol.cc",
"src/runtime/runtime-temporal.cc",
"src/runtime/runtime-test.cc", "src/runtime/runtime-test.cc",
"src/runtime/runtime-trace.cc", "src/runtime/runtime-trace.cc",
"src/runtime/runtime-typedarray.cc", "src/runtime/runtime-typedarray.cc",
@ -2006,6 +2055,8 @@ filegroup(
"src/sandbox/external-pointer-table.cc", "src/sandbox/external-pointer-table.cc",
"src/sandbox/external-pointer-table-inl.h", "src/sandbox/external-pointer-table-inl.h",
"src/sandbox/external-pointer-table.h", "src/sandbox/external-pointer-table.h",
"src/sandbox/testing.cc",
"src/sandbox/testing.h",
"src/sandbox/sandbox.cc", "src/sandbox/sandbox.cc",
"src/sandbox/sandbox.h", "src/sandbox/sandbox.h",
"src/sandbox/sandboxed-pointer-inl.h", "src/sandbox/sandboxed-pointer-inl.h",
@ -2047,8 +2098,6 @@ filegroup(
"src/snapshot/shared-heap-deserializer.cc", "src/snapshot/shared-heap-deserializer.cc",
"src/snapshot/shared-heap-serializer.h", "src/snapshot/shared-heap-serializer.h",
"src/snapshot/shared-heap-serializer.cc", "src/snapshot/shared-heap-serializer.cc",
"src/snapshot/snapshot-compression.cc",
"src/snapshot/snapshot-compression.h",
"src/snapshot/snapshot-data.cc", "src/snapshot/snapshot-data.cc",
"src/snapshot/snapshot-data.h", "src/snapshot/snapshot-data.h",
"src/snapshot/snapshot-source-sink.cc", "src/snapshot/snapshot-source-sink.cc",
@ -2097,12 +2146,6 @@ filegroup(
"src/tracing/traced-value.h", "src/tracing/traced-value.h",
"src/tracing/tracing-category-observer.cc", "src/tracing/tracing-category-observer.cc",
"src/tracing/tracing-category-observer.h", "src/tracing/tracing-category-observer.h",
"src/trap-handler/handler-inside-posix.h",
"src/trap-handler/handler-inside.cc",
"src/trap-handler/handler-outside.cc",
"src/trap-handler/handler-shared.cc",
"src/trap-handler/trap-handler-internal.h",
"src/trap-handler/trap-handler.h",
"src/utils/address-map.cc", "src/utils/address-map.cc",
"src/utils/address-map.h", "src/utils/address-map.h",
"src/utils/allocation.cc", "src/utils/allocation.cc",
@ -2112,6 +2155,8 @@ filegroup(
"src/utils/boxed-float.h", "src/utils/boxed-float.h",
"src/utils/detachable-vector.cc", "src/utils/detachable-vector.cc",
"src/utils/detachable-vector.h", "src/utils/detachable-vector.h",
"src/utils/hex-format.cc",
"src/utils/hex-format.h",
"src/utils/identity-map.cc", "src/utils/identity-map.cc",
"src/utils/identity-map.h", "src/utils/identity-map.h",
"src/utils/locked-queue-inl.h", "src/utils/locked-queue-inl.h",
@ -2121,6 +2166,9 @@ filegroup(
"src/utils/ostreams.cc", "src/utils/ostreams.cc",
"src/utils/ostreams.h", "src/utils/ostreams.h",
"src/utils/scoped-list.h", "src/utils/scoped-list.h",
"src/utils/sha-256.cc",
"src/utils/sha-256.h",
"src/utils/sparse-bit-vector.h",
"src/utils/utils-inl.h", "src/utils/utils-inl.h",
"src/utils/utils.cc", "src/utils/utils.cc",
"src/utils/utils.h", "src/utils/utils.h",
@ -2386,7 +2434,11 @@ filegroup(
], ],
}) + select({ }) + select({
# Only for x64 builds and for arm64 with x64 host simulator. # Only for x64 builds and for arm64 with x64 host simulator.
"@v8//bazel/config:is_non_android_posix_x64": [ ":is_v8_enable_webassembly_on_non_android_posix_x64": [
"src/trap-handler/handler-inside-posix.cc",
"src/trap-handler/handler-outside-posix.cc",
],
"@v8//bazel/config:is_macos_arm64": [
"src/trap-handler/handler-inside-posix.cc", "src/trap-handler/handler-inside-posix.cc",
"src/trap-handler/handler-outside-posix.cc", "src/trap-handler/handler-outside-posix.cc",
], ],
@ -2420,16 +2472,20 @@ filegroup(
"src/asmjs/asm-scanner.h", "src/asmjs/asm-scanner.h",
"src/asmjs/asm-types.cc", "src/asmjs/asm-types.cc",
"src/asmjs/asm-types.h", "src/asmjs/asm-types.h",
"src/compiler/int64-lowering.h",
"src/compiler/wasm-compiler.h",
"src/compiler/wasm-escape-analysis.h",
"src/compiler/wasm-inlining.h",
"src/compiler/wasm-loop-peeling.h",
"src/debug/debug-wasm-objects.cc", "src/debug/debug-wasm-objects.cc",
"src/debug/debug-wasm-objects.h", "src/debug/debug-wasm-objects.h",
"src/debug/debug-wasm-objects-inl.h", "src/debug/debug-wasm-objects-inl.h",
"src/runtime/runtime-test-wasm.cc", "src/runtime/runtime-test-wasm.cc",
"src/runtime/runtime-wasm.cc", "src/runtime/runtime-wasm.cc",
"src/third_party/utf8-decoder/generalized-utf8-decoder.h",
"src/trap-handler/handler-inside-posix.h",
"src/trap-handler/handler-inside.cc",
"src/trap-handler/handler-outside.cc",
"src/trap-handler/handler-shared.cc",
"src/trap-handler/trap-handler-internal.h",
"src/trap-handler/trap-handler.h",
"src/wasm/assembler-buffer-cache.cc",
"src/wasm/assembler-buffer-cache.h",
"src/wasm/baseline/liftoff-assembler.cc", "src/wasm/baseline/liftoff-assembler.cc",
"src/wasm/baseline/liftoff-assembler-defs.h", "src/wasm/baseline/liftoff-assembler-defs.h",
"src/wasm/baseline/liftoff-assembler.h", "src/wasm/baseline/liftoff-assembler.h",
@ -2442,6 +2498,10 @@ filegroup(
"src/wasm/code-space-access.cc", "src/wasm/code-space-access.cc",
"src/wasm/code-space-access.h", "src/wasm/code-space-access.h",
"src/wasm/compilation-environment.h", "src/wasm/compilation-environment.h",
"src/wasm/constant-expression.cc",
"src/wasm/constant-expression.h",
"src/wasm/constant-expression-interface.cc",
"src/wasm/constant-expression-interface.h",
"src/wasm/decoder.h", "src/wasm/decoder.h",
"src/wasm/function-body-decoder.cc", "src/wasm/function-body-decoder.cc",
"src/wasm/function-body-decoder.h", "src/wasm/function-body-decoder.h",
@ -2450,31 +2510,33 @@ filegroup(
"src/wasm/function-compiler.h", "src/wasm/function-compiler.h",
"src/wasm/graph-builder-interface.cc", "src/wasm/graph-builder-interface.cc",
"src/wasm/graph-builder-interface.h", "src/wasm/graph-builder-interface.h",
"src/wasm/init-expr-interface.cc",
"src/wasm/init-expr-interface.h",
"src/wasm/jump-table-assembler.cc", "src/wasm/jump-table-assembler.cc",
"src/wasm/jump-table-assembler.h", "src/wasm/jump-table-assembler.h",
"src/wasm/leb-helper.h", "src/wasm/leb-helper.h",
"src/wasm/local-decl-encoder.cc", "src/wasm/local-decl-encoder.cc",
"src/wasm/local-decl-encoder.h", "src/wasm/local-decl-encoder.h",
"src/wasm/memory-protection-key.cc",
"src/wasm/memory-protection-key.h",
"src/wasm/memory-tracing.cc", "src/wasm/memory-tracing.cc",
"src/wasm/memory-tracing.h", "src/wasm/memory-tracing.h",
"src/wasm/module-compiler.cc", "src/wasm/module-compiler.cc",
"src/wasm/module-compiler.h", "src/wasm/module-compiler.h",
"src/wasm/module-decoder.cc", "src/wasm/module-decoder.cc",
"src/wasm/module-decoder.h", "src/wasm/module-decoder.h",
"src/wasm/module-decoder-impl.h",
"src/wasm/module-instantiate.cc", "src/wasm/module-instantiate.cc",
"src/wasm/module-instantiate.h", "src/wasm/module-instantiate.h",
"src/wasm/names-provider.cc",
"src/wasm/names-provider.h",
"src/wasm/object-access.h", "src/wasm/object-access.h",
"src/wasm/signature-map.cc", "src/wasm/signature-map.cc",
"src/wasm/signature-map.h", "src/wasm/signature-map.h",
"src/wasm/simd-shuffle.cc", "src/wasm/simd-shuffle.cc",
"src/wasm/simd-shuffle.h", "src/wasm/simd-shuffle.h",
"src/wasm/stacks.cc",
"src/wasm/stacks.h", "src/wasm/stacks.h",
"src/wasm/streaming-decoder.cc", "src/wasm/streaming-decoder.cc",
"src/wasm/streaming-decoder.h", "src/wasm/streaming-decoder.h",
"src/wasm/string-builder.h",
"src/wasm/string-builder-multiline.h",
"src/wasm/struct-types.h", "src/wasm/struct-types.h",
"src/wasm/sync-streaming-decoder.cc", "src/wasm/sync-streaming-decoder.cc",
"src/wasm/value-type.cc", "src/wasm/value-type.cc",
@ -2484,6 +2546,9 @@ filegroup(
"src/wasm/wasm-code-manager.h", "src/wasm/wasm-code-manager.h",
"src/wasm/wasm-debug.cc", "src/wasm/wasm-debug.cc",
"src/wasm/wasm-debug.h", "src/wasm/wasm-debug.h",
"src/wasm/wasm-disassembler.cc",
"src/wasm/wasm-disassembler.h",
"src/wasm/wasm-disassembler-impl.h",
"src/wasm/wasm-engine.cc", "src/wasm/wasm-engine.cc",
"src/wasm/wasm-engine.h", "src/wasm/wasm-engine.h",
"src/wasm/wasm-external-refs.cc", "src/wasm/wasm-external-refs.cc",
@ -2646,6 +2711,7 @@ filegroup(
"src/compiler/control-equivalence.h", "src/compiler/control-equivalence.h",
"src/compiler/control-flow-optimizer.cc", "src/compiler/control-flow-optimizer.cc",
"src/compiler/control-flow-optimizer.h", "src/compiler/control-flow-optimizer.h",
"src/compiler/control-path-state.h",
"src/compiler/csa-load-elimination.cc", "src/compiler/csa-load-elimination.cc",
"src/compiler/csa-load-elimination.h", "src/compiler/csa-load-elimination.h",
"src/compiler/dead-code-elimination.cc", "src/compiler/dead-code-elimination.cc",
@ -2708,6 +2774,8 @@ filegroup(
"src/compiler/js-type-hint-lowering.h", "src/compiler/js-type-hint-lowering.h",
"src/compiler/js-typed-lowering.cc", "src/compiler/js-typed-lowering.cc",
"src/compiler/js-typed-lowering.h", "src/compiler/js-typed-lowering.h",
"src/compiler/late-escape-analysis.cc",
"src/compiler/late-escape-analysis.h",
"src/compiler/linkage.cc", "src/compiler/linkage.cc",
"src/compiler/linkage.h", "src/compiler/linkage.h",
"src/compiler/load-elimination.cc", "src/compiler/load-elimination.cc",
@ -2793,6 +2861,26 @@ filegroup(
"src/compiler/state-values-utils.h", "src/compiler/state-values-utils.h",
"src/compiler/store-store-elimination.cc", "src/compiler/store-store-elimination.cc",
"src/compiler/store-store-elimination.h", "src/compiler/store-store-elimination.h",
"src/compiler/turboshaft/assembler.h",
"src/compiler/turboshaft/decompression-optimization.cc",
"src/compiler/turboshaft/decompression-optimization.h",
"src/compiler/turboshaft/deopt-data.h",
"src/compiler/turboshaft/fast-hash.h",
"src/compiler/turboshaft/graph-builder.cc",
"src/compiler/turboshaft/graph-builder.h",
"src/compiler/turboshaft/graph.cc",
"src/compiler/turboshaft/graph.h",
"src/compiler/turboshaft/graph-visualizer.cc",
"src/compiler/turboshaft/graph-visualizer.h",
"src/compiler/turboshaft/operations.cc",
"src/compiler/turboshaft/operations.h",
"src/compiler/turboshaft/optimization-phase.cc",
"src/compiler/turboshaft/optimization-phase.h",
"src/compiler/turboshaft/recreate-schedule.cc",
"src/compiler/turboshaft/recreate-schedule.h",
"src/compiler/turboshaft/sidetable.h",
"src/compiler/turboshaft/utils.h",
"src/compiler/turboshaft/value-numbering-assembler.h",
"src/compiler/type-cache.cc", "src/compiler/type-cache.cc",
"src/compiler/type-cache.h", "src/compiler/type-cache.h",
"src/compiler/type-narrowing-reducer.cc", "src/compiler/type-narrowing-reducer.cc",
@ -2813,10 +2901,24 @@ filegroup(
] + select({ ] + select({
":is_v8_enable_webassembly": [ ":is_v8_enable_webassembly": [
"src/compiler/int64-lowering.cc", "src/compiler/int64-lowering.cc",
"src/compiler/int64-lowering.h",
"src/compiler/wasm-compiler-definitions.h",
"src/compiler/wasm-compiler.cc", "src/compiler/wasm-compiler.cc",
"src/compiler/wasm-loop-peeling.cc", "src/compiler/wasm-compiler.h",
"src/compiler/wasm-escape-analysis.cc", "src/compiler/wasm-escape-analysis.cc",
"src/compiler/wasm-escape-analysis.h",
"src/compiler/wasm-loop-peeling.cc",
"src/compiler/wasm-loop-peeling.h",
"src/compiler/wasm-gc-lowering.cc",
"src/compiler/wasm-gc-lowering.h",
"src/compiler/wasm-gc-operator-reducer.cc",
"src/compiler/wasm-gc-operator-reducer.h",
"src/compiler/wasm-graph-assembler.cc",
"src/compiler/wasm-graph-assembler.h",
"src/compiler/wasm-inlining.cc", "src/compiler/wasm-inlining.cc",
"src/compiler/wasm-inlining.h",
"src/compiler/wasm-typer.cc",
"src/compiler/wasm-typer.h",
], ],
"//conditions:default": [], "//conditions:default": [],
}), }),
@ -2863,7 +2965,7 @@ filegroup(
"src/builtins/builtins-proxy-gen.h", "src/builtins/builtins-proxy-gen.h",
"src/builtins/builtins-regexp-gen.cc", "src/builtins/builtins-regexp-gen.cc",
"src/builtins/builtins-regexp-gen.h", "src/builtins/builtins-regexp-gen.h",
"src/builtins/builtins-shadowrealm-gen.cc", "src/builtins/builtins-shadow-realm-gen.cc",
"src/builtins/builtins-sharedarraybuffer-gen.cc", "src/builtins/builtins-sharedarraybuffer-gen.cc",
"src/builtins/builtins-string-gen.cc", "src/builtins/builtins-string-gen.cc",
"src/builtins/builtins-string-gen.h", "src/builtins/builtins-string-gen.h",
@ -2966,6 +3068,8 @@ filegroup(
"src/heap/cppgc/marking-visitor.h", "src/heap/cppgc/marking-visitor.h",
"src/heap/cppgc/marking-worklists.cc", "src/heap/cppgc/marking-worklists.cc",
"src/heap/cppgc/marking-worklists.h", "src/heap/cppgc/marking-worklists.h",
"src/heap/cppgc/member-storage.cc",
"src/heap/cppgc/member-storage.h",
"src/heap/cppgc/memory.cc", "src/heap/cppgc/memory.cc",
"src/heap/cppgc/memory.h", "src/heap/cppgc/memory.h",
"src/heap/cppgc/metric-recorder.h", "src/heap/cppgc/metric-recorder.h",
@ -3030,8 +3134,8 @@ filegroup(
"@v8//bazel/config:is_inline_asm_s390x": ["src/heap/base/asm/s390/push_registers_asm.cc"], "@v8//bazel/config:is_inline_asm_s390x": ["src/heap/base/asm/s390/push_registers_asm.cc"],
"@v8//bazel/config:is_inline_asm_riscv64": ["src/heap/base/asm/riscv64/push_registers_asm.cc"], "@v8//bazel/config:is_inline_asm_riscv64": ["src/heap/base/asm/riscv64/push_registers_asm.cc"],
"@v8//bazel/config:is_inline_asm_ppc64le": ["src/heap/base/asm/ppc/push_registers_asm.cc"], "@v8//bazel/config:is_inline_asm_ppc64le": ["src/heap/base/asm/ppc/push_registers_asm.cc"],
"@v8//bazel/config:is_msvc_asm_ia32": ["src/heap/base/asm/ia32/push_registers_masm.S"], "@v8//bazel/config:is_msvc_asm_ia32": ["src/heap/base/asm/ia32/push_registers_masm.asm"],
"@v8//bazel/config:is_msvc_asm_x64": ["src/heap/base/asm/x64/push_registers_masm.S"], "@v8//bazel/config:is_msvc_asm_x64": ["src/heap/base/asm/x64/push_registers_masm.asm"],
"@v8//bazel/config:is_msvc_asm_arm64": ["src/heap/base/asm/arm64/push_registers_masm.S"], "@v8//bazel/config:is_msvc_asm_arm64": ["src/heap/base/asm/arm64/push_registers_masm.S"],
}), }),
) )
@ -3136,6 +3240,8 @@ filegroup(
"src/inspector/v8-string-conversions.h", "src/inspector/v8-string-conversions.h",
"src/inspector/v8-value-utils.cc", "src/inspector/v8-value-utils.cc",
"src/inspector/v8-value-utils.h", "src/inspector/v8-value-utils.h",
"src/inspector/v8-webdriver-serializer.cc",
"src/inspector/v8-webdriver-serializer.h",
"src/inspector/value-mirror.cc", "src/inspector/value-mirror.cc",
"src/inspector/value-mirror.h", "src/inspector/value-mirror.h",
":crdtp_platform_files", ":crdtp_platform_files",
@ -3285,8 +3391,6 @@ py_binary(
"third_party/inspector_protocol/lib/ValueConversions_h.template", "third_party/inspector_protocol/lib/ValueConversions_h.template",
"third_party/inspector_protocol/lib/Values_cpp.template", "third_party/inspector_protocol/lib/Values_cpp.template",
"third_party/inspector_protocol/lib/Values_h.template", "third_party/inspector_protocol/lib/Values_h.template",
"third_party/inspector_protocol/lib/base_string_adapter_cc.template",
"third_party/inspector_protocol/lib/base_string_adapter_h.template",
"third_party/inspector_protocol/templates/Exported_h.template", "third_party/inspector_protocol/templates/Exported_h.template",
"third_party/inspector_protocol/templates/Imported_h.template", "third_party/inspector_protocol/templates/Imported_h.template",
"third_party/inspector_protocol/templates/TypeBuilder_cpp.template", "third_party/inspector_protocol/templates/TypeBuilder_cpp.template",
@ -3479,8 +3583,6 @@ v8_library(
deps = [ deps = [
":v8_libbase", ":v8_libbase",
"//external:base_trace_event_common", "//external:base_trace_event_common",
"//external:zlib",
"//external:zlib_compression_utils",
], ],
) )
@ -3596,7 +3698,7 @@ v8_binary_non_pointer_compression(
alias( alias(
name = "v8ci", name = "v8ci",
actual = "icu/v8", actual = "noicu/v8",
) )
# ================================================= # =================================================

753
deps/v8/BUILD.gn vendored

File diff suppressed because it is too large Load diff

166
deps/v8/DEPS vendored
View file

@ -12,17 +12,20 @@ vars = {
# Fetches only the SDK boot images which match at least one of the whitelist # Fetches only the SDK boot images which match at least one of the whitelist
# entries in a comma-separated list. # entries in a comma-separated list.
# #
# Only the X64 and ARM64 QEMU images are downloaded by default. Developers # Available images:
# that need to boot on other target architectures or devices can opt to # Emulation:
# download more boot images. Example of images include: # - qemu.x64 (pulls terminal.qemu-x64-release)
# - qemu.arm64 (pulls terminal.qemu-arm64-release)
# - workstation.qemu-x64-release
# Hardware:
# - generic.x64 (pulls terminal.x64-debug)
# - generic.arm64 (pulls terminal.arm64-debug)
# - chromebook.x64 (pulls terminal.chromebook-x64-debug)
# #
# Emulation: # Since the images are hundreds of MB, default to only downloading the image
# qemu.x64, qemu.arm64 # most commonly useful for developers. Bots and developers that need to use
# Hardware: # other images (e.g., qemu.arm64) can override this with additional images.
# generic.x64, generic.arm64 'checkout_fuchsia_boot_images': "qemu.x64",
#
# Wildcards are supported (e.g. "qemu.*").
'checkout_fuchsia_boot_images': "qemu.x64,qemu.arm64",
'checkout_instrumented_libraries': False, 'checkout_instrumented_libraries': False,
'checkout_ittapi': False, 'checkout_ittapi': False,
@ -36,27 +39,35 @@ vars = {
'check_v8_header_includes': False, 'check_v8_header_includes': False,
'checkout_reclient': False, 'checkout_reclient': False,
# By default, download the fuchsia sdk from the public sdk directory.
'fuchsia_sdk_cipd_prefix': 'fuchsia/sdk/gn/',
# reclient CIPD package version # reclient CIPD package version
'reclient_version': 're_client_version:0.40.0.40ff5a5', 'reclient_version': 're_client_version:0.69.0.458df98-gomaip',
# GN CIPD package version. # GN CIPD package version.
'gn_version': 'git_revision:ae110f8b525009255ba1f9ae96982176d3bfad3d', 'gn_version': 'git_revision:b4851eb2062f76a880c07f7fa0d12913beb6d79e',
# luci-go CIPD package version. # luci-go CIPD package version.
'luci_go': 'git_revision:6da0608e4fa8a3c6d1fa4f855485c0038b05bf72', 'luci_go': 'git_revision:c93fd3c5ebdc3999eea86a7623dbd1ed4b40bc78',
# Three lines of non-changing comments so that
# the commit queue can handle CLs rolling Fuchsia sdk
# and whatever else without interference from each other.
'fuchsia_version': 'version:9.20220913.3.1',
# Three lines of non-changing comments so that # Three lines of non-changing comments so that
# the commit queue can handle CLs rolling android_sdk_build-tools_version # the commit queue can handle CLs rolling android_sdk_build-tools_version
# and whatever else without interference from each other. # and whatever else without interference from each other.
'android_sdk_build-tools_version': 'tRoD45SCi7UleQqSV7MrMQO1_e5P8ysphkCcj6z_cCQC', 'android_sdk_build-tools_version': '-VRKr36Uw8L_iFqqo9nevIBgNMggND5iWxjidyjnCgsC',
# Three lines of non-changing comments so that # Three lines of non-changing comments so that
# the commit queue can handle CLs rolling android_sdk_emulator_version # the commit queue can handle CLs rolling android_sdk_emulator_version
# and whatever else without interference from each other. # and whatever else without interference from each other.
'android_sdk_emulator_version': 'gMHhUuoQRKfxr-MBn3fNNXZtkAVXtOwMwT7kfx8jkIgC', 'android_sdk_emulator_version': '9lGp8nTUCRRWGMnI_96HcKfzjnxEJKUcfvfwmA3wXNkC',
# Three lines of non-changing comments so that # Three lines of non-changing comments so that
# the commit queue can handle CLs rolling android_sdk_extras_version # the commit queue can handle CLs rolling android_sdk_extras_version
# and whatever else without interference from each other. # and whatever else without interference from each other.
'android_sdk_extras_version': 'ppQ4TnqDvBHQ3lXx5KPq97egzF5X2FFyOrVHkGmiTMQC', 'android_sdk_extras_version': 'bY55nDqO6FAm6FkGIj09sh2KW9oqAkCGKjYok5nUvBMC',
# Three lines of non-changing comments so that # Three lines of non-changing comments so that
# the commit queue can handle CLs rolling android_sdk_patcher_version # the commit queue can handle CLs rolling android_sdk_patcher_version
# and whatever else without interference from each other. # and whatever else without interference from each other.
@ -64,39 +75,39 @@ vars = {
# Three lines of non-changing comments so that # Three lines of non-changing comments so that
# the commit queue can handle CLs rolling android_sdk_platform-tools_version # the commit queue can handle CLs rolling android_sdk_platform-tools_version
# and whatever else without interference from each other. # and whatever else without interference from each other.
'android_sdk_platform-tools_version': 'g7n_-r6yJd_SGRklujGB1wEt8iyr77FZTUJVS9w6O34C', 'android_sdk_platform-tools_version': 'RSI3iwryh7URLGRgJHsCvUxj092woTPnKt4pwFcJ6L8C',
# Three lines of non-changing comments so that # Three lines of non-changing comments so that
# the commit queue can handle CLs rolling android_sdk_platforms_version # the commit queue can handle CLs rolling android_sdk_platforms_version
# and whatever else without interference from each other. # and whatever else without interference from each other.
'android_sdk_platforms_version': 'lL3IGexKjYlwjO_1Ga-xwxgwbE_w-lmi2Zi1uOlWUIAC', 'android_sdk_platforms_version': 'eo5KvW6UVor92LwZai8Zulc624BQZoCu-yn7wa1z_YcC',
# Three lines of non-changing comments so that # Three lines of non-changing comments so that
# the commit queue can handle CLs rolling android_sdk_sources_version # the commit queue can handle CLs rolling android_sdk_sources_version
# and whatever else without interference from each other. # and whatever else without interference from each other.
'android_sdk_sources_version': '7EcXjyZWkTu3sCA8d8eRXg_aCBCYt8ihXgxp29VXLs8C', 'android_sdk_sources_version': 'qfTSF99e29-w3eIVPpfcif0Em5etyvxuicTDTntWHQMC',
# Three lines of non-changing comments so that # Three lines of non-changing comments so that
# the commit queue can handle CLs rolling android_sdk_tools-lint_version # the commit queue can handle CLs rolling android_sdk_tools-lint_version
# and whatever else without interference from each other. # and whatever else without interference from each other.
'android_sdk_cmdline-tools_version': 'PGPmqJtSIQ84If155ba7iTU846h5WJ-bL5d_OoUWEWYC', 'android_sdk_cmdline-tools_version': 'IPzAG-uU5zVMxohpg9-7-N0tQC1TCSW1VbrBFw7Ld04C',
} }
deps = { deps = {
'base/trace_event/common': 'base/trace_event/common':
Var('chromium_url') + '/chromium/src/base/trace_event/common.git' + '@' + 'd115b033c4e53666b535cbd1985ffe60badad082', Var('chromium_url') + '/chromium/src/base/trace_event/common.git' + '@' + '521ac34ebd795939c7e16b37d9d3ddb40e8ed556',
'build': 'build':
Var('chromium_url') + '/chromium/src/build.git' + '@' + 'b37c340767cf9e7777d4ca5a588c34c5744df9b2', Var('chromium_url') + '/chromium/src/build.git' + '@' + '4157fb6cb44135013300168c9f4c5b95d04acf70',
'buildtools': 'buildtools':
Var('chromium_url') + '/chromium/src/buildtools.git' + '@' + 'c2e4795660817c2776dbabd778b92ed58c074032', Var('chromium_url') + '/chromium/src/buildtools.git' + '@' + 'e713c13e2fa3b7aa9131276f27990011e1aa6a73',
'buildtools/clang_format/script': 'buildtools/clang_format/script':
Var('chromium_url') + '/external/github.com/llvm/llvm-project/clang/tools/clang-format.git' + '@' + 'e435ad79c17b1888b34df88d6a30a094936e3836', Var('chromium_url') + '/external/github.com/llvm/llvm-project/clang/tools/clang-format.git' + '@' + '8b525d2747f2584fc35d8c7e612e66f377858df7',
'buildtools/linux64': { 'buildtools/linux64': {
'packages': [ 'packages': [
{ {
'package': 'gn/gn/linux-amd64', 'package': 'gn/gn/linux-${{arch}}',
'version': Var('gn_version'), 'version': Var('gn_version'),
} }
], ],
'dep_type': 'cipd', 'dep_type': 'cipd',
'condition': 'host_os == "linux"', 'condition': 'host_os == "linux" and host_cpu != "s390" and host_cpu != "ppc"',
}, },
'buildtools/mac': { 'buildtools/mac': {
'packages': [ 'packages': [
@ -109,11 +120,11 @@ deps = {
'condition': 'host_os == "mac"', 'condition': 'host_os == "mac"',
}, },
'buildtools/third_party/libc++/trunk': 'buildtools/third_party/libc++/trunk':
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxx.git' + '@' + '79a2e924d96e2fc1e4b937c42efd08898fa472d7', Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxx.git' + '@' + 'c1e647c7c30238f7c512457eec55798e3458fd8a',
'buildtools/third_party/libc++abi/trunk': 'buildtools/third_party/libc++abi/trunk':
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxxabi.git' + '@' + 'e025ba5dc85202540099d7cd8e72eae2d4ee9e33', Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxxabi.git' + '@' + '5c3e02e92ae8bbc1bf1001bd9ef0d76e044ddb86',
'buildtools/third_party/libunwind/trunk': 'buildtools/third_party/libunwind/trunk':
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libunwind.git' + '@' + 'c39fea88739be63a2d5590a938ce19d762b915fc', Var('chromium_url') + '/external/github.com/llvm/llvm-project/libunwind.git' + '@' + '60a480ee1819266cf8054548454f99838583cd76',
'buildtools/win': { 'buildtools/win': {
'packages': [ 'packages': [
{ {
@ -132,46 +143,26 @@ deps = {
} }
], ],
'dep_type': 'cipd', 'dep_type': 'cipd',
'condition': '(host_os == "linux" or host_os == "win") and checkout_reclient', 'condition': '(host_os == "linux" or host_os == "mac" or host_os == "win") and checkout_reclient',
}, },
'test/benchmarks/data': 'test/benchmarks/data':
Var('chromium_url') + '/v8/deps/third_party/benchmarks.git' + '@' + '05d7188267b4560491ff9155c5ee13e207ecd65f', Var('chromium_url') + '/v8/deps/third_party/benchmarks.git' + '@' + '05d7188267b4560491ff9155c5ee13e207ecd65f',
'test/mozilla/data': 'test/mozilla/data':
Var('chromium_url') + '/v8/deps/third_party/mozilla-tests.git' + '@' + 'f6c578a10ea707b1a8ab0b88943fe5115ce2b9be', Var('chromium_url') + '/v8/deps/third_party/mozilla-tests.git' + '@' + 'f6c578a10ea707b1a8ab0b88943fe5115ce2b9be',
'test/test262/data': 'test/test262/data':
Var('chromium_url') + '/external/github.com/tc39/test262.git' + '@' + 'd7c0a2076c2b0c1531aef7069d4abe70eec44ee3', Var('chromium_url') + '/external/github.com/tc39/test262.git' + '@' + '746197355c1705b7d4463fc75c29433c0ce2fd0d',
'third_party/aemu-linux-x64': {
'packages': [
{
'package': 'fuchsia/third_party/aemu/linux-amd64',
'version': 'vRCm89BzABss-_H8vC-tLjcSf6uusZA9IBSSYtdw4_kC'
},
],
'condition': 'host_os == "linux" and checkout_fuchsia',
'dep_type': 'cipd',
},
'third_party/aemu-mac-x64': {
'packages': [
{
'package': 'fuchsia/third_party/aemu/mac-amd64',
'version': 'T9bWxf8aUC5TwCFgPxpuW29Mfy-7Z9xCfXB9QO8MfU0C'
},
],
'condition': 'host_os == "mac" and checkout_fuchsia',
'dep_type': 'cipd',
},
'third_party/android_ndk': { 'third_party/android_ndk': {
'url': Var('chromium_url') + '/android_ndk.git' + '@' + '8388a2be5421311dc75c5f937aae13d821a27f3d', 'url': Var('chromium_url') + '/android_ndk.git' + '@' + '8388a2be5421311dc75c5f937aae13d821a27f3d',
'condition': 'checkout_android', 'condition': 'checkout_android',
}, },
'third_party/android_platform': { 'third_party/android_platform': {
'url': Var('chromium_url') + '/chromium/src/third_party/android_platform.git' + '@' + '2760db43ffc8b074cb7960c90b5254f74a5c299a', 'url': Var('chromium_url') + '/chromium/src/third_party/android_platform.git' + '@' + '04b33506bfd9d0e866bd8bd62f4cbf323d84dc79',
'condition': 'checkout_android', 'condition': 'checkout_android',
}, },
'third_party/android_sdk/public': { 'third_party/android_sdk/public': {
'packages': [ 'packages': [
{ {
'package': 'chromium/third_party/android_sdk/public/build-tools/31.0.0', 'package': 'chromium/third_party/android_sdk/public/build-tools/33.0.0',
'version': Var('android_sdk_build-tools_version'), 'version': Var('android_sdk_build-tools_version'),
}, },
{ {
@ -191,7 +182,7 @@ deps = {
'version': Var('android_sdk_platform-tools_version'), 'version': Var('android_sdk_platform-tools_version'),
}, },
{ {
'package': 'chromium/third_party/android_sdk/public/platforms/android-31', 'package': 'chromium/third_party/android_sdk/public/platforms/android-33',
'version': Var('android_sdk_platforms_version'), 'version': Var('android_sdk_platforms_version'),
}, },
{ {
@ -207,7 +198,7 @@ deps = {
'dep_type': 'cipd', 'dep_type': 'cipd',
}, },
'third_party/catapult': { 'third_party/catapult': {
'url': Var('chromium_url') + '/catapult.git' + '@' + '3a1ae18f882d024686144edbec3050aae055f146', 'url': Var('chromium_url') + '/catapult.git' + '@' + '37391a1619e953e23d3441dbc61e658e881fede4',
'condition': 'checkout_android', 'condition': 'checkout_android',
}, },
'third_party/colorama/src': { 'third_party/colorama/src': {
@ -215,18 +206,24 @@ deps = {
'condition': 'checkout_android', 'condition': 'checkout_android',
}, },
'third_party/depot_tools': 'third_party/depot_tools':
Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + '3b97fa826eee4bd1978c4c049038b1e4f201e8f2', Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + '9ebcfa6be17c2d1e7bd72135ceab5e767ed89b7d',
'third_party/fuchsia-sdk': { 'third_party/fuchsia-sdk/sdk': {
'url': Var('chromium_url') + '/chromium/src/third_party/fuchsia-sdk.git' + '@' + '7c9c220d13ab367d49420144a257886ebfbce278', 'packages': [
{
'package': Var('fuchsia_sdk_cipd_prefix') + '${{platform}}',
'version': Var('fuchsia_version'),
},
],
'condition': 'checkout_fuchsia', 'condition': 'checkout_fuchsia',
'dep_type': 'cipd',
}, },
'third_party/google_benchmark/src': { 'third_party/google_benchmark/src': {
'url': Var('chromium_url') + '/external/github.com/google/benchmark.git' + '@' + 'dc901ff9090e2b931433790cc44afc3af3b09ab2', 'url': Var('chromium_url') + '/external/github.com/google/benchmark.git' + '@' + 'e8baf2622591569a27615b31372d1e9cc046af10',
}, },
'third_party/googletest/src': 'third_party/googletest/src':
Var('chromium_url') + '/external/github.com/google/googletest.git' + '@' + 'af29db7ec28d6df1c7f0f745186884091e602e07', Var('chromium_url') + '/external/github.com/google/googletest.git' + '@' + 'af29db7ec28d6df1c7f0f745186884091e602e07',
'third_party/icu': 'third_party/icu':
Var('chromium_url') + '/chromium/deps/icu.git' + '@' + '1fd0dbea04448c3f73fe5cb7599f9472f0f107f1', Var('chromium_url') + '/chromium/deps/icu.git' + '@' + '20f8ac695af59b6c830def7d4e95bfeb13dd7be5',
'third_party/instrumented_libraries': 'third_party/instrumented_libraries':
Var('chromium_url') + '/chromium/src/third_party/instrumented_libraries.git' + '@' + 'e09c4b66b6e87116eb190651421f1a6e2f3b9c52', Var('chromium_url') + '/chromium/src/third_party/instrumented_libraries.git' + '@' + 'e09c4b66b6e87116eb190651421f1a6e2f3b9c52',
'third_party/ittapi': { 'third_party/ittapi': {
@ -238,43 +235,23 @@ deps = {
'third_party/jinja2': 'third_party/jinja2':
Var('chromium_url') + '/chromium/src/third_party/jinja2.git' + '@' + 'ee69aa00ee8536f61db6a451f3858745cf587de6', Var('chromium_url') + '/chromium/src/third_party/jinja2.git' + '@' + 'ee69aa00ee8536f61db6a451f3858745cf587de6',
'third_party/jsoncpp/source': 'third_party/jsoncpp/source':
Var('chromium_url') + '/external/github.com/open-source-parsers/jsoncpp.git'+ '@' + '9059f5cad030ba11d37818847443a53918c327b1', Var('chromium_url') + '/external/github.com/open-source-parsers/jsoncpp.git'+ '@' + '42e892d96e47b1f6e29844cc705e148ec4856448',
'third_party/logdog/logdog': 'third_party/logdog/logdog':
Var('chromium_url') + '/infra/luci/luci-py/client/libs/logdog' + '@' + '0b2078a90f7a638d576b3a7c407d136f2fb62399', Var('chromium_url') + '/infra/luci/luci-py/client/libs/logdog' + '@' + '0b2078a90f7a638d576b3a7c407d136f2fb62399',
'third_party/markupsafe': 'third_party/markupsafe':
Var('chromium_url') + '/chromium/src/third_party/markupsafe.git' + '@' + '1b882ef6372b58bfd55a3285f37ed801be9137cd', Var('chromium_url') + '/chromium/src/third_party/markupsafe.git' + '@' + '1b882ef6372b58bfd55a3285f37ed801be9137cd',
'third_party/perfetto': 'third_party/perfetto':
Var('android_url') + '/platform/external/perfetto.git' + '@' + 'aa4385bc5997ecad4c633885e1b331b1115012fb', Var('android_url') + '/platform/external/perfetto.git' + '@' + '0eba417b2c72264fa825dc21067b9adc9b8adf70',
'third_party/protobuf': 'third_party/protobuf':
Var('chromium_url') + '/external/github.com/google/protobuf'+ '@' + '6a59a2ad1f61d9696092f79b6d74368b4d7970a3', Var('chromium_url') + '/external/github.com/google/protobuf'+ '@' + '6a59a2ad1f61d9696092f79b6d74368b4d7970a3',
'third_party/qemu-linux-x64': {
'packages': [
{
'package': 'fuchsia/qemu/linux-amd64',
'version': '9cc486c5b18a0be515c39a280ca9a309c54cf994'
},
],
'condition': 'host_os == "linux" and checkout_fuchsia',
'dep_type': 'cipd',
},
'third_party/qemu-mac-x64': {
'packages': [
{
'package': 'fuchsia/qemu/mac-amd64',
'version': '2d3358ae9a569b2d4a474f498b32b202a152134f'
},
],
'condition': 'host_os == "mac" and checkout_fuchsia',
'dep_type': 'cipd',
},
'third_party/requests': { 'third_party/requests': {
'url': Var('chromium_url') + '/external/github.com/kennethreitz/requests.git' + '@' + '2c2138e811487b13020eb331482fb991fd399d4e', 'url': Var('chromium_url') + '/external/github.com/kennethreitz/requests.git' + '@' + 'refs/tags/v2.23.0',
'condition': 'checkout_android', 'condition': 'checkout_android',
}, },
'third_party/zlib': 'third_party/zlib':
Var('chromium_url') + '/chromium/src/third_party/zlib.git'+ '@' + 'a6d209ab932df0f1c9d5b7dc67cfa74e8a3272c0', Var('chromium_url') + '/chromium/src/third_party/zlib.git'+ '@' + 'f48cb14d487038d20c85680e29351e095a0fea8b',
'tools/clang': 'tools/clang':
Var('chromium_url') + '/chromium/src/tools/clang.git' + '@' + 'b5e2f7c16bbf3aefc9354e8fbad3de0a543f2193', Var('chromium_url') + '/chromium/src/tools/clang.git' + '@' + '02a202a7b1fa863352c0c9fb088fd3c0cf48c978',
'tools/luci-go': { 'tools/luci-go': {
'packages': [ 'packages': [
{ {
@ -580,15 +557,6 @@ hooks = [
'action': ['python3', 'build/util/lastchange.py', 'action': ['python3', 'build/util/lastchange.py',
'-o', 'build/util/LASTCHANGE'], '-o', 'build/util/LASTCHANGE'],
}, },
{
'name': 'Download Fuchsia SDK',
'pattern': '.',
'condition': 'checkout_fuchsia',
'action': [
'python3',
'build/fuchsia/update_sdk.py',
],
},
{ {
'name': 'Download Fuchsia system images', 'name': 'Download Fuchsia system images',
'pattern': '.', 'pattern': '.',
@ -634,4 +602,14 @@ hooks = [
'tools/generate-header-include-checks.py', 'tools/generate-header-include-checks.py',
], ],
}, },
{
# Clean up build dirs for crbug.com/1337238.
# After a libc++ roll and revert, .ninja_deps would get into a state
# that breaks Ninja on Windows.
# TODO(crbug.com/1337238): Remove in a month or so.
'name': 'del_ninja_deps_cache',
'pattern': '.',
'condition': 'host_os == "win"',
'action': ['python3', 'build/del_ninja_deps_cache.py'],
},
] ]

View file

@ -6,3 +6,4 @@ adamk@chromium.org
danno@chromium.org danno@chromium.org
hpayer@chromium.org hpayer@chromium.org
verwaest@chromium.org verwaest@chromium.org
vahl@chromium.org

View file

@ -1,4 +1,5 @@
machenbach@chromium.org alexschulze@chromium.org
tmrts@chromium.org
almuthanna@chromium.org almuthanna@chromium.org
liviurau@chromium.org liviurau@chromium.org
machenbach@chromium.org
tmrts@chromium.org

3
deps/v8/LICENSE vendored
View file

@ -15,8 +15,7 @@ are:
- Strongtalk assembler, the basis of the files assembler-arm-inl.h, - Strongtalk assembler, the basis of the files assembler-arm-inl.h,
assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h, assembler-arm.cc, assembler-arm.h, assembler-ia32-inl.h,
assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h, assembler-ia32.cc, assembler-ia32.h, assembler-x64-inl.h,
assembler-x64.cc, assembler-x64.h, assembler-mips-inl.h, assembler-x64.cc, assembler-x64.h, assembler.cc and assembler.h.
assembler-mips.cc, assembler-mips.h, assembler.cc and assembler.h.
This code is copyrighted by Sun Microsystems Inc. and released This code is copyrighted by Sun Microsystems Inc. and released
under a 3-clause BSD license. under a 3-clause BSD license.

17
deps/v8/OWNERS vendored
View file

@ -3,20 +3,21 @@
file:ENG_REVIEW_OWNERS file:ENG_REVIEW_OWNERS
per-file .*=file:INFRA_OWNERS per-file .*=file:INFRA_OWNERS
per-file .mailmap=file:COMMON_OWNERS
per-file .bazelrc=file:COMMON_OWNERS per-file .bazelrc=file:COMMON_OWNERS
per-file .mailmap=file:COMMON_OWNERS
per-file codereview.settings=file:INFRA_OWNERS
per-file AUTHORS=file:COMMON_OWNERS
per-file BUILD.bazel=file:COMMON_OWNERS per-file BUILD.bazel=file:COMMON_OWNERS
per-file BUILD.gn=file:COMMON_OWNERS per-file BUILD.gn=file:COMMON_OWNERS
per-file WORKSPACE=file:COMMON_OWNERS
per-file DEPS=file:INFRA_OWNERS per-file DEPS=file:INFRA_OWNERS
per-file INFRA_OWNERS=file:INFRA_OWNERS
per-file PRESUBMIT.py=file:INFRA_OWNERS
per-file WATCHLISTS=file:COMMON_OWNERS
per-file WORKSPACE=file:COMMON_OWNERS
# For Test262 rolls. # For Test262 rolls.
per-file DEPS=mathias@chromium.org per-file DEPS=mathias@chromium.org
per-file DEPS=syg@chromium.org per-file DEPS=syg@chromium.org
per-file PRESUBMIT.py=file:INFRA_OWNERS
per-file codereview.settings=file:INFRA_OWNERS
per-file AUTHORS=file:COMMON_OWNERS
per-file WATCHLISTS=file:COMMON_OWNERS
# Needed by the auto_tag builder # Needed by the auto_tag builder
per-file WATCHLISTS=v8-ci-autoroll-builder@chops-service-accounts.iam.gserviceaccount.com per-file WATCHLISTS=v8-ci-autoroll-builder@chops-service-accounts.iam.gserviceaccount.com
@ -26,5 +27,5 @@ per-file ...-loong64*=file:LOONG_OWNERS
per-file ...-mips*=file:MIPS_OWNERS per-file ...-mips*=file:MIPS_OWNERS
per-file ...-mips64*=file:MIPS_OWNERS per-file ...-mips64*=file:MIPS_OWNERS
per-file ...-ppc*=file:PPC_OWNERS per-file ...-ppc*=file:PPC_OWNERS
per-file ...-riscv64*=file:RISCV_OWNERS per-file ...-riscv*=file:RISCV_OWNERS
per-file ...-s390*=file:S390_OWNERS per-file ...-s390*=file:S390_OWNERS

14
deps/v8/PRESUBMIT.py vendored
View file

@ -486,14 +486,18 @@ def _CheckNoexceptAnnotations(input_api, output_api):
""" """
def FilterFile(affected_file): def FilterFile(affected_file):
return input_api.FilterSourceFile( files_to_skip = _EXCLUDED_PATHS + (
affected_file,
files_to_check=(r'src[\\\/].*', r'test[\\\/].*'),
# Skip api.cc since we cannot easily add the 'noexcept' annotation to # Skip api.cc since we cannot easily add the 'noexcept' annotation to
# public methods. # public methods.
r'src[\\\/]api[\\\/]api\.cc',
# Skip src/bigint/ because it's meant to be V8-independent. # Skip src/bigint/ because it's meant to be V8-independent.
files_to_skip=(r'src[\\\/]api[\\\/]api\.cc', r'src[\\\/]bigint[\\\/].*',
r'src[\\\/]bigint[\\\/].*')) )
return input_api.FilterSourceFile(
affected_file,
files_to_check=(r'src[\\\/].*\.cc', r'src[\\\/].*\.h',
r'test[\\\/].*\.cc', r'test[\\\/].*\.h'),
files_to_skip=files_to_skip)
# matches any class name. # matches any class name.
class_name = r'\b([A-Z][A-Za-z0-9_:]*)(?:::\1)?' class_name = r'\b([A-Z][A-Za-z0-9_:]*)(?:::\1)?'

2
deps/v8/WATCHLISTS vendored
View file

@ -108,6 +108,7 @@
'jgruber+watch@chromium.org', 'jgruber+watch@chromium.org',
'leszeks+watch@chromium.org', 'leszeks+watch@chromium.org',
'verwaest+watch@chromium.org', 'verwaest+watch@chromium.org',
'victorgomes+watch@chromium.org',
], ],
'snapshot': [ 'snapshot': [
'jgruber+watch@chromium.org', 'jgruber+watch@chromium.org',
@ -129,6 +130,7 @@
'arm': [ 'arm': [
'v8-mips-ports@googlegroups.com', 'v8-mips-ports@googlegroups.com',
'v8-ppc-ports@googlegroups.com', 'v8-ppc-ports@googlegroups.com',
'v8-risc-v-ports@chromium.org',
], ],
'merges': [ 'merges': [
# Only enabled on branches created with tools/release/create_release.py # Only enabled on branches created with tools/release/create_release.py

16
deps/v8/WORKSPACE vendored
View file

@ -34,22 +34,6 @@ pip_install(
requirements = "//:bazel/requirements.txt", requirements = "//:bazel/requirements.txt",
) )
new_local_repository(
name = "com_googlesource_chromium_zlib",
build_file = "bazel/BUILD.zlib",
path = "third_party/zlib",
)
bind(
name = "zlib",
actual = "@com_googlesource_chromium_zlib//:zlib",
)
bind(
name = "zlib_compression_utils",
actual = "@com_googlesource_chromium_zlib//:zlib_compression_utils",
)
new_local_repository( new_local_repository(
name = "com_googlesource_chromium_icu", name = "com_googlesource_chromium_icu",
build_file = "bazel/BUILD.icu", build_file = "bazel/BUILD.icu",

View file

@ -1,4 +1,4 @@
// Copyright 2015 The Chromium Authors. All rights reserved. // Copyright 2015 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be // Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file. // found in the LICENSE file.
@ -404,6 +404,10 @@ struct BASE_EXPORT TraceTimestampTraits<::base::TimeTicks> {
// - |timestamp| must be non-null or it crashes. Use DCHECK(timestamp) before // - |timestamp| must be non-null or it crashes. Use DCHECK(timestamp) before
// calling this to detect an invalid timestamp even when tracing is not // calling this to detect an invalid timestamp even when tracing is not
// enabled, as the commit queue doesn't run all tests with tracing enabled. // enabled, as the commit queue doesn't run all tests with tracing enabled.
// Note: This legacy macro is deprecated. It should not be used in new code.
// If thread_id is different from current thread id, it will result into
// DCHECK failure. This note is also applicable to `_COPY` and `_END`
// variant of this macro.
#define TRACE_EVENT_BEGIN_WITH_ID_TID_AND_TIMESTAMP0(category_group, name, id, \ #define TRACE_EVENT_BEGIN_WITH_ID_TID_AND_TIMESTAMP0(category_group, name, id, \
thread_id, timestamp) \ thread_id, timestamp) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID_TID_AND_TIMESTAMP( \ INTERNAL_TRACE_EVENT_ADD_WITH_ID_TID_AND_TIMESTAMP( \
@ -1024,18 +1028,6 @@ struct BASE_EXPORT TraceTimestampTraits<::base::TimeTicks> {
TRACE_EVENT_PHASE_DELETE_OBJECT, category_group, name, id, \ TRACE_EVENT_PHASE_DELETE_OBJECT, category_group, name, id, \
TRACE_EVENT_FLAG_NONE) TRACE_EVENT_FLAG_NONE)
// Records entering and leaving trace event contexts. |category_group| and
// |name| specify the context category and type. |context| is a
// snapshotted context object id.
#define TRACE_EVENT_ENTER_CONTEXT(category_group, name, context) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID( \
TRACE_EVENT_PHASE_ENTER_CONTEXT, category_group, name, context, \
TRACE_EVENT_FLAG_NONE)
#define TRACE_EVENT_LEAVE_CONTEXT(category_group, name, context) \
INTERNAL_TRACE_EVENT_ADD_WITH_ID( \
TRACE_EVENT_PHASE_LEAVE_CONTEXT, category_group, name, context, \
TRACE_EVENT_FLAG_NONE)
// Macro to efficiently determine if a given category group is enabled. // Macro to efficiently determine if a given category group is enabled.
#define TRACE_EVENT_CATEGORY_GROUP_ENABLED(category_group, ret) \ #define TRACE_EVENT_CATEGORY_GROUP_ENABLED(category_group, ret) \
do { \ do { \
@ -1099,12 +1091,15 @@ struct BASE_EXPORT TraceTimestampTraits<::base::TimeTicks> {
#define TRACE_EVENT_PHASE_MEMORY_DUMP ('v') #define TRACE_EVENT_PHASE_MEMORY_DUMP ('v')
#define TRACE_EVENT_PHASE_MARK ('R') #define TRACE_EVENT_PHASE_MARK ('R')
#define TRACE_EVENT_PHASE_CLOCK_SYNC ('c') #define TRACE_EVENT_PHASE_CLOCK_SYNC ('c')
#define TRACE_EVENT_PHASE_ENTER_CONTEXT ('(')
#define TRACE_EVENT_PHASE_LEAVE_CONTEXT (')')
// Flags for changing the behavior of TRACE_EVENT_API_ADD_TRACE_EVENT. // Flags for changing the behavior of TRACE_EVENT_API_ADD_TRACE_EVENT.
#define TRACE_EVENT_FLAG_NONE (static_cast<unsigned int>(0)) #define TRACE_EVENT_FLAG_NONE (static_cast<unsigned int>(0))
// Should not be used outside this file or
// except `trace_event_impl.cc` (implementation details).
// If used, it will result in CHECK failure in SDK build.
#define TRACE_EVENT_FLAG_COPY (static_cast<unsigned int>(1 << 0)) #define TRACE_EVENT_FLAG_COPY (static_cast<unsigned int>(1 << 0))
#define TRACE_EVENT_FLAG_HAS_ID (static_cast<unsigned int>(1 << 1)) #define TRACE_EVENT_FLAG_HAS_ID (static_cast<unsigned int>(1 << 1))
#define TRACE_EVENT_FLAG_SCOPE_OFFSET (static_cast<unsigned int>(1 << 2)) #define TRACE_EVENT_FLAG_SCOPE_OFFSET (static_cast<unsigned int>(1 << 2))
#define TRACE_EVENT_FLAG_SCOPE_EXTRA (static_cast<unsigned int>(1 << 3)) #define TRACE_EVENT_FLAG_SCOPE_EXTRA (static_cast<unsigned int>(1 << 3))

View file

@ -1,69 +0,0 @@
# Copyright 2021 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
cc_library(
name = "zlib",
srcs = [
"adler32.c",
"chromeconf.h",
"compress.c",
"contrib/optimizations/insert_string.h",
"contrib/optimizations/slide_hash_neon.h",
"cpu_features.c",
"cpu_features.h",
"crc32.c",
"crc32.h",
"deflate.c",
"deflate.h",
"gzclose.c",
"gzguts.h",
"gzlib.c",
"gzread.c",
"gzwrite.c",
"infback.c",
"inffast.c",
"inffast.h",
"inffixed.h",
"inflate.c",
"inflate.h",
"inftrees.c",
"inftrees.h",
"trees.c",
"trees.h",
"uncompr.c",
"zconf.h",
"zlib.h",
"zutil.c",
"zutil.h",
],
hdrs = [
"zlib.h",
],
defines = [
"CHROMIUM_ZLIB_NO_CHROMECONF",
"CPU_NO_SIMD",
] + select({
"@platforms//os:windows": [],
"//conditions:default": [
"HAVE_HIDDEN",
],
}),
include_prefix = "third_party/zlib",
visibility = ["//visibility:public"],
)
cc_library(
name = "zlib_compression_utils",
srcs = [
"google/compression_utils_portable.cc",
],
hdrs = [
"google/compression_utils_portable.h",
],
include_prefix = "third_party/zlib",
visibility = ["//visibility:public"],
deps = [
"//external:zlib",
],
)

View file

@ -290,6 +290,14 @@ selects.config_setting_group(
], ],
) )
selects.config_setting_group(
name = "is_macos_arm64",
match_all = [
":is_macos",
":is_arm64",
],
)
config_setting( config_setting(
name = "is_compiler_default", name = "is_compiler_default",
flag_values = { flag_values = {

View file

@ -493,7 +493,6 @@ def build_config_content(cpu, icu):
("target_cpu", cpu), ("target_cpu", cpu),
("v8_current_cpu", cpu), ("v8_current_cpu", cpu),
("v8_dict_property_const_tracking", "false"), ("v8_dict_property_const_tracking", "false"),
("v8_enable_atomic_marking_state", "false"),
("v8_enable_atomic_object_field_writes", "false"), ("v8_enable_atomic_object_field_writes", "false"),
("v8_enable_concurrent_marking", "false"), ("v8_enable_concurrent_marking", "false"),
("v8_enable_i18n_support", icu), ("v8_enable_i18n_support", icu),

2
deps/v8/gni/OWNERS vendored
View file

@ -1,5 +1,5 @@
file:../INFRA_OWNERS file:../INFRA_OWNERS
per-file v8.cmx=victorgomes@chromium.org per-file v8.cml=victorgomes@chromium.org
per-file release_branch_toggle.gni=v8-ci-autoroll-builder@chops-service-accounts.iam.gserviceaccount.com per-file release_branch_toggle.gni=v8-ci-autoroll-builder@chops-service-accounts.iam.gserviceaccount.com
per-file release_branch_toggle.gni=vahl@chromium.org per-file release_branch_toggle.gni=vahl@chromium.org

View file

@ -76,6 +76,17 @@ template("proto_library") {
out_dir = "$root_gen_dir/" + proto_out_dir out_dir = "$root_gen_dir/" + proto_out_dir
rel_out_dir = rebase_path(out_dir, root_build_dir) rel_out_dir = rebase_path(out_dir, root_build_dir)
# exclude_imports is only used for generating the descriptor. Therefore, the
# check needs to be here to avoid complaints from GN about the unused
# variable.
if (generate_descriptor != "") {
if (defined(invoker.exclude_imports)) {
exclude_imports = invoker.exclude_imports
} else {
exclude_imports = false
}
}
# Prevent unused errors when generating descriptor only. # Prevent unused errors when generating descriptor only.
if (generate_descriptor != "") { if (generate_descriptor != "") {
not_needed([ "rel_out_dir" ]) not_needed([ "rel_out_dir" ])
@ -163,8 +174,10 @@ template("proto_library") {
} }
if (generate_descriptor != "") { if (generate_descriptor != "") {
depfile = "$out_dir/$generate_descriptor.d" depfile = "$out_dir/$generate_descriptor.d"
if (!exclude_imports) {
args += [ "--include_imports" ]
}
args += [ args += [
"--include_imports",
"--descriptor_set_out", "--descriptor_set_out",
rebase_path("$out_dir/$generate_descriptor", root_build_dir), rebase_path("$out_dir/$generate_descriptor", root_build_dir),
"--dependency_out", "--dependency_out",

View file

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
# Copyright 2021 the V8 project authors. All rights reserved. # Copyright 2021 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.

View file

@ -64,8 +64,7 @@ if (v8_snapshot_toolchain == "") {
current_cpu == "arm") { current_cpu == "arm") {
# Trying to compile 32-bit arm on arm64. Good luck! # Trying to compile 32-bit arm on arm64. Good luck!
v8_snapshot_toolchain = current_toolchain v8_snapshot_toolchain = current_toolchain
} else if (host_cpu == "x64" && } else if (host_cpu == "x64" && v8_current_cpu == "mips64") {
(v8_current_cpu == "mips" || v8_current_cpu == "mips64")) {
# We don't support snapshot generation for big-endian targets, # We don't support snapshot generation for big-endian targets,
# therefore snapshots will need to be built using native mksnapshot # therefore snapshots will need to be built using native mksnapshot
# in combination with qemu # in combination with qemu
@ -96,7 +95,7 @@ if (v8_snapshot_toolchain == "") {
} else { } else {
_cpus = "x64_v8_${v8_current_cpu}" _cpus = "x64_v8_${v8_current_cpu}"
} }
} else if (v8_current_cpu == "arm" || v8_current_cpu == "mipsel") { } else if (v8_current_cpu == "arm" || v8_current_cpu == "riscv32") {
_cpus = "x86_v8_${v8_current_cpu}" _cpus = "x86_v8_${v8_current_cpu}"
} else { } else {
# This branch should not be reached; leave _cpus blank so the assert # This branch should not be reached; leave _cpus blank so the assert
@ -121,7 +120,6 @@ assert(v8_snapshot_toolchain != "",
# avoid building v8_libbase on the host more than once. On mips with big endian, # avoid building v8_libbase on the host more than once. On mips with big endian,
# the snapshot toolchain is the target toolchain and, hence, can't be used. # the snapshot toolchain is the target toolchain and, hence, can't be used.
v8_generator_toolchain = v8_snapshot_toolchain v8_generator_toolchain = v8_snapshot_toolchain
if (host_cpu == "x64" && if (host_cpu == "x64" && v8_current_cpu == "mips64") {
(v8_current_cpu == "mips" || v8_current_cpu == "mips64")) {
v8_generator_toolchain = "//build/toolchain/linux:clang_x64" v8_generator_toolchain = "//build/toolchain/linux:clang_x64"
} }

21
deps/v8/gni/v8.cml vendored Normal file
View file

@ -0,0 +1,21 @@
// Copyright 2022 The V8 project authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
{
include: [ "syslog/client.shard.cml" ],
program: {
runner: "elf",
binary: "d8",
},
use: [
{
protocol: [
"fuchsia.kernel.VmexResource",
],
},
{
storage: "tmp",
path: "/tmp",
},
],
}

52
deps/v8/gni/v8.cmx vendored
View file

@ -1,52 +0,0 @@
{
"facets": {
"fuchsia.test": {
"system-services": [
"fuchsia.kernel.VmexResource"
]
}
},
"sandbox": {
"dev": [
"null",
"zero"
],
"features": [
"deprecated-ambient-replace-as-executable",
"isolated-cache-storage",
"isolated-persistent-storage",
"isolated-temp",
"root-ssl-certificates",
"vulkan"
],
"services": [
"fuchsia.accessibility.semantics.SemanticsManager",
"fuchsia.camera3.DeviceWatcher",
"fuchsia.device.NameProvider",
"fuchsia.fonts.Provider",
"fuchsia.intl.PropertyProvider",
"fuchsia.kernel.VmexResource",
"fuchsia.logger.Log",
"fuchsia.logger.LogSink",
"fuchsia.media.Audio",
"fuchsia.media.SessionAudioConsumerFactory",
"fuchsia.media.drm.Widevine",
"fuchsia.mediacodec.CodecFactory",
"fuchsia.memorypressure.Provider",
"fuchsia.net.NameLookup",
"fuchsia.net.interfaces.State",
"fuchsia.posix.socket.Provider",
"fuchsia.process.Launcher",
"fuchsia.sys.Environment",
"fuchsia.sys.Launcher",
"fuchsia.sys.Loader",
"fuchsia.sysmem.Allocator",
"fuchsia.ui.input.ImeService",
"fuchsia.ui.input.ImeVisibilityService",
"fuchsia.ui.scenic.Scenic",
"fuchsia.ui.policy.Presenter",
"fuchsia.vulkan.loader.Loader",
"fuchsia.web.ContextProvider"
]
}
}

21
deps/v8/gni/v8.gni vendored
View file

@ -69,6 +69,9 @@ declare_args() {
# executed as standard JavaScript instead. # executed as standard JavaScript instead.
v8_enable_webassembly = "" v8_enable_webassembly = ""
# Enable 256-bit long vector re-vectorization pass in WASM compilation pipeline.
v8_enable_wasm_simd256_revec = false
# Enable runtime call stats. # Enable runtime call stats.
v8_enable_runtime_call_stats = !is_on_release_branch v8_enable_runtime_call_stats = !is_on_release_branch
@ -78,6 +81,12 @@ declare_args() {
# Scan the call stack conservatively during garbage collection. # Scan the call stack conservatively during garbage collection.
v8_enable_conservative_stack_scanning = false v8_enable_conservative_stack_scanning = false
# Use the object start bitmap for inner pointer resolution.
v8_enable_inner_pointer_resolution_osb = false
# Use the marking bitmap for inner pointer resolution.
v8_enable_inner_pointer_resolution_mb = false
v8_enable_google_benchmark = false v8_enable_google_benchmark = false
cppgc_is_standalone = false cppgc_is_standalone = false
@ -88,6 +97,13 @@ declare_args() {
# Enable young generation in cppgc. # Enable young generation in cppgc.
cppgc_enable_young_generation = false cppgc_enable_young_generation = false
# Enable pointer compression in cppgc.
cppgc_enable_pointer_compression = false
# Enable 2gb cage for fast compression/decompression. Currently disabled
# due to an increased number of OOMs.
cppgc_enable_2gb_cage = false
# Enable advanced BigInt algorithms, costing about 10-30 KB binary size # Enable advanced BigInt algorithms, costing about 10-30 KB binary size
# depending on platform. Disabled on Android to save binary size. # depending on platform. Disabled on Android to save binary size.
v8_advanced_bigint_algorithms = !is_android v8_advanced_bigint_algorithms = !is_android
@ -142,7 +158,7 @@ if (is_debug && !v8_optimized_debug) {
# TODO(crbug.com/621335) Rework this so that we don't have the confusion # TODO(crbug.com/621335) Rework this so that we don't have the confusion
# between "optimize_speed" and "optimize_max". # between "optimize_speed" and "optimize_max".
if (((is_posix && !is_android) || is_fuchsia) && !using_sanitizer) { if (is_posix && !is_android && !using_sanitizer) {
v8_add_configs += [ "//build/config/compiler:optimize_speed" ] v8_add_configs += [ "//build/config/compiler:optimize_speed" ]
} else { } else {
v8_add_configs += [ "//build/config/compiler:optimize_max" ] v8_add_configs += [ "//build/config/compiler:optimize_max" ]
@ -183,8 +199,7 @@ if ((is_posix || is_fuchsia) &&
} }
# On MIPS gcc_target_rpath and ldso_path might be needed for all builds. # On MIPS gcc_target_rpath and ldso_path might be needed for all builds.
if (target_cpu == "mipsel" || target_cpu == "mips64el" || if (target_cpu == "mips64el" || target_cpu == "mips64") {
target_cpu == "mips" || target_cpu == "mips64") {
v8_add_configs += [ "//build/config/gcc:rpath_for_built_shared_libraries" ] v8_add_configs += [ "//build/config/gcc:rpath_for_built_shared_libraries" ]
} }

View file

@ -5,7 +5,6 @@
#ifndef INCLUDE_CPPGC_COMMON_H_ #ifndef INCLUDE_CPPGC_COMMON_H_
#define INCLUDE_CPPGC_COMMON_H_ #define INCLUDE_CPPGC_COMMON_H_
// TODO(chromium:1056170): Remove dependency on v8.
#include "v8config.h" // NOLINT(build/include_directory) #include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc { namespace cppgc {

View file

@ -120,7 +120,7 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
if (!IsValid(raw)) return; if (!IsValid(raw)) return;
PersistentRegionLock guard; PersistentRegionLock guard;
CrossThreadPersistentRegion& region = this->GetPersistentRegion(raw); CrossThreadPersistentRegion& region = this->GetPersistentRegion(raw);
SetNode(region.AllocateNode(this, &Trace)); SetNode(region.AllocateNode(this, &TraceAsRoot));
this->CheckPointer(raw); this->CheckPointer(raw);
} }
@ -138,7 +138,7 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
: CrossThreadPersistentBase(raw), LocationPolicy(loc) { : CrossThreadPersistentBase(raw), LocationPolicy(loc) {
if (!IsValid(raw)) return; if (!IsValid(raw)) return;
CrossThreadPersistentRegion& region = this->GetPersistentRegion(raw); CrossThreadPersistentRegion& region = this->GetPersistentRegion(raw);
SetNode(region.AllocateNode(this, &Trace)); SetNode(region.AllocateNode(this, &TraceAsRoot));
this->CheckPointer(raw); this->CheckPointer(raw);
} }
@ -349,9 +349,8 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
return ptr && ptr != kSentinelPointer; return ptr && ptr != kSentinelPointer;
} }
static void Trace(Visitor* v, const void* ptr) { static void TraceAsRoot(RootVisitor& root_visitor, const void* ptr) {
const auto* handle = static_cast<const BasicCrossThreadPersistent*>(ptr); root_visitor.Trace(*static_cast<const BasicCrossThreadPersistent*>(ptr));
v->TraceRoot(*handle, handle->Location());
} }
void AssignUnsafe(T* ptr) { void AssignUnsafe(T* ptr) {
@ -378,7 +377,7 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
SetValue(ptr); SetValue(ptr);
if (!IsValid(ptr)) return; if (!IsValid(ptr)) return;
PersistentRegionLock guard; PersistentRegionLock guard;
SetNode(this->GetPersistentRegion(ptr).AllocateNode(this, &Trace)); SetNode(this->GetPersistentRegion(ptr).AllocateNode(this, &TraceAsRoot));
this->CheckPointer(ptr); this->CheckPointer(ptr);
} }
@ -398,7 +397,7 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
} }
SetValue(ptr); SetValue(ptr);
if (!IsValid(ptr)) return; if (!IsValid(ptr)) return;
SetNode(this->GetPersistentRegion(ptr).AllocateNode(this, &Trace)); SetNode(this->GetPersistentRegion(ptr).AllocateNode(this, &TraceAsRoot));
this->CheckPointer(ptr); this->CheckPointer(ptr);
} }
@ -416,7 +415,7 @@ class BasicCrossThreadPersistent final : public CrossThreadPersistentBase,
return static_cast<T*>(const_cast<void*>(GetValueFromGC())); return static_cast<T*>(const_cast<void*>(GetValueFromGC()));
} }
friend class cppgc::Visitor; friend class internal::RootVisitor;
}; };
template <typename T, typename LocationPolicy, typename CheckingPolicy> template <typename T, typename LocationPolicy, typename CheckingPolicy>

View file

@ -9,6 +9,7 @@
#include "cppgc/internal/write-barrier.h" #include "cppgc/internal/write-barrier.h"
#include "cppgc/macros.h" #include "cppgc/macros.h"
#include "cppgc/member.h"
#include "cppgc/trace-trait.h" #include "cppgc/trace-trait.h"
#include "v8config.h" // NOLINT(build/include_directory) #include "v8config.h" // NOLINT(build/include_directory)
@ -47,6 +48,29 @@ class HeapConsistency final {
return internal::WriteBarrier::GetWriteBarrierType(slot, value, params); return internal::WriteBarrier::GetWriteBarrierType(slot, value, params);
} }
/**
* Gets the required write barrier type for a specific write. This override is
* only used for all the BasicMember types.
*
* \param slot Slot containing the pointer to the object. The slot itself
* must reside in an object that has been allocated using
* `MakeGarbageCollected()`.
* \param value The pointer to the object held via `BasicMember`.
* \param params Parameters that may be used for actual write barrier calls.
* Only filled if return value indicates that a write barrier is needed. The
* contents of the `params` are an implementation detail.
* \returns whether a write barrier is needed and which barrier to invoke.
*/
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
typename CheckingPolicy>
static V8_INLINE WriteBarrierType GetWriteBarrierType(
const internal::BasicMember<T, WeaknessTag, WriteBarrierPolicy,
CheckingPolicy>& value,
WriteBarrierParams& params) {
return internal::WriteBarrier::GetWriteBarrierType(
value.GetRawSlot(), value.GetRawStorage(), params);
}
/** /**
* Gets the required write barrier type for a specific write. * Gets the required write barrier type for a specific write.
* *
@ -146,7 +170,25 @@ class HeapConsistency final {
*/ */
static V8_INLINE void GenerationalBarrier(const WriteBarrierParams& params, static V8_INLINE void GenerationalBarrier(const WriteBarrierParams& params,
const void* slot) { const void* slot) {
internal::WriteBarrier::GenerationalBarrier(params, slot); internal::WriteBarrier::GenerationalBarrier<
internal::WriteBarrier::GenerationalBarrierType::kPreciseSlot>(params,
slot);
}
/**
* Generational barrier for maintaining consistency when running with multiple
* generations. This version is used when slot contains uncompressed pointer.
*
* \param params The parameters retrieved from `GetWriteBarrierType()`.
* \param slot Uncompressed slot containing the direct pointer to the object.
* The slot itself must reside in an object that has been allocated using
* `MakeGarbageCollected()`.
*/
static V8_INLINE void GenerationalBarrierForUncompressedSlot(
const WriteBarrierParams& params, const void* uncompressed_slot) {
internal::WriteBarrier::GenerationalBarrier<
internal::WriteBarrier::GenerationalBarrierType::
kPreciseUncompressedSlot>(params, uncompressed_slot);
} }
/** /**
@ -158,8 +200,9 @@ class HeapConsistency final {
*/ */
static V8_INLINE void GenerationalBarrierForSourceObject( static V8_INLINE void GenerationalBarrierForSourceObject(
const WriteBarrierParams& params, const void* inner_pointer) { const WriteBarrierParams& params, const void* inner_pointer) {
internal::WriteBarrier::GenerationalBarrierForSourceObject(params, internal::WriteBarrier::GenerationalBarrier<
inner_pointer); internal::WriteBarrier::GenerationalBarrierType::kImpreciseSlot>(
params, inner_pointer);
} }
private: private:

41
deps/v8/include/cppgc/heap-handle.h vendored Normal file
View file

@ -0,0 +1,41 @@
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_HEAP_HANDLE_H_
#define INCLUDE_CPPGC_HEAP_HANDLE_H_
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
namespace internal {
class HeapBase;
class WriteBarrierTypeForCagedHeapPolicy;
} // namespace internal
/**
* Opaque handle used for additional heap APIs.
*/
class HeapHandle {
private:
HeapHandle() = default;
V8_INLINE bool is_incremental_marking_in_progress() const {
return is_incremental_marking_in_progress_;
}
V8_INLINE bool is_young_generation_enabled() const {
return is_young_generation_enabled_;
}
bool is_incremental_marking_in_progress_ = false;
bool is_young_generation_enabled_ = false;
friend class internal::HeapBase;
friend class internal::WriteBarrierTypeForCagedHeapPolicy;
};
} // namespace cppgc
#endif // INCLUDE_CPPGC_HEAP_HANDLE_H_

View file

@ -56,7 +56,7 @@ struct HeapStatistics final {
/** Amount of memory actually used on the page. */ /** Amount of memory actually used on the page. */
size_t used_size_bytes = 0; size_t used_size_bytes = 0;
/** Statistics for object allocated on the page. Filled only when /** Statistics for object allocated on the page. Filled only when
* NameProvider::HideInternalNames() is false. */ * NameProvider::SupportsCppClassNamesAsObjectNames() is true. */
std::vector<ObjectStatsEntry> object_statistics; std::vector<ObjectStatsEntry> object_statistics;
}; };
@ -98,7 +98,7 @@ struct HeapStatistics final {
/** Overall committed amount of memory for the heap. */ /** Overall committed amount of memory for the heap. */
size_t committed_size_bytes = 0; size_t committed_size_bytes = 0;
/** Resident amount of memory help by the heap. */ /** Resident amount of memory held by the heap. */
size_t resident_size_bytes = 0; size_t resident_size_bytes = 0;
/** Amount of memory actually used on the heap. */ /** Amount of memory actually used on the heap. */
size_t used_size_bytes = 0; size_t used_size_bytes = 0;

View file

@ -21,6 +21,7 @@
namespace cppgc { namespace cppgc {
class AllocationHandle; class AllocationHandle;
class HeapHandle;
/** /**
* Implementation details of cppgc. Those details are considered internal and * Implementation details of cppgc. Those details are considered internal and
@ -31,11 +32,6 @@ namespace internal {
class Heap; class Heap;
} // namespace internal } // namespace internal
/**
* Used for additional heap APIs.
*/
class HeapHandle;
class V8_EXPORT Heap { class V8_EXPORT Heap {
public: public:
/** /**
@ -59,7 +55,7 @@ class V8_EXPORT Heap {
}; };
/** /**
* Specifies supported marking types * Specifies supported marking types.
*/ */
enum class MarkingType : uint8_t { enum class MarkingType : uint8_t {
/** /**
@ -79,7 +75,7 @@ class V8_EXPORT Heap {
}; };
/** /**
* Specifies supported sweeping types * Specifies supported sweeping types.
*/ */
enum class SweepingType : uint8_t { enum class SweepingType : uint8_t {
/** /**

View file

@ -32,12 +32,22 @@ static constexpr uint16_t kFullyConstructedBitMask = uint16_t{1};
static constexpr size_t kPageSize = size_t{1} << 17; static constexpr size_t kPageSize = size_t{1} << 17;
#if defined(V8_TARGET_ARCH_ARM64) && defined(V8_OS_MACOS)
constexpr size_t kGuardPageSize = 0;
#else
constexpr size_t kGuardPageSize = 4096;
#endif
static constexpr size_t kLargeObjectSizeThreshold = kPageSize / 2; static constexpr size_t kLargeObjectSizeThreshold = kPageSize / 2;
#if defined(CPPGC_CAGED_HEAP) #if defined(CPPGC_CAGED_HEAP)
#if defined(CPPGC_2GB_CAGE)
constexpr size_t kCagedHeapReservationSize = static_cast<size_t>(2) * kGB;
#else // !defined(CPPGC_2GB_CAGE)
constexpr size_t kCagedHeapReservationSize = static_cast<size_t>(4) * kGB; constexpr size_t kCagedHeapReservationSize = static_cast<size_t>(4) * kGB;
#endif // !defined(CPPGC_2GB_CAGE)
constexpr size_t kCagedHeapReservationAlignment = kCagedHeapReservationSize; constexpr size_t kCagedHeapReservationAlignment = kCagedHeapReservationSize;
#endif #endif // defined(CPPGC_CAGED_HEAP)
static constexpr size_t kDefaultAlignment = sizeof(void*); static constexpr size_t kDefaultAlignment = sizeof(void*);

View file

@ -0,0 +1,45 @@
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_INTERNAL_BASE_PAGE_HANDLE_H_
#define INCLUDE_CPPGC_INTERNAL_BASE_PAGE_HANDLE_H_
#include "cppgc/heap-handle.h"
#include "cppgc/internal/api-constants.h"
#include "cppgc/internal/logging.h"
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
namespace internal {
// The class is needed in the header to allow for fast access to HeapHandle in
// the write barrier.
class BasePageHandle {
public:
static V8_INLINE BasePageHandle* FromPayload(void* payload) {
return reinterpret_cast<BasePageHandle*>(
(reinterpret_cast<uintptr_t>(payload) &
~(api_constants::kPageSize - 1)) +
api_constants::kGuardPageSize);
}
static V8_INLINE const BasePageHandle* FromPayload(const void* payload) {
return FromPayload(const_cast<void*>(payload));
}
HeapHandle& heap_handle() { return heap_handle_; }
const HeapHandle& heap_handle() const { return heap_handle_; }
protected:
explicit BasePageHandle(HeapHandle& heap_handle) : heap_handle_(heap_handle) {
CPPGC_DCHECK(reinterpret_cast<uintptr_t>(this) % api_constants::kPageSize ==
api_constants::kGuardPageSize);
}
HeapHandle& heap_handle_;
};
} // namespace internal
} // namespace cppgc
#endif // INCLUDE_CPPGC_INTERNAL_BASE_PAGE_HANDLE_H_

View file

@ -10,46 +10,76 @@
#include <cstdint> #include <cstdint>
#include "cppgc/internal/api-constants.h" #include "cppgc/internal/api-constants.h"
#include "cppgc/internal/caged-heap.h"
#include "cppgc/internal/logging.h" #include "cppgc/internal/logging.h"
#include "cppgc/platform.h" #include "cppgc/platform.h"
#include "v8config.h" // NOLINT(build/include_directory) #include "v8config.h" // NOLINT(build/include_directory)
#if __cpp_lib_bitopts
#include <bit>
#endif // __cpp_lib_bitopts
#if defined(CPPGC_CAGED_HEAP)
namespace cppgc { namespace cppgc {
namespace internal { namespace internal {
class HeapBase; class HeapBase;
class HeapBaseHandle;
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
// AgeTable is the bytemap needed for the fast generation check in the write // AgeTable is the bytemap needed for the fast generation check in the write
// barrier. AgeTable contains entries that correspond to 512 bytes memory // barrier. AgeTable contains entries that correspond to 4096 bytes memory
// regions (cards). Each entry in the table represents generation of the objects // regions (cards). Each entry in the table represents generation of the objects
// that reside on the corresponding card (young, old or mixed). // that reside on the corresponding card (young, old or mixed).
class AgeTable final { class V8_EXPORT AgeTable final {
static constexpr size_t kRequiredSize = 1 * api_constants::kMB; static constexpr size_t kRequiredSize = 1 * api_constants::kMB;
static constexpr size_t kAllocationGranularity = static constexpr size_t kAllocationGranularity =
api_constants::kAllocationGranularity; api_constants::kAllocationGranularity;
public: public:
// Represents age of the objects living on a single card.
enum class Age : uint8_t { kOld, kYoung, kMixed }; enum class Age : uint8_t { kOld, kYoung, kMixed };
// When setting age for a range, consider or ignore ages of the adjacent
// cards.
enum class AdjacentCardsPolicy : uint8_t { kConsider, kIgnore };
static constexpr size_t kCardSizeInBytes = static constexpr size_t kCardSizeInBytes =
(api_constants::kCagedHeapReservationSize / kAllocationGranularity) / api_constants::kCagedHeapReservationSize / kRequiredSize;
kRequiredSize;
void SetAge(uintptr_t cage_offset, Age age) { void SetAge(uintptr_t cage_offset, Age age) {
table_[card(cage_offset)] = age; table_[card(cage_offset)] = age;
} }
V8_INLINE Age GetAge(uintptr_t cage_offset) const { V8_INLINE Age GetAge(uintptr_t cage_offset) const {
return table_[card(cage_offset)]; return table_[card(cage_offset)];
} }
void Reset(PageAllocator* allocator); void SetAgeForRange(uintptr_t cage_offset_begin, uintptr_t cage_offset_end,
Age age, AdjacentCardsPolicy adjacent_cards_policy);
Age GetAgeForRange(uintptr_t cage_offset_begin,
uintptr_t cage_offset_end) const;
void ResetForTesting();
private: private:
V8_INLINE size_t card(uintptr_t offset) const { V8_INLINE size_t card(uintptr_t offset) const {
constexpr size_t kGranularityBits = constexpr size_t kGranularityBits =
#if __cpp_lib_bitopts
std::countr_zero(static_cast<uint32_t>(kCardSizeInBytes));
#elif V8_HAS_BUILTIN_CTZ
__builtin_ctz(static_cast<uint32_t>(kCardSizeInBytes)); __builtin_ctz(static_cast<uint32_t>(kCardSizeInBytes));
#else //! V8_HAS_BUILTIN_CTZ
// Hardcode and check with assert.
#if defined(CPPGC_2GB_CAGE)
11;
#else // !defined(CPPGC_2GB_CAGE)
12;
#endif // !defined(CPPGC_2GB_CAGE)
#endif // !V8_HAS_BUILTIN_CTZ
static_assert((1 << kGranularityBits) == kCardSizeInBytes);
const size_t entry = offset >> kGranularityBits; const size_t entry = offset >> kGranularityBits;
CPPGC_DCHECK(table_.size() > entry); CPPGC_DCHECK(table_.size() > entry);
return entry; return entry;
@ -64,10 +94,10 @@ static_assert(sizeof(AgeTable) == 1 * api_constants::kMB,
#endif // CPPGC_YOUNG_GENERATION #endif // CPPGC_YOUNG_GENERATION
struct CagedHeapLocalData final { struct CagedHeapLocalData final {
CagedHeapLocalData(HeapBase&, PageAllocator&); V8_INLINE static CagedHeapLocalData& Get() {
return *reinterpret_cast<CagedHeapLocalData*>(CagedHeapBase::GetBase());
}
bool is_incremental_marking_in_progress = false;
HeapBase& heap_base;
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
AgeTable age_table; AgeTable age_table;
#endif #endif
@ -76,4 +106,6 @@ struct CagedHeapLocalData final {
} // namespace internal } // namespace internal
} // namespace cppgc } // namespace cppgc
#endif // defined(CPPGC_CAGED_HEAP)
#endif // INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_LOCAL_DATA_H_ #endif // INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_LOCAL_DATA_H_

View file

@ -0,0 +1,61 @@
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_H_
#define INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_H_
#include <climits>
#include <cstddef>
#include "cppgc/internal/api-constants.h"
#include "cppgc/internal/base-page-handle.h"
#include "v8config.h" // NOLINT(build/include_directory)
#if defined(CPPGC_CAGED_HEAP)
namespace cppgc {
namespace internal {
class V8_EXPORT CagedHeapBase {
public:
V8_INLINE static uintptr_t OffsetFromAddress(const void* address) {
return reinterpret_cast<uintptr_t>(address) &
(api_constants::kCagedHeapReservationAlignment - 1);
}
V8_INLINE static bool IsWithinCage(const void* address) {
CPPGC_DCHECK(g_heap_base_);
return (reinterpret_cast<uintptr_t>(address) &
~(api_constants::kCagedHeapReservationAlignment - 1)) ==
g_heap_base_;
}
V8_INLINE static bool AreWithinCage(const void* addr1, const void* addr2) {
#if defined(CPPGC_2GB_CAGE)
static constexpr size_t kHalfWordShift = sizeof(uint32_t) * CHAR_BIT - 1;
#else //! defined(CPPGC_2GB_CAGE)
static constexpr size_t kHalfWordShift = sizeof(uint32_t) * CHAR_BIT;
#endif //! defined(CPPGC_2GB_CAGE)
static_assert((static_cast<size_t>(1) << kHalfWordShift) ==
api_constants::kCagedHeapReservationSize);
CPPGC_DCHECK(g_heap_base_);
return !(((reinterpret_cast<uintptr_t>(addr1) ^ g_heap_base_) |
(reinterpret_cast<uintptr_t>(addr2) ^ g_heap_base_)) >>
kHalfWordShift);
}
V8_INLINE static uintptr_t GetBase() { return g_heap_base_; }
private:
friend class CagedHeap;
static uintptr_t g_heap_base_;
};
} // namespace internal
} // namespace cppgc
#endif // defined(CPPGC_CAGED_HEAP)
#endif // INCLUDE_CPPGC_INTERNAL_CAGED_HEAP_H_

View file

@ -48,7 +48,6 @@ struct V8_EXPORT EnsureGCInfoIndexTrait final {
static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&, static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&,
TraceCallback, TraceCallback,
FinalizationCallback, FinalizationCallback,
NameCallback); NameCallback);
static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&, static GCInfoIndex EnsureGCInfoIndexNonPolymorphic(std::atomic<GCInfoIndex>&,
TraceCallback, TraceCallback,

View file

@ -0,0 +1,236 @@
// Copyright 2022 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_
#define INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_
#include <atomic>
#include <cstddef>
#include <type_traits>
#include "cppgc/internal/api-constants.h"
#include "cppgc/internal/logging.h"
#include "cppgc/sentinel-pointer.h"
#include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc {
namespace internal {
#if defined(CPPGC_POINTER_COMPRESSION)
#if defined(__clang__)
// Attribute const allows the compiler to assume that CageBaseGlobal::g_base_
// doesn't change (e.g. across calls) and thereby avoid redundant loads.
#define CPPGC_CONST __attribute__((const))
#define CPPGC_REQUIRE_CONSTANT_INIT \
__attribute__((require_constant_initialization))
#else // defined(__clang__)
#define CPPGC_CONST
#define CPPGC_REQUIRE_CONSTANT_INIT
#endif // defined(__clang__)
class CageBaseGlobal final {
public:
V8_INLINE CPPGC_CONST static uintptr_t Get() {
CPPGC_DCHECK(IsBaseConsistent());
return g_base_;
}
V8_INLINE CPPGC_CONST static bool IsSet() {
CPPGC_DCHECK(IsBaseConsistent());
return (g_base_ & ~kLowerHalfWordMask) != 0;
}
private:
// We keep the lower halfword as ones to speed up decompression.
static constexpr uintptr_t kLowerHalfWordMask =
(api_constants::kCagedHeapReservationAlignment - 1);
static V8_EXPORT uintptr_t g_base_ CPPGC_REQUIRE_CONSTANT_INIT;
CageBaseGlobal() = delete;
V8_INLINE static bool IsBaseConsistent() {
return kLowerHalfWordMask == (g_base_ & kLowerHalfWordMask);
}
friend class CageBaseGlobalUpdater;
};
#undef CPPGC_REQUIRE_CONSTANT_INIT
#undef CPPGC_CONST
class CompressedPointer final {
public:
using IntegralType = uint32_t;
V8_INLINE CompressedPointer() : value_(0u) {}
V8_INLINE explicit CompressedPointer(const void* ptr)
: value_(Compress(ptr)) {}
V8_INLINE explicit CompressedPointer(std::nullptr_t) : value_(0u) {}
V8_INLINE explicit CompressedPointer(SentinelPointer)
: value_(kCompressedSentinel) {}
V8_INLINE const void* Load() const { return Decompress(value_); }
V8_INLINE const void* LoadAtomic() const {
return Decompress(
reinterpret_cast<const std::atomic<IntegralType>&>(value_).load(
std::memory_order_relaxed));
}
V8_INLINE void Store(const void* ptr) { value_ = Compress(ptr); }
V8_INLINE void StoreAtomic(const void* value) {
reinterpret_cast<std::atomic<IntegralType>&>(value_).store(
Compress(value), std::memory_order_relaxed);
}
V8_INLINE void Clear() { value_ = 0u; }
V8_INLINE bool IsCleared() const { return !value_; }
V8_INLINE bool IsSentinel() const { return value_ == kCompressedSentinel; }
V8_INLINE uint32_t GetAsInteger() const { return value_; }
V8_INLINE friend bool operator==(CompressedPointer a, CompressedPointer b) {
return a.value_ == b.value_;
}
V8_INLINE friend bool operator!=(CompressedPointer a, CompressedPointer b) {
return a.value_ != b.value_;
}
V8_INLINE friend bool operator<(CompressedPointer a, CompressedPointer b) {
return a.value_ < b.value_;
}
V8_INLINE friend bool operator<=(CompressedPointer a, CompressedPointer b) {
return a.value_ <= b.value_;
}
V8_INLINE friend bool operator>(CompressedPointer a, CompressedPointer b) {
return a.value_ > b.value_;
}
V8_INLINE friend bool operator>=(CompressedPointer a, CompressedPointer b) {
return a.value_ >= b.value_;
}
static V8_INLINE IntegralType Compress(const void* ptr) {
static_assert(
SentinelPointer::kSentinelValue == 0b10,
"The compression scheme relies on the sentinel encoded as 0b10");
static constexpr size_t kGigaCageMask =
~(api_constants::kCagedHeapReservationAlignment - 1);
CPPGC_DCHECK(CageBaseGlobal::IsSet());
const uintptr_t base = CageBaseGlobal::Get();
CPPGC_DCHECK(!ptr || ptr == kSentinelPointer ||
(base & kGigaCageMask) ==
(reinterpret_cast<uintptr_t>(ptr) & kGigaCageMask));
#if defined(CPPGC_2GB_CAGE)
// Truncate the pointer.
auto compressed =
static_cast<IntegralType>(reinterpret_cast<uintptr_t>(ptr));
#else // !defined(CPPGC_2GB_CAGE)
const auto uptr = reinterpret_cast<uintptr_t>(ptr);
// Shift the pointer by one and truncate.
auto compressed = static_cast<IntegralType>(uptr >> 1);
#endif // !defined(CPPGC_2GB_CAGE)
// Normal compressed pointers must have the MSB set.
CPPGC_DCHECK((!compressed || compressed == kCompressedSentinel) ||
(compressed & (1 << 31)));
return compressed;
}
static V8_INLINE void* Decompress(IntegralType ptr) {
CPPGC_DCHECK(CageBaseGlobal::IsSet());
const uintptr_t base = CageBaseGlobal::Get();
// Treat compressed pointer as signed and cast it to uint64_t, which will
// sign-extend it.
#if defined(CPPGC_2GB_CAGE)
const uint64_t mask = static_cast<uint64_t>(static_cast<int32_t>(ptr));
#else // !defined(CPPGC_2GB_CAGE)
// Then, shift the result by one. It's important to shift the unsigned
// value, as otherwise it would result in undefined behavior.
const uint64_t mask = static_cast<uint64_t>(static_cast<int32_t>(ptr)) << 1;
#endif // !defined(CPPGC_2GB_CAGE)
return reinterpret_cast<void*>(mask & base);
}
private:
#if defined(CPPGC_2GB_CAGE)
static constexpr IntegralType kCompressedSentinel =
SentinelPointer::kSentinelValue;
#else // !defined(CPPGC_2GB_CAGE)
static constexpr IntegralType kCompressedSentinel =
SentinelPointer::kSentinelValue >> 1;
#endif // !defined(CPPGC_2GB_CAGE)
// All constructors initialize `value_`. Do not add a default value here as it
// results in a non-atomic write on some builds, even when the atomic version
// of the constructor is used.
IntegralType value_;
};
#endif // defined(CPPGC_POINTER_COMPRESSION)
class RawPointer final {
public:
using IntegralType = uintptr_t;
V8_INLINE RawPointer() : ptr_(nullptr) {}
V8_INLINE explicit RawPointer(const void* ptr) : ptr_(ptr) {}
V8_INLINE const void* Load() const { return ptr_; }
V8_INLINE const void* LoadAtomic() const {
return reinterpret_cast<const std::atomic<const void*>&>(ptr_).load(
std::memory_order_relaxed);
}
V8_INLINE void Store(const void* ptr) { ptr_ = ptr; }
V8_INLINE void StoreAtomic(const void* ptr) {
reinterpret_cast<std::atomic<const void*>&>(ptr_).store(
ptr, std::memory_order_relaxed);
}
V8_INLINE void Clear() { ptr_ = nullptr; }
V8_INLINE bool IsCleared() const { return !ptr_; }
V8_INLINE bool IsSentinel() const { return ptr_ == kSentinelPointer; }
V8_INLINE uintptr_t GetAsInteger() const {
return reinterpret_cast<uintptr_t>(ptr_);
}
V8_INLINE friend bool operator==(RawPointer a, RawPointer b) {
return a.ptr_ == b.ptr_;
}
V8_INLINE friend bool operator!=(RawPointer a, RawPointer b) {
return a.ptr_ != b.ptr_;
}
V8_INLINE friend bool operator<(RawPointer a, RawPointer b) {
return a.ptr_ < b.ptr_;
}
V8_INLINE friend bool operator<=(RawPointer a, RawPointer b) {
return a.ptr_ <= b.ptr_;
}
V8_INLINE friend bool operator>(RawPointer a, RawPointer b) {
return a.ptr_ > b.ptr_;
}
V8_INLINE friend bool operator>=(RawPointer a, RawPointer b) {
return a.ptr_ >= b.ptr_;
}
private:
// All constructors initialize `ptr_`. Do not add a default value here as it
// results in a non-atomic write on some builds, even when the atomic version
// of the constructor is used.
const void* ptr_;
};
#if defined(CPPGC_POINTER_COMPRESSION)
using MemberStorage = CompressedPointer;
#else // !defined(CPPGC_POINTER_COMPRESSION)
using MemberStorage = RawPointer;
#endif // !defined(CPPGC_POINTER_COMPRESSION)
} // namespace internal
} // namespace cppgc
#endif // INCLUDE_CPPGC_INTERNAL_MEMBER_STORAGE_H_

View file

@ -6,6 +6,7 @@
#define INCLUDE_CPPGC_INTERNAL_NAME_TRAIT_H_ #define INCLUDE_CPPGC_INTERNAL_NAME_TRAIT_H_
#include <cstddef> #include <cstddef>
#include <cstdint>
#include <type_traits> #include <type_traits>
#include "cppgc/name-provider.h" #include "cppgc/name-provider.h"
@ -58,6 +59,11 @@ struct HeapObjectName {
bool name_was_hidden; bool name_was_hidden;
}; };
enum class HeapObjectNameForUnnamedObject : uint8_t {
kUseClassNameIfSupported,
kUseHiddenName,
};
class V8_EXPORT NameTraitBase { class V8_EXPORT NameTraitBase {
protected: protected:
static HeapObjectName GetNameFromTypeSignature(const char*); static HeapObjectName GetNameFromTypeSignature(const char*);
@ -78,16 +84,24 @@ class NameTrait final : public NameTraitBase {
#endif // !CPPGC_SUPPORTS_OBJECT_NAMES #endif // !CPPGC_SUPPORTS_OBJECT_NAMES
} }
static HeapObjectName GetName(const void* obj) { static HeapObjectName GetName(
return GetNameFor(static_cast<const T*>(obj)); const void* obj, HeapObjectNameForUnnamedObject name_retrieval_mode) {
return GetNameFor(static_cast<const T*>(obj), name_retrieval_mode);
} }
private: private:
static HeapObjectName GetNameFor(const NameProvider* name_provider) { static HeapObjectName GetNameFor(const NameProvider* name_provider,
HeapObjectNameForUnnamedObject) {
// Objects inheriting from `NameProvider` are not considered unnamed as
// users already provided a name for them.
return {name_provider->GetHumanReadableName(), false}; return {name_provider->GetHumanReadableName(), false};
} }
static HeapObjectName GetNameFor(...) { static HeapObjectName GetNameFor(
const void*, HeapObjectNameForUnnamedObject name_retrieval_mode) {
if (name_retrieval_mode == HeapObjectNameForUnnamedObject::kUseHiddenName)
return {NameProvider::kHiddenName, true};
#if CPPGC_SUPPORTS_COMPILE_TIME_TYPENAME #if CPPGC_SUPPORTS_COMPILE_TIME_TYPENAME
return {GetTypename<T>(), false}; return {GetTypename<T>(), false};
#elif CPPGC_SUPPORTS_OBJECT_NAMES #elif CPPGC_SUPPORTS_OBJECT_NAMES
@ -102,7 +116,7 @@ class NameTrait final : public NameTraitBase {
static const HeapObjectName leaky_name = static const HeapObjectName leaky_name =
GetNameFromTypeSignature(PRETTY_FUNCTION_VALUE); GetNameFromTypeSignature(PRETTY_FUNCTION_VALUE);
return {leaky_name, false}; return leaky_name;
#undef PRETTY_FUNCTION_VALUE #undef PRETTY_FUNCTION_VALUE
@ -112,7 +126,8 @@ class NameTrait final : public NameTraitBase {
} }
}; };
using NameCallback = HeapObjectName (*)(const void*); using NameCallback = HeapObjectName (*)(const void*,
HeapObjectNameForUnnamedObject);
} // namespace internal } // namespace internal
} // namespace cppgc } // namespace cppgc

View file

@ -14,13 +14,11 @@
#include "v8config.h" // NOLINT(build/include_directory) #include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc { namespace cppgc {
class Visitor;
namespace internal { namespace internal {
class CrossThreadPersistentRegion; class CrossThreadPersistentRegion;
class FatalOutOfMemoryHandler; class FatalOutOfMemoryHandler;
class RootVisitor;
// PersistentNode represents a variant of two states: // PersistentNode represents a variant of two states:
// 1) traceable node with a back pointer to the Persistent object; // 1) traceable node with a back pointer to the Persistent object;
@ -32,7 +30,7 @@ class PersistentNode final {
PersistentNode(const PersistentNode&) = delete; PersistentNode(const PersistentNode&) = delete;
PersistentNode& operator=(const PersistentNode&) = delete; PersistentNode& operator=(const PersistentNode&) = delete;
void InitializeAsUsedNode(void* owner, TraceCallback trace) { void InitializeAsUsedNode(void* owner, TraceRootCallback trace) {
CPPGC_DCHECK(trace); CPPGC_DCHECK(trace);
owner_ = owner; owner_ = owner;
trace_ = trace; trace_ = trace;
@ -53,9 +51,9 @@ class PersistentNode final {
return next_; return next_;
} }
void Trace(Visitor* visitor) const { void Trace(RootVisitor& root_visitor) const {
CPPGC_DCHECK(IsUsed()); CPPGC_DCHECK(IsUsed());
trace_(visitor, owner_); trace_(root_visitor, owner_);
} }
bool IsUsed() const { return trace_; } bool IsUsed() const { return trace_; }
@ -73,7 +71,7 @@ class PersistentNode final {
void* owner_ = nullptr; void* owner_ = nullptr;
PersistentNode* next_; PersistentNode* next_;
}; };
TraceCallback trace_ = nullptr; TraceRootCallback trace_ = nullptr;
}; };
class V8_EXPORT PersistentRegionBase { class V8_EXPORT PersistentRegionBase {
@ -86,7 +84,7 @@ class V8_EXPORT PersistentRegionBase {
PersistentRegionBase(const PersistentRegionBase&) = delete; PersistentRegionBase(const PersistentRegionBase&) = delete;
PersistentRegionBase& operator=(const PersistentRegionBase&) = delete; PersistentRegionBase& operator=(const PersistentRegionBase&) = delete;
void Trace(Visitor*); void Iterate(RootVisitor&);
size_t NodesInUse() const; size_t NodesInUse() const;
@ -96,7 +94,7 @@ class V8_EXPORT PersistentRegionBase {
explicit PersistentRegionBase(const FatalOutOfMemoryHandler& oom_handler); explicit PersistentRegionBase(const FatalOutOfMemoryHandler& oom_handler);
PersistentNode* TryAllocateNodeFromFreeList(void* owner, PersistentNode* TryAllocateNodeFromFreeList(void* owner,
TraceCallback trace) { TraceRootCallback trace) {
PersistentNode* node = nullptr; PersistentNode* node = nullptr;
if (V8_LIKELY(free_list_head_)) { if (V8_LIKELY(free_list_head_)) {
node = free_list_head_; node = free_list_head_;
@ -118,7 +116,7 @@ class V8_EXPORT PersistentRegionBase {
} }
PersistentNode* RefillFreeListAndAllocateNode(void* owner, PersistentNode* RefillFreeListAndAllocateNode(void* owner,
TraceCallback trace); TraceRootCallback trace);
private: private:
template <typename PersistentBaseClass> template <typename PersistentBaseClass>
@ -145,7 +143,7 @@ class V8_EXPORT PersistentRegion final : public PersistentRegionBase {
PersistentRegion(const PersistentRegion&) = delete; PersistentRegion(const PersistentRegion&) = delete;
PersistentRegion& operator=(const PersistentRegion&) = delete; PersistentRegion& operator=(const PersistentRegion&) = delete;
V8_INLINE PersistentNode* AllocateNode(void* owner, TraceCallback trace) { V8_INLINE PersistentNode* AllocateNode(void* owner, TraceRootCallback trace) {
CPPGC_DCHECK(IsCreationThread()); CPPGC_DCHECK(IsCreationThread());
auto* node = TryAllocateNodeFromFreeList(owner, trace); auto* node = TryAllocateNodeFromFreeList(owner, trace);
if (V8_LIKELY(node)) return node; if (V8_LIKELY(node)) return node;
@ -189,7 +187,7 @@ class V8_EXPORT CrossThreadPersistentRegion final
CrossThreadPersistentRegion& operator=(const CrossThreadPersistentRegion&) = CrossThreadPersistentRegion& operator=(const CrossThreadPersistentRegion&) =
delete; delete;
V8_INLINE PersistentNode* AllocateNode(void* owner, TraceCallback trace) { V8_INLINE PersistentNode* AllocateNode(void* owner, TraceRootCallback trace) {
PersistentRegionLock::AssertLocked(); PersistentRegionLock::AssertLocked();
auto* node = TryAllocateNodeFromFreeList(owner, trace); auto* node = TryAllocateNodeFromFreeList(owner, trace);
if (V8_LIKELY(node)) return node; if (V8_LIKELY(node)) return node;
@ -202,7 +200,7 @@ class V8_EXPORT CrossThreadPersistentRegion final
PersistentRegionBase::FreeNode(node); PersistentRegionBase::FreeNode(node);
} }
void Trace(Visitor*); void Iterate(RootVisitor&);
size_t NodesInUse() const; size_t NodesInUse() const;

View file

@ -8,6 +8,7 @@
#include <cstdint> #include <cstdint>
#include <type_traits> #include <type_traits>
#include "cppgc/internal/member-storage.h"
#include "cppgc/internal/write-barrier.h" #include "cppgc/internal/write-barrier.h"
#include "cppgc/sentinel-pointer.h" #include "cppgc/sentinel-pointer.h"
#include "cppgc/source-location.h" #include "cppgc/source-location.h"
@ -27,15 +28,34 @@ class WeakMemberTag;
class UntracedMemberTag; class UntracedMemberTag;
struct DijkstraWriteBarrierPolicy { struct DijkstraWriteBarrierPolicy {
static void InitializingBarrier(const void*, const void*) { V8_INLINE static void InitializingBarrier(const void*, const void*) {
// Since in initializing writes the source object is always white, having no // Since in initializing writes the source object is always white, having no
// barrier doesn't break the tri-color invariant. // barrier doesn't break the tri-color invariant.
} }
static void AssigningBarrier(const void* slot, const void* value) {
V8_INLINE static void AssigningBarrier(const void* slot, const void* value) {
WriteBarrier::Params params; WriteBarrier::Params params;
switch (WriteBarrier::GetWriteBarrierType(slot, value, params)) { const WriteBarrier::Type type =
WriteBarrier::GetWriteBarrierType(slot, value, params);
WriteBarrier(type, params, slot, value);
}
V8_INLINE static void AssigningBarrier(const void* slot,
MemberStorage storage) {
WriteBarrier::Params params;
const WriteBarrier::Type type =
WriteBarrier::GetWriteBarrierType(slot, storage, params);
WriteBarrier(type, params, slot, storage.Load());
}
private:
V8_INLINE static void WriteBarrier(WriteBarrier::Type type,
const WriteBarrier::Params& params,
const void* slot, const void* value) {
switch (type) {
case WriteBarrier::Type::kGenerational: case WriteBarrier::Type::kGenerational:
WriteBarrier::GenerationalBarrier(params, slot); WriteBarrier::GenerationalBarrier<
WriteBarrier::GenerationalBarrierType::kPreciseSlot>(params, slot);
break; break;
case WriteBarrier::Type::kMarking: case WriteBarrier::Type::kMarking:
WriteBarrier::DijkstraMarkingBarrier(params, value); WriteBarrier::DijkstraMarkingBarrier(params, value);
@ -47,8 +67,9 @@ struct DijkstraWriteBarrierPolicy {
}; };
struct NoWriteBarrierPolicy { struct NoWriteBarrierPolicy {
static void InitializingBarrier(const void*, const void*) {} V8_INLINE static void InitializingBarrier(const void*, const void*) {}
static void AssigningBarrier(const void*, const void*) {} V8_INLINE static void AssigningBarrier(const void*, const void*) {}
V8_INLINE static void AssigningBarrier(const void*, MemberStorage) {}
}; };
class V8_EXPORT SameThreadEnabledCheckingPolicyBase { class V8_EXPORT SameThreadEnabledCheckingPolicyBase {
@ -89,7 +110,7 @@ class V8_EXPORT SameThreadEnabledCheckingPolicy
class DisabledCheckingPolicy { class DisabledCheckingPolicy {
protected: protected:
void CheckPointer(const void*) {} V8_INLINE void CheckPointer(const void*) {}
}; };
#ifdef DEBUG #ifdef DEBUG

View file

@ -8,9 +8,11 @@
#include <cstddef> #include <cstddef>
#include <cstdint> #include <cstdint>
#include "cppgc/heap-handle.h"
#include "cppgc/heap-state.h" #include "cppgc/heap-state.h"
#include "cppgc/internal/api-constants.h" #include "cppgc/internal/api-constants.h"
#include "cppgc/internal/atomic-entry-flag.h" #include "cppgc/internal/atomic-entry-flag.h"
#include "cppgc/internal/member-storage.h"
#include "cppgc/platform.h" #include "cppgc/platform.h"
#include "cppgc/sentinel-pointer.h" #include "cppgc/sentinel-pointer.h"
#include "cppgc/trace-trait.h" #include "cppgc/trace-trait.h"
@ -18,6 +20,7 @@
#if defined(CPPGC_CAGED_HEAP) #if defined(CPPGC_CAGED_HEAP)
#include "cppgc/internal/caged-heap-local-data.h" #include "cppgc/internal/caged-heap-local-data.h"
#include "cppgc/internal/caged-heap.h"
#endif #endif
namespace cppgc { namespace cppgc {
@ -40,16 +43,18 @@ class V8_EXPORT WriteBarrier final {
kGenerational, kGenerational,
}; };
enum class GenerationalBarrierType : uint8_t {
kPreciseSlot,
kPreciseUncompressedSlot,
kImpreciseSlot,
};
struct Params { struct Params {
HeapHandle* heap = nullptr; HeapHandle* heap = nullptr;
#if V8_ENABLE_CHECKS #if V8_ENABLE_CHECKS
Type type = Type::kNone; Type type = Type::kNone;
#endif // !V8_ENABLE_CHECKS #endif // !V8_ENABLE_CHECKS
#if defined(CPPGC_CAGED_HEAP) #if defined(CPPGC_CAGED_HEAP)
uintptr_t start = 0;
CagedHeapLocalData& caged_heap() const {
return *reinterpret_cast<CagedHeapLocalData*>(start);
}
uintptr_t slot_offset = 0; uintptr_t slot_offset = 0;
uintptr_t value_offset = 0; uintptr_t value_offset = 0;
#endif // CPPGC_CAGED_HEAP #endif // CPPGC_CAGED_HEAP
@ -63,6 +68,9 @@ class V8_EXPORT WriteBarrier final {
// Returns the required write barrier for a given `slot` and `value`. // Returns the required write barrier for a given `slot` and `value`.
static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value, static V8_INLINE Type GetWriteBarrierType(const void* slot, const void* value,
Params& params); Params& params);
// Returns the required write barrier for a given `slot` and `value`.
static V8_INLINE Type GetWriteBarrierType(const void* slot, MemberStorage,
Params& params);
// Returns the required write barrier for a given `slot`. // Returns the required write barrier for a given `slot`.
template <typename HeapHandleCallback> template <typename HeapHandleCallback>
static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params, static V8_INLINE Type GetWriteBarrierType(const void* slot, Params& params,
@ -78,15 +86,13 @@ class V8_EXPORT WriteBarrier final {
static V8_INLINE void SteeleMarkingBarrier(const Params& params, static V8_INLINE void SteeleMarkingBarrier(const Params& params,
const void* object); const void* object);
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
template <GenerationalBarrierType>
static V8_INLINE void GenerationalBarrier(const Params& params, static V8_INLINE void GenerationalBarrier(const Params& params,
const void* slot); const void* slot);
static V8_INLINE void GenerationalBarrierForSourceObject(
const Params& params, const void* inner_pointer);
#else // !CPPGC_YOUNG_GENERATION #else // !CPPGC_YOUNG_GENERATION
template <GenerationalBarrierType>
static V8_INLINE void GenerationalBarrier(const Params& params, static V8_INLINE void GenerationalBarrier(const Params& params,
const void* slot) {} const void* slot){}
static V8_INLINE void GenerationalBarrierForSourceObject(
const Params& params, const void* inner_pointer) {}
#endif // CPPGC_YOUNG_GENERATION #endif // CPPGC_YOUNG_GENERATION
#if V8_ENABLE_CHECKS #if V8_ENABLE_CHECKS
@ -95,12 +101,10 @@ class V8_EXPORT WriteBarrier final {
static void CheckParams(Type expected_type, const Params& params) {} static void CheckParams(Type expected_type, const Params& params) {}
#endif // !V8_ENABLE_CHECKS #endif // !V8_ENABLE_CHECKS
// The IncrementalOrConcurrentUpdater class allows cppgc internal to update // The FlagUpdater class allows cppgc internal to update
// |incremental_or_concurrent_marking_flag_|. // |write_barrier_enabled_|.
class IncrementalOrConcurrentMarkingFlagUpdater; class FlagUpdater;
static bool IsAnyIncrementalOrConcurrentMarking() { static bool IsEnabled() { return write_barrier_enabled_.MightBeEntered(); }
return incremental_or_concurrent_marking_flag_.MightBeEntered();
}
private: private:
WriteBarrier() = delete; WriteBarrier() = delete;
@ -125,17 +129,23 @@ class V8_EXPORT WriteBarrier final {
static CagedHeapLocalData& GetLocalData(HeapHandle&); static CagedHeapLocalData& GetLocalData(HeapHandle&);
static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data, static void GenerationalBarrierSlow(const CagedHeapLocalData& local_data,
const AgeTable& age_table, const AgeTable& age_table,
const void* slot, uintptr_t value_offset); const void* slot, uintptr_t value_offset,
HeapHandle* heap_handle);
static void GenerationalBarrierForUncompressedSlotSlow(
const CagedHeapLocalData& local_data, const AgeTable& age_table,
const void* slot, uintptr_t value_offset, HeapHandle* heap_handle);
static void GenerationalBarrierForSourceObjectSlow( static void GenerationalBarrierForSourceObjectSlow(
const CagedHeapLocalData& local_data, const void* object); const CagedHeapLocalData& local_data, const void* object,
HeapHandle* heap_handle);
#endif // CPPGC_YOUNG_GENERATION #endif // CPPGC_YOUNG_GENERATION
static AtomicEntryFlag incremental_or_concurrent_marking_flag_; static AtomicEntryFlag write_barrier_enabled_;
}; };
template <WriteBarrier::Type type> template <WriteBarrier::Type type>
V8_INLINE WriteBarrier::Type SetAndReturnType(WriteBarrier::Params& params) { V8_INLINE WriteBarrier::Type SetAndReturnType(WriteBarrier::Params& params) {
if (type == WriteBarrier::Type::kNone) return WriteBarrier::Type::kNone; if constexpr (type == WriteBarrier::Type::kNone)
return WriteBarrier::Type::kNone;
#if V8_ENABLE_CHECKS #if V8_ENABLE_CHECKS
params.type = type; params.type = type;
#endif // !V8_ENABLE_CHECKS #endif // !V8_ENABLE_CHECKS
@ -152,6 +162,13 @@ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
return ValueModeDispatch<value_mode>::Get(slot, value, params, callback); return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
} }
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
static V8_INLINE WriteBarrier::Type Get(const void* slot, MemberStorage value,
WriteBarrier::Params& params,
HeapHandleCallback callback) {
return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
}
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback> template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
static V8_INLINE WriteBarrier::Type Get(const void* value, static V8_INLINE WriteBarrier::Type Get(const void* value,
WriteBarrier::Params& params, WriteBarrier::Params& params,
@ -166,69 +183,77 @@ class V8_EXPORT WriteBarrierTypeForCagedHeapPolicy final {
static V8_INLINE WriteBarrier::Type GetNoSlot(const void* value, static V8_INLINE WriteBarrier::Type GetNoSlot(const void* value,
WriteBarrier::Params& params, WriteBarrier::Params& params,
HeapHandleCallback) { HeapHandleCallback) {
if (!TryGetCagedHeap(value, value, params)) { const bool within_cage = CagedHeapBase::IsWithinCage(value);
return WriteBarrier::Type::kNone; if (!within_cage) return WriteBarrier::Type::kNone;
}
if (V8_UNLIKELY(params.caged_heap().is_incremental_marking_in_progress)) { // We know that |value| points either within the normal page or to the
// beginning of large-page, so extract the page header by bitmasking.
BasePageHandle* page =
BasePageHandle::FromPayload(const_cast<void*>(value));
HeapHandle& heap_handle = page->heap_handle();
if (V8_UNLIKELY(heap_handle.is_incremental_marking_in_progress())) {
return SetAndReturnType<WriteBarrier::Type::kMarking>(params); return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
} }
return SetAndReturnType<WriteBarrier::Type::kNone>(params); return SetAndReturnType<WriteBarrier::Type::kNone>(params);
} }
template <WriteBarrier::ValueMode value_mode> template <WriteBarrier::ValueMode value_mode>
struct ValueModeDispatch; struct ValueModeDispatch;
static V8_INLINE bool TryGetCagedHeap(const void* slot, const void* value,
WriteBarrier::Params& params) {
// TODO(chromium:1056170): Check if the null check can be folded in with
// the rest of the write barrier.
if (!value) return false;
params.start = reinterpret_cast<uintptr_t>(value) &
~(api_constants::kCagedHeapReservationAlignment - 1);
const uintptr_t slot_offset =
reinterpret_cast<uintptr_t>(slot) - params.start;
if (slot_offset > api_constants::kCagedHeapReservationSize) {
// Check if slot is on stack or value is sentinel or nullptr. This relies
// on the fact that kSentinelPointer is encoded as 0x1.
return false;
}
return true;
}
// Returns whether marking is in progress. If marking is not in progress
// sets the start of the cage accordingly.
//
// TODO(chromium:1056170): Create fast path on API.
static bool IsMarking(const HeapHandle&, WriteBarrier::Params&);
}; };
template <> template <>
struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch< struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
WriteBarrier::ValueMode::kValuePresent> { WriteBarrier::ValueMode::kValuePresent> {
template <typename HeapHandleCallback>
static V8_INLINE WriteBarrier::Type Get(const void* slot,
MemberStorage storage,
WriteBarrier::Params& params,
HeapHandleCallback) {
if (V8_LIKELY(!WriteBarrier::IsEnabled()))
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
return BarrierEnabledGet(slot, storage.Load(), params);
}
template <typename HeapHandleCallback> template <typename HeapHandleCallback>
static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value, static V8_INLINE WriteBarrier::Type Get(const void* slot, const void* value,
WriteBarrier::Params& params, WriteBarrier::Params& params,
HeapHandleCallback) { HeapHandleCallback) {
#if !defined(CPPGC_YOUNG_GENERATION) if (V8_LIKELY(!WriteBarrier::IsEnabled()))
if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
return SetAndReturnType<WriteBarrier::Type::kNone>(params); return SetAndReturnType<WriteBarrier::Type::kNone>(params);
}
#endif // !CPPGC_YOUNG_GENERATION return BarrierEnabledGet(slot, value, params);
bool within_cage = TryGetCagedHeap(slot, value, params); }
if (!within_cage) {
return WriteBarrier::Type::kNone; private:
} static V8_INLINE WriteBarrier::Type BarrierEnabledGet(
if (V8_LIKELY(!params.caged_heap().is_incremental_marking_in_progress)) { const void* slot, const void* value, WriteBarrier::Params& params) {
const bool within_cage = CagedHeapBase::AreWithinCage(slot, value);
if (!within_cage) return WriteBarrier::Type::kNone;
// We know that |value| points either within the normal page or to the
// beginning of large-page, so extract the page header by bitmasking.
BasePageHandle* page =
BasePageHandle::FromPayload(const_cast<void*>(value));
HeapHandle& heap_handle = page->heap_handle();
if (V8_LIKELY(!heap_handle.is_incremental_marking_in_progress())) {
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
params.heap = reinterpret_cast<HeapHandle*>(params.start); if (!heap_handle.is_young_generation_enabled())
params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start; return WriteBarrier::Type::kNone;
params.value_offset = reinterpret_cast<uintptr_t>(value) - params.start; params.heap = &heap_handle;
params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
params.value_offset = CagedHeapBase::OffsetFromAddress(value);
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params); return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
#else // !CPPGC_YOUNG_GENERATION #else // !CPPGC_YOUNG_GENERATION
return SetAndReturnType<WriteBarrier::Type::kNone>(params); return SetAndReturnType<WriteBarrier::Type::kNone>(params);
#endif // !CPPGC_YOUNG_GENERATION #endif // !CPPGC_YOUNG_GENERATION
} }
params.heap = reinterpret_cast<HeapHandle*>(params.start);
// Use marking barrier.
params.heap = &heap_handle;
return SetAndReturnType<WriteBarrier::Type::kMarking>(params); return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
} }
}; };
@ -240,28 +265,28 @@ struct WriteBarrierTypeForCagedHeapPolicy::ValueModeDispatch<
static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*, static V8_INLINE WriteBarrier::Type Get(const void* slot, const void*,
WriteBarrier::Params& params, WriteBarrier::Params& params,
HeapHandleCallback callback) { HeapHandleCallback callback) {
#if defined(CPPGC_YOUNG_GENERATION) if (V8_LIKELY(!WriteBarrier::IsEnabled()))
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
HeapHandle& handle = callback(); HeapHandle& handle = callback();
if (V8_LIKELY(!IsMarking(handle, params))) { #if defined(CPPGC_YOUNG_GENERATION)
// params.start is populated by IsMarking(). if (V8_LIKELY(!handle.is_incremental_marking_in_progress())) {
if (!handle.is_young_generation_enabled()) {
return WriteBarrier::Type::kNone;
}
params.heap = &handle; params.heap = &handle;
params.slot_offset = reinterpret_cast<uintptr_t>(slot) - params.start; // Check if slot is on stack.
// params.value_offset stays 0. if (V8_UNLIKELY(!CagedHeapBase::IsWithinCage(slot))) {
if (params.slot_offset > api_constants::kCagedHeapReservationSize) {
// Check if slot is on stack.
return SetAndReturnType<WriteBarrier::Type::kNone>(params); return SetAndReturnType<WriteBarrier::Type::kNone>(params);
} }
params.slot_offset = CagedHeapBase::OffsetFromAddress(slot);
return SetAndReturnType<WriteBarrier::Type::kGenerational>(params); return SetAndReturnType<WriteBarrier::Type::kGenerational>(params);
} }
#else // !CPPGC_YOUNG_GENERATION #else // !defined(CPPGC_YOUNG_GENERATION)
if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) {
return SetAndReturnType<WriteBarrier::Type::kNone>(params);
}
HeapHandle& handle = callback();
if (V8_UNLIKELY(!subtle::HeapState::IsMarking(handle))) { if (V8_UNLIKELY(!subtle::HeapState::IsMarking(handle))) {
return SetAndReturnType<WriteBarrier::Type::kNone>(params); return SetAndReturnType<WriteBarrier::Type::kNone>(params);
} }
#endif // !CPPGC_YOUNG_GENERATION #endif // !defined(CPPGC_YOUNG_GENERATION)
params.heap = &handle; params.heap = &handle;
return SetAndReturnType<WriteBarrier::Type::kMarking>(params); return SetAndReturnType<WriteBarrier::Type::kMarking>(params);
} }
@ -278,6 +303,16 @@ class V8_EXPORT WriteBarrierTypeForNonCagedHeapPolicy final {
return ValueModeDispatch<value_mode>::Get(slot, value, params, callback); return ValueModeDispatch<value_mode>::Get(slot, value, params, callback);
} }
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
static V8_INLINE WriteBarrier::Type Get(const void* slot, MemberStorage value,
WriteBarrier::Params& params,
HeapHandleCallback callback) {
// `MemberStorage` will always be `RawPointer` for non-caged heap builds.
// Just convert to `void*` in this case.
return ValueModeDispatch<value_mode>::Get(slot, value.Load(), params,
callback);
}
template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback> template <WriteBarrier::ValueMode value_mode, typename HeapHandleCallback>
static V8_INLINE WriteBarrier::Type Get(const void* value, static V8_INLINE WriteBarrier::Type Get(const void* value,
WriteBarrier::Params& params, WriteBarrier::Params& params,
@ -310,7 +345,7 @@ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
if (object <= static_cast<void*>(kSentinelPointer)) { if (object <= static_cast<void*>(kSentinelPointer)) {
return SetAndReturnType<WriteBarrier::Type::kNone>(params); return SetAndReturnType<WriteBarrier::Type::kNone>(params);
} }
if (V8_LIKELY(!WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) { if (V8_LIKELY(!WriteBarrier::IsEnabled())) {
return SetAndReturnType<WriteBarrier::Type::kNone>(params); return SetAndReturnType<WriteBarrier::Type::kNone>(params);
} }
if (IsMarking(object, &params.heap)) { if (IsMarking(object, &params.heap)) {
@ -327,7 +362,7 @@ struct WriteBarrierTypeForNonCagedHeapPolicy::ValueModeDispatch<
static V8_INLINE WriteBarrier::Type Get(const void*, const void*, static V8_INLINE WriteBarrier::Type Get(const void*, const void*,
WriteBarrier::Params& params, WriteBarrier::Params& params,
HeapHandleCallback callback) { HeapHandleCallback callback) {
if (V8_UNLIKELY(WriteBarrier::IsAnyIncrementalOrConcurrentMarking())) { if (V8_UNLIKELY(WriteBarrier::IsEnabled())) {
HeapHandle& handle = callback(); HeapHandle& handle = callback();
if (IsMarking(handle)) { if (IsMarking(handle)) {
params.heap = &handle; params.heap = &handle;
@ -345,6 +380,13 @@ WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
params, []() {}); params, []() {});
} }
// static
WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
const void* slot, MemberStorage value, WriteBarrier::Params& params) {
return WriteBarrierTypePolicy::Get<ValueMode::kValuePresent>(slot, value,
params, []() {});
}
// static // static
template <typename HeapHandleCallback> template <typename HeapHandleCallback>
WriteBarrier::Type WriteBarrier::GetWriteBarrierType( WriteBarrier::Type WriteBarrier::GetWriteBarrierType(
@ -397,34 +439,32 @@ void WriteBarrier::SteeleMarkingBarrier(const Params& params,
} }
#if defined(CPPGC_YOUNG_GENERATION) #if defined(CPPGC_YOUNG_GENERATION)
// static // static
template <WriteBarrier::GenerationalBarrierType type>
void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) { void WriteBarrier::GenerationalBarrier(const Params& params, const void* slot) {
CheckParams(Type::kGenerational, params); CheckParams(Type::kGenerational, params);
const CagedHeapLocalData& local_data = params.caged_heap(); const CagedHeapLocalData& local_data = CagedHeapLocalData::Get();
const AgeTable& age_table = local_data.age_table; const AgeTable& age_table = local_data.age_table;
// Bail out if the slot is in young generation. // Bail out if the slot (precise or imprecise) is in young generation.
if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung)) if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
return; return;
GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset); // Dispatch between different types of barriers.
} // TODO(chromium:1029379): Consider reload local_data in the slow path to
// reduce register pressure.
// static if constexpr (type == GenerationalBarrierType::kPreciseSlot) {
void WriteBarrier::GenerationalBarrierForSourceObject( GenerationalBarrierSlow(local_data, age_table, slot, params.value_offset,
const Params& params, const void* inner_pointer) { params.heap);
CheckParams(Type::kGenerational, params); } else if constexpr (type ==
GenerationalBarrierType::kPreciseUncompressedSlot) {
const CagedHeapLocalData& local_data = params.caged_heap(); GenerationalBarrierForUncompressedSlotSlow(
const AgeTable& age_table = local_data.age_table; local_data, age_table, slot, params.value_offset, params.heap);
} else {
// Assume that if the first element is in young generation, the whole range is GenerationalBarrierForSourceObjectSlow(local_data, slot, params.heap);
// in young generation. }
if (V8_LIKELY(age_table.GetAge(params.slot_offset) == AgeTable::Age::kYoung))
return;
GenerationalBarrierForSourceObjectSlow(local_data, inner_pointer);
} }
#endif // !CPPGC_YOUNG_GENERATION #endif // !CPPGC_YOUNG_GENERATION

View file

@ -7,6 +7,7 @@
#include "cppgc/heap.h" #include "cppgc/heap.h"
#include "cppgc/member.h" #include "cppgc/member.h"
#include "cppgc/sentinel-pointer.h"
#include "cppgc/trace-trait.h" #include "cppgc/trace-trait.h"
#include "v8config.h" // NOLINT(build/include_directory) #include "v8config.h" // NOLINT(build/include_directory)
@ -44,24 +45,24 @@ class V8_EXPORT LivenessBroker final {
public: public:
template <typename T> template <typename T>
bool IsHeapObjectAlive(const T* object) const { bool IsHeapObjectAlive(const T* object) const {
// nullptr objects are considered alive to allow weakness to be used from // - nullptr objects are considered alive to allow weakness to be used from
// stack while running into a conservative GC. Treating nullptr as dead // stack while running into a conservative GC. Treating nullptr as dead
// would mean that e.g. custom collectins could not be strongified on stack. // would mean that e.g. custom collections could not be strongified on
return !object || // stack.
// - Sentinel pointers are also preserved in weakness and not cleared.
return !object || object == kSentinelPointer ||
IsHeapObjectAliveImpl( IsHeapObjectAliveImpl(
TraceTrait<T>::GetTraceDescriptor(object).base_object_payload); TraceTrait<T>::GetTraceDescriptor(object).base_object_payload);
} }
template <typename T> template <typename T>
bool IsHeapObjectAlive(const WeakMember<T>& weak_member) const { bool IsHeapObjectAlive(const WeakMember<T>& weak_member) const {
return (weak_member != kSentinelPointer) && return IsHeapObjectAlive<T>(weak_member.Get());
IsHeapObjectAlive<T>(weak_member.Get());
} }
template <typename T> template <typename T>
bool IsHeapObjectAlive(const UntracedMember<T>& untraced_member) const { bool IsHeapObjectAlive(const UntracedMember<T>& untraced_member) const {
return (untraced_member != kSentinelPointer) && return IsHeapObjectAlive<T>(untraced_member.Get());
IsHeapObjectAlive<T>(untraced_member.Get());
} }
private: private:

View file

@ -9,6 +9,8 @@
#include <cstddef> #include <cstddef>
#include <type_traits> #include <type_traits>
#include "cppgc/internal/api-constants.h"
#include "cppgc/internal/member-storage.h"
#include "cppgc/internal/pointer-policies.h" #include "cppgc/internal/pointer-policies.h"
#include "cppgc/sentinel-pointer.h" #include "cppgc/sentinel-pointer.h"
#include "cppgc/type-traits.h" #include "cppgc/type-traits.h"
@ -16,6 +18,10 @@
namespace cppgc { namespace cppgc {
namespace subtle {
class HeapConsistency;
} // namespace subtle
class Visitor; class Visitor;
namespace internal { namespace internal {
@ -23,33 +29,46 @@ namespace internal {
// MemberBase always refers to the object as const object and defers to // MemberBase always refers to the object as const object and defers to
// BasicMember on casting to the right type as needed. // BasicMember on casting to the right type as needed.
class MemberBase { class MemberBase {
public:
#if defined(CPPGC_POINTER_COMPRESSION)
using RawStorage = CompressedPointer;
#else // !defined(CPPGC_POINTER_COMPRESSION)
using RawStorage = RawPointer;
#endif // !defined(CPPGC_POINTER_COMPRESSION)
protected: protected:
struct AtomicInitializerTag {}; struct AtomicInitializerTag {};
MemberBase() : raw_(nullptr) {} V8_INLINE MemberBase() = default;
explicit MemberBase(const void* value) : raw_(value) {} V8_INLINE explicit MemberBase(const void* value) : raw_(value) {}
MemberBase(const void* value, AtomicInitializerTag) { SetRawAtomic(value); } V8_INLINE MemberBase(const void* value, AtomicInitializerTag) {
SetRawAtomic(value);
const void** GetRawSlot() const { return &raw_; }
const void* GetRaw() const { return raw_; }
void SetRaw(void* value) { raw_ = value; }
const void* GetRawAtomic() const {
return reinterpret_cast<const std::atomic<const void*>*>(&raw_)->load(
std::memory_order_relaxed);
}
void SetRawAtomic(const void* value) {
reinterpret_cast<std::atomic<const void*>*>(&raw_)->store(
value, std::memory_order_relaxed);
} }
void ClearFromGC() const { raw_ = nullptr; } V8_INLINE explicit MemberBase(RawStorage raw) : raw_(raw) {}
V8_INLINE explicit MemberBase(std::nullptr_t) : raw_(nullptr) {}
V8_INLINE explicit MemberBase(SentinelPointer s) : raw_(s) {}
V8_INLINE const void** GetRawSlot() const {
return reinterpret_cast<const void**>(const_cast<MemberBase*>(this));
}
V8_INLINE const void* GetRaw() const { return raw_.Load(); }
V8_INLINE void SetRaw(void* value) { raw_.Store(value); }
V8_INLINE const void* GetRawAtomic() const { return raw_.LoadAtomic(); }
V8_INLINE void SetRawAtomic(const void* value) { raw_.StoreAtomic(value); }
V8_INLINE RawStorage GetRawStorage() const { return raw_; }
V8_INLINE void SetRawStorageAtomic(RawStorage other) {
reinterpret_cast<std::atomic<RawStorage>&>(raw_).store(
other, std::memory_order_relaxed);
}
V8_INLINE bool IsCleared() const { return raw_.IsCleared(); }
V8_INLINE void ClearFromGC() const { raw_.Clear(); }
private: private:
// All constructors initialize `raw_`. Do not add a default value here as it mutable RawStorage raw_;
// results in a non-atomic write on some builds, even when the atomic version
// of the constructor is used.
mutable const void* raw_;
}; };
// The basic class from which all Member classes are 'generated'. // The basic class from which all Member classes are 'generated'.
@ -59,134 +78,184 @@ class BasicMember final : private MemberBase, private CheckingPolicy {
public: public:
using PointeeType = T; using PointeeType = T;
constexpr BasicMember() = default; V8_INLINE constexpr BasicMember() = default;
constexpr BasicMember(std::nullptr_t) {} // NOLINT V8_INLINE constexpr BasicMember(std::nullptr_t) {} // NOLINT
BasicMember(SentinelPointer s) : MemberBase(s) {} // NOLINT V8_INLINE BasicMember(SentinelPointer s) : MemberBase(s) {} // NOLINT
BasicMember(T* raw) : MemberBase(raw) { // NOLINT V8_INLINE BasicMember(T* raw) : MemberBase(raw) { // NOLINT
InitializingWriteBarrier(); InitializingWriteBarrier(raw);
this->CheckPointer(Get()); this->CheckPointer(Get());
} }
BasicMember(T& raw) : BasicMember(&raw) {} // NOLINT V8_INLINE BasicMember(T& raw) // NOLINT
: BasicMember(&raw) {}
// Atomic ctor. Using the AtomicInitializerTag forces BasicMember to // Atomic ctor. Using the AtomicInitializerTag forces BasicMember to
// initialize using atomic assignments. This is required for preventing // initialize using atomic assignments. This is required for preventing
// data races with concurrent marking. // data races with concurrent marking.
using AtomicInitializerTag = MemberBase::AtomicInitializerTag; using AtomicInitializerTag = MemberBase::AtomicInitializerTag;
BasicMember(std::nullptr_t, AtomicInitializerTag atomic) V8_INLINE BasicMember(std::nullptr_t, AtomicInitializerTag atomic)
: MemberBase(nullptr, atomic) {} : MemberBase(nullptr, atomic) {}
BasicMember(SentinelPointer s, AtomicInitializerTag atomic) V8_INLINE BasicMember(SentinelPointer s, AtomicInitializerTag atomic)
: MemberBase(s, atomic) {} : MemberBase(s, atomic) {}
BasicMember(T* raw, AtomicInitializerTag atomic) : MemberBase(raw, atomic) { V8_INLINE BasicMember(T* raw, AtomicInitializerTag atomic)
InitializingWriteBarrier(); : MemberBase(raw, atomic) {
InitializingWriteBarrier(raw);
this->CheckPointer(Get()); this->CheckPointer(Get());
} }
BasicMember(T& raw, AtomicInitializerTag atomic) V8_INLINE BasicMember(T& raw, AtomicInitializerTag atomic)
: BasicMember(&raw, atomic) {} : BasicMember(&raw, atomic) {}
// Copy ctor. // Copy ctor.
BasicMember(const BasicMember& other) : BasicMember(other.Get()) {} V8_INLINE BasicMember(const BasicMember& other)
// Allow heterogeneous construction. : BasicMember(other.GetRawStorage()) {}
// Heterogeneous copy constructors. When the source pointer have a different
// type, perform a compress-decompress round, because the source pointer may
// need to be adjusted.
template <typename U, typename OtherBarrierPolicy, typename OtherWeaknessTag, template <typename U, typename OtherBarrierPolicy, typename OtherWeaknessTag,
typename OtherCheckingPolicy, typename OtherCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>> std::enable_if_t<internal::IsDecayedSameV<T, U>>* = nullptr>
BasicMember( // NOLINT V8_INLINE BasicMember( // NOLINT
const BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy,
OtherCheckingPolicy>& other)
: BasicMember(other.GetRawStorage()) {}
template <typename U, typename OtherBarrierPolicy, typename OtherWeaknessTag,
typename OtherCheckingPolicy,
std::enable_if_t<internal::IsStrictlyBaseOfV<T, U>>* = nullptr>
V8_INLINE BasicMember( // NOLINT
const BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy, const BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy,
OtherCheckingPolicy>& other) OtherCheckingPolicy>& other)
: BasicMember(other.Get()) {} : BasicMember(other.Get()) {}
// Move ctor. // Move ctor.
BasicMember(BasicMember&& other) noexcept : BasicMember(other.Get()) { V8_INLINE BasicMember(BasicMember&& other) noexcept
: BasicMember(other.GetRawStorage()) {
other.Clear(); other.Clear();
} }
// Allow heterogeneous move construction.
// Heterogeneous move constructors. When the source pointer have a different
// type, perform a compress-decompress round, because the source pointer may
// need to be adjusted.
template <typename U, typename OtherBarrierPolicy, typename OtherWeaknessTag, template <typename U, typename OtherBarrierPolicy, typename OtherWeaknessTag,
typename OtherCheckingPolicy, typename OtherCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>> std::enable_if_t<internal::IsDecayedSameV<T, U>>* = nullptr>
BasicMember(BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy, V8_INLINE BasicMember(BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy,
OtherCheckingPolicy>&& other) noexcept OtherCheckingPolicy>&& other) noexcept
: BasicMember(other.GetRawStorage()) {
other.Clear();
}
template <typename U, typename OtherBarrierPolicy, typename OtherWeaknessTag,
typename OtherCheckingPolicy,
std::enable_if_t<internal::IsStrictlyBaseOfV<T, U>>* = nullptr>
V8_INLINE BasicMember(BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy,
OtherCheckingPolicy>&& other) noexcept
: BasicMember(other.Get()) { : BasicMember(other.Get()) {
other.Clear(); other.Clear();
} }
// Construction from Persistent. // Construction from Persistent.
template <typename U, typename PersistentWeaknessPolicy, template <typename U, typename PersistentWeaknessPolicy,
typename PersistentLocationPolicy, typename PersistentLocationPolicy,
typename PersistentCheckingPolicy, typename PersistentCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>> typename = std::enable_if_t<std::is_base_of<T, U>::value>>
BasicMember(const BasicPersistent<U, PersistentWeaknessPolicy, V8_INLINE BasicMember(const BasicPersistent<U, PersistentWeaknessPolicy,
PersistentLocationPolicy, PersistentLocationPolicy,
PersistentCheckingPolicy>& p) PersistentCheckingPolicy>& p)
: BasicMember(p.Get()) {} : BasicMember(p.Get()) {}
// Copy assignment. // Copy assignment.
BasicMember& operator=(const BasicMember& other) { V8_INLINE BasicMember& operator=(const BasicMember& other) {
return operator=(other.Get()); return operator=(other.GetRawStorage());
} }
// Allow heterogeneous copy assignment.
// Heterogeneous copy assignment. When the source pointer have a different
// type, perform a compress-decompress round, because the source pointer may
// need to be adjusted.
template <typename U, typename OtherWeaknessTag, typename OtherBarrierPolicy, template <typename U, typename OtherWeaknessTag, typename OtherBarrierPolicy,
typename OtherCheckingPolicy, typename OtherCheckingPolicy>
typename = std::enable_if_t<std::is_base_of<T, U>::value>> V8_INLINE BasicMember& operator=(
BasicMember& operator=(
const BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy, const BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy,
OtherCheckingPolicy>& other) { OtherCheckingPolicy>& other) {
return operator=(other.Get()); if constexpr (internal::IsDecayedSameV<T, U>) {
return operator=(other.GetRawStorage());
} else {
static_assert(internal::IsStrictlyBaseOfV<T, U>);
return operator=(other.Get());
}
} }
// Move assignment. // Move assignment.
BasicMember& operator=(BasicMember&& other) noexcept { V8_INLINE BasicMember& operator=(BasicMember&& other) noexcept {
operator=(other.Get()); operator=(other.GetRawStorage());
other.Clear(); other.Clear();
return *this; return *this;
} }
// Heterogeneous move assignment.
// Heterogeneous move assignment. When the source pointer have a different
// type, perform a compress-decompress round, because the source pointer may
// need to be adjusted.
template <typename U, typename OtherWeaknessTag, typename OtherBarrierPolicy, template <typename U, typename OtherWeaknessTag, typename OtherBarrierPolicy,
typename OtherCheckingPolicy, typename OtherCheckingPolicy>
typename = std::enable_if_t<std::is_base_of<T, U>::value>> V8_INLINE BasicMember& operator=(
BasicMember& operator=(BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy, BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy,
OtherCheckingPolicy>&& other) noexcept { OtherCheckingPolicy>&& other) noexcept {
operator=(other.Get()); if constexpr (internal::IsDecayedSameV<T, U>) {
operator=(other.GetRawStorage());
} else {
static_assert(internal::IsStrictlyBaseOfV<T, U>);
operator=(other.Get());
}
other.Clear(); other.Clear();
return *this; return *this;
} }
// Assignment from Persistent. // Assignment from Persistent.
template <typename U, typename PersistentWeaknessPolicy, template <typename U, typename PersistentWeaknessPolicy,
typename PersistentLocationPolicy, typename PersistentLocationPolicy,
typename PersistentCheckingPolicy, typename PersistentCheckingPolicy,
typename = std::enable_if_t<std::is_base_of<T, U>::value>> typename = std::enable_if_t<std::is_base_of<T, U>::value>>
BasicMember& operator=( V8_INLINE BasicMember& operator=(
const BasicPersistent<U, PersistentWeaknessPolicy, const BasicPersistent<U, PersistentWeaknessPolicy,
PersistentLocationPolicy, PersistentCheckingPolicy>& PersistentLocationPolicy, PersistentCheckingPolicy>&
other) { other) {
return operator=(other.Get()); return operator=(other.Get());
} }
BasicMember& operator=(T* other) {
V8_INLINE BasicMember& operator=(T* other) {
SetRawAtomic(other); SetRawAtomic(other);
AssigningWriteBarrier(); AssigningWriteBarrier(other);
this->CheckPointer(Get()); this->CheckPointer(Get());
return *this; return *this;
} }
BasicMember& operator=(std::nullptr_t) {
V8_INLINE BasicMember& operator=(std::nullptr_t) {
Clear(); Clear();
return *this; return *this;
} }
BasicMember& operator=(SentinelPointer s) { V8_INLINE BasicMember& operator=(SentinelPointer s) {
SetRawAtomic(s); SetRawAtomic(s);
return *this; return *this;
} }
template <typename OtherWeaknessTag, typename OtherBarrierPolicy, template <typename OtherWeaknessTag, typename OtherBarrierPolicy,
typename OtherCheckingPolicy> typename OtherCheckingPolicy>
void Swap(BasicMember<T, OtherWeaknessTag, OtherBarrierPolicy, V8_INLINE void Swap(BasicMember<T, OtherWeaknessTag, OtherBarrierPolicy,
OtherCheckingPolicy>& other) { OtherCheckingPolicy>& other) {
T* tmp = Get(); auto tmp = GetRawStorage();
*this = other; *this = other;
other = tmp; other = tmp;
} }
explicit operator bool() const { return Get(); } V8_INLINE explicit operator bool() const { return !IsCleared(); }
operator T*() const { return Get(); } V8_INLINE operator T*() const { return Get(); }
T* operator->() const { return Get(); } V8_INLINE T* operator->() const { return Get(); }
T& operator*() const { return *Get(); } V8_INLINE T& operator*() const { return *Get(); }
// CFI cast exemption to allow passing SentinelPointer through T* and support // CFI cast exemption to allow passing SentinelPointer through T* and support
// heterogeneous assignments between different Member and Persistent handles // heterogeneous assignments between different Member and Persistent handles
// based on their actual types. // based on their actual types.
V8_CLANG_NO_SANITIZE("cfi-unrelated-cast") T* Get() const { V8_INLINE V8_CLANG_NO_SANITIZE("cfi-unrelated-cast") T* Get() const {
// Executed by the mutator, hence non atomic load. // Executed by the mutator, hence non atomic load.
// //
// The const_cast below removes the constness from MemberBase storage. The // The const_cast below removes the constness from MemberBase storage. The
@ -195,59 +264,262 @@ class BasicMember final : private MemberBase, private CheckingPolicy {
return static_cast<T*>(const_cast<void*>(MemberBase::GetRaw())); return static_cast<T*>(const_cast<void*>(MemberBase::GetRaw()));
} }
void Clear() { SetRawAtomic(nullptr); } V8_INLINE void Clear() { SetRawStorageAtomic(RawStorage{}); }
T* Release() { V8_INLINE T* Release() {
T* result = Get(); T* result = Get();
Clear(); Clear();
return result; return result;
} }
const T** GetSlotForTesting() const { V8_INLINE const T** GetSlotForTesting() const {
return reinterpret_cast<const T**>(GetRawSlot()); return reinterpret_cast<const T**>(GetRawSlot());
} }
V8_INLINE RawStorage GetRawStorage() const {
return MemberBase::GetRawStorage();
}
private: private:
const T* GetRawAtomic() const { V8_INLINE explicit BasicMember(RawStorage raw) : MemberBase(raw) {
InitializingWriteBarrier(Get());
this->CheckPointer(Get());
}
V8_INLINE BasicMember& operator=(RawStorage other) {
SetRawStorageAtomic(other);
AssigningWriteBarrier();
this->CheckPointer(Get());
return *this;
}
V8_INLINE const T* GetRawAtomic() const {
return static_cast<const T*>(MemberBase::GetRawAtomic()); return static_cast<const T*>(MemberBase::GetRawAtomic());
} }
void InitializingWriteBarrier() const { V8_INLINE void InitializingWriteBarrier(T* value) const {
WriteBarrierPolicy::InitializingBarrier(GetRawSlot(), GetRaw()); WriteBarrierPolicy::InitializingBarrier(GetRawSlot(), value);
} }
void AssigningWriteBarrier() const { V8_INLINE void AssigningWriteBarrier(T* value) const {
WriteBarrierPolicy::AssigningBarrier(GetRawSlot(), GetRaw()); WriteBarrierPolicy::AssigningBarrier(GetRawSlot(), value);
}
V8_INLINE void AssigningWriteBarrier() const {
WriteBarrierPolicy::AssigningBarrier(GetRawSlot(), GetRawStorage());
} }
void ClearFromGC() const { MemberBase::ClearFromGC(); } V8_INLINE void ClearFromGC() const { MemberBase::ClearFromGC(); }
T* GetFromGC() const { return Get(); } V8_INLINE T* GetFromGC() const { return Get(); }
friend class cppgc::subtle::HeapConsistency;
friend class cppgc::Visitor; friend class cppgc::Visitor;
template <typename U> template <typename U>
friend struct cppgc::TraceTrait; friend struct cppgc::TraceTrait;
template <typename T1, typename WeaknessTag1, typename WriteBarrierPolicy1,
typename CheckingPolicy1>
friend class BasicMember;
}; };
// Member equality operators.
template <typename T1, typename WeaknessTag1, typename WriteBarrierPolicy1, template <typename T1, typename WeaknessTag1, typename WriteBarrierPolicy1,
typename CheckingPolicy1, typename T2, typename WeaknessTag2, typename CheckingPolicy1, typename T2, typename WeaknessTag2,
typename WriteBarrierPolicy2, typename CheckingPolicy2> typename WriteBarrierPolicy2, typename CheckingPolicy2>
bool operator==(const BasicMember<T1, WeaknessTag1, WriteBarrierPolicy1, V8_INLINE bool operator==(
CheckingPolicy1>& member1, const BasicMember<T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1>&
const BasicMember<T2, WeaknessTag2, WriteBarrierPolicy2, member1,
CheckingPolicy2>& member2) { const BasicMember<T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2>&
return member1.Get() == member2.Get(); member2) {
if constexpr (internal::IsDecayedSameV<T1, T2>) {
// Check compressed pointers if types are the same.
return member1.GetRawStorage() == member2.GetRawStorage();
} else {
static_assert(internal::IsStrictlyBaseOfV<T1, T2> ||
internal::IsStrictlyBaseOfV<T2, T1>);
// Otherwise, check decompressed pointers.
return member1.Get() == member2.Get();
}
} }
template <typename T1, typename WeaknessTag1, typename WriteBarrierPolicy1, template <typename T1, typename WeaknessTag1, typename WriteBarrierPolicy1,
typename CheckingPolicy1, typename T2, typename WeaknessTag2, typename CheckingPolicy1, typename T2, typename WeaknessTag2,
typename WriteBarrierPolicy2, typename CheckingPolicy2> typename WriteBarrierPolicy2, typename CheckingPolicy2>
bool operator!=(const BasicMember<T1, WeaknessTag1, WriteBarrierPolicy1, V8_INLINE bool operator!=(
CheckingPolicy1>& member1, const BasicMember<T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1>&
const BasicMember<T2, WeaknessTag2, WriteBarrierPolicy2, member1,
CheckingPolicy2>& member2) { const BasicMember<T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2>&
member2) {
return !(member1 == member2); return !(member1 == member2);
} }
// Equality with raw pointers.
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
typename CheckingPolicy, typename U>
V8_INLINE bool operator==(const BasicMember<T, WeaknessTag, WriteBarrierPolicy,
CheckingPolicy>& member,
U* raw) {
// Never allow comparison with erased pointers.
static_assert(!internal::IsDecayedSameV<void, U>);
if constexpr (internal::IsDecayedSameV<T, U>) {
// Check compressed pointers if types are the same.
return member.GetRawStorage() == MemberBase::RawStorage(raw);
} else if constexpr (internal::IsStrictlyBaseOfV<T, U>) {
// Cast the raw pointer to T, which may adjust the pointer.
return member.GetRawStorage() ==
MemberBase::RawStorage(static_cast<T*>(raw));
} else {
// Otherwise, decompressed the member.
return member.Get() == raw;
}
}
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
typename CheckingPolicy, typename U>
V8_INLINE bool operator!=(const BasicMember<T, WeaknessTag, WriteBarrierPolicy,
CheckingPolicy>& member,
U* raw) {
return !(member == raw);
}
template <typename T, typename U, typename WeaknessTag,
typename WriteBarrierPolicy, typename CheckingPolicy>
V8_INLINE bool operator==(T* raw,
const BasicMember<U, WeaknessTag, WriteBarrierPolicy,
CheckingPolicy>& member) {
return member == raw;
}
template <typename T, typename U, typename WeaknessTag,
typename WriteBarrierPolicy, typename CheckingPolicy>
V8_INLINE bool operator!=(T* raw,
const BasicMember<U, WeaknessTag, WriteBarrierPolicy,
CheckingPolicy>& member) {
return !(raw == member);
}
// Equality with sentinel.
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
typename CheckingPolicy>
V8_INLINE bool operator==(const BasicMember<T, WeaknessTag, WriteBarrierPolicy,
CheckingPolicy>& member,
SentinelPointer) {
return member.GetRawStorage().IsSentinel();
}
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
typename CheckingPolicy>
V8_INLINE bool operator!=(const BasicMember<T, WeaknessTag, WriteBarrierPolicy,
CheckingPolicy>& member,
SentinelPointer s) {
return !(member == s);
}
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
typename CheckingPolicy>
V8_INLINE bool operator==(SentinelPointer s,
const BasicMember<T, WeaknessTag, WriteBarrierPolicy,
CheckingPolicy>& member) {
return member == s;
}
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
typename CheckingPolicy>
V8_INLINE bool operator!=(SentinelPointer s,
const BasicMember<T, WeaknessTag, WriteBarrierPolicy,
CheckingPolicy>& member) {
return !(s == member);
}
// Equality with nullptr.
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
typename CheckingPolicy>
V8_INLINE bool operator==(const BasicMember<T, WeaknessTag, WriteBarrierPolicy,
CheckingPolicy>& member,
std::nullptr_t) {
return !static_cast<bool>(member);
}
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
typename CheckingPolicy>
V8_INLINE bool operator!=(const BasicMember<T, WeaknessTag, WriteBarrierPolicy,
CheckingPolicy>& member,
std::nullptr_t n) {
return !(member == n);
}
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
typename CheckingPolicy>
V8_INLINE bool operator==(std::nullptr_t n,
const BasicMember<T, WeaknessTag, WriteBarrierPolicy,
CheckingPolicy>& member) {
return member == n;
}
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
typename CheckingPolicy>
V8_INLINE bool operator!=(std::nullptr_t n,
const BasicMember<T, WeaknessTag, WriteBarrierPolicy,
CheckingPolicy>& member) {
return !(n == member);
}
// Relational operators.
template <typename T1, typename WeaknessTag1, typename WriteBarrierPolicy1,
typename CheckingPolicy1, typename T2, typename WeaknessTag2,
typename WriteBarrierPolicy2, typename CheckingPolicy2>
V8_INLINE bool operator<(
const BasicMember<T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1>&
member1,
const BasicMember<T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2>&
member2) {
static_assert(
internal::IsDecayedSameV<T1, T2>,
"Comparison works only for same pointer type modulo cv-qualifiers");
return member1.GetRawStorage() < member2.GetRawStorage();
}
template <typename T1, typename WeaknessTag1, typename WriteBarrierPolicy1,
typename CheckingPolicy1, typename T2, typename WeaknessTag2,
typename WriteBarrierPolicy2, typename CheckingPolicy2>
V8_INLINE bool operator<=(
const BasicMember<T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1>&
member1,
const BasicMember<T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2>&
member2) {
static_assert(
internal::IsDecayedSameV<T1, T2>,
"Comparison works only for same pointer type modulo cv-qualifiers");
return member1.GetRawStorage() <= member2.GetRawStorage();
}
template <typename T1, typename WeaknessTag1, typename WriteBarrierPolicy1,
typename CheckingPolicy1, typename T2, typename WeaknessTag2,
typename WriteBarrierPolicy2, typename CheckingPolicy2>
V8_INLINE bool operator>(
const BasicMember<T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1>&
member1,
const BasicMember<T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2>&
member2) {
static_assert(
internal::IsDecayedSameV<T1, T2>,
"Comparison works only for same pointer type modulo cv-qualifiers");
return member1.GetRawStorage() > member2.GetRawStorage();
}
template <typename T1, typename WeaknessTag1, typename WriteBarrierPolicy1,
typename CheckingPolicy1, typename T2, typename WeaknessTag2,
typename WriteBarrierPolicy2, typename CheckingPolicy2>
V8_INLINE bool operator>=(
const BasicMember<T1, WeaknessTag1, WriteBarrierPolicy1, CheckingPolicy1>&
member1,
const BasicMember<T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2>&
member2) {
static_assert(
internal::IsDecayedSameV<T1, T2>,
"Comparison works only for same pointer type modulo cv-qualifiers");
return member1.GetRawStorage() >= member2.GetRawStorage();
}
template <typename T, typename WriteBarrierPolicy, typename CheckingPolicy> template <typename T, typename WriteBarrierPolicy, typename CheckingPolicy>
struct IsWeak< struct IsWeak<
internal::BasicMember<T, WeakMemberTag, WriteBarrierPolicy, CheckingPolicy>> internal::BasicMember<T, WeakMemberTag, WriteBarrierPolicy, CheckingPolicy>>

View file

@ -37,15 +37,15 @@ class V8_EXPORT NameProvider {
static constexpr const char kNoNameDeducible[] = "<No name>"; static constexpr const char kNoNameDeducible[] = "<No name>";
/** /**
* Indicating whether internal names are hidden or not. * Indicating whether the build supports extracting C++ names as object names.
* *
* @returns true if C++ names should be hidden and represented by kHiddenName. * @returns true if C++ names should be hidden and represented by kHiddenName.
*/ */
static constexpr bool HideInternalNames() { static constexpr bool SupportsCppClassNamesAsObjectNames() {
#if CPPGC_SUPPORTS_OBJECT_NAMES #if CPPGC_SUPPORTS_OBJECT_NAMES
return false;
#else // !CPPGC_SUPPORTS_OBJECT_NAMES
return true; return true;
#else // !CPPGC_SUPPORTS_OBJECT_NAMES
return false;
#endif // !CPPGC_SUPPORTS_OBJECT_NAMES #endif // !CPPGC_SUPPORTS_OBJECT_NAMES
} }

View file

@ -16,9 +16,6 @@
#include "v8config.h" // NOLINT(build/include_directory) #include "v8config.h" // NOLINT(build/include_directory)
namespace cppgc { namespace cppgc {
class Visitor;
namespace internal { namespace internal {
// PersistentBase always refers to the object as const object and defers to // PersistentBase always refers to the object as const object and defers to
@ -78,7 +75,7 @@ class BasicPersistent final : public PersistentBase,
: PersistentBase(raw), LocationPolicy(loc) { : PersistentBase(raw), LocationPolicy(loc) {
if (!IsValid()) return; if (!IsValid()) return;
SetNode(WeaknessPolicy::GetPersistentRegion(GetValue()) SetNode(WeaknessPolicy::GetPersistentRegion(GetValue())
.AllocateNode(this, &BasicPersistent::Trace)); .AllocateNode(this, &TraceAsRoot));
this->CheckPointer(Get()); this->CheckPointer(Get());
} }
@ -221,9 +218,8 @@ class BasicPersistent final : public PersistentBase,
} }
private: private:
static void Trace(Visitor* v, const void* ptr) { static void TraceAsRoot(RootVisitor& root_visitor, const void* ptr) {
const auto* persistent = static_cast<const BasicPersistent*>(ptr); root_visitor.Trace(*static_cast<const BasicPersistent*>(ptr));
v->TraceRoot(*persistent, persistent->Location());
} }
bool IsValid() const { bool IsValid() const {
@ -247,7 +243,7 @@ class BasicPersistent final : public PersistentBase,
SetValue(ptr); SetValue(ptr);
if (!IsValid()) return; if (!IsValid()) return;
SetNode(WeaknessPolicy::GetPersistentRegion(GetValue()) SetNode(WeaknessPolicy::GetPersistentRegion(GetValue())
.AllocateNode(this, &BasicPersistent::Trace)); .AllocateNode(this, &TraceAsRoot));
this->CheckPointer(Get()); this->CheckPointer(Get());
} }
@ -264,7 +260,7 @@ class BasicPersistent final : public PersistentBase,
return static_cast<T*>(const_cast<void*>(GetValue())); return static_cast<T*>(const_cast<void*>(GetValue()));
} }
friend class cppgc::Visitor; friend class internal::RootVisitor;
}; };
template <typename T1, typename WeaknessPolicy1, typename LocationPolicy1, template <typename T1, typename WeaknessPolicy1, typename LocationPolicy1,

View file

@ -132,8 +132,8 @@ class V8_EXPORT Platform {
* *
* Can be called multiple times when paired with `ShutdownProcess()`. * Can be called multiple times when paired with `ShutdownProcess()`.
* *
* \param page_allocator The allocator used for maintaining meta data. Must not * \param page_allocator The allocator used for maintaining meta data. Must stay
* change between multiple calls to InitializeProcess. * always alive and not change between multiple calls to InitializeProcess.
*/ */
V8_EXPORT void InitializeProcess(PageAllocator* page_allocator); V8_EXPORT void InitializeProcess(PageAllocator* page_allocator);

View file

@ -13,9 +13,9 @@ namespace internal {
// Special tag type used to denote some sentinel member. The semantics of the // Special tag type used to denote some sentinel member. The semantics of the
// sentinel is defined by the embedder. // sentinel is defined by the embedder.
struct SentinelPointer { struct SentinelPointer {
static constexpr intptr_t kSentinelValue = 0b10;
template <typename T> template <typename T>
operator T*() const { operator T*() const {
static constexpr intptr_t kSentinelValue = 1;
return reinterpret_cast<T*>(kSentinelValue); return reinterpret_cast<T*>(kSentinelValue);
} }
// Hidden friends. // Hidden friends.

View file

@ -16,6 +16,10 @@ class Visitor;
namespace internal { namespace internal {
class RootVisitor;
using TraceRootCallback = void (*)(RootVisitor&, const void* object);
// Implementation of the default TraceTrait handling GarbageCollected and // Implementation of the default TraceTrait handling GarbageCollected and
// GarbageCollectedMixin. // GarbageCollectedMixin.
template <typename T, template <typename T,

View file

@ -170,6 +170,15 @@ struct IsComplete {
decltype(IsSizeOfKnown(std::declval<T*>()))::value; decltype(IsSizeOfKnown(std::declval<T*>()))::value;
}; };
template <typename T, typename U>
constexpr bool IsDecayedSameV =
std::is_same_v<std::decay_t<T>, std::decay_t<U>>;
template <typename B, typename D>
constexpr bool IsStrictlyBaseOfV =
std::is_base_of_v<std::decay_t<B>, std::decay_t<D>> &&
!IsDecayedSameV<B, D>;
} // namespace internal } // namespace internal
/** /**

View file

@ -62,22 +62,6 @@ class V8_EXPORT Visitor {
virtual ~Visitor() = default; virtual ~Visitor() = default;
/**
* Trace method for raw pointers. Prefer the versions for managed pointers.
*
* \param member Reference retaining an object.
*/
template <typename T>
void Trace(const T* t) {
static_assert(sizeof(T), "Pointee type must be fully defined.");
static_assert(internal::IsGarbageCollectedOrMixinType<T>::value,
"T must be GarbageCollected or GarbageCollectedMixin type");
if (!t) {
return;
}
Visit(t, TraceTrait<T>::GetTraceDescriptor(t));
}
/** /**
* Trace method for Member. * Trace method for Member.
* *
@ -87,7 +71,7 @@ class V8_EXPORT Visitor {
void Trace(const Member<T>& member) { void Trace(const Member<T>& member) {
const T* value = member.GetRawAtomic(); const T* value = member.GetRawAtomic();
CPPGC_DCHECK(value != kSentinelPointer); CPPGC_DCHECK(value != kSentinelPointer);
Trace(value); TraceImpl(value);
} }
/** /**
@ -231,23 +215,33 @@ class V8_EXPORT Visitor {
void TraceStrongly(const WeakMember<T>& weak_member) { void TraceStrongly(const WeakMember<T>& weak_member) {
const T* value = weak_member.GetRawAtomic(); const T* value = weak_member.GetRawAtomic();
CPPGC_DCHECK(value != kSentinelPointer); CPPGC_DCHECK(value != kSentinelPointer);
Trace(value); TraceImpl(value);
} }
/** /**
* Trace method for weak containers. * Trace method for retaining containers strongly.
* *
* \param object reference of the weak container. * \param object reference to the container.
*/
template <typename T>
void TraceStrongContainer(const T* object) {
TraceImpl(object);
}
/**
* Trace method for retaining containers weakly.
*
* \param object reference to the container.
* \param callback to be invoked. * \param callback to be invoked.
* \param data custom data that is passed to the callback. * \param callback_data custom data that is passed to the callback.
*/ */
template <typename T> template <typename T>
void TraceWeakContainer(const T* object, WeakCallback callback, void TraceWeakContainer(const T* object, WeakCallback callback,
const void* data) { const void* callback_data) {
if (!object) return; if (!object) return;
VisitWeakContainer(object, TraceTrait<T>::GetTraceDescriptor(object), VisitWeakContainer(object, TraceTrait<T>::GetTraceDescriptor(object),
TraceTrait<T>::GetWeakTraceDescriptor(object), callback, TraceTrait<T>::GetWeakTraceDescriptor(object), callback,
data); callback_data);
} }
/** /**
@ -255,6 +249,7 @@ class V8_EXPORT Visitor {
* compactable space. Such references maybe be arbitrarily moved by the GC. * compactable space. Such references maybe be arbitrarily moved by the GC.
* *
* \param slot location of reference to object that might be moved by the GC. * \param slot location of reference to object that might be moved by the GC.
* The slot must contain an uncompressed pointer.
*/ */
template <typename T> template <typename T>
void RegisterMovableReference(const T** slot) { void RegisterMovableReference(const T** slot) {
@ -297,9 +292,6 @@ class V8_EXPORT Visitor {
virtual void Visit(const void* self, TraceDescriptor) {} virtual void Visit(const void* self, TraceDescriptor) {}
virtual void VisitWeak(const void* self, TraceDescriptor, WeakCallback, virtual void VisitWeak(const void* self, TraceDescriptor, WeakCallback,
const void* weak_member) {} const void* weak_member) {}
virtual void VisitRoot(const void*, TraceDescriptor, const SourceLocation&) {}
virtual void VisitWeakRoot(const void* self, TraceDescriptor, WeakCallback,
const void* weak_root, const SourceLocation&) {}
virtual void VisitEphemeron(const void* key, const void* value, virtual void VisitEphemeron(const void* key, const void* value,
TraceDescriptor value_desc) {} TraceDescriptor value_desc) {}
virtual void VisitWeakContainer(const void* self, TraceDescriptor strong_desc, virtual void VisitWeakContainer(const void* self, TraceDescriptor strong_desc,
@ -320,44 +312,20 @@ class V8_EXPORT Visitor {
static void HandleWeak(const LivenessBroker& info, const void* object) { static void HandleWeak(const LivenessBroker& info, const void* object) {
const PointerType* weak = static_cast<const PointerType*>(object); const PointerType* weak = static_cast<const PointerType*>(object);
auto* raw_ptr = weak->GetFromGC(); auto* raw_ptr = weak->GetFromGC();
// Sentinel values are preserved for weak pointers.
if (raw_ptr == kSentinelPointer) return;
if (!info.IsHeapObjectAlive(raw_ptr)) { if (!info.IsHeapObjectAlive(raw_ptr)) {
weak->ClearFromGC(); weak->ClearFromGC();
} }
} }
template <typename Persistent, template <typename T>
std::enable_if_t<Persistent::IsStrongPersistent::value>* = nullptr> void TraceImpl(const T* t) {
void TraceRoot(const Persistent& p, const SourceLocation& loc) { static_assert(sizeof(T), "Pointee type must be fully defined.");
using PointeeType = typename Persistent::PointeeType; static_assert(internal::IsGarbageCollectedOrMixinType<T>::value,
static_assert(sizeof(PointeeType), "T must be GarbageCollected or GarbageCollectedMixin type");
"Persistent's pointee type must be fully defined"); if (!t) {
static_assert(internal::IsGarbageCollectedOrMixinType<PointeeType>::value,
"Persistent's pointee type must be GarbageCollected or "
"GarbageCollectedMixin");
auto* ptr = p.GetFromGC();
if (!ptr) {
return; return;
} }
VisitRoot(ptr, TraceTrait<PointeeType>::GetTraceDescriptor(ptr), loc); Visit(t, TraceTrait<T>::GetTraceDescriptor(t));
}
template <
typename WeakPersistent,
std::enable_if_t<!WeakPersistent::IsStrongPersistent::value>* = nullptr>
void TraceRoot(const WeakPersistent& p, const SourceLocation& loc) {
using PointeeType = typename WeakPersistent::PointeeType;
static_assert(sizeof(PointeeType),
"Persistent's pointee type must be fully defined");
static_assert(internal::IsGarbageCollectedOrMixinType<PointeeType>::value,
"Persistent's pointee type must be GarbageCollected or "
"GarbageCollectedMixin");
static_assert(!internal::IsAllocatedOnCompactableSpace<PointeeType>::value,
"Weak references to compactable objects are not allowed");
auto* ptr = p.GetFromGC();
VisitWeakRoot(ptr, TraceTrait<PointeeType>::GetTraceDescriptor(ptr),
&HandleWeak<WeakPersistent>, &p, loc);
} }
#if V8_ENABLE_CHECKS #if V8_ENABLE_CHECKS
@ -374,6 +342,70 @@ class V8_EXPORT Visitor {
friend class internal::VisitorBase; friend class internal::VisitorBase;
}; };
namespace internal {
class V8_EXPORT RootVisitor {
public:
explicit RootVisitor(Visitor::Key) {}
virtual ~RootVisitor() = default;
template <typename AnyStrongPersistentType,
std::enable_if_t<
AnyStrongPersistentType::IsStrongPersistent::value>* = nullptr>
void Trace(const AnyStrongPersistentType& p) {
using PointeeType = typename AnyStrongPersistentType::PointeeType;
const void* object = Extract(p);
if (!object) {
return;
}
VisitRoot(object, TraceTrait<PointeeType>::GetTraceDescriptor(object),
p.Location());
}
template <typename AnyWeakPersistentType,
std::enable_if_t<
!AnyWeakPersistentType::IsStrongPersistent::value>* = nullptr>
void Trace(const AnyWeakPersistentType& p) {
using PointeeType = typename AnyWeakPersistentType::PointeeType;
static_assert(!internal::IsAllocatedOnCompactableSpace<PointeeType>::value,
"Weak references to compactable objects are not allowed");
const void* object = Extract(p);
if (!object) {
return;
}
VisitWeakRoot(object, TraceTrait<PointeeType>::GetTraceDescriptor(object),
&HandleWeak<AnyWeakPersistentType>, &p, p.Location());
}
protected:
virtual void VisitRoot(const void*, TraceDescriptor, const SourceLocation&) {}
virtual void VisitWeakRoot(const void* self, TraceDescriptor, WeakCallback,
const void* weak_root, const SourceLocation&) {}
private:
template <typename AnyPersistentType>
static const void* Extract(AnyPersistentType& p) {
using PointeeType = typename AnyPersistentType::PointeeType;
static_assert(sizeof(PointeeType),
"Persistent's pointee type must be fully defined");
static_assert(internal::IsGarbageCollectedOrMixinType<PointeeType>::value,
"Persistent's pointee type must be GarbageCollected or "
"GarbageCollectedMixin");
return p.GetFromGC();
}
template <typename PointerType>
static void HandleWeak(const LivenessBroker& info, const void* object) {
const PointerType* weak = static_cast<const PointerType*>(object);
auto* raw_ptr = weak->GetFromGC();
if (!info.IsHeapObjectAlive(raw_ptr)) {
weak->ClearFromGC();
}
}
};
} // namespace internal
} // namespace cppgc } // namespace cppgc
#endif // INCLUDE_CPPGC_VISITOR_H_ #endif // INCLUDE_CPPGC_VISITOR_H_

View file

@ -113,6 +113,11 @@ domain Debugger
Runtime.RemoteObject this Runtime.RemoteObject this
# The value being returned, if the function is at return point. # The value being returned, if the function is at return point.
optional Runtime.RemoteObject returnValue optional Runtime.RemoteObject returnValue
# Valid only while the VM is paused and indicates whether this frame
# can be restarted or not. Note that a `true` value here does not
# guarantee that Debugger#restartFrame with this CallFrameId will be
# successful, but it is very likely.
experimental optional boolean canBeRestarted
# Scope description. # Scope description.
type Scope extends object type Scope extends object
@ -239,6 +244,40 @@ domain Debugger
# Wasm bytecode. # Wasm bytecode.
optional binary bytecode optional binary bytecode
experimental type WasmDisassemblyChunk extends object
properties
# The next chunk of disassembled lines.
array of string lines
# The bytecode offsets describing the start of each line.
array of integer bytecodeOffsets
experimental command disassembleWasmModule
parameters
# Id of the script to disassemble
Runtime.ScriptId scriptId
returns
# For large modules, return a stream from which additional chunks of
# disassembly can be read successively.
optional string streamId
# The total number of lines in the disassembly text.
integer totalNumberOfLines
# The offsets of all function bodies, in the format [start1, end1,
# start2, end2, ...] where all ends are exclusive.
array of integer functionBodyOffsets
# The first chunk of disassembly.
WasmDisassemblyChunk chunk
# Disassemble the next chunk of lines for the module corresponding to the
# stream. If disassembly is complete, this API will invalidate the streamId
# and return an empty chunk. Any subsequent calls for the now invalid stream
# will return errors.
experimental command nextWasmDisassemblyChunk
parameters
string streamId
returns
# The next chunk of disassembly.
WasmDisassemblyChunk chunk
# This command is deprecated. Use getScriptSource instead. # This command is deprecated. Use getScriptSource instead.
deprecated command getWasmBytecode deprecated command getWasmBytecode
parameters parameters
@ -268,18 +307,35 @@ domain Debugger
parameters parameters
BreakpointId breakpointId BreakpointId breakpointId
# Restarts particular call frame from the beginning. # Restarts particular call frame from the beginning. The old, deprecated
deprecated command restartFrame # behavior of `restartFrame` is to stay paused and allow further CDP commands
# after a restart was scheduled. This can cause problems with restarting, so
# we now continue execution immediatly after it has been scheduled until we
# reach the beginning of the restarted frame.
#
# To stay back-wards compatible, `restartFrame` now expects a `mode`
# parameter to be present. If the `mode` parameter is missing, `restartFrame`
# errors out.
#
# The various return values are deprecated and `callFrames` is always empty.
# Use the call frames from the `Debugger#paused` events instead, that fires
# once V8 pauses at the beginning of the restarted function.
command restartFrame
parameters parameters
# Call frame identifier to evaluate on. # Call frame identifier to evaluate on.
CallFrameId callFrameId CallFrameId callFrameId
# The `mode` parameter must be present and set to 'StepInto', otherwise
# `restartFrame` will error out.
experimental optional enum mode
# Pause at the beginning of the restarted function
StepInto
returns returns
# New stack trace. # New stack trace.
array of CallFrame callFrames deprecated array of CallFrame callFrames
# Async stack trace, if any. # Async stack trace, if any.
optional Runtime.StackTrace asyncStackTrace deprecated optional Runtime.StackTrace asyncStackTrace
# Async stack trace, if any. # Async stack trace, if any.
experimental optional Runtime.StackTraceId asyncStackTraceId deprecated optional Runtime.StackTraceId asyncStackTraceId
# Resumes JavaScript execution. # Resumes JavaScript execution.
command resume command resume
@ -419,6 +475,12 @@ domain Debugger
Runtime.CallArgument newValue Runtime.CallArgument newValue
# Edits JavaScript source live. # Edits JavaScript source live.
#
# In general, functions that are currently on the stack can not be edited with
# a single exception: If the edited function is the top-most stack frame and
# that is the only activation of that function on the stack. In this case
# the live edit will be successful and a `Debugger.restartFrame` for the
# top-most function is automatically triggered.
command setScriptSource command setScriptSource
parameters parameters
# Id of the script to edit. # Id of the script to edit.
@ -428,16 +490,27 @@ domain Debugger
# If true the change will not actually be applied. Dry run may be used to get result # If true the change will not actually be applied. Dry run may be used to get result
# description without actually modifying the code. # description without actually modifying the code.
optional boolean dryRun optional boolean dryRun
# If true, then `scriptSource` is allowed to change the function on top of the stack
# as long as the top-most stack frame is the only activation of that function.
experimental optional boolean allowTopFrameEditing
returns returns
# New stack trace in case editing has happened while VM was stopped. # New stack trace in case editing has happened while VM was stopped.
optional array of CallFrame callFrames deprecated optional array of CallFrame callFrames
# Whether current call stack was modified after applying the changes. # Whether current call stack was modified after applying the changes.
optional boolean stackChanged deprecated optional boolean stackChanged
# Async stack trace, if any. # Async stack trace, if any.
optional Runtime.StackTrace asyncStackTrace deprecated optional Runtime.StackTrace asyncStackTrace
# Async stack trace, if any. # Async stack trace, if any.
experimental optional Runtime.StackTraceId asyncStackTraceId deprecated optional Runtime.StackTraceId asyncStackTraceId
# Exception details if any. # Whether the operation was successful or not. Only `Ok` denotes a
# successful live edit while the other enum variants denote why
# the live edit failed.
experimental enum status
Ok
CompileError
BlockedByActiveGenerator
BlockedByActiveFunction
# Exception details if any. Only present when `status` is `CompileError`.
optional Runtime.ExceptionDetails exceptionDetails optional Runtime.ExceptionDetails exceptionDetails
# Makes page not interrupt on any pauses (breakpoint, exception, dom exception etc). # Makes page not interrupt on any pauses (breakpoint, exception, dom exception etc).
@ -554,7 +627,7 @@ domain Debugger
integer endColumn integer endColumn
# Specifies script creation context. # Specifies script creation context.
Runtime.ExecutionContextId executionContextId Runtime.ExecutionContextId executionContextId
# Content hash of the script. # Content hash of the script, SHA-256.
string hash string hash
# Embedder-specific auxiliary data. # Embedder-specific auxiliary data.
optional object executionContextAuxData optional object executionContextAuxData
@ -593,7 +666,7 @@ domain Debugger
integer endColumn integer endColumn
# Specifies script creation context. # Specifies script creation context.
Runtime.ExecutionContextId executionContextId Runtime.ExecutionContextId executionContextId
# Content hash of the script. # Content hash of the script, SHA-256.
string hash string hash
# Embedder-specific auxiliary data. # Embedder-specific auxiliary data.
optional object executionContextAuxData optional object executionContextAuxData
@ -708,18 +781,24 @@ experimental domain HeapProfiler
# If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken # If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken
# when the tracking is stopped. # when the tracking is stopped.
optional boolean reportProgress optional boolean reportProgress
optional boolean treatGlobalObjectsAsRoots # Deprecated in favor of `exposeInternals`.
deprecated optional boolean treatGlobalObjectsAsRoots
# If true, numerical values are included in the snapshot # If true, numerical values are included in the snapshot
optional boolean captureNumericValue optional boolean captureNumericValue
# If true, exposes internals of the snapshot.
experimental optional boolean exposeInternals
command takeHeapSnapshot command takeHeapSnapshot
parameters parameters
# If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken. # If true 'reportHeapSnapshotProgress' events will be generated while snapshot is being taken.
optional boolean reportProgress optional boolean reportProgress
# If true, a raw snapshot without artificial roots will be generated # If true, a raw snapshot without artificial roots will be generated.
optional boolean treatGlobalObjectsAsRoots # Deprecated in favor of `exposeInternals`.
deprecated optional boolean treatGlobalObjectsAsRoots
# If true, numerical values are included in the snapshot # If true, numerical values are included in the snapshot
optional boolean captureNumericValue optional boolean captureNumericValue
# If true, exposes internals of the snapshot.
experimental optional boolean exposeInternals
event addHeapSnapshotChunk event addHeapSnapshotChunk
parameters parameters
@ -1342,7 +1421,9 @@ domain Runtime
optional string objectGroup optional string objectGroup
# Whether to throw an exception if side effect cannot be ruled out during evaluation. # Whether to throw an exception if side effect cannot be ruled out during evaluation.
experimental optional boolean throwOnSideEffect experimental optional boolean throwOnSideEffect
# Whether the result should be serialized according to https://w3c.github.io/webdriver-bidi. # Whether the result should contain `webDriverValue`, serialized according to
# https://w3c.github.io/webdriver-bidi. This is mutually exclusive with `returnByValue`, but
# resulting `objectId` is still provided.
experimental optional boolean generateWebDriverValue experimental optional boolean generateWebDriverValue
returns returns
# Call result. # Call result.

View file

@ -12,6 +12,7 @@
#include "cppgc/common.h" #include "cppgc/common.h"
#include "v8-data.h" // NOLINT(build/include_directory) #include "v8-data.h" // NOLINT(build/include_directory)
#include "v8-local-handle.h" // NOLINT(build/include_directory) #include "v8-local-handle.h" // NOLINT(build/include_directory)
#include "v8-promise.h" // NOLINT(build/include_directory)
#include "v8config.h" // NOLINT(build/include_directory) #include "v8config.h" // NOLINT(build/include_directory)
#if defined(V8_OS_WIN) #if defined(V8_OS_WIN)
@ -105,7 +106,7 @@ struct JitCodeEvent {
size_t line_number_table_size; size_t line_number_table_size;
}; };
wasm_source_info_t* wasm_source_info; wasm_source_info_t* wasm_source_info = nullptr;
union { union {
// Only valid for CODE_ADDED. // Only valid for CODE_ADDED.
@ -216,7 +217,13 @@ using AddHistogramSampleCallback = void (*)(void* histogram, int sample);
using FatalErrorCallback = void (*)(const char* location, const char* message); using FatalErrorCallback = void (*)(const char* location, const char* message);
using OOMErrorCallback = void (*)(const char* location, bool is_heap_oom); struct OOMDetails {
bool is_heap_oom = false;
const char* detail = nullptr;
};
using OOMErrorCallback = void (*)(const char* location,
const OOMDetails& details);
using MessageCallback = void (*)(Local<Message> message, Local<Value> data); using MessageCallback = void (*)(Local<Message> message, Local<Value> data);
@ -231,8 +238,11 @@ enum class CrashKeyId {
kIsolateAddress, kIsolateAddress,
kReadonlySpaceFirstPageAddress, kReadonlySpaceFirstPageAddress,
kMapSpaceFirstPageAddress, kMapSpaceFirstPageAddress,
kCodeRangeBaseAddress,
kCodeSpaceFirstPageAddress, kCodeSpaceFirstPageAddress,
kDumpType, kDumpType,
kSnapshotChecksumCalculated,
kSnapshotChecksumExpected,
}; };
using AddCrashKeyCallback = void (*)(CrashKeyId id, const std::string& value); using AddCrashKeyCallback = void (*)(CrashKeyId id, const std::string& value);
@ -300,6 +310,13 @@ using ApiImplementationCallback = void (*)(const FunctionCallbackInfo<Value>&);
// --- Callback for WebAssembly.compileStreaming --- // --- Callback for WebAssembly.compileStreaming ---
using WasmStreamingCallback = void (*)(const FunctionCallbackInfo<Value>&); using WasmStreamingCallback = void (*)(const FunctionCallbackInfo<Value>&);
enum class WasmAsyncSuccess { kSuccess, kFail };
// --- Callback called when async WebAssembly operations finish ---
using WasmAsyncResolvePromiseCallback = void (*)(
Isolate* isolate, Local<Context> context, Local<Promise::Resolver> resolver,
Local<Value> result, WasmAsyncSuccess success);
// --- Callback for loading source map file for Wasm profiling support // --- Callback for loading source map file for Wasm profiling support
using WasmLoadSourceMapCallback = Local<String> (*)(Isolate* isolate, using WasmLoadSourceMapCallback = Local<String> (*)(Isolate* isolate,
const char* name); const char* name);
@ -310,9 +327,6 @@ using WasmSimdEnabledCallback = bool (*)(Local<Context> context);
// --- Callback for checking if WebAssembly exceptions are enabled --- // --- Callback for checking if WebAssembly exceptions are enabled ---
using WasmExceptionsEnabledCallback = bool (*)(Local<Context> context); using WasmExceptionsEnabledCallback = bool (*)(Local<Context> context);
// --- Callback for checking if WebAssembly dynamic tiering is enabled ---
using WasmDynamicTieringEnabledCallback = bool (*)(Local<Context> context);
// --- Callback for checking if the SharedArrayBuffer constructor is enabled --- // --- Callback for checking if the SharedArrayBuffer constructor is enabled ---
using SharedArrayBufferConstructorEnabledCallback = using SharedArrayBufferConstructorEnabledCallback =
bool (*)(Local<Context> context); bool (*)(Local<Context> context);

View file

@ -244,6 +244,12 @@ class V8_EXPORT Context : public Data {
*/ */
void SetErrorMessageForCodeGenerationFromStrings(Local<String> message); void SetErrorMessageForCodeGenerationFromStrings(Local<String> message);
/**
* Sets the error description for the exception that is thrown when
* wasm code generation is not allowed.
*/
void SetErrorMessageForWasmCodeGeneration(Local<String> message);
/** /**
* Return data that was previously attached to the context snapshot via * Return data that was previously attached to the context snapshot via
* SnapshotCreator, and removes the reference to it. * SnapshotCreator, and removes the reference to it.
@ -374,15 +380,13 @@ void* Context::GetAlignedPointerFromEmbedderData(int index) {
A ctx = *reinterpret_cast<const A*>(this); A ctx = *reinterpret_cast<const A*>(this);
A embedder_data = A embedder_data =
I::ReadTaggedPointerField(ctx, I::kNativeContextEmbedderDataOffset); I::ReadTaggedPointerField(ctx, I::kNativeContextEmbedderDataOffset);
int value_offset = int value_offset = I::kEmbedderDataArrayHeaderSize +
I::kEmbedderDataArrayHeaderSize + (I::kEmbedderDataSlotSize * index); (I::kEmbedderDataSlotSize * index) +
#ifdef V8_SANDBOXED_EXTERNAL_POINTERS I::kEmbedderDataSlotExternalPointerOffset;
value_offset += I::kEmbedderDataSlotRawPayloadOffset; Isolate* isolate = I::GetIsolateForSandbox(ctx);
#endif
internal::Isolate* isolate = I::GetIsolateForSandbox(ctx);
return reinterpret_cast<void*>( return reinterpret_cast<void*>(
I::ReadExternalPointerField(isolate, embedder_data, value_offset, I::ReadExternalPointerField<internal::kEmbedderDataSlotPayloadTag>(
internal::kEmbedderDataSlotPayloadTag)); isolate, embedder_data, value_offset));
#else #else
return SlowGetAlignedPointerFromEmbedderData(index); return SlowGetAlignedPointerFromEmbedderData(index);
#endif #endif

View file

@ -77,11 +77,20 @@ struct WrapperDescriptor final {
}; };
struct V8_EXPORT CppHeapCreateParams { struct V8_EXPORT CppHeapCreateParams {
CppHeapCreateParams(const CppHeapCreateParams&) = delete;
CppHeapCreateParams& operator=(const CppHeapCreateParams&) = delete;
std::vector<std::unique_ptr<cppgc::CustomSpaceBase>> custom_spaces; std::vector<std::unique_ptr<cppgc::CustomSpaceBase>> custom_spaces;
WrapperDescriptor wrapper_descriptor; WrapperDescriptor wrapper_descriptor;
/**
* Specifies which kind of marking are supported by the heap. The type may be
* further reduced via runtime flags when attaching the heap to an Isolate.
*/
cppgc::Heap::MarkingType marking_support =
cppgc::Heap::MarkingType::kIncrementalAndConcurrent;
/**
* Specifies which kind of sweeping is supported by the heap. The type may be
* further reduced via runtime flags when attaching the heap to an Isolate.
*/
cppgc::Heap::SweepingType sweeping_support =
cppgc::Heap::SweepingType::kIncrementalAndConcurrent;
}; };
/** /**

View file

@ -27,6 +27,11 @@ class V8_EXPORT Date : public Object {
*/ */
double ValueOf() const; double ValueOf() const;
/**
* Generates ISO string representation.
*/
v8::Local<v8::String> ToISOString() const;
V8_INLINE static Date* Cast(Value* value) { V8_INLINE static Date* Cast(Value* value) {
#ifdef V8_ENABLE_CHECKS #ifdef V8_ENABLE_CHECKS
CheckCast(value); CheckCast(value);

View file

@ -69,7 +69,12 @@ class V8_EXPORT EmbedderRootsHandler {
* trace through its heap and use reporter to report each JavaScript object * trace through its heap and use reporter to report each JavaScript object
* reachable from any of the given wrappers. * reachable from any of the given wrappers.
*/ */
class V8_EXPORT EmbedderHeapTracer { class V8_EXPORT
// GCC doesn't like combining __attribute__(()) with [[deprecated]].
#ifdef __clang__
V8_DEPRECATE_SOON("Use CppHeap when working with v8::TracedReference.")
#endif // __clang__
EmbedderHeapTracer {
public: public:
using EmbedderStackState = cppgc::EmbedderStackState; using EmbedderStackState = cppgc::EmbedderStackState;
@ -205,10 +210,10 @@ class V8_EXPORT EmbedderHeapTracer {
* Returns the v8::Isolate this tracer is attached too and |nullptr| if it * Returns the v8::Isolate this tracer is attached too and |nullptr| if it
* is not attached to any v8::Isolate. * is not attached to any v8::Isolate.
*/ */
v8::Isolate* isolate() const { return isolate_; } v8::Isolate* isolate() const { return v8_isolate_; }
protected: protected:
v8::Isolate* isolate_ = nullptr; v8::Isolate* v8_isolate_ = nullptr;
friend class internal::LocalEmbedderHeapTracer; friend class internal::LocalEmbedderHeapTracer;
}; };

View file

@ -197,7 +197,7 @@ class V8_EXPORT TryCatch {
void ResetInternal(); void ResetInternal();
internal::Isolate* isolate_; internal::Isolate* i_isolate_;
TryCatch* next_; TryCatch* next_;
void* exception_; void* exception_;
void* message_obj_; void* message_obj_;

View file

@ -240,6 +240,7 @@ class CTypeInfo {
enum class Type : uint8_t { enum class Type : uint8_t {
kVoid, kVoid,
kBool, kBool,
kUint8,
kInt32, kInt32,
kUint32, kUint32,
kInt64, kInt64,
@ -302,8 +303,9 @@ class CTypeInfo {
constexpr Flags GetFlags() const { return flags_; } constexpr Flags GetFlags() const { return flags_; }
static constexpr bool IsIntegralType(Type type) { static constexpr bool IsIntegralType(Type type) {
return type == Type::kInt32 || type == Type::kUint32 || return type == Type::kUint8 || type == Type::kInt32 ||
type == Type::kInt64 || type == Type::kUint64; type == Type::kUint32 || type == Type::kInt64 ||
type == Type::kUint64;
} }
static constexpr bool IsFloatingPointType(Type type) { static constexpr bool IsFloatingPointType(Type type) {
@ -429,6 +431,7 @@ struct AnyCType {
double double_value; double double_value;
Local<Object> object_value; Local<Object> object_value;
Local<Array> sequence_value; Local<Array> sequence_value;
const FastApiTypedArray<uint8_t>* uint8_ta_value;
const FastApiTypedArray<int32_t>* int32_ta_value; const FastApiTypedArray<int32_t>* int32_ta_value;
const FastApiTypedArray<uint32_t>* uint32_ta_value; const FastApiTypedArray<uint32_t>* uint32_ta_value;
const FastApiTypedArray<int64_t>* int64_ta_value; const FastApiTypedArray<int64_t>* int64_ta_value;
@ -544,7 +547,7 @@ struct FastApiCallbackOptions {
* returned instance may be filled with mock data. * returned instance may be filled with mock data.
*/ */
static FastApiCallbackOptions CreateForTesting(Isolate* isolate) { static FastApiCallbackOptions CreateForTesting(Isolate* isolate) {
return {false, {0}}; return {false, {0}, nullptr};
} }
/** /**
@ -566,8 +569,13 @@ struct FastApiCallbackOptions {
*/ */
union { union {
uintptr_t data_ptr; uintptr_t data_ptr;
v8::Value data; v8::Local<v8::Value> data;
}; };
/**
* When called from WebAssembly, a view of the calling module's memory.
*/
FastApiTypedArray<uint8_t>* const wasm_memory;
}; };
namespace internal { namespace internal {
@ -648,7 +656,8 @@ struct CTypeInfoTraits {};
V(int64_t, kInt64) \ V(int64_t, kInt64) \
V(uint64_t, kUint64) \ V(uint64_t, kUint64) \
V(float, kFloat32) \ V(float, kFloat32) \
V(double, kFloat64) V(double, kFloat64) \
V(uint8_t, kUint8)
// Same as above, but includes deprecated types for compatibility. // Same as above, but includes deprecated types for compatibility.
#define ALL_C_TYPES(V) \ #define ALL_C_TYPES(V) \
@ -687,7 +696,8 @@ PRIMITIVE_C_TYPES(DEFINE_TYPE_INFO_TRAITS)
V(int64_t, kInt64) \ V(int64_t, kInt64) \
V(uint64_t, kUint64) \ V(uint64_t, kUint64) \
V(float, kFloat32) \ V(float, kFloat32) \
V(double, kFloat64) V(double, kFloat64) \
V(uint8_t, kUint8)
TYPED_ARRAY_C_TYPES(SPECIALIZE_GET_TYPE_INFO_HELPER_FOR_TA) TYPED_ARRAY_C_TYPES(SPECIALIZE_GET_TYPE_INFO_HELPER_FOR_TA)
@ -802,6 +812,16 @@ class CFunctionBuilderWithFunction {
std::make_index_sequence<sizeof...(ArgBuilders)>()); std::make_index_sequence<sizeof...(ArgBuilders)>());
} }
// Provided for testing purposes.
template <typename Ret, typename... Args>
auto Patch(Ret (*patching_func)(Args...)) {
static_assert(
sizeof...(Args) == sizeof...(ArgBuilders),
"The patching function must have the same number of arguments.");
fn_ = reinterpret_cast<void*>(patching_func);
return *this;
}
auto Build() { auto Build() {
static CFunctionInfoImpl<RetBuilder, ArgBuilders...> instance; static CFunctionInfoImpl<RetBuilder, ArgBuilders...> instance;
return CFunction(fn_, &instance); return CFunction(fn_, &instance);
@ -881,31 +901,6 @@ static constexpr CTypeInfo kTypeInfoFloat64 =
* to the requested destination type, is considered unsupported. The operation * to the requested destination type, is considered unsupported. The operation
* returns true on success. `type_info` will be used for conversions. * returns true on success. `type_info` will be used for conversions.
*/ */
template <const CTypeInfo* type_info, typename T>
V8_DEPRECATED(
"Use TryToCopyAndConvertArrayToCppBuffer<CTypeInfo::Identifier, T>()")
bool V8_EXPORT V8_WARN_UNUSED_RESULT
TryCopyAndConvertArrayToCppBuffer(Local<Array> src, T* dst,
uint32_t max_length);
template <>
V8_DEPRECATED(
"Use TryToCopyAndConvertArrayToCppBuffer<CTypeInfo::Identifier, T>()")
inline bool V8_WARN_UNUSED_RESULT
TryCopyAndConvertArrayToCppBuffer<&kTypeInfoInt32, int32_t>(
Local<Array> src, int32_t* dst, uint32_t max_length) {
return false;
}
template <>
V8_DEPRECATED(
"Use TryToCopyAndConvertArrayToCppBuffer<CTypeInfo::Identifier, T>()")
inline bool V8_WARN_UNUSED_RESULT
TryCopyAndConvertArrayToCppBuffer<&kTypeInfoFloat64, double>(
Local<Array> src, double* dst, uint32_t max_length) {
return false;
}
template <CTypeInfo::Identifier type_info_id, typename T> template <CTypeInfo::Identifier type_info_id, typename T>
bool V8_EXPORT V8_WARN_UNUSED_RESULT TryToCopyAndConvertArrayToCppBuffer( bool V8_EXPORT V8_WARN_UNUSED_RESULT TryToCopyAndConvertArrayToCppBuffer(
Local<Array> src, T* dst, uint32_t max_length); Local<Array> src, T* dst, uint32_t max_length);

View file

@ -106,6 +106,14 @@ class V8_EXPORT Function : public Object {
V8_WARN_UNUSED_RESULT MaybeLocal<String> FunctionProtoToString( V8_WARN_UNUSED_RESULT MaybeLocal<String> FunctionProtoToString(
Local<Context> context); Local<Context> context);
/**
* Returns true if the function does nothing.
* The function returns false on error.
* Note that this function is experimental. Embedders should not rely on
* this existing. We may remove this function in the future.
*/
V8_WARN_UNUSED_RESULT bool Experimental_IsNopFunction() const;
ScriptOrigin GetScriptOrigin() const; ScriptOrigin GetScriptOrigin() const;
V8_INLINE static Function* Cast(Value* value) { V8_INLINE static Function* Cast(Value* value) {
#ifdef V8_ENABLE_CHECKS #ifdef V8_ENABLE_CHECKS

View file

@ -100,9 +100,6 @@ class V8_EXPORT V8 {
const int kBuildConfiguration = const int kBuildConfiguration =
(internal::PointerCompressionIsEnabled() ? kPointerCompression : 0) | (internal::PointerCompressionIsEnabled() ? kPointerCompression : 0) |
(internal::SmiValuesAre31Bits() ? k31BitSmis : 0) | (internal::SmiValuesAre31Bits() ? k31BitSmis : 0) |
(internal::SandboxedExternalPointersAreEnabled()
? kSandboxedExternalPointers
: 0) |
(internal::SandboxIsEnabled() ? kSandbox : 0); (internal::SandboxIsEnabled() ? kSandbox : 0);
return Initialize(kBuildConfiguration); return Initialize(kBuildConfiguration);
} }
@ -184,30 +181,19 @@ class V8_EXPORT V8 {
* V8 was disposed. * V8 was disposed.
*/ */
static void DisposePlatform(); static void DisposePlatform();
V8_DEPRECATED("Use DisposePlatform()")
static void ShutdownPlatform() { DisposePlatform(); }
#ifdef V8_SANDBOX
//
// Sandbox related API.
//
// This API is not yet stable and subject to changes in the future.
//
#if defined(V8_ENABLE_SANDBOX)
/** /**
* Initializes the V8 sandbox. * Returns true if the sandbox is configured securely.
* *
* This must be invoked after the platform was initialized but before V8 is * If V8 cannot create a regular sandbox during initialization, for example
* initialized. The sandbox is torn down during platform shutdown. * because not enough virtual address space can be reserved, it will instead
* Returns true on success, false otherwise. * create a fallback sandbox that still allows it to function normally but
* * does not have the same security properties as a regular sandbox. This API
* TODO(saelo) Once it is no longer optional to initialize the sandbox when * can be used to determine if such a fallback sandbox is being used, in
* compiling with V8_SANDBOX, the sandbox initialization will likely happen * which case it will return false.
* as part of V8::Initialize, at which point this function should be removed.
*/ */
static bool InitializeSandbox(); static bool IsSandboxConfiguredSecurely();
V8_DEPRECATE_SOON("Use InitializeSandbox()")
static bool InitializeVirtualMemoryCage() { return InitializeSandbox(); }
/** /**
* Provides access to the virtual address subspace backing the sandbox. * Provides access to the virtual address subspace backing the sandbox.
@ -220,39 +206,29 @@ class V8_EXPORT V8 {
* and so in particular the contents of pages allocagted in this virtual * and so in particular the contents of pages allocagted in this virtual
* address space, arbitrarily and concurrently. Due to this, it is * address space, arbitrarily and concurrently. Due to this, it is
* recommended to to only place pure data buffers in them. * recommended to to only place pure data buffers in them.
*
* This function must only be called after initializing the sandbox.
*/ */
static VirtualAddressSpace* GetSandboxAddressSpace(); static VirtualAddressSpace* GetSandboxAddressSpace();
V8_DEPRECATE_SOON("Use GetSandboxAddressSpace()")
static PageAllocator* GetVirtualMemoryCagePageAllocator();
/** /**
* Returns the size of the sandbox in bytes. * Returns the size of the sandbox in bytes.
* *
* If the sandbox has not been initialized, or if the initialization failed, * This represents the size of the address space that V8 can directly address
* this returns zero. * and in which it allocates its objects.
*/ */
static size_t GetSandboxSizeInBytes(); static size_t GetSandboxSizeInBytes();
V8_DEPRECATE_SOON("Use GetSandboxSizeInBytes()")
static size_t GetVirtualMemoryCageSizeInBytes() {
return GetSandboxSizeInBytes();
}
/** /**
* Returns whether the sandbox is configured securely. * Returns the size of the address space reservation backing the sandbox.
* *
* If V8 cannot create a proper sandbox, it will fall back to creating a * This may be larger than the sandbox (i.e. |GetSandboxSizeInBytes()|) due
* sandbox that doesn't have the desired security properties but at least * to surrounding guard regions, or may be smaller than the sandbox in case a
* still allows V8 to function. This API can be used to determine if such an * fallback sandbox is being used, which will use a smaller virtual address
* insecure sandbox is being used, in which case it will return false. * space reservation. In the latter case this will also be different from
* |GetSandboxAddressSpace()->size()| as that will cover a larger part of the
* address space than what has actually been reserved.
*/ */
static bool IsSandboxConfiguredSecurely(); static size_t GetSandboxReservationSizeInBytes();
V8_DEPRECATE_SOON("Use IsSandboxConfiguredSecurely()") #endif // V8_ENABLE_SANDBOX
static bool IsUsingSecureVirtualMemoryCage() {
return IsSandboxConfiguredSecurely();
}
#endif
/** /**
* Activate trap-based bounds checking for WebAssembly. * Activate trap-based bounds checking for WebAssembly.
@ -273,7 +249,7 @@ class V8_EXPORT V8 {
* exceptions in V8-generated code. * exceptions in V8-generated code.
*/ */
static void SetUnhandledExceptionCallback( static void SetUnhandledExceptionCallback(
UnhandledExceptionCallback unhandled_exception_callback); UnhandledExceptionCallback callback);
#endif #endif
/** /**
@ -281,8 +257,7 @@ class V8_EXPORT V8 {
* v8 has encountered a fatal failure to allocate memory and is about to * v8 has encountered a fatal failure to allocate memory and is about to
* terminate. * terminate.
*/ */
static void SetFatalMemoryErrorCallback(OOMErrorCallback callback);
static void SetFatalMemoryErrorCallback(OOMErrorCallback oom_error_callback);
/** /**
* Get statistics about the shared memory usage. * Get statistics about the shared memory usage.
@ -295,8 +270,7 @@ class V8_EXPORT V8 {
enum BuildConfigurationFeatures { enum BuildConfigurationFeatures {
kPointerCompression = 1 << 0, kPointerCompression = 1 << 0,
k31BitSmis = 1 << 1, k31BitSmis = 1 << 1,
kSandboxedExternalPointers = 1 << 2, kSandbox = 1 << 2,
kSandbox = 1 << 3,
}; };
/** /**

View file

@ -207,10 +207,10 @@ class V8_EXPORT V8InspectorSession {
class V8_EXPORT WebDriverValue { class V8_EXPORT WebDriverValue {
public: public:
explicit WebDriverValue(StringView type, v8::MaybeLocal<v8::Value> value = {}) explicit WebDriverValue(std::unique_ptr<StringBuffer> type,
: type(type), value(value) {} v8::MaybeLocal<v8::Value> value = {})
: type(std::move(type)), value(value) {}
StringView type; std::unique_ptr<StringBuffer> type;
v8::MaybeLocal<v8::Value> value; v8::MaybeLocal<v8::Value> value;
}; };
@ -219,6 +219,9 @@ class V8_EXPORT V8InspectorClient {
virtual ~V8InspectorClient() = default; virtual ~V8InspectorClient() = default;
virtual void runMessageLoopOnPause(int contextGroupId) {} virtual void runMessageLoopOnPause(int contextGroupId) {}
virtual void runMessageLoopOnInstrumentationPause(int contextGroupId) {
runMessageLoopOnPause(contextGroupId);
}
virtual void quitMessageLoopOnPause() {} virtual void quitMessageLoopOnPause() {}
virtual void runIfWaitingForDebugger(int contextGroupId) {} virtual void runIfWaitingForDebugger(int contextGroupId) {}
@ -361,9 +364,12 @@ class V8_EXPORT V8Inspector {
virtual void sendNotification(std::unique_ptr<StringBuffer> message) = 0; virtual void sendNotification(std::unique_ptr<StringBuffer> message) = 0;
virtual void flushProtocolNotifications() = 0; virtual void flushProtocolNotifications() = 0;
}; };
virtual std::unique_ptr<V8InspectorSession> connect(int contextGroupId, enum ClientTrustLevel { kUntrusted, kFullyTrusted };
Channel*, virtual std::unique_ptr<V8InspectorSession> connect(
StringView state) = 0; int contextGroupId, Channel*, StringView state,
ClientTrustLevel client_trust_level) {
return nullptr;
}
// API methods. // API methods.
virtual std::unique_ptr<V8StackTrace> createStackTrace( virtual std::unique_ptr<V8StackTrace> createStackTrace(

View file

@ -8,6 +8,8 @@
#include <stddef.h> #include <stddef.h>
#include <stdint.h> #include <stdint.h>
#include <string.h> #include <string.h>
#include <atomic>
#include <type_traits> #include <type_traits>
#include "v8-version.h" // NOLINT(build/include_directory) #include "v8-version.h" // NOLINT(build/include_directory)
@ -50,6 +52,7 @@ const int kHeapObjectTag = 1;
const int kWeakHeapObjectTag = 3; const int kWeakHeapObjectTag = 3;
const int kHeapObjectTagSize = 2; const int kHeapObjectTagSize = 2;
const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1; const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
const intptr_t kHeapObjectReferenceTagMask = 1 << (kHeapObjectTagSize - 1);
// Tag information for fowarding pointers stored in object headers. // Tag information for fowarding pointers stored in object headers.
// 0b00 at the lowest 2 bits in the header indicates that the map word is a // 0b00 at the lowest 2 bits in the header indicates that the map word is a
@ -157,15 +160,7 @@ V8_INLINE static constexpr internal::Address IntToSmi(int value) {
* Sandbox related types, constants, and functions. * Sandbox related types, constants, and functions.
*/ */
constexpr bool SandboxIsEnabled() { constexpr bool SandboxIsEnabled() {
#ifdef V8_SANDBOX #ifdef V8_ENABLE_SANDBOX
return true;
#else
return false;
#endif
}
constexpr bool SandboxedExternalPointersAreEnabled() {
#ifdef V8_SANDBOXED_EXTERNAL_POINTERS
return true; return true;
#else #else
return false; return false;
@ -176,19 +171,18 @@ constexpr bool SandboxedExternalPointersAreEnabled() {
// for example by storing them as offset rather than as raw pointers. // for example by storing them as offset rather than as raw pointers.
using SandboxedPointer_t = Address; using SandboxedPointer_t = Address;
// ExternalPointers point to objects located outside the sandbox. When sandboxed #ifdef V8_ENABLE_SANDBOX
// external pointers are enabled, these are stored in an external pointer table
// and referenced from HeapObjects through indices.
#ifdef V8_SANDBOXED_EXTERNAL_POINTERS
using ExternalPointer_t = uint32_t;
#else
using ExternalPointer_t = Address;
#endif
#ifdef V8_SANDBOX_IS_AVAILABLE
// Size of the sandbox, excluding the guard regions surrounding it. // Size of the sandbox, excluding the guard regions surrounding it.
#ifdef V8_TARGET_OS_ANDROID
// On Android, most 64-bit devices seem to be configured with only 39 bits of
// virtual address space for userspace. As such, limit the sandbox to 128GB (a
// quarter of the total available address space).
constexpr size_t kSandboxSizeLog2 = 37; // 128 GB
#else
// Everywhere else use a 1TB sandbox.
constexpr size_t kSandboxSizeLog2 = 40; // 1 TB constexpr size_t kSandboxSizeLog2 = 40; // 1 TB
#endif // V8_OS_ANDROID
constexpr size_t kSandboxSize = 1ULL << kSandboxSizeLog2; constexpr size_t kSandboxSize = 1ULL << kSandboxSizeLog2;
// Required alignment of the sandbox. For simplicity, we require the // Required alignment of the sandbox. For simplicity, we require the
@ -213,20 +207,6 @@ static_assert((kSandboxGuardRegionSize % kSandboxAlignment) == 0,
"The size of the guard regions around the sandbox must be a " "The size of the guard regions around the sandbox must be a "
"multiple of its required alignment."); "multiple of its required alignment.");
// Minimum size of the sandbox, excluding the guard regions surrounding it. If
// the virtual memory reservation for the sandbox fails, its size is currently
// halved until either the reservation succeeds or the minimum size is reached.
// A minimum of 32GB allows the 4GB pointer compression region as well as the
// ArrayBuffer partition and two 10GB Wasm memory cages to fit into the
// sandbox. 32GB should also be the minimum possible size of the userspace
// address space as there are some machine configurations with only 36 virtual
// address bits.
constexpr size_t kSandboxMinimumSize = 32ULL * GB;
static_assert(kSandboxMinimumSize <= kSandboxSize,
"The minimal size of the sandbox must be smaller or equal to the "
"regular size.");
// On OSes where reserving virtual memory is too expensive to reserve the // On OSes where reserving virtual memory is too expensive to reserve the
// entire address space backing the sandbox, notably Windows pre 8.1, we create // entire address space backing the sandbox, notably Windows pre 8.1, we create
// a partially reserved sandbox that doesn't actually reserve most of the // a partially reserved sandbox that doesn't actually reserve most of the
@ -239,82 +219,253 @@ static_assert(kSandboxMinimumSize <= kSandboxSize,
// well as the ArrayBuffer partition. // well as the ArrayBuffer partition.
constexpr size_t kSandboxMinimumReservationSize = 8ULL * GB; constexpr size_t kSandboxMinimumReservationSize = 8ULL * GB;
static_assert(kSandboxMinimumSize > kPtrComprCageReservationSize,
"The sandbox must be larger than the pointer compression cage "
"contained within it.");
static_assert(kSandboxMinimumReservationSize > kPtrComprCageReservationSize, static_assert(kSandboxMinimumReservationSize > kPtrComprCageReservationSize,
"The minimum reservation size for a sandbox must be larger than " "The minimum reservation size for a sandbox must be larger than "
"the pointer compression cage contained within it."); "the pointer compression cage contained within it.");
// For now, even if the sandbox is enabled, we still allow backing stores to be #endif // V8_ENABLE_SANDBOX
// allocated outside of it as fallback. This will simplify the initial rollout.
// However, if sandboxed pointers are also enabled, we must always place #ifdef V8_COMPRESS_POINTERS
// backing stores inside the sandbox as they will be referenced though them.
#ifdef V8_SANDBOXED_POINTERS
constexpr bool kAllowBackingStoresOutsideSandbox = false;
#else
constexpr bool kAllowBackingStoresOutsideSandbox = true;
#endif // V8_SANDBOXED_POINTERS
// The size of the virtual memory reservation for an external pointer table. // The size of the virtual memory reservation for an external pointer table.
// This determines the maximum number of entries in a table. Using a maximum // This determines the maximum number of entries in a table. Using a maximum
// size allows omitting bounds checks on table accesses if the indices are // size allows omitting bounds checks on table accesses if the indices are
// guaranteed (e.g. through shifting) to be below the maximum index. This // guaranteed (e.g. through shifting) to be below the maximum index. This
// value must be a power of two. // value must be a power of two.
static const size_t kExternalPointerTableReservationSize = 128 * MB; static const size_t kExternalPointerTableReservationSize = 512 * MB;
// The maximum number of entries in an external pointer table. // The maximum number of entries in an external pointer table.
static const size_t kMaxSandboxedExternalPointers = static const size_t kMaxExternalPointers =
kExternalPointerTableReservationSize / kApiSystemPointerSize; kExternalPointerTableReservationSize / kApiSystemPointerSize;
// The external pointer table indices stored in HeapObjects as external // The external pointer table indices stored in HeapObjects as external
// pointers are shifted to the left by this amount to guarantee that they are // pointers are shifted to the left by this amount to guarantee that they are
// smaller than the maximum table size. // smaller than the maximum table size.
static const uint32_t kExternalPointerIndexShift = 8; static const uint32_t kExternalPointerIndexShift = 6;
static_assert((1 << (32 - kExternalPointerIndexShift)) == static_assert((1 << (32 - kExternalPointerIndexShift)) == kMaxExternalPointers,
kMaxSandboxedExternalPointers,
"kExternalPointerTableReservationSize and " "kExternalPointerTableReservationSize and "
"kExternalPointerIndexShift don't match"); "kExternalPointerIndexShift don't match");
#endif // V8_SANDBOX_IS_AVAILABLE #else // !V8_COMPRESS_POINTERS
// If sandboxed external pointers are enabled, these tag values will be ORed // Needed for the V8.SandboxedExternalPointersCount histogram.
// with the external pointers in the external pointer table to prevent use of static const size_t kMaxExternalPointers = 0;
// pointers of the wrong type. When a pointer is loaded, it is ANDed with the
// inverse of the expected type's tag. The tags are constructed in a way that #endif // V8_COMPRESS_POINTERS
// guarantees that a failed type check will result in one or more of the top
// bits of the pointer to be set, rendering the pointer inacessible. Besides // A ExternalPointerHandle represents a (opaque) reference to an external
// the type tag bits (48 through 62), the tags also have the GC mark bit (63) // pointer that can be stored inside the sandbox. A ExternalPointerHandle has
// set, so that the mark bit is automatically set when a pointer is written // meaning only in combination with an (active) Isolate as it references an
// into the external pointer table (in which case it is clearly alive) and is // external pointer stored in the currently active Isolate's
// cleared when the pointer is loaded. The exception to this is the free entry // ExternalPointerTable. Internally, an ExternalPointerHandles is simply an
// tag, which doesn't have the mark bit set, as the entry is not alive. This // index into an ExternalPointerTable that is shifted to the left to guarantee
// that it is smaller than the size of the table.
using ExternalPointerHandle = uint32_t;
// ExternalPointers point to objects located outside the sandbox. When
// sandboxed external pointers are enabled, these are stored on heap as
// ExternalPointerHandles, otherwise they are simply raw pointers.
#ifdef V8_ENABLE_SANDBOX
using ExternalPointer_t = ExternalPointerHandle;
#else
using ExternalPointer_t = Address;
#endif
// When the sandbox is enabled, external pointers are stored in an external
// pointer table and are referenced from HeapObjects through an index (a
// "handle"). When stored in the table, the pointers are tagged with per-type
// tags to prevent type confusion attacks between different external objects.
// Besides type information bits, these tags also contain the GC marking bit
// which indicates whether the pointer table entry is currently alive. When a
// pointer is written into the table, the tag is ORed into the top bits. When
// that pointer is later loaded from the table, it is ANDed with the inverse of
// the expected tag. If the expected and actual type differ, this will leave
// some of the top bits of the pointer set, rendering the pointer inaccessible.
// The AND operation also removes the GC marking bit from the pointer.
//
// The tags are constructed such that UNTAG(TAG(0, T1), T2) != 0 for any two
// (distinct) tags T1 and T2. In practice, this is achieved by generating tags
// that all have the same number of zeroes and ones but different bit patterns.
// With N type tag bits, this allows for (N choose N/2) possible type tags.
// Besides the type tag bits, the tags also have the GC marking bit set so that
// the marking bit is automatically set when a pointer is written into the
// external pointer table (in which case it is clearly alive) and is cleared
// when the pointer is loaded. The exception to this is the free entry tag,
// which doesn't have the mark bit set, as the entry is not alive. This
// construction allows performing the type check and removing GC marking bits // construction allows performing the type check and removing GC marking bits
// (the MSB) from the pointer at the same time. // from the pointer in one efficient operation (bitwise AND). The number of
// Note: this scheme assumes a 48-bit address space and will likely break if // available bits is limited in the following way: on x64, bits [47, 64) are
// more virtual address bits are used. // generally available for tagging (userspace has 47 address bits available).
constexpr uint64_t kExternalPointerTagMask = 0xffff000000000000; // On Arm64, userspace typically has a 40 or 48 bit address space. However, due
// to top-byte ignore (TBI) and memory tagging (MTE), the top byte is unusable
// for type checks as type-check failures would go unnoticed or collide with
// MTE bits. Some bits of the top byte can, however, still be used for the GC
// marking bit. The bits available for the type tags are therefore limited to
// [48, 56), i.e. (8 choose 4) = 70 different types.
// The following options exist to increase the number of possible types:
// - Using multiple ExternalPointerTables since tags can safely be reused
// across different tables
// - Using "extended" type checks, where additional type information is stored
// either in an adjacent pointer table entry or at the pointed-to location
// - Using a different tagging scheme, for example based on XOR which would
// allow for 2**8 different tags but require a separate operation to remove
// the marking bit
//
// The external pointer sandboxing mechanism ensures that every access to an
// external pointer field will result in a valid pointer of the expected type
// even in the presence of an attacker able to corrupt memory inside the
// sandbox. However, if any data related to the external object is stored
// inside the sandbox it may still be corrupted and so must be validated before
// use or moved into the external object. Further, an attacker will always be
// able to substitute different external pointers of the same type for each
// other. Therefore, code using external pointers must be written in a
// "substitution-safe" way, i.e. it must always be possible to substitute
// external pointers of the same type without causing memory corruption outside
// of the sandbox. Generally this is achieved by referencing any group of
// related external objects through a single external pointer.
//
// Currently we use bit 62 for the marking bit which should always be unused as
// it's part of the non-canonical address range. When Arm's top-byte ignore
// (TBI) is enabled, this bit will be part of the ignored byte, and we assume
// that the Embedder is not using this byte (really only this one bit) for any
// other purpose. This bit also does not collide with the memory tagging
// extension (MTE) which would use bits [56, 60).
constexpr uint64_t kExternalPointerMarkBit = 1ULL << 62;
constexpr uint64_t kExternalPointerTagMask = 0x40ff000000000000;
constexpr uint64_t kExternalPointerTagShift = 48; constexpr uint64_t kExternalPointerTagShift = 48;
#define MAKE_TAG(v) (static_cast<uint64_t>(v) << kExternalPointerTagShift)
// clang-format off
enum ExternalPointerTag : uint64_t {
kExternalPointerNullTag = MAKE_TAG(0b0000000000000000),
kExternalPointerFreeEntryTag = MAKE_TAG(0b0111111110000000),
kExternalStringResourceTag = MAKE_TAG(0b1000000011111111),
kExternalStringResourceDataTag = MAKE_TAG(0b1000000101111111),
kForeignForeignAddressTag = MAKE_TAG(0b1000000110111111),
kNativeContextMicrotaskQueueTag = MAKE_TAG(0b1000000111011111),
kEmbedderDataSlotPayloadTag = MAKE_TAG(0b1000000111101111),
kCodeEntryPointTag = MAKE_TAG(0b1000000111110111),
kExternalObjectValueTag = MAKE_TAG(0b1000000111111011),
};
// clang-format on
#undef MAKE_TAG
// Converts encoded external pointer to address. // All possible 8-bit type tags.
V8_EXPORT Address DecodeExternalPointerImpl(const Isolate* isolate, // These are sorted so that tags can be grouped together and it can efficiently
ExternalPointer_t pointer, // be checked if a tag belongs to a given group. See for example the
ExternalPointerTag tag); // IsSharedExternalPointerType routine.
constexpr uint64_t kAllExternalPointerTypeTags[] = {
0b00001111, 0b00010111, 0b00011011, 0b00011101, 0b00011110, 0b00100111,
0b00101011, 0b00101101, 0b00101110, 0b00110011, 0b00110101, 0b00110110,
0b00111001, 0b00111010, 0b00111100, 0b01000111, 0b01001011, 0b01001101,
0b01001110, 0b01010011, 0b01010101, 0b01010110, 0b01011001, 0b01011010,
0b01011100, 0b01100011, 0b01100101, 0b01100110, 0b01101001, 0b01101010,
0b01101100, 0b01110001, 0b01110010, 0b01110100, 0b01111000, 0b10000111,
0b10001011, 0b10001101, 0b10001110, 0b10010011, 0b10010101, 0b10010110,
0b10011001, 0b10011010, 0b10011100, 0b10100011, 0b10100101, 0b10100110,
0b10101001, 0b10101010, 0b10101100, 0b10110001, 0b10110010, 0b10110100,
0b10111000, 0b11000011, 0b11000101, 0b11000110, 0b11001001, 0b11001010,
0b11001100, 0b11010001, 0b11010010, 0b11010100, 0b11011000, 0b11100001,
0b11100010, 0b11100100, 0b11101000, 0b11110000};
// clang-format off
// New entries should be added with state "sandboxed".
// When adding new tags, please ensure that the code using these tags is
// "substitution-safe", i.e. still operate safely if external pointers of the
// same type are swapped by an attacker. See comment above for more details.
#define TAG(i) (kAllExternalPointerTypeTags[i])
// Shared external pointers are owned by the shared Isolate and stored in the
// shared external pointer table associated with that Isolate, where they can
// be accessed from multiple threads at the same time. The objects referenced
// in this way must therefore always be thread-safe.
#define SHARED_EXTERNAL_POINTER_TAGS(V) \
V(kFirstSharedTag, sandboxed, TAG(0)) \
V(kWaiterQueueNodeTag, sandboxed, TAG(0)) \
V(kExternalStringResourceTag, sandboxed, TAG(1)) \
V(kExternalStringResourceDataTag, sandboxed, TAG(2)) \
V(kLastSharedTag, sandboxed, TAG(2))
// External pointers using these tags are kept in a per-Isolate external
// pointer table and can only be accessed when this Isolate is active.
#define PER_ISOLATE_EXTERNAL_POINTER_TAGS(V) \
V(kForeignForeignAddressTag, sandboxed, TAG(10)) \
V(kNativeContextMicrotaskQueueTag, sandboxed, TAG(11)) \
V(kEmbedderDataSlotPayloadTag, sandboxed, TAG(12)) \
V(kExternalObjectValueTag, sandboxed, TAG(13)) \
V(kCallHandlerInfoCallbackTag, sandboxed, TAG(14)) \
V(kAccessorInfoGetterTag, sandboxed, TAG(15)) \
V(kAccessorInfoSetterTag, sandboxed, TAG(16)) \
V(kWasmInternalFunctionCallTargetTag, sandboxed, TAG(17)) \
V(kWasmTypeInfoNativeTypeTag, sandboxed, TAG(18)) \
V(kWasmExportedFunctionDataSignatureTag, sandboxed, TAG(19)) \
V(kWasmContinuationJmpbufTag, sandboxed, TAG(20))
// All external pointer tags.
#define ALL_EXTERNAL_POINTER_TAGS(V) \
SHARED_EXTERNAL_POINTER_TAGS(V) \
PER_ISOLATE_EXTERNAL_POINTER_TAGS(V)
// When the sandbox is enabled, external pointers marked as "sandboxed" above
// use the external pointer table (i.e. are sandboxed). This allows a gradual
// rollout of external pointer sandboxing. If the sandbox is off, no external
// pointers are sandboxed.
//
// Sandboxed external pointer tags are available when compressing pointers even
// when the sandbox is off. Some tags (e.g. kWaiterQueueNodeTag) are used
// manually with the external pointer table even when the sandbox is off to ease
// alignment requirements.
#define sandboxed(X) (X << kExternalPointerTagShift) | kExternalPointerMarkBit
#define unsandboxed(X) kUnsandboxedExternalPointerTag
#if defined(V8_COMPRESS_POINTERS)
#define EXTERNAL_POINTER_TAG_ENUM(Name, State, Bits) Name = State(Bits),
#else
#define EXTERNAL_POINTER_TAG_ENUM(Name, State, Bits) Name = unsandboxed(Bits),
#endif
#define MAKE_TAG(HasMarkBit, TypeTag) \
((static_cast<uint64_t>(TypeTag) << kExternalPointerTagShift) | \
(HasMarkBit ? kExternalPointerMarkBit : 0))
enum ExternalPointerTag : uint64_t {
// Empty tag value. Mostly used as placeholder.
kExternalPointerNullTag = MAKE_TAG(0, 0b00000000),
// Tag to use for unsandboxed external pointers, which are still stored as
// raw pointers on the heap.
kUnsandboxedExternalPointerTag = MAKE_TAG(0, 0b00000000),
// External pointer tag that will match any external pointer. Use with care!
kAnyExternalPointerTag = MAKE_TAG(1, 0b11111111),
// The free entry tag has all type bits set so every type check with a
// different type fails. It also doesn't have the mark bit set as free
// entries are (by definition) not alive.
kExternalPointerFreeEntryTag = MAKE_TAG(0, 0b11111111),
// Evacuation entries are used during external pointer table compaction.
kExternalPointerEvacuationEntryTag = MAKE_TAG(1, 0b11100111),
ALL_EXTERNAL_POINTER_TAGS(EXTERNAL_POINTER_TAG_ENUM)
};
#undef MAKE_TAG
#undef unsandboxed
#undef sandboxed
#undef TAG
#undef EXTERNAL_POINTER_TAG_ENUM
// clang-format on
// True if the external pointer is sandboxed and so must be referenced through
// an external pointer table.
V8_INLINE static constexpr bool IsSandboxedExternalPointerType(
ExternalPointerTag tag) {
return tag != kUnsandboxedExternalPointerTag;
}
// True if the external pointer must be accessed from the shared isolate's
// external pointer table.
V8_INLINE static constexpr bool IsSharedExternalPointerType(
ExternalPointerTag tag) {
return tag >= kFirstSharedTag && tag <= kLastSharedTag;
}
// Sanity checks.
#define CHECK_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
static_assert(!IsSandboxedExternalPointerType(Tag) || \
IsSharedExternalPointerType(Tag));
#define CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
static_assert(!IsSandboxedExternalPointerType(Tag) || \
!IsSharedExternalPointerType(Tag));
SHARED_EXTERNAL_POINTER_TAGS(CHECK_SHARED_EXTERNAL_POINTER_TAGS)
PER_ISOLATE_EXTERNAL_POINTER_TAGS(CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS)
#undef CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS
#undef CHECK_SHARED_EXTERNAL_POINTER_TAGS
#undef SHARED_EXTERNAL_POINTER_TAGS
#undef EXTERNAL_POINTER_TAGS
// {obj} must be the raw tagged pointer representation of a HeapObject // {obj} must be the raw tagged pointer representation of a HeapObject
// that's guaranteed to never be in ReadOnlySpace. // that's guaranteed to never be in ReadOnlySpace.
@ -324,9 +475,6 @@ V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj);
// mode based on the current context and the closure. This returns true if the // mode based on the current context and the closure. This returns true if the
// language mode is strict. // language mode is strict.
V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate); V8_EXPORT bool ShouldThrowOnError(v8::internal::Isolate* isolate);
V8_EXPORT bool CanHaveInternalField(int instance_type);
/** /**
* This class exports constants and functionality from within v8 that * This class exports constants and functionality from within v8 that
* is necessary to implement inline functions in the v8 api. Don't * is necessary to implement inline functions in the v8 api. Don't
@ -354,8 +502,10 @@ class Internals {
static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize; static const int kFixedArrayHeaderSize = 2 * kApiTaggedSize;
static const int kEmbedderDataArrayHeaderSize = 2 * kApiTaggedSize; static const int kEmbedderDataArrayHeaderSize = 2 * kApiTaggedSize;
static const int kEmbedderDataSlotSize = kApiSystemPointerSize; static const int kEmbedderDataSlotSize = kApiSystemPointerSize;
#ifdef V8_SANDBOXED_EXTERNAL_POINTERS #ifdef V8_ENABLE_SANDBOX
static const int kEmbedderDataSlotRawPayloadOffset = kApiTaggedSize; static const int kEmbedderDataSlotExternalPointerOffset = kApiTaggedSize;
#else
static const int kEmbedderDataSlotExternalPointerOffset = 0;
#endif #endif
static const int kNativeContextEmbedderDataOffset = 6 * kApiTaggedSize; static const int kNativeContextEmbedderDataOffset = 6 * kApiTaggedSize;
static const int kStringRepresentationAndEncodingMask = 0x0f; static const int kStringRepresentationAndEncodingMask = 0x0f;
@ -365,15 +515,21 @@ class Internals {
static const uint32_t kNumIsolateDataSlots = 4; static const uint32_t kNumIsolateDataSlots = 4;
static const int kStackGuardSize = 7 * kApiSystemPointerSize; static const int kStackGuardSize = 7 * kApiSystemPointerSize;
static const int kBuiltinTier0EntryTableSize = 10 * kApiSystemPointerSize; static const int kBuiltinTier0EntryTableSize = 7 * kApiSystemPointerSize;
static const int kBuiltinTier0TableSize = 10 * kApiSystemPointerSize; static const int kBuiltinTier0TableSize = 7 * kApiSystemPointerSize;
// ExternalPointerTable layout guarantees.
static const int kExternalPointerTableBufferOffset = 0;
static const int kExternalPointerTableSize = 4 * kApiSystemPointerSize;
// IsolateData layout guarantees. // IsolateData layout guarantees.
static const int kIsolateCageBaseOffset = 0; static const int kIsolateCageBaseOffset = 0;
static const int kIsolateStackGuardOffset = static const int kIsolateStackGuardOffset =
kIsolateCageBaseOffset + kApiSystemPointerSize; kIsolateCageBaseOffset + kApiSystemPointerSize;
static const int kBuiltinTier0EntryTableOffset = static const int kVariousBooleanFlagsOffset =
kIsolateStackGuardOffset + kStackGuardSize; kIsolateStackGuardOffset + kStackGuardSize;
static const int kBuiltinTier0EntryTableOffset =
kVariousBooleanFlagsOffset + kApiSystemPointerSize;
static const int kBuiltinTier0TableOffset = static const int kBuiltinTier0TableOffset =
kBuiltinTier0EntryTableOffset + kBuiltinTier0EntryTableSize; kBuiltinTier0EntryTableOffset + kBuiltinTier0EntryTableSize;
static const int kIsolateEmbedderDataOffset = static const int kIsolateEmbedderDataOffset =
@ -386,14 +542,17 @@ class Internals {
kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize; kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize;
static const int kIsolateLongTaskStatsCounterOffset = static const int kIsolateLongTaskStatsCounterOffset =
kIsolateFastApiCallTargetOffset + kApiSystemPointerSize; kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
#ifdef V8_COMPRESS_POINTERS
static const int kIsolateExternalPointerTableOffset =
kIsolateLongTaskStatsCounterOffset + kApiSizetSize;
static const int kIsolateSharedExternalPointerTableAddressOffset =
kIsolateExternalPointerTableOffset + kExternalPointerTableSize;
static const int kIsolateRootsOffset =
kIsolateSharedExternalPointerTableAddressOffset + kApiSystemPointerSize;
#else
static const int kIsolateRootsOffset = static const int kIsolateRootsOffset =
kIsolateLongTaskStatsCounterOffset + kApiSizetSize; kIsolateLongTaskStatsCounterOffset + kApiSizetSize;
#endif
static const int kExternalPointerTableBufferOffset = 0;
static const int kExternalPointerTableCapacityOffset =
kExternalPointerTableBufferOffset + kApiSystemPointerSize;
static const int kExternalPointerTableFreelistHeadOffset =
kExternalPointerTableCapacityOffset + kApiInt32Size;
static const int kUndefinedValueRootIndex = 4; static const int kUndefinedValueRootIndex = 4;
static const int kTheHoleValueRootIndex = 5; static const int kTheHoleValueRootIndex = 5;
@ -404,9 +563,8 @@ class Internals {
static const int kNodeClassIdOffset = 1 * kApiSystemPointerSize; static const int kNodeClassIdOffset = 1 * kApiSystemPointerSize;
static const int kNodeFlagsOffset = 1 * kApiSystemPointerSize + 3; static const int kNodeFlagsOffset = 1 * kApiSystemPointerSize + 3;
static const int kNodeStateMask = 0x7; static const int kNodeStateMask = 0x3;
static const int kNodeStateIsWeakValue = 2; static const int kNodeStateIsWeakValue = 2;
static const int kNodeStateIsPendingValue = 3;
static const int kFirstNonstringType = 0x80; static const int kFirstNonstringType = 0x80;
static const int kOddballType = 0x83; static const int kOddballType = 0x83;
@ -481,6 +639,18 @@ class Internals {
return representation == kExternalTwoByteRepresentationTag; return representation == kExternalTwoByteRepresentationTag;
} }
V8_INLINE static constexpr bool CanHaveInternalField(int instance_type) {
static_assert(kJSObjectType + 1 == kFirstJSApiObjectType);
static_assert(kJSObjectType < kLastJSApiObjectType);
static_assert(kFirstJSApiObjectType < kLastJSApiObjectType);
// Check for IsJSObject() || IsJSSpecialApiObject() || IsJSApiObject()
return instance_type == kJSSpecialApiObjectType ||
// inlined version of base::IsInRange
(static_cast<unsigned>(static_cast<unsigned>(instance_type) -
static_cast<unsigned>(kJSObjectType)) <=
static_cast<unsigned>(kLastJSApiObjectType - kJSObjectType));
}
V8_INLINE static uint8_t GetNodeFlag(internal::Address* obj, int shift) { V8_INLINE static uint8_t GetNodeFlag(internal::Address* obj, int shift) {
uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset; uint8_t* addr = reinterpret_cast<uint8_t*>(obj) + kNodeFlagsOffset;
return *addr & static_cast<uint8_t>(1U << shift); return *addr & static_cast<uint8_t>(1U << shift);
@ -532,6 +702,25 @@ class Internals {
return reinterpret_cast<internal::Address*>(addr); return reinterpret_cast<internal::Address*>(addr);
} }
#ifdef V8_ENABLE_SANDBOX
V8_INLINE static internal::Address* GetExternalPointerTableBase(
v8::Isolate* isolate) {
internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
kIsolateExternalPointerTableOffset +
kExternalPointerTableBufferOffset;
return *reinterpret_cast<internal::Address**>(addr);
}
V8_INLINE static internal::Address* GetSharedExternalPointerTableBase(
v8::Isolate* isolate) {
internal::Address addr = reinterpret_cast<internal::Address>(isolate) +
kIsolateSharedExternalPointerTableAddressOffset;
addr = *reinterpret_cast<internal::Address*>(addr);
addr += kExternalPointerTableBufferOffset;
return *reinterpret_cast<internal::Address**>(addr);
}
#endif
template <typename T> template <typename T>
V8_INLINE static T ReadRawField(internal::Address heap_object_ptr, V8_INLINE static T ReadRawField(internal::Address heap_object_ptr,
int offset) { int offset) {
@ -572,38 +761,38 @@ class Internals {
#endif #endif
} }
V8_INLINE static internal::Isolate* GetIsolateForSandbox( V8_INLINE static v8::Isolate* GetIsolateForSandbox(internal::Address obj) {
internal::Address obj) { #ifdef V8_ENABLE_SANDBOX
#ifdef V8_SANDBOXED_EXTERNAL_POINTERS return reinterpret_cast<v8::Isolate*>(
return internal::IsolateFromNeverReadOnlySpaceObject(obj); internal::IsolateFromNeverReadOnlySpaceObject(obj));
#else #else
// Not used in non-sandbox mode. // Not used in non-sandbox mode.
return nullptr; return nullptr;
#endif #endif
} }
V8_INLINE static Address DecodeExternalPointer( template <ExternalPointerTag tag>
const Isolate* isolate, ExternalPointer_t encoded_pointer,
ExternalPointerTag tag) {
#ifdef V8_SANDBOXED_EXTERNAL_POINTERS
return internal::DecodeExternalPointerImpl(isolate, encoded_pointer, tag);
#else
return encoded_pointer;
#endif
}
V8_INLINE static internal::Address ReadExternalPointerField( V8_INLINE static internal::Address ReadExternalPointerField(
internal::Isolate* isolate, internal::Address heap_object_ptr, int offset, v8::Isolate* isolate, internal::Address heap_object_ptr, int offset) {
ExternalPointerTag tag) { #ifdef V8_ENABLE_SANDBOX
#ifdef V8_SANDBOXED_EXTERNAL_POINTERS if (IsSandboxedExternalPointerType(tag)) {
internal::ExternalPointer_t encoded_value = // See src/sandbox/external-pointer-table-inl.h. Logic duplicated here so
ReadRawField<uint32_t>(heap_object_ptr, offset); // it can be inlined and doesn't require an additional call.
// We currently have to treat zero as nullptr in embedder slots. internal::Address* table =
return encoded_value ? DecodeExternalPointer(isolate, encoded_value, tag) IsSharedExternalPointerType(tag)
: 0; ? GetSharedExternalPointerTableBase(isolate)
#else : GetExternalPointerTableBase(isolate);
return ReadRawField<Address>(heap_object_ptr, offset); internal::ExternalPointerHandle handle =
ReadRawField<ExternalPointerHandle>(heap_object_ptr, offset);
uint32_t index = handle >> kExternalPointerIndexShift;
std::atomic<internal::Address>* ptr =
reinterpret_cast<std::atomic<internal::Address>*>(&table[index]);
internal::Address entry =
std::atomic_load_explicit(ptr, std::memory_order_relaxed);
return entry & ~tag;
}
#endif #endif
return ReadRawField<Address>(heap_object_ptr, offset);
} }
#ifdef V8_COMPRESS_POINTERS #ifdef V8_COMPRESS_POINTERS
@ -652,7 +841,7 @@ class BackingStoreBase {};
// The maximum value in enum GarbageCollectionReason, defined in heap.h. // The maximum value in enum GarbageCollectionReason, defined in heap.h.
// This is needed for histograms sampling garbage collection reasons. // This is needed for histograms sampling garbage collection reasons.
constexpr int kGarbageCollectionReasonMaxValue = 25; constexpr int kGarbageCollectionReasonMaxValue = 27;
} // namespace internal } // namespace internal

View file

@ -194,6 +194,11 @@ enum RAILMode : unsigned {
*/ */
enum class MemoryPressureLevel { kNone, kModerate, kCritical }; enum class MemoryPressureLevel { kNone, kModerate, kCritical };
/**
* Indicator for the stack state.
*/
using StackState = cppgc::EmbedderStackState;
/** /**
* Isolate represents an isolated instance of the V8 engine. V8 isolates have * Isolate represents an isolated instance of the V8 engine. V8 isolates have
* completely separate states. Objects from one isolate must not be used in * completely separate states. Objects from one isolate must not be used in
@ -211,6 +216,8 @@ class V8_EXPORT Isolate {
CreateParams(); CreateParams();
~CreateParams(); ~CreateParams();
ALLOW_COPY_AND_MOVE_WITH_DEPRECATED_FIELDS(CreateParams)
/** /**
* Allows the host application to provide the address of a function that is * Allows the host application to provide the address of a function that is
* notified each time code is added, moved or removed. * notified each time code is added, moved or removed.
@ -287,12 +294,6 @@ class V8_EXPORT Isolate {
*/ */
FatalErrorCallback fatal_error_callback = nullptr; FatalErrorCallback fatal_error_callback = nullptr;
OOMErrorCallback oom_error_callback = nullptr; OOMErrorCallback oom_error_callback = nullptr;
/**
* The following parameter is experimental and may change significantly.
* This is currently for internal testing.
*/
Isolate* experimental_attach_to_shared_isolate = nullptr;
}; };
/** /**
@ -301,16 +302,18 @@ class V8_EXPORT Isolate {
*/ */
class V8_EXPORT V8_NODISCARD Scope { class V8_EXPORT V8_NODISCARD Scope {
public: public:
explicit Scope(Isolate* isolate) : isolate_(isolate) { isolate->Enter(); } explicit Scope(Isolate* isolate) : v8_isolate_(isolate) {
v8_isolate_->Enter();
}
~Scope() { isolate_->Exit(); } ~Scope() { v8_isolate_->Exit(); }
// Prevent copying of Scope objects. // Prevent copying of Scope objects.
Scope(const Scope&) = delete; Scope(const Scope&) = delete;
Scope& operator=(const Scope&) = delete; Scope& operator=(const Scope&) = delete;
private: private:
Isolate* const isolate_; Isolate* const v8_isolate_;
}; };
/** /**
@ -331,7 +334,7 @@ class V8_EXPORT Isolate {
private: private:
OnFailure on_failure_; OnFailure on_failure_;
Isolate* isolate_; v8::Isolate* v8_isolate_;
bool was_execution_allowed_assert_; bool was_execution_allowed_assert_;
bool was_execution_allowed_throws_; bool was_execution_allowed_throws_;
@ -353,7 +356,7 @@ class V8_EXPORT Isolate {
const AllowJavascriptExecutionScope&) = delete; const AllowJavascriptExecutionScope&) = delete;
private: private:
Isolate* isolate_; Isolate* v8_isolate_;
bool was_execution_allowed_assert_; bool was_execution_allowed_assert_;
bool was_execution_allowed_throws_; bool was_execution_allowed_throws_;
bool was_execution_allowed_dump_; bool was_execution_allowed_dump_;
@ -376,7 +379,7 @@ class V8_EXPORT Isolate {
const SuppressMicrotaskExecutionScope&) = delete; const SuppressMicrotaskExecutionScope&) = delete;
private: private:
internal::Isolate* const isolate_; internal::Isolate* const i_isolate_;
internal::MicrotaskQueue* const microtask_queue_; internal::MicrotaskQueue* const microtask_queue_;
internal::Address previous_stack_height_; internal::Address previous_stack_height_;
@ -389,7 +392,7 @@ class V8_EXPORT Isolate {
*/ */
class V8_EXPORT V8_NODISCARD SafeForTerminationScope { class V8_EXPORT V8_NODISCARD SafeForTerminationScope {
public: public:
explicit SafeForTerminationScope(v8::Isolate* isolate); explicit SafeForTerminationScope(v8::Isolate* v8_isolate);
~SafeForTerminationScope(); ~SafeForTerminationScope();
// Prevent copying of Scope objects. // Prevent copying of Scope objects.
@ -397,7 +400,7 @@ class V8_EXPORT Isolate {
SafeForTerminationScope& operator=(const SafeForTerminationScope&) = delete; SafeForTerminationScope& operator=(const SafeForTerminationScope&) = delete;
private: private:
internal::Isolate* isolate_; internal::Isolate* i_isolate_;
bool prev_value_; bool prev_value_;
}; };
@ -531,6 +534,8 @@ class V8_EXPORT Isolate {
kInvalidatedMegaDOMProtector = 112, kInvalidatedMegaDOMProtector = 112,
kFunctionPrototypeArguments = 113, kFunctionPrototypeArguments = 113,
kFunctionPrototypeCaller = 114, kFunctionPrototypeCaller = 114,
kTurboFanOsrCompileStarted = 115,
kAsyncStackTaggingCreateTaskCall = 116,
// If you add new values here, you'll also need to update Chromium's: // If you add new values here, you'll also need to update Chromium's:
// web_feature.mojom, use_counter_callback.cc, and enums.xml. V8 changes to // web_feature.mojom, use_counter_callback.cc, and enums.xml. V8 changes to
@ -636,9 +641,6 @@ class V8_EXPORT Isolate {
* This specifies the callback called by the upcoming dynamic * This specifies the callback called by the upcoming dynamic
* import() language feature to load modules. * import() language feature to load modules.
*/ */
V8_DEPRECATED("Use HostImportModuleDynamicallyCallback")
void SetHostImportModuleDynamicallyCallback(
HostImportModuleDynamicallyWithImportAssertionsCallback callback);
void SetHostImportModuleDynamicallyCallback( void SetHostImportModuleDynamicallyCallback(
HostImportModuleDynamicallyCallback callback); HostImportModuleDynamicallyCallback callback);
@ -839,12 +841,6 @@ class V8_EXPORT Isolate {
*/ */
int64_t AdjustAmountOfExternalAllocatedMemory(int64_t change_in_bytes); int64_t AdjustAmountOfExternalAllocatedMemory(int64_t change_in_bytes);
/**
* Returns the number of phantom handles without callbacks that were reset
* by the garbage collector since the last call to this function.
*/
size_t NumberOfPhantomHandleResetsSinceLastCall();
/** /**
* Returns heap profiler for this isolate. Will return NULL until the isolate * Returns heap profiler for this isolate. Will return NULL until the isolate
* is initialized. * is initialized.
@ -927,6 +923,7 @@ class V8_EXPORT Isolate {
void RemoveGCPrologueCallback(GCCallbackWithData, void* data = nullptr); void RemoveGCPrologueCallback(GCCallbackWithData, void* data = nullptr);
void RemoveGCPrologueCallback(GCCallback callback); void RemoveGCPrologueCallback(GCCallback callback);
START_ALLOW_USE_DEPRECATED()
/** /**
* Sets the embedder heap tracer for the isolate. * Sets the embedder heap tracer for the isolate.
* SetEmbedderHeapTracer cannot be used simultaneously with AttachCppHeap. * SetEmbedderHeapTracer cannot be used simultaneously with AttachCppHeap.
@ -938,6 +935,7 @@ class V8_EXPORT Isolate {
* SetEmbedderHeapTracer. * SetEmbedderHeapTracer.
*/ */
EmbedderHeapTracer* GetEmbedderHeapTracer(); EmbedderHeapTracer* GetEmbedderHeapTracer();
END_ALLOW_USE_DEPRECATED()
/** /**
* Sets an embedder roots handle that V8 should consider when performing * Sets an embedder roots handle that V8 should consider when performing
@ -1163,9 +1161,8 @@ class V8_EXPORT Isolate {
* LowMemoryNotification() instead to influence the garbage collection * LowMemoryNotification() instead to influence the garbage collection
* schedule. * schedule.
*/ */
void RequestGarbageCollectionForTesting( void RequestGarbageCollectionForTesting(GarbageCollectionType type,
GarbageCollectionType type, StackState stack_state);
EmbedderHeapTracer::EmbedderStackState stack_state);
/** /**
* Set the callback to invoke for logging event. * Set the callback to invoke for logging event.
@ -1523,15 +1520,15 @@ class V8_EXPORT Isolate {
void SetWasmStreamingCallback(WasmStreamingCallback callback); void SetWasmStreamingCallback(WasmStreamingCallback callback);
void SetWasmAsyncResolvePromiseCallback(
WasmAsyncResolvePromiseCallback callback);
void SetWasmLoadSourceMapCallback(WasmLoadSourceMapCallback callback); void SetWasmLoadSourceMapCallback(WasmLoadSourceMapCallback callback);
void SetWasmSimdEnabledCallback(WasmSimdEnabledCallback callback); void SetWasmSimdEnabledCallback(WasmSimdEnabledCallback callback);
void SetWasmExceptionsEnabledCallback(WasmExceptionsEnabledCallback callback); void SetWasmExceptionsEnabledCallback(WasmExceptionsEnabledCallback callback);
void SetWasmDynamicTieringEnabledCallback(
WasmDynamicTieringEnabledCallback callback);
void SetSharedArrayBufferConstructorEnabledCallback( void SetSharedArrayBufferConstructorEnabledCallback(
SharedArrayBufferConstructorEnabledCallback callback); SharedArrayBufferConstructorEnabledCallback callback);
@ -1598,19 +1595,6 @@ class V8_EXPORT Isolate {
*/ */
void VisitExternalResources(ExternalResourceVisitor* visitor); void VisitExternalResources(ExternalResourceVisitor* visitor);
/**
* Iterates through all the persistent handles in the current isolate's heap
* that have class_ids.
*/
void VisitHandlesWithClassIds(PersistentHandleVisitor* visitor);
/**
* Iterates through all the persistent handles in the current isolate's heap
* that have class_ids and are weak to be marked as inactive if there is no
* pending activity for the handle.
*/
void VisitWeakHandles(PersistentHandleVisitor* visitor);
/** /**
* Check if this isolate is in use. * Check if this isolate is in use.
* True if at least one thread Enter'ed this isolate. * True if at least one thread Enter'ed this isolate.

View file

@ -86,7 +86,7 @@ class V8_EXPORT V8_NODISCARD HandleScope {
static int NumberOfHandles(Isolate* isolate); static int NumberOfHandles(Isolate* isolate);
V8_INLINE Isolate* GetIsolate() const { V8_INLINE Isolate* GetIsolate() const {
return reinterpret_cast<Isolate*>(isolate_); return reinterpret_cast<Isolate*>(i_isolate_);
} }
HandleScope(const HandleScope&) = delete; HandleScope(const HandleScope&) = delete;
@ -97,7 +97,7 @@ class V8_EXPORT V8_NODISCARD HandleScope {
void Initialize(Isolate* isolate); void Initialize(Isolate* isolate);
static internal::Address* CreateHandle(internal::Isolate* isolate, static internal::Address* CreateHandle(internal::Isolate* i_isolate,
internal::Address value); internal::Address value);
private: private:
@ -108,7 +108,7 @@ class V8_EXPORT V8_NODISCARD HandleScope {
void operator delete(void*, size_t); void operator delete(void*, size_t);
void operator delete[](void*, size_t); void operator delete[](void*, size_t);
internal::Isolate* isolate_; internal::Isolate* i_isolate_;
internal::Address* prev_next_; internal::Address* prev_next_;
internal::Address* prev_limit_; internal::Address* prev_limit_;
@ -354,7 +354,7 @@ class MaybeLocal {
/** /**
* Converts this MaybeLocal<> to a Local<>. If this MaybeLocal<> is empty, * Converts this MaybeLocal<> to a Local<>. If this MaybeLocal<> is empty,
* |false| is returned and |out| is left untouched. * |false| is returned and |out| is assigned with nullptr.
*/ */
template <class S> template <class S>
V8_WARN_UNUSED_RESULT V8_INLINE bool ToLocal(Local<S>* out) const { V8_WARN_UNUSED_RESULT V8_INLINE bool ToLocal(Local<S>* out) const {
@ -445,7 +445,7 @@ class V8_EXPORT V8_NODISCARD SealHandleScope {
void operator delete(void*, size_t); void operator delete(void*, size_t);
void operator delete[](void*, size_t); void operator delete[](void*, size_t);
internal::Isolate* const isolate_; internal::Isolate* const i_isolate_;
internal::Address* prev_limit_; internal::Address* prev_limit_;
int prev_sealed_level_; int prev_sealed_level_;
}; };

View file

@ -121,17 +121,6 @@ class V8_EXPORT Locker {
*/ */
static bool IsLocked(Isolate* isolate); static bool IsLocked(Isolate* isolate);
/**
* Returns whether any v8::Locker has ever been used in this process.
* TODO(cbruni, chromium:1240851): Fix locking checks on a per-thread basis.
* The current implementation is quite confusing and leads to unexpected
* results if anybody uses v8::Locker in the current process.
*/
V8_DEPRECATE_SOON("This method will be removed.")
static bool WasEverUsed();
V8_DEPRECATED("Use WasEverUsed instead")
static bool IsActive();
// Disallow copying and assigning. // Disallow copying and assigning.
Locker(const Locker&) = delete; Locker(const Locker&) = delete;
void operator=(const Locker&) = delete; void operator=(const Locker&) = delete;

View file

@ -5,6 +5,9 @@
#ifndef INCLUDE_V8_MAYBE_H_ #ifndef INCLUDE_V8_MAYBE_H_
#define INCLUDE_V8_MAYBE_H_ #define INCLUDE_V8_MAYBE_H_
#include <type_traits>
#include <utility>
#include "v8-internal.h" // NOLINT(build/include_directory) #include "v8-internal.h" // NOLINT(build/include_directory)
#include "v8config.h" // NOLINT(build/include_directory) #include "v8config.h" // NOLINT(build/include_directory)
@ -57,11 +60,20 @@ class Maybe {
* Converts this Maybe<> to a value of type T. If this Maybe<> is * Converts this Maybe<> to a value of type T. If this Maybe<> is
* nothing (empty), V8 will crash the process. * nothing (empty), V8 will crash the process.
*/ */
V8_INLINE T FromJust() const { V8_INLINE T FromJust() const& {
if (V8_UNLIKELY(!IsJust())) api_internal::FromJustIsNothing(); if (V8_UNLIKELY(!IsJust())) api_internal::FromJustIsNothing();
return value_; return value_;
} }
/**
* Converts this Maybe<> to a value of type T. If this Maybe<> is
* nothing (empty), V8 will crash the process.
*/
V8_INLINE T FromJust() && {
if (V8_UNLIKELY(!IsJust())) api_internal::FromJustIsNothing();
return std::move(value_);
}
/** /**
* Converts this Maybe<> to a value of type T, using a default value if this * Converts this Maybe<> to a value of type T, using a default value if this
* Maybe<> is nothing (empty). * Maybe<> is nothing (empty).
@ -82,6 +94,7 @@ class Maybe {
private: private:
Maybe() : has_value_(false) {} Maybe() : has_value_(false) {}
explicit Maybe(const T& t) : has_value_(true), value_(t) {} explicit Maybe(const T& t) : has_value_(true), value_(t) {}
explicit Maybe(T&& t) : has_value_(true), value_(std::move(t)) {}
bool has_value_; bool has_value_;
T value_; T value_;
@ -90,6 +103,8 @@ class Maybe {
friend Maybe<U> Nothing(); friend Maybe<U> Nothing();
template <class U> template <class U>
friend Maybe<U> Just(const U& u); friend Maybe<U> Just(const U& u);
template <class U, std::enable_if_t<!std::is_lvalue_reference_v<U>>*>
friend Maybe<U> Just(U&& u);
}; };
template <class T> template <class T>
@ -102,6 +117,14 @@ inline Maybe<T> Just(const T& t) {
return Maybe<T>(t); return Maybe<T>(t);
} }
// Don't use forwarding references here but instead use two overloads.
// Forwarding references only work when type deduction takes place, which is not
// the case for callsites such as Just<Type>(t).
template <class T, std::enable_if_t<!std::is_lvalue_reference_v<T>>* = nullptr>
inline Maybe<T> Just(T&& t) {
return Maybe<T>(std::move(t));
}
// A template specialization of Maybe<T> for the case of T = void. // A template specialization of Maybe<T> for the case of T = void.
template <> template <>
class Maybe<void> { class Maybe<void> {

View file

@ -70,7 +70,7 @@ class V8_EXPORT ScriptOrigin {
bool resource_is_opaque = false, bool is_wasm = false, bool resource_is_opaque = false, bool is_wasm = false,
bool is_module = false, bool is_module = false,
Local<Data> host_defined_options = Local<Data>()) Local<Data> host_defined_options = Local<Data>())
: isolate_(isolate), : v8_isolate_(isolate),
resource_name_(resource_name), resource_name_(resource_name),
resource_line_offset_(resource_line_offset), resource_line_offset_(resource_line_offset),
resource_column_offset_(resource_column_offset), resource_column_offset_(resource_column_offset),
@ -87,14 +87,12 @@ class V8_EXPORT ScriptOrigin {
V8_INLINE int ColumnOffset() const; V8_INLINE int ColumnOffset() const;
V8_INLINE int ScriptId() const; V8_INLINE int ScriptId() const;
V8_INLINE Local<Value> SourceMapUrl() const; V8_INLINE Local<Value> SourceMapUrl() const;
V8_DEPRECATE_SOON("Use GetHostDefinedOptions")
Local<PrimitiveArray> HostDefinedOptions() const;
V8_INLINE Local<Data> GetHostDefinedOptions() const; V8_INLINE Local<Data> GetHostDefinedOptions() const;
V8_INLINE ScriptOriginOptions Options() const { return options_; } V8_INLINE ScriptOriginOptions Options() const { return options_; }
private: private:
void VerifyHostDefinedOptions() const; void VerifyHostDefinedOptions() const;
Isolate* isolate_; Isolate* v8_isolate_;
Local<Value> resource_name_; Local<Value> resource_name_;
int resource_line_offset_; int resource_line_offset_;
int resource_column_offset_; int resource_column_offset_;

View file

@ -125,31 +125,10 @@ struct WasmModuleInstantiated {
int64_t wall_clock_duration_in_us = -1; int64_t wall_clock_duration_in_us = -1;
}; };
struct WasmModuleTieredUp {
bool lazy = false;
size_t code_size_in_bytes = 0;
int64_t wall_clock_duration_in_us = -1;
int64_t cpu_duration_in_us = -1;
};
struct WasmModulesPerIsolate { struct WasmModulesPerIsolate {
size_t count = 0; size_t count = 0;
}; };
#define V8_MAIN_THREAD_METRICS_EVENTS(V) \
V(GarbageCollectionFullCycle) \
V(GarbageCollectionFullMainThreadIncrementalMark) \
V(GarbageCollectionFullMainThreadBatchedIncrementalMark) \
V(GarbageCollectionFullMainThreadIncrementalSweep) \
V(GarbageCollectionFullMainThreadBatchedIncrementalSweep) \
V(GarbageCollectionYoungCycle) \
V(WasmModuleDecoded) \
V(WasmModuleCompiled) \
V(WasmModuleInstantiated) \
V(WasmModuleTieredUp)
#define V8_THREAD_SAFE_METRICS_EVENTS(V) V(WasmModulesPerIsolate)
/** /**
* This class serves as a base class for recording event-based metrics in V8. * This class serves as a base class for recording event-based metrics in V8.
* There a two kinds of metrics, those which are expected to be thread-safe and * There a two kinds of metrics, those which are expected to be thread-safe and
@ -159,19 +138,6 @@ struct WasmModulesPerIsolate {
* background thread, it will be delayed and executed by the foreground task * background thread, it will be delayed and executed by the foreground task
* runner. * runner.
* *
* The thread-safe events are listed in the V8_THREAD_SAFE_METRICS_EVENTS
* macro above while the main thread event are listed in
* V8_MAIN_THREAD_METRICS_EVENTS above. For the former, a virtual method
* AddMainThreadEvent(const E& event, v8::Context::Token token) will be
* generated and for the latter AddThreadSafeEvent(const E& event).
*
* Thread-safe events are not allowed to access the context and therefore do
* not carry a context ID with them. These IDs can be generated using
* Recorder::GetContextId() and the ID will be valid throughout the lifetime
* of the isolate. It is not guaranteed that the ID will still resolve to
* a valid context using Recorder::GetContext() at the time the metric is
* recorded. In this case, an empty handle will be returned.
*
* The embedder is expected to call v8::Isolate::SetMetricsRecorder() * The embedder is expected to call v8::Isolate::SetMetricsRecorder()
* providing its implementation and have the virtual methods overwritten * providing its implementation and have the virtual methods overwritten
* for the events it cares about. * for the events it cares about.
@ -202,14 +168,30 @@ class V8_EXPORT Recorder {
virtual ~Recorder() = default; virtual ~Recorder() = default;
// Main thread events. Those are only triggered on the main thread, and hence
// can access the context.
#define ADD_MAIN_THREAD_EVENT(E) \ #define ADD_MAIN_THREAD_EVENT(E) \
virtual void AddMainThreadEvent(const E& event, ContextId context_id) {} virtual void AddMainThreadEvent(const E&, ContextId) {}
V8_MAIN_THREAD_METRICS_EVENTS(ADD_MAIN_THREAD_EVENT) ADD_MAIN_THREAD_EVENT(GarbageCollectionFullCycle)
ADD_MAIN_THREAD_EVENT(GarbageCollectionFullMainThreadIncrementalMark)
ADD_MAIN_THREAD_EVENT(GarbageCollectionFullMainThreadBatchedIncrementalMark)
ADD_MAIN_THREAD_EVENT(GarbageCollectionFullMainThreadIncrementalSweep)
ADD_MAIN_THREAD_EVENT(GarbageCollectionFullMainThreadBatchedIncrementalSweep)
ADD_MAIN_THREAD_EVENT(GarbageCollectionYoungCycle)
ADD_MAIN_THREAD_EVENT(WasmModuleDecoded)
ADD_MAIN_THREAD_EVENT(WasmModuleCompiled)
ADD_MAIN_THREAD_EVENT(WasmModuleInstantiated)
#undef ADD_MAIN_THREAD_EVENT #undef ADD_MAIN_THREAD_EVENT
// Thread-safe events are not allowed to access the context and therefore do
// not carry a context ID with them. These IDs can be generated using
// Recorder::GetContextId() and the ID will be valid throughout the lifetime
// of the isolate. It is not guaranteed that the ID will still resolve to
// a valid context using Recorder::GetContext() at the time the metric is
// recorded. In this case, an empty handle will be returned.
#define ADD_THREAD_SAFE_EVENT(E) \ #define ADD_THREAD_SAFE_EVENT(E) \
virtual void AddThreadSafeEvent(const E& event) {} virtual void AddThreadSafeEvent(const E&) {}
V8_THREAD_SAFE_METRICS_EVENTS(ADD_THREAD_SAFE_EVENT) ADD_THREAD_SAFE_EVENT(WasmModulesPerIsolate)
#undef ADD_THREAD_SAFE_EVENT #undef ADD_THREAD_SAFE_EVENT
virtual void NotifyIsolateDisposal() {} virtual void NotifyIsolateDisposal() {}

View file

@ -142,7 +142,7 @@ class V8_EXPORT V8_NODISCARD MicrotasksScope {
MicrotasksScope& operator=(const MicrotasksScope&) = delete; MicrotasksScope& operator=(const MicrotasksScope&) = delete;
private: private:
internal::Isolate* const isolate_; internal::Isolate* const i_isolate_;
internal::MicrotaskQueue* const microtask_queue_; internal::MicrotaskQueue* const microtask_queue_;
bool run_; bool run_;
}; };

View file

@ -594,8 +594,6 @@ class V8_EXPORT Object : public Value {
/** /**
* Returns the context in which the object was created. * Returns the context in which the object was created.
*/ */
V8_DEPRECATED("Use MaybeLocal<Context> GetCreationContext()")
Local<Context> CreationContext();
MaybeLocal<Context> GetCreationContext(); MaybeLocal<Context> GetCreationContext();
/** /**
@ -604,10 +602,6 @@ class V8_EXPORT Object : public Value {
Local<Context> GetCreationContextChecked(); Local<Context> GetCreationContextChecked();
/** Same as above, but works for Persistents */ /** Same as above, but works for Persistents */
V8_DEPRECATED(
"Use MaybeLocal<Context> GetCreationContext(const "
"PersistentBase<Object>& object)")
static Local<Context> CreationContext(const PersistentBase<Object>& object);
V8_INLINE static MaybeLocal<Context> GetCreationContext( V8_INLINE static MaybeLocal<Context> GetCreationContext(
const PersistentBase<Object>& object) { const PersistentBase<Object>& object) {
return object.val_->GetCreationContext(); return object.val_->GetCreationContext();
@ -717,7 +711,7 @@ Local<Value> Object::GetInternalField(int index) {
// Fast path: If the object is a plain JSObject, which is the common case, we // Fast path: If the object is a plain JSObject, which is the common case, we
// know where to find the internal fields and can return the value directly. // know where to find the internal fields and can return the value directly.
int instance_type = I::GetInstanceType(obj); int instance_type = I::GetInstanceType(obj);
if (v8::internal::CanHaveInternalField(instance_type)) { if (I::CanHaveInternalField(instance_type)) {
int offset = I::kJSObjectHeaderSize + (I::kEmbedderDataSlotSize * index); int offset = I::kJSObjectHeaderSize + (I::kEmbedderDataSlotSize * index);
A value = I::ReadRawField<A>(obj, offset); A value = I::ReadRawField<A>(obj, offset);
#ifdef V8_COMPRESS_POINTERS #ifdef V8_COMPRESS_POINTERS
@ -742,14 +736,13 @@ void* Object::GetAlignedPointerFromInternalField(int index) {
// Fast path: If the object is a plain JSObject, which is the common case, we // Fast path: If the object is a plain JSObject, which is the common case, we
// know where to find the internal fields and can return the value directly. // know where to find the internal fields and can return the value directly.
auto instance_type = I::GetInstanceType(obj); auto instance_type = I::GetInstanceType(obj);
if (v8::internal::CanHaveInternalField(instance_type)) { if (I::CanHaveInternalField(instance_type)) {
int offset = I::kJSObjectHeaderSize + (I::kEmbedderDataSlotSize * index); int offset = I::kJSObjectHeaderSize + (I::kEmbedderDataSlotSize * index) +
#ifdef V8_SANDBOXED_EXTERNAL_POINTERS I::kEmbedderDataSlotExternalPointerOffset;
offset += I::kEmbedderDataSlotRawPayloadOffset; Isolate* isolate = I::GetIsolateForSandbox(obj);
#endif A value =
internal::Isolate* isolate = I::GetIsolateForSandbox(obj); I::ReadExternalPointerField<internal::kEmbedderDataSlotPayloadTag>(
A value = I::ReadExternalPointerField( isolate, obj, offset);
isolate, obj, offset, internal::kEmbedderDataSlotPayloadTag);
return reinterpret_cast<void*>(value); return reinterpret_cast<void*>(value);
} }
#endif #endif

View file

@ -169,8 +169,6 @@ class PersistentBase {
* Turns this handle into a weak phantom handle without finalization callback. * Turns this handle into a weak phantom handle without finalization callback.
* The handle will be reset automatically when the garbage collector detects * The handle will be reset automatically when the garbage collector detects
* that the object is no longer reachable. * that the object is no longer reachable.
* A related function Isolate::NumberOfPhantomHandleResetsSinceLastCall
* returns how many phantom handles were reset by the garbage collector.
*/ */
V8_INLINE void SetWeak(); V8_INLINE void SetWeak();
@ -254,7 +252,7 @@ class NonCopyablePersistentTraits {
* This will clone the contents of storage cell, but not any of the flags, etc. * This will clone the contents of storage cell, but not any of the flags, etc.
*/ */
template <class T> template <class T>
struct CopyablePersistentTraits { struct V8_DEPRECATED("Use v8::Global instead") CopyablePersistentTraits {
using CopyablePersistent = Persistent<T, CopyablePersistentTraits<T>>; using CopyablePersistent = Persistent<T, CopyablePersistentTraits<T>>;
static const bool kResetInDestructor = true; static const bool kResetInDestructor = true;
template <class S, class M> template <class S, class M>

View file

@ -158,9 +158,10 @@ class TaskRunner {
class JobDelegate { class JobDelegate {
public: public:
/** /**
* Returns true if this thread should return from the worker task on the * Returns true if this thread *must* return from the worker task on the
* current thread ASAP. Workers should periodically invoke ShouldYield (or * current thread ASAP. Workers should periodically invoke ShouldYield (or
* YieldIfNeeded()) as often as is reasonable. * YieldIfNeeded()) as often as is reasonable.
* After this method returned true, ShouldYield must not be called again.
*/ */
virtual bool ShouldYield() = 0; virtual bool ShouldYield() = 0;
@ -429,6 +430,17 @@ class PageAllocator {
virtual bool SetPermissions(void* address, size_t length, virtual bool SetPermissions(void* address, size_t length,
Permission permissions) = 0; Permission permissions) = 0;
/**
* Recommits discarded pages in the given range with given permissions.
* Discarded pages must be recommitted with their original permissions
* before they are used again.
*/
virtual bool RecommitPages(void* address, size_t length,
Permission permissions) {
// TODO(v8:12797): make it pure once it's implemented on Chromium side.
return false;
}
/** /**
* Frees memory in the given [address, address + size) range. address and size * Frees memory in the given [address, address + size) range. address and size
* should be operating system page-aligned. The next write to this * should be operating system page-aligned. The next write to this
@ -698,6 +710,10 @@ class VirtualAddressSpace {
/** /**
* Sets permissions of all allocated pages in the given range. * Sets permissions of all allocated pages in the given range.
* *
* This operation can fail due to OOM, in which case false is returned. If
* the operation fails for a reason other than OOM, this function will
* terminate the process as this implies a bug in the client.
*
* \param address The start address of the range. Must be aligned to * \param address The start address of the range. Must be aligned to
* page_size(). * page_size().
* *
@ -706,7 +722,7 @@ class VirtualAddressSpace {
* *
* \param permissions The new permissions for the range. * \param permissions The new permissions for the range.
* *
* \returns true on success, false otherwise. * \returns true on success, false on OOM.
*/ */
virtual V8_WARN_UNUSED_RESULT bool SetPagePermissions( virtual V8_WARN_UNUSED_RESULT bool SetPagePermissions(
Address address, size_t size, PagePermissions permissions) = 0; Address address, size_t size, PagePermissions permissions) = 0;
@ -820,6 +836,24 @@ class VirtualAddressSpace {
// takes a command enum as parameter. // takes a command enum as parameter.
// //
/**
* Recommits discarded pages in the given range with given permissions.
* Discarded pages must be recommitted with their original permissions
* before they are used again.
*
* \param address The start address of the range. Must be aligned to
* page_size().
*
* \param size The size in bytes of the range. Must be a multiple
* of page_size().
*
* \param permissions The permissions for the range that the pages must have.
*
* \returns true on success, false otherwise.
*/
virtual V8_WARN_UNUSED_RESULT bool RecommitPages(
Address address, size_t size, PagePermissions permissions) = 0;
/** /**
* Frees memory in the given [address, address + size) range. address and * Frees memory in the given [address, address + size) range. address and
* size should be aligned to the page_size(). The next write to this memory * size should be aligned to the page_size(). The next write to this memory
@ -890,10 +924,7 @@ class Platform {
/** /**
* Allows the embedder to manage memory page allocations. * Allows the embedder to manage memory page allocations.
*/ */
virtual PageAllocator* GetPageAllocator() { virtual PageAllocator* GetPageAllocator() = 0;
// TODO(bbudge) Make this abstract after all embedders implement this.
return nullptr;
}
/** /**
* Allows the embedder to specify a custom allocator used for zones. * Allows the embedder to specify a custom allocator used for zones.
@ -910,10 +941,7 @@ class Platform {
* error. * error.
* Embedder overrides of this function must NOT call back into V8. * Embedder overrides of this function must NOT call back into V8.
*/ */
virtual void OnCriticalMemoryPressure() { virtual void OnCriticalMemoryPressure() {}
// TODO(bbudge) Remove this when embedders override the following method.
// See crbug.com/634547.
}
/** /**
* Enables the embedder to respond in cases where V8 can't allocate large * Enables the embedder to respond in cases where V8 can't allocate large
@ -924,6 +952,7 @@ class Platform {
* *
* Embedder overrides of this function must NOT call back into V8. * Embedder overrides of this function must NOT call back into V8.
*/ */
V8_DEPRECATED("Use the method without informative parameter")
virtual bool OnCriticalMemoryPressure(size_t length) { return false; } virtual bool OnCriticalMemoryPressure(size_t length) { return false; }
/** /**
@ -1022,16 +1051,28 @@ class Platform {
* thread (A=>B/B=>A deadlock) and [2] JobTask::Run or * thread (A=>B/B=>A deadlock) and [2] JobTask::Run or
* JobTask::GetMaxConcurrency may be invoked synchronously from JobHandle * JobTask::GetMaxConcurrency may be invoked synchronously from JobHandle
* (B=>JobHandle::foo=>B deadlock). * (B=>JobHandle::foo=>B deadlock).
*/
virtual std::unique_ptr<JobHandle> PostJob(
TaskPriority priority, std::unique_ptr<JobTask> job_task) {
auto handle = CreateJob(priority, std::move(job_task));
handle->NotifyConcurrencyIncrease();
return handle;
}
/**
* Creates and returns a JobHandle associated with a Job. Unlike PostJob(),
* this doesn't immediately schedules |worker_task| to run; the Job is then
* scheduled by calling either NotifyConcurrencyIncrease() or Join().
* *
* A sufficient PostJob() implementation that uses the default Job provided in * A sufficient CreateJob() implementation that uses the default Job provided
* libplatform looks like: * in libplatform looks like:
* std::unique_ptr<JobHandle> PostJob( * std::unique_ptr<JobHandle> CreateJob(
* TaskPriority priority, std::unique_ptr<JobTask> job_task) override { * TaskPriority priority, std::unique_ptr<JobTask> job_task) override {
* return v8::platform::NewDefaultJobHandle( * return v8::platform::NewDefaultJobHandle(
* this, priority, std::move(job_task), NumberOfWorkerThreads()); * this, priority, std::move(job_task), NumberOfWorkerThreads());
* } * }
*/ */
virtual std::unique_ptr<JobHandle> PostJob( virtual std::unique_ptr<JobHandle> CreateJob(
TaskPriority priority, std::unique_ptr<JobTask> job_task) = 0; TaskPriority priority, std::unique_ptr<JobTask> job_task) = 0;
/** /**

View file

@ -20,6 +20,7 @@ class String;
namespace internal { namespace internal {
class ExternalString; class ExternalString;
class ScopedExternalStringLock; class ScopedExternalStringLock;
class StringForwardingTable;
} // namespace internal } // namespace internal
/** /**
@ -269,6 +270,7 @@ class V8_EXPORT String : public Name {
private: private:
friend class internal::ExternalString; friend class internal::ExternalString;
friend class v8::String; friend class v8::String;
friend class internal::StringForwardingTable;
friend class internal::ScopedExternalStringLock; friend class internal::ScopedExternalStringLock;
}; };
@ -785,10 +787,9 @@ String::ExternalStringResource* String::GetExternalStringResource() const {
ExternalStringResource* result; ExternalStringResource* result;
if (I::IsExternalTwoByteString(I::GetInstanceType(obj))) { if (I::IsExternalTwoByteString(I::GetInstanceType(obj))) {
internal::Isolate* isolate = I::GetIsolateForSandbox(obj); Isolate* isolate = I::GetIsolateForSandbox(obj);
A value = A value = I::ReadExternalPointerField<internal::kExternalStringResourceTag>(
I::ReadExternalPointerField(isolate, obj, I::kStringResourceOffset, isolate, obj, I::kStringResourceOffset);
internal::kExternalStringResourceTag);
result = reinterpret_cast<String::ExternalStringResource*>(value); result = reinterpret_cast<String::ExternalStringResource*>(value);
} else { } else {
result = GetExternalStringResourceSlow(); result = GetExternalStringResourceSlow();
@ -809,10 +810,9 @@ String::ExternalStringResourceBase* String::GetExternalStringResourceBase(
ExternalStringResourceBase* resource; ExternalStringResourceBase* resource;
if (type == I::kExternalOneByteRepresentationTag || if (type == I::kExternalOneByteRepresentationTag ||
type == I::kExternalTwoByteRepresentationTag) { type == I::kExternalTwoByteRepresentationTag) {
internal::Isolate* isolate = I::GetIsolateForSandbox(obj); Isolate* isolate = I::GetIsolateForSandbox(obj);
A value = A value = I::ReadExternalPointerField<internal::kExternalStringResourceTag>(
I::ReadExternalPointerField(isolate, obj, I::kStringResourceOffset, isolate, obj, I::kStringResourceOffset);
internal::kExternalStringResourceTag);
resource = reinterpret_cast<ExternalStringResourceBase*>(value); resource = reinterpret_cast<ExternalStringResourceBase*>(value);
} else { } else {
resource = GetExternalStringResourceBaseSlow(encoding_out); resource = GetExternalStringResourceBaseSlow(encoding_out);

View file

@ -331,6 +331,9 @@ class V8_EXPORT CpuProfilingOptions {
unsigned max_samples = kNoSampleLimit, int sampling_interval_us = 0, unsigned max_samples = kNoSampleLimit, int sampling_interval_us = 0,
MaybeLocal<Context> filter_context = MaybeLocal<Context>()); MaybeLocal<Context> filter_context = MaybeLocal<Context>());
CpuProfilingOptions(CpuProfilingOptions&&) = default;
CpuProfilingOptions& operator=(CpuProfilingOptions&&) = default;
CpuProfilingMode mode() const { return mode_; } CpuProfilingMode mode() const { return mode_; }
unsigned max_samples() const { return max_samples_; } unsigned max_samples() const { return max_samples_; }
int sampling_interval_us() const { return sampling_interval_us_; } int sampling_interval_us() const { return sampling_interval_us_; }
@ -344,7 +347,7 @@ class V8_EXPORT CpuProfilingOptions {
CpuProfilingMode mode_; CpuProfilingMode mode_;
unsigned max_samples_; unsigned max_samples_;
int sampling_interval_us_; int sampling_interval_us_;
CopyablePersistentTraits<Context>::CopyablePersistent filter_context_; Global<Context> filter_context_;
}; };
/** /**
@ -542,7 +545,9 @@ class V8_EXPORT HeapGraphNode {
kConsString = 10, // Concatenated string. A pair of pointers to strings. kConsString = 10, // Concatenated string. A pair of pointers to strings.
kSlicedString = 11, // Sliced string. A fragment of another string. kSlicedString = 11, // Sliced string. A fragment of another string.
kSymbol = 12, // A Symbol (ES6). kSymbol = 12, // A Symbol (ES6).
kBigInt = 13 // BigInt. kBigInt = 13, // BigInt.
kObjectShape = 14, // Internal data used for tracking the shapes (or
// "hidden classes") of JS objects.
}; };
/** Returns node type (see HeapGraphNode::Type). */ /** Returns node type (see HeapGraphNode::Type). */
@ -975,14 +980,71 @@ class V8_EXPORT HeapProfiler {
virtual ~ObjectNameResolver() = default; virtual ~ObjectNameResolver() = default;
}; };
enum class HeapSnapshotMode {
/**
* Heap snapshot for regular developers.
*/
kRegular,
/**
* Heap snapshot is exposing internals that may be useful for experts.
*/
kExposeInternals,
};
enum class NumericsMode {
/**
* Numeric values are hidden as they are values of the corresponding
* objects.
*/
kHideNumericValues,
/**
* Numeric values are exposed in artificial fields.
*/
kExposeNumericValues
};
struct HeapSnapshotOptions final {
// Manually define default constructor here to be able to use it in
// `TakeSnapshot()` below.
// NOLINTNEXTLINE
HeapSnapshotOptions() {}
/**
* The control used to report intermediate progress to.
*/
ActivityControl* control = nullptr;
/**
* The resolver used by the snapshot generator to get names for V8 objects.
*/
ObjectNameResolver* global_object_name_resolver = nullptr;
/**
* Mode for taking the snapshot, see `HeapSnapshotMode`.
*/
HeapSnapshotMode snapshot_mode = HeapSnapshotMode::kRegular;
/**
* Mode for dealing with numeric values, see `NumericsMode`.
*/
NumericsMode numerics_mode = NumericsMode::kHideNumericValues;
};
/** /**
* Takes a heap snapshot and returns it. * Takes a heap snapshot.
*
* \returns the snapshot.
*/ */
const HeapSnapshot* TakeHeapSnapshot( const HeapSnapshot* TakeHeapSnapshot(
ActivityControl* control = nullptr, const HeapSnapshotOptions& options = HeapSnapshotOptions());
/**
* Takes a heap snapshot. See `HeapSnapshotOptions` for details on the
* parameters.
*
* \returns the snapshot.
*/
const HeapSnapshot* TakeHeapSnapshot(
ActivityControl* control,
ObjectNameResolver* global_object_name_resolver = nullptr, ObjectNameResolver* global_object_name_resolver = nullptr,
bool treat_global_objects_as_roots = true, bool hide_internals = true, bool capture_numeric_value = false);
bool capture_numeric_value = false);
/** /**
* Starts tracking of heap objects population statistics. After calling * Starts tracking of heap objects population statistics. After calling
@ -1101,18 +1163,18 @@ struct HeapStatsUpdate {
uint32_t size; // New value of size field for the interval with this index. uint32_t size; // New value of size field for the interval with this index.
}; };
#define CODE_EVENTS_LIST(V) \ #define CODE_EVENTS_LIST(V) \
V(Builtin) \ V(Builtin) \
V(Callback) \ V(Callback) \
V(Eval) \ V(Eval) \
V(Function) \ V(Function) \
V(InterpretedFunction) \ V(InterpretedFunction) \
V(Handler) \ V(Handler) \
V(BytecodeHandler) \ V(BytecodeHandler) \
V(LazyCompile) \ V(LazyCompile) /* Unused, use kFunction instead */ \
V(RegExp) \ V(RegExp) \
V(Script) \ V(Script) \
V(Stub) \ V(Stub) \
V(Relocation) V(Relocation)
/** /**

View file

@ -37,9 +37,10 @@ class V8_EXPORT RegExp : public Object {
kDotAll = 1 << 5, kDotAll = 1 << 5,
kLinear = 1 << 6, kLinear = 1 << 6,
kHasIndices = 1 << 7, kHasIndices = 1 << 7,
kUnicodeSets = 1 << 8,
}; };
static constexpr int kFlagCount = 8; static constexpr int kFlagCount = 9;
/** /**
* Creates a regular expression from the given pattern string and * Creates a regular expression from the given pattern string and

View file

@ -20,6 +20,7 @@
namespace v8 { namespace v8 {
class Function; class Function;
class Message;
class Object; class Object;
class PrimitiveArray; class PrimitiveArray;
class Script; class Script;
@ -47,8 +48,6 @@ class V8_EXPORT ScriptOrModule {
* The options that were passed by the embedder as HostDefinedOptions to * The options that were passed by the embedder as HostDefinedOptions to
* the ScriptOrigin. * the ScriptOrigin.
*/ */
V8_DEPRECATED("Use HostDefinedOptions")
Local<PrimitiveArray> GetHostDefinedOptions();
Local<Data> HostDefinedOptions(); Local<Data> HostDefinedOptions();
}; };
@ -78,7 +77,13 @@ class V8_EXPORT UnboundScript {
* Returns zero based line number of the code_pos location in the script. * Returns zero based line number of the code_pos location in the script.
* -1 will be returned if no information available. * -1 will be returned if no information available.
*/ */
int GetLineNumber(int code_pos); int GetLineNumber(int code_pos = 0);
/**
* Returns zero based column number of the code_pos location in the script.
* -1 will be returned if no information available.
*/
int GetColumnNumber(int code_pos = 0);
static const int kNoScriptId = 0; static const int kNoScriptId = 0;
}; };
@ -286,6 +291,16 @@ class V8_EXPORT Module : public Data {
V8_WARN_UNUSED_RESULT Maybe<bool> SetSyntheticModuleExport( V8_WARN_UNUSED_RESULT Maybe<bool> SetSyntheticModuleExport(
Isolate* isolate, Local<String> export_name, Local<Value> export_value); Isolate* isolate, Local<String> export_name, Local<Value> export_value);
/**
* Search the modules requested directly or indirectly by the module for
* any top-level await that has not yet resolved. If there is any, the
* returned vector contains a tuple of the unresolved module and a message
* with the pending top-level await.
* An embedder may call this before exiting to improve error messages.
*/
std::vector<std::tuple<Local<Module>, Local<Message>>>
GetStalledTopLevelAwaitMessage(Isolate* isolate);
V8_INLINE static Module* Cast(Data* data); V8_INLINE static Module* Cast(Data* data);
private: private:
@ -489,7 +504,7 @@ class V8_EXPORT ScriptCompiler {
/** /**
* A task which the embedder must run on a background thread to * A task which the embedder must run on a background thread to
* consume a V8 code cache. Returned by * consume a V8 code cache. Returned by
* ScriptCompiler::StarConsumingCodeCache. * ScriptCompiler::StartConsumingCodeCache.
*/ */
class V8_EXPORT ConsumeCodeCacheTask final { class V8_EXPORT ConsumeCodeCacheTask final {
public: public:
@ -497,6 +512,36 @@ class V8_EXPORT ScriptCompiler {
void Run(); void Run();
/**
* Provides the source text string and origin information to the consumption
* task. May be called before, during, or after Run(). This step checks
* whether the script matches an existing script in the Isolate's
* compilation cache. To check whether such a script was found, call
* ShouldMergeWithExistingScript.
*
* The Isolate provided must be the same one used during
* StartConsumingCodeCache and must be currently entered on the thread that
* calls this function. The source text and origin provided in this step
* must precisely match those used later in the ScriptCompiler::Source that
* will contain this ConsumeCodeCacheTask.
*/
void SourceTextAvailable(Isolate* isolate, Local<String> source_text,
const ScriptOrigin& origin);
/**
* Returns whether the embedder should call MergeWithExistingScript. This
* function may be called from any thread, any number of times, but its
* return value is only meaningful after SourceTextAvailable has completed.
*/
bool ShouldMergeWithExistingScript() const;
/**
* Merges newly deserialized data into an existing script which was found
* during SourceTextAvailable. May be called only after Run() has completed.
* Can execute on any thread, like Run().
*/
void MergeWithExistingScript();
private: private:
friend class ScriptCompiler; friend class ScriptCompiler;
@ -581,7 +626,8 @@ class V8_EXPORT ScriptCompiler {
*/ */
static ScriptStreamingTask* StartStreaming( static ScriptStreamingTask* StartStreaming(
Isolate* isolate, StreamedSource* source, Isolate* isolate, StreamedSource* source,
ScriptType type = ScriptType::kClassic); ScriptType type = ScriptType::kClassic,
CompileOptions options = kNoCompileOptions);
static ConsumeCodeCacheTask* StartConsumingCodeCache( static ConsumeCodeCacheTask* StartConsumingCodeCache(
Isolate* isolate, std::unique_ptr<CachedData> source); Isolate* isolate, std::unique_ptr<CachedData> source);
@ -650,6 +696,7 @@ class V8_EXPORT ScriptCompiler {
* It is possible to specify multiple context extensions (obj in the above * It is possible to specify multiple context extensions (obj in the above
* example). * example).
*/ */
V8_DEPRECATED("Use CompileFunction")
static V8_WARN_UNUSED_RESULT MaybeLocal<Function> CompileFunctionInContext( static V8_WARN_UNUSED_RESULT MaybeLocal<Function> CompileFunctionInContext(
Local<Context> context, Source* source, size_t arguments_count, Local<Context> context, Source* source, size_t arguments_count,
Local<String> arguments[], size_t context_extension_count, Local<String> arguments[], size_t context_extension_count,
@ -657,6 +704,7 @@ class V8_EXPORT ScriptCompiler {
CompileOptions options = kNoCompileOptions, CompileOptions options = kNoCompileOptions,
NoCacheReason no_cache_reason = kNoCacheNoReason, NoCacheReason no_cache_reason = kNoCacheNoReason,
Local<ScriptOrModule>* script_or_module_out = nullptr); Local<ScriptOrModule>* script_or_module_out = nullptr);
static V8_WARN_UNUSED_RESULT MaybeLocal<Function> CompileFunction( static V8_WARN_UNUSED_RESULT MaybeLocal<Function> CompileFunction(
Local<Context> context, Source* source, size_t arguments_count = 0, Local<Context> context, Source* source, size_t arguments_count = 0,
Local<String> arguments[] = nullptr, size_t context_extension_count = 0, Local<String> arguments[] = nullptr, size_t context_extension_count = 0,

View file

@ -14,7 +14,6 @@
namespace v8 { namespace v8 {
class AccessorSignature;
class CFunction; class CFunction;
class FunctionTemplate; class FunctionTemplate;
class ObjectTemplate; class ObjectTemplate;
@ -83,28 +82,7 @@ class V8_EXPORT Template : public Data {
* cross-context access. * cross-context access.
* \param attribute The attributes of the property for which an accessor * \param attribute The attributes of the property for which an accessor
* is added. * is added.
* \param signature The signature describes valid receivers for the accessor
* and is used to perform implicit instance checks against them. If the
* receiver is incompatible (i.e. is not an instance of the constructor as
* defined by FunctionTemplate::HasInstance()), an implicit TypeError is
* thrown and no callback is invoked.
*/ */
V8_DEPRECATED("Do signature check in accessor")
void SetNativeDataProperty(
Local<String> name, AccessorGetterCallback getter,
AccessorSetterCallback setter, Local<Value> data,
PropertyAttribute attribute, Local<AccessorSignature> signature,
AccessControl settings = DEFAULT,
SideEffectType getter_side_effect_type = SideEffectType::kHasSideEffect,
SideEffectType setter_side_effect_type = SideEffectType::kHasSideEffect);
V8_DEPRECATED("Do signature check in accessor")
void SetNativeDataProperty(
Local<Name> name, AccessorNameGetterCallback getter,
AccessorNameSetterCallback setter, Local<Value> data,
PropertyAttribute attribute, Local<AccessorSignature> signature,
AccessControl settings = DEFAULT,
SideEffectType getter_side_effect_type = SideEffectType::kHasSideEffect,
SideEffectType setter_side_effect_type = SideEffectType::kHasSideEffect);
void SetNativeDataProperty( void SetNativeDataProperty(
Local<String> name, AccessorGetterCallback getter, Local<String> name, AccessorGetterCallback getter,
AccessorSetterCallback setter = nullptr, AccessorSetterCallback setter = nullptr,
@ -151,7 +129,8 @@ class V8_EXPORT Template : public Data {
* Interceptor for get requests on an object. * Interceptor for get requests on an object.
* *
* Use `info.GetReturnValue().Set()` to set the return value of the * Use `info.GetReturnValue().Set()` to set the return value of the
* intercepted get request. * intercepted get request. If the property does not exist the callback should
* not set the result and must not produce side effects.
* *
* \param property The name of the property for which the request was * \param property The name of the property for which the request was
* intercepted. * intercepted.
@ -192,9 +171,9 @@ using GenericNamedPropertyGetterCallback =
* Use `info.GetReturnValue()` to indicate whether the request was intercepted * Use `info.GetReturnValue()` to indicate whether the request was intercepted
* or not. If the setter successfully intercepts the request, i.e., if the * or not. If the setter successfully intercepts the request, i.e., if the
* request should not be further executed, call * request should not be further executed, call
* `info.GetReturnValue().Set(value)`. If the setter * `info.GetReturnValue().Set(value)`. If the setter did not intercept the
* did not intercept the request, i.e., if the request should be handled as * request, i.e., if the request should be handled as if no interceptor is
* if no interceptor is present, do not not call `Set()`. * present, do not not call `Set()` and do not produce side effects.
* *
* \param property The name of the property for which the request was * \param property The name of the property for which the request was
* intercepted. * intercepted.
@ -217,7 +196,9 @@ using GenericNamedPropertySetterCallback =
* defineProperty(). * defineProperty().
* *
* Use `info.GetReturnValue().Set(value)` to set the property attributes. The * Use `info.GetReturnValue().Set(value)` to set the property attributes. The
* value is an integer encoding a `v8::PropertyAttribute`. * value is an integer encoding a `v8::PropertyAttribute`. If the property does
* not exist the callback should not set the result and must not produce side
* effects.
* *
* \param property The name of the property for which the request was * \param property The name of the property for which the request was
* intercepted. * intercepted.
@ -242,7 +223,8 @@ using GenericNamedPropertyQueryCallback =
* or not. If the deleter successfully intercepts the request, i.e., if the * or not. If the deleter successfully intercepts the request, i.e., if the
* request should not be further executed, call * request should not be further executed, call
* `info.GetReturnValue().Set(value)` with a boolean `value`. The `value` is * `info.GetReturnValue().Set(value)` with a boolean `value`. The `value` is
* used as the return value of `delete`. * used as the return value of `delete`. If the deleter does not intercept the
* request then it should not set the result and must not produce side effects.
* *
* \param property The name of the property for which the request was * \param property The name of the property for which the request was
* intercepted. * intercepted.
@ -274,9 +256,9 @@ using GenericNamedPropertyEnumeratorCallback =
* Use `info.GetReturnValue()` to indicate whether the request was intercepted * Use `info.GetReturnValue()` to indicate whether the request was intercepted
* or not. If the definer successfully intercepts the request, i.e., if the * or not. If the definer successfully intercepts the request, i.e., if the
* request should not be further executed, call * request should not be further executed, call
* `info.GetReturnValue().Set(value)`. If the definer * `info.GetReturnValue().Set(value)`. If the definer did not intercept the
* did not intercept the request, i.e., if the request should be handled as * request, i.e., if the request should be handled as if no interceptor is
* if no interceptor is present, do not not call `Set()`. * present, do not not call `Set()` and do not produce side effects.
* *
* \param property The name of the property for which the request was * \param property The name of the property for which the request was
* intercepted. * intercepted.
@ -821,27 +803,7 @@ class V8_EXPORT ObjectTemplate : public Template {
* cross-context access. * cross-context access.
* \param attribute The attributes of the property for which an accessor * \param attribute The attributes of the property for which an accessor
* is added. * is added.
* \param signature The signature describes valid receivers for the accessor
* and is used to perform implicit instance checks against them. If the
* receiver is incompatible (i.e. is not an instance of the constructor as
* defined by FunctionTemplate::HasInstance()), an implicit TypeError is
* thrown and no callback is invoked.
*/ */
V8_DEPRECATED("Do signature check in accessor")
void SetAccessor(
Local<String> name, AccessorGetterCallback getter,
AccessorSetterCallback setter, Local<Value> data, AccessControl settings,
PropertyAttribute attribute, Local<AccessorSignature> signature,
SideEffectType getter_side_effect_type = SideEffectType::kHasSideEffect,
SideEffectType setter_side_effect_type = SideEffectType::kHasSideEffect);
V8_DEPRECATED("Do signature check in accessor")
void SetAccessor(
Local<Name> name, AccessorNameGetterCallback getter,
AccessorNameSetterCallback setter, Local<Value> data,
AccessControl settings, PropertyAttribute attribute,
Local<AccessorSignature> signature,
SideEffectType getter_side_effect_type = SideEffectType::kHasSideEffect,
SideEffectType setter_side_effect_type = SideEffectType::kHasSideEffect);
void SetAccessor( void SetAccessor(
Local<String> name, AccessorGetterCallback getter, Local<String> name, AccessorGetterCallback getter,
AccessorSetterCallback setter = nullptr, AccessorSetterCallback setter = nullptr,
@ -1019,24 +981,6 @@ class V8_EXPORT Signature : public Data {
static void CheckCast(Data* that); static void CheckCast(Data* that);
}; };
/**
* An AccessorSignature specifies which receivers are valid parameters
* to an accessor callback.
*/
class V8_EXPORT AccessorSignature : public Data {
public:
static Local<AccessorSignature> New(
Isolate* isolate,
Local<FunctionTemplate> receiver = Local<FunctionTemplate>());
V8_INLINE static AccessorSignature* Cast(Data* data);
private:
AccessorSignature();
static void CheckCast(Data* that);
};
// --- Implementation --- // --- Implementation ---
void Template::Set(Isolate* isolate, const char* name, Local<Data> value, void Template::Set(Isolate* isolate, const char* name, Local<Data> value,
@ -1067,13 +1011,6 @@ Signature* Signature::Cast(Data* data) {
return reinterpret_cast<Signature*>(data); return reinterpret_cast<Signature*>(data);
} }
AccessorSignature* AccessorSignature::Cast(Data* data) {
#ifdef V8_ENABLE_CHECKS
CheckCast(data);
#endif
return reinterpret_cast<AccessorSignature*>(data);
}
} // namespace v8 } // namespace v8
#endif // INCLUDE_V8_TEMPLATE_H_ #endif // INCLUDE_V8_TEMPLATE_H_

View file

@ -17,10 +17,10 @@ struct CalleeSavedRegisters {
void* arm_r9; void* arm_r9;
void* arm_r10; void* arm_r10;
}; };
#elif V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_ARM64 || \ #elif V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_ARM64 || \
V8_TARGET_ARCH_MIPS || V8_TARGET_ARCH_MIPS64 || V8_TARGET_ARCH_PPC || \ V8_TARGET_ARCH_MIPS64 || V8_TARGET_ARCH_PPC || V8_TARGET_ARCH_PPC64 || \
V8_TARGET_ARCH_PPC64 || V8_TARGET_ARCH_RISCV64 || V8_TARGET_ARCH_S390 || \ V8_TARGET_ARCH_RISCV64 || V8_TARGET_ARCH_S390 || V8_TARGET_ARCH_LOONG64 || \
V8_TARGET_ARCH_LOONG64 V8_TARGET_ARCH_RISCV32
struct CalleeSavedRegisters {}; struct CalleeSavedRegisters {};
#else #else
#error Target architecture was not detected as supported by v8 #error Target architecture was not detected as supported by v8

View file

@ -537,7 +537,6 @@ class StdGlobalValueMap : public GlobalValueMap<K, V, Traits> {
: GlobalValueMap<K, V, Traits>(isolate) {} : GlobalValueMap<K, V, Traits>(isolate) {}
}; };
class DefaultPersistentValueVectorTraits { class DefaultPersistentValueVectorTraits {
public: public:
typedef std::vector<PersistentContainerValue> Impl; typedef std::vector<PersistentContainerValue> Impl;
@ -562,7 +561,6 @@ class DefaultPersistentValueVectorTraits {
} }
}; };
/** /**
* A vector wrapper that safely stores Global values. * A vector wrapper that safely stores Global values.
* C++11 embedders don't need this class, as they can use Global * C++11 embedders don't need this class, as they can use Global
@ -573,8 +571,8 @@ class DefaultPersistentValueVectorTraits {
* PersistentContainerValue, with all conversion into and out of V8 * PersistentContainerValue, with all conversion into and out of V8
* handles being transparently handled by this class. * handles being transparently handled by this class.
*/ */
template<typename V, typename Traits = DefaultPersistentValueVectorTraits> template <typename V, typename Traits = DefaultPersistentValueVectorTraits>
class PersistentValueVector { class V8_DEPRECATE_SOON("Use std::vector<Global<V>>.") PersistentValueVector {
public: public:
explicit PersistentValueVector(Isolate* isolate) : isolate_(isolate) { } explicit PersistentValueVector(Isolate* isolate) : isolate_(isolate) { }

View file

@ -8,6 +8,7 @@
#include <stddef.h> #include <stddef.h>
#include <stdint.h> #include <stdint.h>
#include <memory>
#include <utility> #include <utility>
#include "v8-local-handle.h" // NOLINT(build/include_directory) #include "v8-local-handle.h" // NOLINT(build/include_directory)
@ -26,8 +27,37 @@ class Value;
namespace internal { namespace internal {
struct ScriptStreamingData; struct ScriptStreamingData;
class SharedObjectConveyorHandles;
class ValueDeserializer;
class ValueSerializer;
} // namespace internal } // namespace internal
/**
* A move-only class for managing the lifetime of shared value conveyors used
* by V8 to keep JS shared values alive in transit when serialized.
*
* This class is not directly constructible and is always passed to a
* ValueSerializer::Delegate via ValueSerializer::SetSharedValueConveyor.
*
* The embedder must not destruct the SharedValueConveyor until the associated
* serialized data will no longer be deserialized.
*/
class V8_EXPORT SharedValueConveyor final {
public:
SharedValueConveyor(SharedValueConveyor&&) noexcept;
~SharedValueConveyor();
SharedValueConveyor& operator=(SharedValueConveyor&&) noexcept;
private:
friend class internal::ValueSerializer;
friend class internal::ValueDeserializer;
explicit SharedValueConveyor(Isolate* isolate);
std::unique_ptr<internal::SharedObjectConveyorHandles> private_;
};
/** /**
* Value serialization compatible with the HTML structured clone algorithm. * Value serialization compatible with the HTML structured clone algorithm.
* The format is backward-compatible (i.e. safe to store to disk). * The format is backward-compatible (i.e. safe to store to disk).
@ -69,20 +99,20 @@ class V8_EXPORT ValueSerializer {
Isolate* isolate, Local<WasmModuleObject> module); Isolate* isolate, Local<WasmModuleObject> module);
/** /**
* Returns whether shared values are supported. GetSharedValueId is only * Called when the first shared value is serialized. All subsequent shared
* called if SupportsSharedValues() returns true. * values will use the same conveyor.
*
* The embedder must ensure the lifetime of the conveyor matches the
* lifetime of the serialized data.
*
* If the embedder supports serializing shared values, this method should
* return true. Otherwise the embedder should throw an exception and return
* false.
*
* This method is called at most once per serializer.
*/ */
virtual bool SupportsSharedValues() const; virtual bool AdoptSharedValueConveyor(Isolate* isolate,
SharedValueConveyor&& conveyor);
/**
* Called when the ValueSerializer serializes a value that is shared across
* Isolates. The embedder must return an ID for the object. This function
* must be idempotent for the same object. When deserializing, the ID will
* be passed to ValueDeserializer::Delegate::GetSharedValueFromId as
* |shared_value_id|.
*/
virtual Maybe<uint32_t> GetSharedValueId(Isolate* isolate,
Local<Value> shared_value);
/** /**
* Allocates memory for the buffer of at least the size provided. The actual * Allocates memory for the buffer of at least the size provided. The actual
@ -196,17 +226,10 @@ class V8_EXPORT ValueDeserializer {
Isolate* isolate, uint32_t clone_id); Isolate* isolate, uint32_t clone_id);
/** /**
* Returns whether shared values are supported. GetSharedValueFromId is only * Get the SharedValueConveyor previously provided by
* called if SupportsSharedValues() returns true. * ValueSerializer::Delegate::AdoptSharedValueConveyor.
*/ */
virtual bool SupportsSharedValues() const; virtual const SharedValueConveyor* GetSharedValueConveyor(Isolate* isolate);
/**
* Get a value shared across Isolates given a shared_value_id provided by
* ValueSerializer::Delegate::GetSharedValueId.
*/
virtual MaybeLocal<Value> GetSharedValueFromId(Isolate* isolate,
uint32_t shared_value_id);
}; };
ValueDeserializer(Isolate* isolate, const uint8_t* data, size_t size); ValueDeserializer(Isolate* isolate, const uint8_t* data, size_t size);

View file

@ -9,9 +9,9 @@
// NOTE these macros are used by some of the tool scripts and the build // NOTE these macros are used by some of the tool scripts and the build
// system so their names cannot be changed without changing the scripts. // system so their names cannot be changed without changing the scripts.
#define V8_MAJOR_VERSION 10 #define V8_MAJOR_VERSION 10
#define V8_MINOR_VERSION 2 #define V8_MINOR_VERSION 7
#define V8_BUILD_NUMBER 154 #define V8_BUILD_NUMBER 193
#define V8_PATCH_LEVEL 15 #define V8_PATCH_LEVEL 13
// Use 1 for candidates and 0 otherwise. // Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.) // (Boolean macro values are not supported by all preprocessors.)

View file

@ -5,6 +5,7 @@
#ifndef INCLUDE_V8_WASM_H_ #ifndef INCLUDE_V8_WASM_H_
#define INCLUDE_V8_WASM_H_ #define INCLUDE_V8_WASM_H_
#include <functional>
#include <memory> #include <memory>
#include <string> #include <string>
@ -130,19 +131,6 @@ class V8_EXPORT WasmStreaming final {
public: public:
class WasmStreamingImpl; class WasmStreamingImpl;
/**
* Client to receive streaming event notifications.
*/
class Client {
public:
virtual ~Client() = default;
/**
* Passes the fully compiled module to the client. This can be used to
* implement code caching.
*/
virtual void OnModuleCompiled(CompiledWasmModule compiled_module) = 0;
};
explicit WasmStreaming(std::unique_ptr<WasmStreamingImpl> impl); explicit WasmStreaming(std::unique_ptr<WasmStreamingImpl> impl);
~WasmStreaming(); ~WasmStreaming();
@ -183,10 +171,11 @@ class V8_EXPORT WasmStreaming final {
bool SetCompiledModuleBytes(const uint8_t* bytes, size_t size); bool SetCompiledModuleBytes(const uint8_t* bytes, size_t size);
/** /**
* Sets the client object that will receive streaming event notifications. * Sets a callback which is called whenever a significant number of new
* This must be called before {OnBytesReceived}, {Finish}, or {Abort}. * functions are ready for serialization.
*/ */
void SetClient(std::shared_ptr<Client> client); void SetMoreFunctionsCanBeSerializedCallback(
std::function<void(CompiledWasmModule)>);
/* /*
* Sets the UTF-8 encoded source URL for the {Script} object. This must be * Sets the UTF-8 encoded source URL for the {Script} object. This must be
@ -206,52 +195,6 @@ class V8_EXPORT WasmStreaming final {
std::unique_ptr<WasmStreamingImpl> impl_; std::unique_ptr<WasmStreamingImpl> impl_;
}; };
// TODO(mtrofin): when streaming compilation is done, we can rename this
// to simply WasmModuleObjectBuilder
class V8_EXPORT WasmModuleObjectBuilderStreaming final {
public:
explicit WasmModuleObjectBuilderStreaming(Isolate* isolate);
/**
* The buffer passed into OnBytesReceived is owned by the caller.
*/
void OnBytesReceived(const uint8_t*, size_t size);
void Finish();
/**
* Abort streaming compilation. If {exception} has a value, then the promise
* associated with streaming compilation is rejected with that value. If
* {exception} does not have value, the promise does not get rejected.
*/
void Abort(MaybeLocal<Value> exception);
Local<Promise> GetPromise();
~WasmModuleObjectBuilderStreaming() = default;
private:
WasmModuleObjectBuilderStreaming(const WasmModuleObjectBuilderStreaming&) =
delete;
WasmModuleObjectBuilderStreaming(WasmModuleObjectBuilderStreaming&&) =
default;
WasmModuleObjectBuilderStreaming& operator=(
const WasmModuleObjectBuilderStreaming&) = delete;
WasmModuleObjectBuilderStreaming& operator=(
WasmModuleObjectBuilderStreaming&&) = default;
Isolate* isolate_ = nullptr;
#if V8_CC_MSVC
/**
* We don't need the static Copy API, so the default
* NonCopyablePersistentTraits would be sufficient, however,
* MSVC eagerly instantiates the Copy.
* We ensure we don't use Copy, however, by compiling with the
* defaults everywhere else.
*/
Persistent<Promise, CopyablePersistentTraits<Promise>> promise_;
#else
Persistent<Promise> promise_;
#endif
std::shared_ptr<internal::wasm::StreamingDecoder> streaming_decoder_;
};
} // namespace v8 } // namespace v8
#endif // INCLUDE_V8_WASM_H_ #endif // INCLUDE_V8_WASM_H_

View file

@ -63,13 +63,6 @@ enum class WeakCallbackType {
* Passes the first two internal fields of the object back to the callback. * Passes the first two internal fields of the object back to the callback.
*/ */
kInternalFields, kInternalFields,
/**
* Passes a user-defined void* parameter back to the callback. Will do so
* before the object is actually reclaimed, allowing it to be resurrected. In
* this case it is not possible to set a second-pass callback.
*/
kFinalizer V8_ENUM_DEPRECATED("Resurrecting finalizers are deprecated "
"and will not be supported going forward.")
}; };
template <class T> template <class T>

View file

@ -308,6 +308,7 @@ path. Add it with -I<path> to the command line
// V8_HAS_BUILTIN_SADD_OVERFLOW - __builtin_sadd_overflow() supported // V8_HAS_BUILTIN_SADD_OVERFLOW - __builtin_sadd_overflow() supported
// V8_HAS_BUILTIN_SSUB_OVERFLOW - __builtin_ssub_overflow() supported // V8_HAS_BUILTIN_SSUB_OVERFLOW - __builtin_ssub_overflow() supported
// V8_HAS_BUILTIN_UADD_OVERFLOW - __builtin_uadd_overflow() supported // V8_HAS_BUILTIN_UADD_OVERFLOW - __builtin_uadd_overflow() supported
// V8_HAS_BUILTIN_SMUL_OVERFLOW - __builtin_smul_overflow() supported
// V8_HAS_COMPUTED_GOTO - computed goto/labels as values // V8_HAS_COMPUTED_GOTO - computed goto/labels as values
// supported // supported
// V8_HAS_DECLSPEC_NOINLINE - __declspec(noinline) supported // V8_HAS_DECLSPEC_NOINLINE - __declspec(noinline) supported
@ -344,6 +345,7 @@ path. Add it with -I<path> to the command line
# define V8_HAS_CPP_ATTRIBUTE_NO_UNIQUE_ADDRESS \ # define V8_HAS_CPP_ATTRIBUTE_NO_UNIQUE_ADDRESS \
(V8_HAS_CPP_ATTRIBUTE(no_unique_address)) (V8_HAS_CPP_ATTRIBUTE(no_unique_address))
# define V8_HAS_BUILTIN_ASSUME (__has_builtin(__builtin_assume))
# define V8_HAS_BUILTIN_ASSUME_ALIGNED (__has_builtin(__builtin_assume_aligned)) # define V8_HAS_BUILTIN_ASSUME_ALIGNED (__has_builtin(__builtin_assume_aligned))
# define V8_HAS_BUILTIN_BSWAP16 (__has_builtin(__builtin_bswap16)) # define V8_HAS_BUILTIN_BSWAP16 (__has_builtin(__builtin_bswap16))
# define V8_HAS_BUILTIN_BSWAP32 (__has_builtin(__builtin_bswap32)) # define V8_HAS_BUILTIN_BSWAP32 (__has_builtin(__builtin_bswap32))
@ -356,6 +358,8 @@ path. Add it with -I<path> to the command line
# define V8_HAS_BUILTIN_SADD_OVERFLOW (__has_builtin(__builtin_sadd_overflow)) # define V8_HAS_BUILTIN_SADD_OVERFLOW (__has_builtin(__builtin_sadd_overflow))
# define V8_HAS_BUILTIN_SSUB_OVERFLOW (__has_builtin(__builtin_ssub_overflow)) # define V8_HAS_BUILTIN_SSUB_OVERFLOW (__has_builtin(__builtin_ssub_overflow))
# define V8_HAS_BUILTIN_UADD_OVERFLOW (__has_builtin(__builtin_uadd_overflow)) # define V8_HAS_BUILTIN_UADD_OVERFLOW (__has_builtin(__builtin_uadd_overflow))
# define V8_HAS_BUILTIN_SMUL_OVERFLOW (__has_builtin(__builtin_smul_overflow))
# define V8_HAS_BUILTIN_UNREACHABLE (__has_builtin(__builtin_unreachable))
// Clang has no __has_feature for computed gotos. // Clang has no __has_feature for computed gotos.
// GCC doc: https://gcc.gnu.org/onlinedocs/gcc/Labels-as-Values.html // GCC doc: https://gcc.gnu.org/onlinedocs/gcc/Labels-as-Values.html
@ -394,6 +398,7 @@ path. Add it with -I<path> to the command line
# define V8_HAS_BUILTIN_EXPECT 1 # define V8_HAS_BUILTIN_EXPECT 1
# define V8_HAS_BUILTIN_FRAME_ADDRESS 1 # define V8_HAS_BUILTIN_FRAME_ADDRESS 1
# define V8_HAS_BUILTIN_POPCOUNT 1 # define V8_HAS_BUILTIN_POPCOUNT 1
# define V8_HAS_BUILTIN_UNREACHABLE 1
// GCC doc: https://gcc.gnu.org/onlinedocs/gcc/Labels-as-Values.html // GCC doc: https://gcc.gnu.org/onlinedocs/gcc/Labels-as-Values.html
#define V8_HAS_COMPUTED_GOTO 1 #define V8_HAS_COMPUTED_GOTO 1
@ -425,6 +430,18 @@ path. Add it with -I<path> to the command line
# define V8_INLINE inline # define V8_INLINE inline
#endif #endif
#ifdef DEBUG
// In debug mode, check assumptions instead of actually adding annotations.
# define V8_ASSUME(condition) DCHECK(condition)
#elif V8_HAS_BUILTIN_ASSUME
# define V8_ASSUME(condition) __builtin_assume(condition)
#elif V8_HAS_BUILTIN_UNREACHABLE
# define V8_ASSUME(condition) \
do { if (!(condition)) __builtin_unreachable(); } while (false)
#else
# define V8_ASSUME(condition)
#endif
#if V8_HAS_BUILTIN_ASSUME_ALIGNED #if V8_HAS_BUILTIN_ASSUME_ALIGNED
# define V8_ASSUME_ALIGNED(ptr, alignment) \ # define V8_ASSUME_ALIGNED(ptr, alignment) \
__builtin_assume_aligned((ptr), (alignment)) __builtin_assume_aligned((ptr), (alignment))
@ -471,6 +488,34 @@ path. Add it with -I<path> to the command line
#endif #endif
#if defined(V8_IMMINENT_DEPRECATION_WARNINGS) || \
defined(V8_DEPRECATION_WARNINGS)
#if defined(V8_CC_MSVC)
# define START_ALLOW_USE_DEPRECATED() \
__pragma(warning(push)) \
__pragma(warning(disable : 4996))
# define END_ALLOW_USE_DEPRECATED() __pragma(warning(pop))
#else // !defined(V8_CC_MSVC)
# define START_ALLOW_USE_DEPRECATED() \
_Pragma("GCC diagnostic push") \
_Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"")
#define END_ALLOW_USE_DEPRECATED() _Pragma("GCC diagnostic pop")
#endif // !defined(V8_CC_MSVC)
#else // !(defined(V8_IMMINENT_DEPRECATION_WARNINGS) ||
// defined(V8_DEPRECATION_WARNINGS))
#define START_ALLOW_USE_DEPRECATED()
#define END_ALLOW_USE_DEPRECATED()
#endif // !(defined(V8_IMMINENT_DEPRECATION_WARNINGS) ||
// defined(V8_DEPRECATION_WARNINGS))
#define ALLOW_COPY_AND_MOVE_WITH_DEPRECATED_FIELDS(ClassName) \
START_ALLOW_USE_DEPRECATED() \
ClassName(const ClassName&) = default; \
ClassName(ClassName&&) = default; \
ClassName& operator=(const ClassName&) = default; \
ClassName& operator=(ClassName&&) = default; \
END_ALLOW_USE_DEPRECATED()
#if defined(__GNUC__) && !defined(__clang__) && (__GNUC__ < 6) #if defined(__GNUC__) && !defined(__clang__) && (__GNUC__ < 6)
# define V8_ENUM_DEPRECATED(message) # define V8_ENUM_DEPRECATED(message)
# define V8_ENUM_DEPRECATE_SOON(message) # define V8_ENUM_DEPRECATE_SOON(message)
@ -580,26 +625,215 @@ V8 shared library set USING_V8_SHARED.
#endif // V8_OS_WIN #endif // V8_OS_WIN
// The sandbox is available (i.e. defined) when pointer compression
// is enabled, but it is only used when V8_SANDBOX is enabled as
// well. This allows better test coverage of the sandbox.
#if defined(V8_COMPRESS_POINTERS)
#define V8_SANDBOX_IS_AVAILABLE
#endif
#if defined(V8_SANDBOX) && !defined(V8_SANDBOX_IS_AVAILABLE)
#error Inconsistent configuration: sandbox is enabled but not available
#endif
// From C++17 onwards, static constexpr member variables are defined to be
// "inline", and adding a separate definition for them can trigger deprecation
// warnings. For C++14 and below, however, these definitions are required.
#if __cplusplus < 201703L && (!defined(_MSVC_LANG) || _MSVC_LANG < 201703L)
#define V8_STATIC_CONSTEXPR_VARIABLES_NEED_DEFINITIONS
#endif
// clang-format on // clang-format on
// Processor architecture detection. For more info on what's defined, see:
// http://msdn.microsoft.com/en-us/library/b0084kay.aspx
// http://www.agner.org/optimize/calling_conventions.pdf
// or with gcc, run: "echo | gcc -E -dM -"
// The V8_HOST_ARCH_* macros correspond to the architecture on which V8, as a
// virtual machine and compiler, runs. Don't confuse this with the architecture
// on which V8 is built.
#if defined(_M_X64) || defined(__x86_64__)
#define V8_HOST_ARCH_X64 1
#if defined(__x86_64__) && __SIZEOF_POINTER__ == 4 // Check for x32.
#define V8_HOST_ARCH_32_BIT 1
#else
#define V8_HOST_ARCH_64_BIT 1
#endif
#elif defined(_M_IX86) || defined(__i386__)
#define V8_HOST_ARCH_IA32 1
#define V8_HOST_ARCH_32_BIT 1
#elif defined(__AARCH64EL__) || defined(_M_ARM64)
#define V8_HOST_ARCH_ARM64 1
#define V8_HOST_ARCH_64_BIT 1
#elif defined(__ARMEL__)
#define V8_HOST_ARCH_ARM 1
#define V8_HOST_ARCH_32_BIT 1
#elif defined(__mips64)
#define V8_HOST_ARCH_MIPS64 1
#define V8_HOST_ARCH_64_BIT 1
#elif defined(__loongarch64)
#define V8_HOST_ARCH_LOONG64 1
#define V8_HOST_ARCH_64_BIT 1
#elif defined(__PPC64__) || defined(_ARCH_PPC64)
#define V8_HOST_ARCH_PPC64 1
#define V8_HOST_ARCH_64_BIT 1
#elif defined(__PPC__) || defined(_ARCH_PPC)
#define V8_HOST_ARCH_PPC 1
#define V8_HOST_ARCH_32_BIT 1
#elif defined(__s390__) || defined(__s390x__)
#define V8_HOST_ARCH_S390 1
#if defined(__s390x__)
#define V8_HOST_ARCH_64_BIT 1
#else
#define V8_HOST_ARCH_32_BIT 1
#endif
#elif defined(__riscv) || defined(__riscv__)
#if __riscv_xlen == 64
#define V8_HOST_ARCH_RISCV64 1
#define V8_HOST_ARCH_64_BIT 1
#elif __riscv_xlen == 32
#define V8_HOST_ARCH_RISCV32 1
#define V8_HOST_ARCH_32_BIT 1
#else
#error "Cannot detect Riscv's bitwidth"
#endif
#else
#error "Host architecture was not detected as supported by v8"
#endif
// Target architecture detection. This corresponds to the architecture for which
// V8's JIT will generate code (the last stage of the canadian cross-compiler).
// The macros may be set externally. If not, detect in the same way as the host
// architecture, that is, target the native environment as presented by the
// compiler.
#if !V8_TARGET_ARCH_X64 && !V8_TARGET_ARCH_IA32 && !V8_TARGET_ARCH_ARM && \
!V8_TARGET_ARCH_ARM64 && !V8_TARGET_ARCH_MIPS64 && !V8_TARGET_ARCH_PPC && \
!V8_TARGET_ARCH_PPC64 && !V8_TARGET_ARCH_S390 && \
!V8_TARGET_ARCH_RISCV64 && !V8_TARGET_ARCH_LOONG64 && \
!V8_TARGET_ARCH_RISCV32
#if defined(_M_X64) || defined(__x86_64__)
#define V8_TARGET_ARCH_X64 1
#elif defined(_M_IX86) || defined(__i386__)
#define V8_TARGET_ARCH_IA32 1
#elif defined(__AARCH64EL__) || defined(_M_ARM64)
#define V8_TARGET_ARCH_ARM64 1
#elif defined(__ARMEL__)
#define V8_TARGET_ARCH_ARM 1
#elif defined(__mips64)
#define V8_TARGET_ARCH_MIPS64 1
#elif defined(_ARCH_PPC64)
#define V8_TARGET_ARCH_PPC64 1
#elif defined(_ARCH_PPC)
#define V8_TARGET_ARCH_PPC 1
#elif defined(__s390__)
#define V8_TARGET_ARCH_S390 1
#if defined(__s390x__)
#define V8_TARGET_ARCH_S390X 1
#endif
#elif defined(__riscv) || defined(__riscv__)
#if __riscv_xlen == 64
#define V8_TARGET_ARCH_RISCV64 1
#elif __riscv_xlen == 32
#define V8_TARGET_ARCH_RISCV32 1
#endif
#else
#error Target architecture was not detected as supported by v8
#endif
#endif
// Determine architecture pointer size.
#if V8_TARGET_ARCH_IA32
#define V8_TARGET_ARCH_32_BIT 1
#elif V8_TARGET_ARCH_X64
#if !V8_TARGET_ARCH_32_BIT && !V8_TARGET_ARCH_64_BIT
#if defined(__x86_64__) && __SIZEOF_POINTER__ == 4 // Check for x32.
#define V8_TARGET_ARCH_32_BIT 1
#else
#define V8_TARGET_ARCH_64_BIT 1
#endif
#endif
#elif V8_TARGET_ARCH_ARM
#define V8_TARGET_ARCH_32_BIT 1
#elif V8_TARGET_ARCH_ARM64
#define V8_TARGET_ARCH_64_BIT 1
#elif V8_TARGET_ARCH_MIPS
#define V8_TARGET_ARCH_32_BIT 1
#elif V8_TARGET_ARCH_MIPS64
#define V8_TARGET_ARCH_64_BIT 1
#elif V8_TARGET_ARCH_LOONG64
#define V8_TARGET_ARCH_64_BIT 1
#elif V8_TARGET_ARCH_PPC
#define V8_TARGET_ARCH_32_BIT 1
#elif V8_TARGET_ARCH_PPC64
#define V8_TARGET_ARCH_64_BIT 1
#elif V8_TARGET_ARCH_S390
#if V8_TARGET_ARCH_S390X
#define V8_TARGET_ARCH_64_BIT 1
#else
#define V8_TARGET_ARCH_32_BIT 1
#endif
#elif V8_TARGET_ARCH_RISCV64
#define V8_TARGET_ARCH_64_BIT 1
#elif V8_TARGET_ARCH_RISCV32
#define V8_TARGET_ARCH_32_BIT 1
#else
#error Unknown target architecture pointer size
#endif
// Check for supported combinations of host and target architectures.
#if V8_TARGET_ARCH_IA32 && !V8_HOST_ARCH_IA32
#error Target architecture ia32 is only supported on ia32 host
#endif
#if (V8_TARGET_ARCH_X64 && V8_TARGET_ARCH_64_BIT && \
!((V8_HOST_ARCH_X64 || V8_HOST_ARCH_ARM64) && V8_HOST_ARCH_64_BIT))
#error Target architecture x64 is only supported on x64 and arm64 host
#endif
#if (V8_TARGET_ARCH_X64 && V8_TARGET_ARCH_32_BIT && \
!(V8_HOST_ARCH_X64 && V8_HOST_ARCH_32_BIT))
#error Target architecture x32 is only supported on x64 host with x32 support
#endif
#if (V8_TARGET_ARCH_ARM && !(V8_HOST_ARCH_IA32 || V8_HOST_ARCH_ARM))
#error Target architecture arm is only supported on arm and ia32 host
#endif
#if (V8_TARGET_ARCH_ARM64 && !(V8_HOST_ARCH_X64 || V8_HOST_ARCH_ARM64))
#error Target architecture arm64 is only supported on arm64 and x64 host
#endif
#if (V8_TARGET_ARCH_MIPS64 && !(V8_HOST_ARCH_X64 || V8_HOST_ARCH_MIPS64))
#error Target architecture mips64 is only supported on mips64 and x64 host
#endif
#if (V8_TARGET_ARCH_RISCV64 && !(V8_HOST_ARCH_X64 || V8_HOST_ARCH_RISCV64))
#error Target architecture riscv64 is only supported on riscv64 and x64 host
#endif
#if (V8_TARGET_ARCH_RISCV32 && !(V8_HOST_ARCH_IA32 || V8_HOST_ARCH_RISCV32))
#error Target architecture riscv32 is only supported on riscv32 and ia32 host
#endif
#if (V8_TARGET_ARCH_LOONG64 && !(V8_HOST_ARCH_X64 || V8_HOST_ARCH_LOONG64))
#error Target architecture loong64 is only supported on loong64 and x64 host
#endif
// Determine architecture endianness.
#if V8_TARGET_ARCH_IA32
#define V8_TARGET_LITTLE_ENDIAN 1
#elif V8_TARGET_ARCH_X64
#define V8_TARGET_LITTLE_ENDIAN 1
#elif V8_TARGET_ARCH_ARM
#define V8_TARGET_LITTLE_ENDIAN 1
#elif V8_TARGET_ARCH_ARM64
#define V8_TARGET_LITTLE_ENDIAN 1
#elif V8_TARGET_ARCH_LOONG64
#define V8_TARGET_LITTLE_ENDIAN 1
#elif V8_TARGET_ARCH_MIPS64
#if defined(__MIPSEB__) || defined(V8_TARGET_ARCH_MIPS64_BE)
#define V8_TARGET_BIG_ENDIAN 1
#else
#define V8_TARGET_LITTLE_ENDIAN 1
#endif
#elif defined(__BIG_ENDIAN__) // FOR PPCGR on AIX
#define V8_TARGET_BIG_ENDIAN 1
#elif V8_TARGET_ARCH_PPC_LE
#define V8_TARGET_LITTLE_ENDIAN 1
#elif V8_TARGET_ARCH_PPC_BE
#define V8_TARGET_BIG_ENDIAN 1
#elif V8_TARGET_ARCH_S390
#if V8_TARGET_ARCH_S390_LE_SIM
#define V8_TARGET_LITTLE_ENDIAN 1
#else
#define V8_TARGET_BIG_ENDIAN 1
#endif
#elif V8_TARGET_ARCH_RISCV32 || V8_TARGET_ARCH_RISCV64
#define V8_TARGET_LITTLE_ENDIAN 1
#elif defined(__BYTE_ORDER__)
#if __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__
#define V8_TARGET_BIG_ENDIAN 1
#else
#define V8_TARGET_LITTLE_ENDIAN 1
#endif
#else
#error Unknown target architecture endianness
#endif
#undef V8_HAS_CPP_ATTRIBUTE #undef V8_HAS_CPP_ATTRIBUTE
#endif // V8CONFIG_H_ #endif // V8CONFIG_H_

View file

@ -20,7 +20,7 @@ def _CommonChecks(input_api, output_api):
'..', 'tools', 'mb', 'mb.py') '..', 'tools', 'mb', 'mb.py')
mb_config_path = input_api.os_path.join(input_api.PresubmitLocalPath(), mb_config_path = input_api.os_path.join(input_api.PresubmitLocalPath(),
'mb_config.pyl') 'mb_config.pyl')
cmd = [input_api.python_executable, mb_script, 'validate', '--config-file', cmd = [input_api.python3_executable, mb_script, 'validate', '--config-file',
mb_config_path] mb_config_path]
kwargs = {'cwd': input_api.PresubmitLocalPath()} kwargs = {'cwd': input_api.PresubmitLocalPath()}
results.extend(input_api.RunTests([ results.extend(input_api.RunTests([

View file

@ -32,7 +32,7 @@
"type": "script", "type": "script",
}, },
"generate-bytecode-expectations": { "generate-bytecode-expectations": {
"label": "//test/cctest:generate-bytecode-expectations", "label": "//test/unittests:generate-bytecode-expectations",
"type": "script", "type": "script",
}, },
"mjsunit": { "mjsunit": {

View file

@ -22,9 +22,6 @@
'ia32.debug': 'default_debug_x86', 'ia32.debug': 'default_debug_x86',
'ia32.optdebug': 'default_optdebug_x86', 'ia32.optdebug': 'default_optdebug_x86',
'ia32.release': 'default_release_x86', 'ia32.release': 'default_release_x86',
'mipsel.debug': 'default_debug_mipsel',
'mipsel.optdebug': 'default_optdebug_mipsel',
'mipsel.release': 'default_release_mipsel',
'mips64el.debug': 'default_debug_mips64el', 'mips64el.debug': 'default_debug_mips64el',
'mips64el.optdebug': 'default_optdebug_mips64el', 'mips64el.optdebug': 'default_optdebug_mips64el',
'mips64el.release': 'default_release_mips64el', 'mips64el.release': 'default_release_mips64el',
@ -67,33 +64,38 @@
'V8 Linux64 - builder (reclient)': 'release_x64_reclient', 'V8 Linux64 - builder (reclient)': 'release_x64_reclient',
'V8 Linux64 - builder (reclient compare)': 'release_x64_reclient', 'V8 Linux64 - builder (reclient compare)': 'release_x64_reclient',
'V8 Linux64 - debug builder': 'debug_x64', 'V8 Linux64 - debug builder': 'debug_x64',
'V8 Linux64 - dict tracking - debug - builder': 'debug_x64_dict_tracking_trybot',
'V8 Linux64 - external code space - debug - builder': 'debug_x64_external_code_space', 'V8 Linux64 - external code space - debug - builder': 'debug_x64_external_code_space',
'V8 Linux64 - custom snapshot - debug builder': 'debug_x64_custom', 'V8 Linux64 - custom snapshot - debug builder': 'debug_x64_custom',
'V8 Linux64 - heap sandbox - debug - builder': 'debug_x64_heap_sandbox', 'V8 Linux64 - heap sandbox - debug - builder': 'debug_x64_heap_sandbox',
'V8 Linux64 - internal snapshot - builder': 'release_x64_internal', 'V8 Linux64 - internal snapshot - builder': 'release_x64_internal',
'V8 Linux64 - debug - header includes - builder': 'debug_x64_header_includes', 'V8 Linux64 - debug - header includes - builder': 'debug_x64_header_includes',
'V8 Linux64 - no sandbox - debug builder': 'debug_x64_no_sandbox',
'V8 Linux64 - no sandbox - builder': 'release_x64_no_sandbox',
'V8 Linux64 - shared - builder': 'release_x64_shared_verify_heap', 'V8 Linux64 - shared - builder': 'release_x64_shared_verify_heap',
'V8 Linux64 - verify csa - builder': 'release_x64_verify_csa', 'V8 Linux64 - verify csa - builder': 'release_x64_verify_csa',
'V8 Linux64 - no wasm - builder': 'release_x64_webassembly_disabled', 'V8 Linux64 - no wasm - builder': 'release_x64_webassembly_disabled',
# Windows. # Windows.
'V8 Win32 - builder': 'release_x86_minimal_symbols', 'V8 Win32 - builder': 'release_x86_minimal_symbols',
'V8 Win32 - builder (goma cache silo)': 'release_x64', 'V8 Win32 - builder (goma cache silo)': 'release_x86',
'V8 Win32 - builder (reclient)': 'release_x86_minimal_symbols_reclient', 'V8 Win32 - builder (reclient)': 'release_x86_minimal_symbols_reclient',
'V8 Win32 - builder (reclient compare)': 'release_x86_minimal_symbols_reclient', 'V8 Win32 - builder (reclient compare)': 'release_x86_minimal_symbols_reclient',
'V8 Win32 - debug builder': 'debug_x86_minimal_symbols', 'V8 Win32 - debug builder': 'debug_x86_minimal_symbols',
# TODO(machenbach): Remove after switching to x64 on infra side. # TODO(machenbach): Remove after switching to x64 on infra side.
'V8 Win64 ASAN - builder': 'release_x64_asan_no_lsan', 'V8 Win64 ASAN - builder': 'release_x64_asan_no_lsan',
'V8 Win64 - builder': 'release_x64_minimal_symbols', 'V8 Win64 - builder': 'release_x64_minimal_symbols',
'V8 Win64 - builder (reclient)': 'release_x64_minimal_symbols_reclient',
'V8 Win64 - builder (reclient compare)': 'release_x64_minimal_symbols_reclient',
'V8 Win64 - dev image': 'release_x64_minimal_symbols', 'V8 Win64 - dev image': 'release_x64_minimal_symbols',
'V8 Win64 - debug builder': 'debug_x64_minimal_symbols', 'V8 Win64 - debug builder': 'debug_x64_minimal_symbols',
'V8 Win64 - msvc - builder': 'release_x64_msvc', 'V8 Win64 - msvc - builder': 'release_x64_msvc',
# Mac. # Mac.
'V8 Mac64 - builder': 'release_x64', 'V8 Mac64 - builder': 'release_x64',
'V8 Mac64 - debug builder': 'debug_x64', 'V8 Mac64 - debug builder': 'debug_x64',
'V8 Mac64 - builder (reclient)': 'release_x64_reclient',
'V8 Official Mac ARM64': 'release_arm64', 'V8 Official Mac ARM64': 'release_arm64',
'V8 Official Mac ARM64 Debug': 'debug_arm64', 'V8 Official Mac ARM64 Debug': 'debug_arm64',
'V8 Mac64 ASAN - builder': 'release_x64_asan_no_lsan', 'V8 Mac64 ASAN - builder': 'release_x64_asan_no_lsan',
'V8 Mac - arm64 - no pointer compression debug builder': 'debug_arm64_no_pointer_compression',
'V8 Mac - arm64 - release builder': 'release_arm64', 'V8 Mac - arm64 - release builder': 'release_arm64',
'V8 Mac - arm64 - debug builder': 'debug_arm64', 'V8 Mac - arm64 - debug builder': 'debug_arm64',
'V8 Mac - arm64 - sim - debug builder': 'debug_simulate_arm64', 'V8 Mac - arm64 - sim - debug builder': 'debug_simulate_arm64',
@ -104,21 +106,19 @@
'V8 Linux64 TSAN - no-concurrent-marking - builder': 'release_x64_tsan_no_cm', 'V8 Linux64 TSAN - no-concurrent-marking - builder': 'release_x64_tsan_no_cm',
'V8 Linux - arm64 - sim - CFI - builder': 'release_simulate_arm64_cfi', 'V8 Linux - arm64 - sim - CFI - builder': 'release_simulate_arm64_cfi',
'V8 Linux - arm64 - sim - MSAN - builder': 'release_simulate_arm64_msan', 'V8 Linux - arm64 - sim - MSAN - builder': 'release_simulate_arm64_msan',
# Misc.
'V8 Linux gcc - builder': 'release_x86_gcc',
# FYI. # FYI.
'V8 iOS - sim - builder': 'release_x64_ios_simulator', 'V8 iOS - sim - builder': 'release_x64_ios_simulator',
'V8 Linux64 - arm64 - sim - heap sandbox - debug - builder': 'debug_x64_heap_sandbox_arm64_sim', 'V8 Linux64 - arm64 - sim - heap sandbox - debug - builder': 'debug_x64_heap_sandbox_arm64_sim',
'V8 Linux64 - arm64 - sim - no pointer compression - builder':
'release_simulate_arm64_no_pointer_compression',
'V8 Linux64 - cppgc-non-default - debug - builder': 'debug_x64_non_default_cppgc', 'V8 Linux64 - cppgc-non-default - debug - builder': 'debug_x64_non_default_cppgc',
'V8 Linux64 - debug - perfetto - builder': 'debug_x64_perfetto', 'V8 Linux64 - debug - perfetto - builder': 'debug_x64_perfetto',
'V8 Linux64 - disable runtime call stats - builder': 'release_x64_disable_runtime_call_stats', 'V8 Linux64 - disable runtime call stats - builder': 'release_x64_disable_runtime_call_stats',
'V8 Linux64 - debug - single generation - builder': 'debug_x64_single_generation', 'V8 Linux64 - debug - single generation - builder': 'debug_x64_single_generation',
'V8 Linux64 - pointer compression - builder': 'release_x64_pointer_compression', 'V8 Linux64 - no pointer compression - builder': 'release_x64_no_pointer_compression',
'V8 Linux64 - pointer compression without dchecks': 'V8 Linux64 gcc - builder': 'release_x64_gcc',
'release_x64_pointer_compression_without_dchecks',
'V8 Linux64 - arm64 - sim - pointer compression - builder':
'release_simulate_arm64_pointer_compression',
'V8 Linux64 gcc - debug builder': 'debug_x64_gcc', 'V8 Linux64 gcc - debug builder': 'debug_x64_gcc',
'V8 Linux64 gcc light - debug builder': 'debug_x64_gcc',
'V8 Fuchsia - builder': 'release_x64_fuchsia', 'V8 Fuchsia - builder': 'release_x64_fuchsia',
'V8 Fuchsia - debug builder': 'debug_x64_fuchsia', 'V8 Fuchsia - debug builder': 'debug_x64_fuchsia',
'V8 Linux64 - cfi - builder': 'release_x64_cfi', 'V8 Linux64 - cfi - builder': 'release_x64_cfi',
@ -126,7 +126,7 @@
'V8 Linux - vtunejit': 'debug_x86_vtunejit', 'V8 Linux - vtunejit': 'debug_x86_vtunejit',
'V8 Linux64 - gcov coverage': 'release_x64_gcc_coverage', 'V8 Linux64 - gcov coverage': 'release_x64_gcc_coverage',
'V8 Linux64 - Fuzzilli - builder': 'release_x64_fuzzilli', 'V8 Linux64 - Fuzzilli - builder': 'release_x64_fuzzilli',
'V8 Linux - predictable - builder': 'release_x86_predictable', 'V8 Linux64 - predictable - builder': 'release_x64_predictable',
'V8 Linux - full debug builder': 'full_debug_x86', 'V8 Linux - full debug builder': 'full_debug_x86',
'V8 Mac64 - full debug builder': 'full_debug_x64', 'V8 Mac64 - full debug builder': 'full_debug_x64',
'V8 Random Deopt Fuzzer - debug': 'debug_x64', 'V8 Random Deopt Fuzzer - debug': 'debug_x64',
@ -166,6 +166,8 @@
'V8 Clusterfuzz Linux64 TSAN - release builder': 'release_x64_tsan', 'V8 Clusterfuzz Linux64 TSAN - release builder': 'release_x64_tsan',
'V8 Clusterfuzz Linux64 UBSan - release builder': 'V8 Clusterfuzz Linux64 UBSan - release builder':
'release_x64_ubsan_recover', 'release_x64_ubsan_recover',
'V8 Clusterfuzz Linux64 ASAN sandbox testing - release builder':
'release_x64_asan_sandbox_testing',
}, },
'client.v8.perf' : { 'client.v8.perf' : {
'V8 Arm - builder - perf': 'official_arm', 'V8 Arm - builder - perf': 'official_arm',
@ -173,6 +175,7 @@
'V8 Android Arm64 - builder - perf': 'official_android_arm64', 'V8 Android Arm64 - builder - perf': 'official_android_arm64',
'V8 Linux - builder - perf': 'official_x86', 'V8 Linux - builder - perf': 'official_x86',
'V8 Linux64 - builder - perf': 'official_x64', 'V8 Linux64 - builder - perf': 'official_x64',
'V8 Mac Arm64 - builder - perf': 'official_mac_arm64',
}, },
'client.v8.ports': { 'client.v8.ports': {
# Arm. # Arm.
@ -191,12 +194,12 @@
'V8 Linux - arm64 - sim - debug builder': 'debug_simulate_arm64', 'V8 Linux - arm64 - sim - debug builder': 'debug_simulate_arm64',
'V8 Linux - arm64 - sim - gc stress - builder': 'debug_simulate_arm64', 'V8 Linux - arm64 - sim - gc stress - builder': 'debug_simulate_arm64',
# Mips. # Mips.
'V8 Linux - mipsel - sim - builder': 'release_simulate_mipsel',
'V8 Linux - mips64el - sim - builder': 'release_simulate_mips64el', 'V8 Linux - mips64el - sim - builder': 'release_simulate_mips64el',
# IBM. # IBM.
'V8 Linux - ppc64 - sim - builder': 'release_simulate_ppc64', 'V8 Linux - ppc64 - sim - builder': 'release_simulate_ppc64',
'V8 Linux - s390x - sim - builder': 'release_simulate_s390x', 'V8 Linux - s390x - sim - builder': 'release_simulate_s390x',
# RISC-V # RISC-V
'V8 Linux - riscv32 - sim - builder': 'release_simulate_riscv32',
'V8 Linux - riscv64 - sim - builder': 'release_simulate_riscv64', 'V8 Linux - riscv64 - sim - builder': 'release_simulate_riscv64',
# Loongson # Loongson
'V8 Linux - loong64 - sim - builder': 'release_simulate_loong64', 'V8 Linux - loong64 - sim - builder': 'release_simulate_loong64',
@ -216,28 +219,34 @@
'v8_linux_noi18n_compile_dbg': 'debug_x86_no_i18n', 'v8_linux_noi18n_compile_dbg': 'debug_x86_no_i18n',
'v8_linux_noi18n_rel_ng': 'release_x86_no_i18n_trybot', 'v8_linux_noi18n_rel_ng': 'release_x86_no_i18n_trybot',
'v8_linux_gc_stress_dbg_ng': 'debug_x86_trybot', 'v8_linux_gc_stress_dbg_ng': 'debug_x86_trybot',
'v8_linux_gcc_compile_rel': 'release_x86_gcc_minimal_symbols',
'v8_linux_gcc_rel_ng': 'release_x86_gcc_minimal_symbols',
'v8_linux_shared_compile_rel': 'release_x86_shared_verify_heap', 'v8_linux_shared_compile_rel': 'release_x86_shared_verify_heap',
'v8_linux_vtunejit': 'debug_x86_vtunejit', 'v8_linux_vtunejit': 'debug_x86_vtunejit',
'v8_linux64_arm64_pointer_compression_rel_ng': 'v8_linux64_arm64_no_pointer_compression_rel_ng':
'release_simulate_arm64_pointer_compression', 'release_simulate_arm64_no_pointer_compression',
'v8_linux64_cppgc_non_default_dbg_ng': 'debug_x64_non_default_cppgc', 'v8_linux64_cppgc_non_default_dbg_ng': 'debug_x64_non_default_cppgc',
'v8_linux64_dbg_ng': 'debug_x64_trybot', 'v8_linux64_dbg_ng': 'debug_x64_trybot',
'v8_linux64_no_sandbox_dbg_ng': 'debug_x64_no_sandbox',
'v8_linux64_dict_tracking_dbg_ng': 'debug_x64_dict_tracking_trybot', 'v8_linux64_dict_tracking_dbg_ng': 'debug_x64_dict_tracking_trybot',
'v8_linux64_disable_runtime_call_stats_rel_ng': 'release_x64_disable_runtime_call_stats', 'v8_linux64_disable_runtime_call_stats_rel_ng': 'release_x64_disable_runtime_call_stats',
'v8_linux64_external_code_space_dbg_ng': 'debug_x64_external_code_space', 'v8_linux64_external_code_space_dbg_ng': 'debug_x64_external_code_space',
'v8_linux64_gc_stress_custom_snapshot_dbg_ng': 'debug_x64_trybot_custom', 'v8_linux64_gc_stress_custom_snapshot_dbg_ng': 'debug_x64_trybot_custom',
'v8_linux64_gc_stress_dbg_ng': 'debug_x64_trybot',
'v8_linux64_gcc_compile_dbg': 'debug_x64_gcc', 'v8_linux64_gcc_compile_dbg': 'debug_x64_gcc',
'v8_linux64_gcc_compile_rel': 'release_x64_gcc',
'v8_linux64_gcc_light_compile_dbg': 'debug_x64_gcc',
'v8_linux64_gcc_rel_ng': 'release_x64_gcc',
'v8_linux64_gcov_coverage': 'release_x64_gcc_coverage', 'v8_linux64_gcov_coverage': 'release_x64_gcc_coverage',
'v8_linux64_header_includes_dbg': 'debug_x64_header_includes', 'v8_linux64_header_includes_dbg': 'debug_x64_header_includes',
'v8_linux64_heap_sandbox_dbg_ng': 'debug_x64_heap_sandbox', 'v8_linux64_heap_sandbox_dbg_ng': 'debug_x64_heap_sandbox',
'v8_linux64_minor_mc_dbg_ng': 'debug_x64_trybot',
'v8_linux_arm64_sim_heap_sandbox_dbg_ng': 'debug_x64_heap_sandbox_arm64_sim', 'v8_linux_arm64_sim_heap_sandbox_dbg_ng': 'debug_x64_heap_sandbox_arm64_sim',
'v8_linux64_fyi_rel_ng': 'release_x64_test_features_trybot', 'v8_linux64_fyi_rel_ng': 'release_x64_test_features_trybot',
'v8_linux64_nodcheck_rel_ng': 'release_x64', 'v8_linux64_nodcheck_rel_ng': 'release_x64',
'v8_linux64_perfetto_dbg_ng': 'debug_x64_perfetto', 'v8_linux64_perfetto_dbg_ng': 'debug_x64_perfetto',
'v8_linux64_pointer_compression_rel_ng': 'release_x64_pointer_compression', 'v8_linux64_no_pointer_compression_rel_ng': 'release_x64_no_pointer_compression',
'v8_linux64_rel_ng': 'release_x64_test_features_trybot', 'v8_linux64_rel_ng': 'release_x64_test_features_trybot',
'v8_linux64_no_sandbox_rel_ng': 'release_x64_no_sandbox',
'v8_linux64_predictable_rel_ng': 'release_x64_predictable',
'v8_linux64_shared_compile_rel': 'release_x64_shared_verify_heap', 'v8_linux64_shared_compile_rel': 'release_x64_shared_verify_heap',
'v8_linux64_single_generation_dbg_ng': 'debug_x64_single_generation', 'v8_linux64_single_generation_dbg_ng': 'debug_x64_single_generation',
'v8_linux64_no_wasm_compile_rel': 'release_x64_webassembly_disabled', 'v8_linux64_no_wasm_compile_rel': 'release_x64_webassembly_disabled',
@ -247,6 +256,7 @@
'v8_linux64_fuzzilli_ng': 'release_x64_fuzzilli', 'v8_linux64_fuzzilli_ng': 'release_x64_fuzzilli',
'v8_linux64_loong64_rel_ng': 'release_simulate_loong64', 'v8_linux64_loong64_rel_ng': 'release_simulate_loong64',
'v8_linux64_msan_rel_ng': 'release_simulate_arm64_msan_minimal_symbols', 'v8_linux64_msan_rel_ng': 'release_simulate_arm64_msan_minimal_symbols',
'v8_linux_riscv32_rel_ng': 'release_simulate_riscv32',
'v8_linux64_riscv64_rel_ng': 'release_simulate_riscv64', 'v8_linux64_riscv64_rel_ng': 'release_simulate_riscv64',
'v8_linux64_tsan_rel_ng': 'release_x64_tsan_minimal_symbols', 'v8_linux64_tsan_rel_ng': 'release_x64_tsan_minimal_symbols',
'v8_linux64_tsan_no_cm_rel_ng': 'release_x64_tsan_no_cm', 'v8_linux64_tsan_no_cm_rel_ng': 'release_x64_tsan_no_cm',
@ -267,6 +277,7 @@
'v8_mac_arm64_rel_ng': 'release_arm64', 'v8_mac_arm64_rel_ng': 'release_arm64',
'v8_mac_arm64_dbg_ng': 'debug_arm64', 'v8_mac_arm64_dbg_ng': 'debug_arm64',
'v8_mac_arm64_full_dbg_ng': 'full_debug_arm64', 'v8_mac_arm64_full_dbg_ng': 'full_debug_arm64',
'v8_mac_arm64_no_pointer_compression_dbg_ng': 'debug_arm64_no_pointer_compression',
'v8_mac_arm64_compile_dbg': 'debug_arm64', 'v8_mac_arm64_compile_dbg': 'debug_arm64',
'v8_mac_arm64_compile_rel': 'release_arm64', 'v8_mac_arm64_compile_rel': 'release_arm64',
'v8_mac_arm64_sim_compile_dbg': 'debug_simulate_arm64', 'v8_mac_arm64_sim_compile_dbg': 'debug_simulate_arm64',
@ -279,7 +290,7 @@
'v8_mac64_dbg': 'debug_x64', 'v8_mac64_dbg': 'debug_x64',
'v8_mac64_dbg_ng': 'debug_x64', 'v8_mac64_dbg_ng': 'debug_x64',
'v8_mac64_compile_full_dbg_ng': 'full_debug_x64', 'v8_mac64_compile_full_dbg_ng': 'full_debug_x64',
'v8_mac64_asan_compile_rel_ng': 'release_x64_asan_no_lsan', 'v8_mac64_asan_compile_rel': 'release_x64_asan_no_lsan',
'v8_mac64_asan_rel_ng': 'release_x64_asan_no_lsan', 'v8_mac64_asan_rel_ng': 'release_x64_asan_no_lsan',
'v8_linux_arm_rel_ng': 'release_simulate_arm_trybot', 'v8_linux_arm_rel_ng': 'release_simulate_arm_trybot',
'v8_linux_arm_lite_compile_dbg': 'debug_simulate_arm_lite', 'v8_linux_arm_lite_compile_dbg': 'debug_simulate_arm_lite',
@ -291,7 +302,6 @@
'v8_linux_arm64_cfi_rel_ng' : 'release_simulate_arm64_cfi', 'v8_linux_arm64_cfi_rel_ng' : 'release_simulate_arm64_cfi',
'v8_linux_arm64_dbg_ng': 'debug_simulate_arm64', 'v8_linux_arm64_dbg_ng': 'debug_simulate_arm64',
'v8_linux_arm64_gc_stress_dbg_ng': 'debug_simulate_arm64', 'v8_linux_arm64_gc_stress_dbg_ng': 'debug_simulate_arm64',
'v8_linux_mipsel_compile_rel': 'release_simulate_mipsel',
'v8_linux_mips64el_compile_rel': 'release_simulate_mips64el', 'v8_linux_mips64el_compile_rel': 'release_simulate_mips64el',
'v8_numfuzz_ng': 'release_x64', 'v8_numfuzz_ng': 'release_x64',
'v8_numfuzz_dbg_ng': 'debug_x64', 'v8_numfuzz_dbg_ng': 'debug_x64',
@ -324,12 +334,6 @@
'release', 'simulate_arm64'], 'release', 'simulate_arm64'],
'release_arm64_sample': [ 'release_arm64_sample': [
'release', 'arm64', 'sample'], 'release', 'arm64', 'sample'],
'default_debug_mipsel': [
'debug', 'simulate_mipsel', 'v8_enable_slow_dchecks', 'v8_full_debug'],
'default_optdebug_mipsel': [
'debug', 'simulate_mipsel', 'v8_enable_slow_dchecks'],
'default_release_mipsel': [
'release', 'simulate_mipsel'],
'default_debug_mips64el': [ 'default_debug_mips64el': [
'debug', 'simulate_mips64el', 'v8_enable_slow_dchecks', 'v8_full_debug'], 'debug', 'simulate_mips64el', 'v8_enable_slow_dchecks', 'v8_full_debug'],
'default_optdebug_mips64el': [ 'default_optdebug_mips64el': [
@ -413,11 +417,8 @@
'release_bot', 'simulate_arm64'], 'release_bot', 'simulate_arm64'],
'release_simulate_arm64_cfi': [ 'release_simulate_arm64_cfi': [
'release_bot', 'simulate_arm64', 'v8_control_flow_integrity'], 'release_bot', 'simulate_arm64', 'v8_control_flow_integrity'],
'release_simulate_arm64_pointer_compression': [ 'release_simulate_arm64_no_pointer_compression': [
# TODO(v8:v7703): Make pointer compression bots testing non pointer 'release_bot', 'simulate_arm64_no_sandbox', 'dcheck_always_on',
# compression mode while pointer compression is temporarily enabled
# on arm64
'release_bot', 'simulate_arm64', 'dcheck_always_on',
'v8_enable_slow_dchecks', 'v8_disable_pointer_compression'], 'v8_enable_slow_dchecks', 'v8_disable_pointer_compression'],
'release_simulate_arm64_msan': [ 'release_simulate_arm64_msan': [
'release_bot', 'simulate_arm64', 'msan'], 'release_bot', 'simulate_arm64', 'msan'],
@ -431,12 +432,12 @@
'release_trybot', 'simulate_arm64'], 'release_trybot', 'simulate_arm64'],
'release_simulate_loong64': [ 'release_simulate_loong64': [
'release_bot', 'simulate_loong64'], 'release_bot', 'simulate_loong64'],
'release_simulate_mipsel': [
'release_bot', 'simulate_mipsel'],
'release_simulate_mips64el': [ 'release_simulate_mips64el': [
'release_bot', 'simulate_mips64el'], 'release_bot', 'simulate_mips64el'],
'release_simulate_ppc64': [ 'release_simulate_ppc64': [
'release_bot', 'simulate_ppc64'], 'release_bot', 'simulate_ppc64'],
'release_simulate_riscv32': [
'release_bot', 'simulate_riscv32'],
'release_simulate_riscv64': [ 'release_simulate_riscv64': [
'release_bot', 'simulate_riscv64'], 'release_bot', 'simulate_riscv64'],
'release_simulate_s390x': [ 'release_simulate_s390x': [
@ -449,6 +450,9 @@
'debug_bot', 'arm', 'hard_float'], 'debug_bot', 'arm', 'hard_float'],
'debug_arm64': [ 'debug_arm64': [
'debug_bot', 'arm64'], 'debug_bot', 'arm64'],
'debug_arm64_no_pointer_compression': [
'debug_bot', 'arm64_no_sandbox', 'dcheck_always_on', 'v8_enable_slow_dchecks', 'v8_enable_javascript_promise_hooks',
'v8_disable_pointer_compression'],
'full_debug_arm64': [ 'full_debug_arm64': [
'debug_bot', 'arm64', 'v8_full_debug'], 'debug_bot', 'arm64', 'v8_full_debug'],
@ -475,6 +479,8 @@
'official_android_arm64': [ 'official_android_arm64': [
'release_bot', 'arm64', 'android', 'minimal_symbols', 'release_bot', 'arm64', 'android', 'minimal_symbols',
'android_strip_outputs', 'official', 'disable_pgo'], 'android_strip_outputs', 'official', 'disable_pgo'],
'official_mac_arm64': [
'release_bot', 'arm64', 'official', 'disable_pgo'],
# Release configs for x64. # Release configs for x64.
'release_x64': [ 'release_x64': [
@ -512,6 +518,8 @@
'release_bot', 'x64', 'fuchsia'], 'release_bot', 'x64', 'fuchsia'],
'release_x64_fuchsia_trybot': [ 'release_x64_fuchsia_trybot': [
'release_trybot', 'x64', 'fuchsia'], 'release_trybot', 'x64', 'fuchsia'],
'release_x64_gcc': [
'release_bot_no_goma', 'x64', 'gcc', 'lld', 'no_custom_libcxx'],
'release_x64_gcc_coverage': [ 'release_x64_gcc_coverage': [
'release_bot_no_goma', 'x64', 'coverage', 'gcc', 'lld', 'release_bot_no_goma', 'x64', 'coverage', 'gcc', 'lld',
'no_custom_libcxx', 'no_sysroot'], 'no_custom_libcxx', 'no_sysroot'],
@ -521,13 +529,15 @@
'release_bot', 'x64', 'v8_snapshot_internal'], 'release_bot', 'x64', 'v8_snapshot_internal'],
'release_x64_minimal_symbols': [ 'release_x64_minimal_symbols': [
'release_bot', 'x64', 'minimal_symbols'], 'release_bot', 'x64', 'minimal_symbols'],
'release_x64_pointer_compression': [ 'release_x64_minimal_symbols_reclient': [
'release_bot', 'x64', 'dcheck_always_on', 'v8_enable_slow_dchecks', 'v8_enable_javascript_promise_hooks', 'release_bot_reclient', 'x64', 'minimal_symbols'],
'release_x64_no_pointer_compression': [
'release_bot', 'x64_no_sandbox', 'dcheck_always_on', 'v8_enable_slow_dchecks', 'v8_enable_javascript_promise_hooks',
'v8_disable_pointer_compression'], 'v8_disable_pointer_compression'],
'release_x64_pointer_compression_without_dchecks': [
'release_bot', 'x64', 'v8_disable_pointer_compression'],
'release_x64_reclient': [ 'release_x64_reclient': [
'release_bot_reclient', 'x64'], 'release_bot_reclient', 'x64'],
'release_x64_no_sandbox': [
'release_bot', 'x64_no_sandbox'],
'release_x64_trybot': [ 'release_x64_trybot': [
'release_trybot', 'x64'], 'release_trybot', 'x64'],
'release_x64_test_features_trybot': [ 'release_x64_test_features_trybot': [
@ -551,6 +561,9 @@
'v8_enable_slow_dchecks', 'v8_verify_csa'], 'v8_enable_slow_dchecks', 'v8_verify_csa'],
'release_x64_webassembly_disabled': [ 'release_x64_webassembly_disabled': [
'release_bot', 'x64', 'webassembly_disabled'], 'release_bot', 'x64', 'webassembly_disabled'],
'release_x64_asan_sandbox_testing': [
'release_bot', 'x64', 'asan', 'symbolized', 'v8_enable_sandbox_future',
'v8_expose_memory_corruption_api'],
# Official configs for x64. # Official configs for x64.
'official_x64': [ 'official_x64': [
@ -571,20 +584,21 @@
'debug_x64_fuchsia': [ 'debug_x64_fuchsia': [
'debug_bot', 'x64', 'fuchsia'], 'debug_bot', 'x64', 'fuchsia'],
'debug_x64_gcc': [ 'debug_x64_gcc': [
'debug_bot_no_goma', 'x64', 'gcc', 'lld', 'no_custom_libcxx', 'debug_bot_no_goma', 'x64', 'gcc', 'lld', 'no_custom_libcxx'],
'v8_check_header_includes'],
'debug_x64_header_includes': [ 'debug_x64_header_includes': [
'debug_bot', 'x64', 'v8_check_header_includes'], 'debug_bot', 'x64', 'v8_check_header_includes'],
'debug_x64_heap_sandbox': [ 'debug_x64_heap_sandbox': [
'debug_bot', 'x64', 'v8_enable_sandbox_future'], 'debug_bot', 'x64', 'v8_enable_sandbox_future', 'v8_expose_memory_corruption_api'],
'debug_x64_heap_sandbox_arm64_sim': [ 'debug_x64_heap_sandbox_arm64_sim': [
'debug_bot', 'simulate_arm64', 'v8_enable_sandbox_future'], 'debug_bot', 'simulate_arm64', 'v8_enable_sandbox_future', 'v8_expose_memory_corruption_api'],
'debug_x64_minimal_symbols': [ 'debug_x64_minimal_symbols': [
'debug_bot', 'x64', 'minimal_symbols'], 'debug_bot', 'x64', 'minimal_symbols'],
'debug_x64_non_default_cppgc': [ 'debug_x64_non_default_cppgc': [
'debug_bot', 'x64', 'non_default_cppgc'], 'debug_bot', 'x64', 'non_default_cppgc'],
'debug_x64_perfetto': [ 'debug_x64_perfetto': [
'debug_bot', 'x64', 'perfetto'], 'debug_bot', 'x64', 'perfetto'],
'debug_x64_no_sandbox': [
'debug_bot', 'x64_no_sandbox'],
'debug_x64_single_generation': [ 'debug_x64_single_generation': [
'debug_bot', 'x64', 'v8_enable_single_generation'], 'debug_bot', 'x64', 'v8_enable_single_generation'],
'debug_x64_trybot': [ 'debug_x64_trybot': [
@ -613,15 +627,11 @@
'debug', 'x86', 'goma', 'v8_enable_slow_dchecks', 'v8_full_debug'], 'debug', 'x86', 'goma', 'v8_enable_slow_dchecks', 'v8_full_debug'],
# Release configs for x86. # Release configs for x86.
'release_x86': [
'release_bot', 'x86'],
'release_x86_asan_symbolized_verify_heap': [ 'release_x86_asan_symbolized_verify_heap': [
'release_bot', 'x86', 'asan', 'lsan', 'symbolized', 'release_bot', 'x86', 'asan', 'lsan', 'symbolized',
'v8_verify_heap'], 'v8_verify_heap'],
'release_x86_gcc': [
'release_bot_no_goma', 'x86', 'gcc', 'lld', 'no_custom_libcxx',
'v8_check_header_includes'],
'release_x86_gcc_minimal_symbols': [
'release_bot_no_goma', 'x86', 'gcc', 'lld', 'no_custom_libcxx',
'minimal_symbols', 'v8_check_header_includes'],
'release_x86_gcmole': [ 'release_x86_gcmole': [
'release_bot', 'x86', 'gcmole'], 'release_bot', 'x86', 'gcmole'],
'release_x86_gcmole_trybot': [ 'release_x86_gcmole_trybot': [
@ -632,8 +642,8 @@
'release_bot_reclient', 'x86', 'minimal_symbols'], 'release_bot_reclient', 'x86', 'minimal_symbols'],
'release_x86_no_i18n_trybot': [ 'release_x86_no_i18n_trybot': [
'release_trybot', 'x86', 'v8_no_i18n'], 'release_trybot', 'x86', 'v8_no_i18n'],
'release_x86_predictable': [ 'release_x64_predictable': [
'release_bot', 'x86', 'v8_enable_verify_predictable'], 'release_bot', 'x64', 'v8_enable_verify_predictable'],
'release_x86_shared_verify_heap': [ 'release_x86_shared_verify_heap': [
'release', 'x86', 'goma', 'shared', 'v8_verify_heap'], 'release', 'x86', 'goma', 'shared', 'v8_verify_heap'],
'release_x86_trybot': [ 'release_x86_trybot': [
@ -665,7 +675,11 @@
}, },
'arm64': { 'arm64': {
'gn_args': 'target_cpu="arm64"', 'gn_args': 'target_cpu="arm64" v8_enable_sandbox=true',
},
'arm64_no_sandbox': {
'gn_args': 'target_cpu="arm64" v8_enable_sandbox=false',
}, },
'asan': { 'asan': {
@ -723,8 +737,7 @@
# atomic marking state enabled because that is needed for the concurrent # atomic marking state enabled because that is needed for the concurrent
# write-barrier used by background compilation. # write-barrier used by background compilation.
'gn_args': 'v8_enable_concurrent_marking=false ' 'gn_args': 'v8_enable_concurrent_marking=false '
'v8_enable_atomic_object_field_writes=false ' 'v8_enable_atomic_object_field_writes=false ',
'v8_enable_atomic_marking_state=true ',
}, },
'disable_pgo': { 'disable_pgo': {
@ -812,7 +825,7 @@
}, },
'reclient': { 'reclient': {
'gn_args': 'use_rbe=true use_remoteexec=true', 'gn_args': 'use_remoteexec=true',
}, },
'release': { 'release': {
@ -848,18 +861,17 @@
}, },
'simulate_arm64': { 'simulate_arm64': {
'gn_args': 'target_cpu="x64" v8_target_cpu="arm64"', 'gn_args': 'target_cpu="x64" v8_target_cpu="arm64" v8_enable_sandbox=true',
},
'simulate_arm64_no_sandbox': {
'gn_args': 'target_cpu="x64" v8_target_cpu="arm64" v8_enable_sandbox=false',
}, },
'simulate_loong64': { 'simulate_loong64': {
'gn_args': 'target_cpu="x64" v8_target_cpu="loong64"', 'gn_args': 'target_cpu="x64" v8_target_cpu="loong64"',
}, },
'simulate_mipsel': {
'gn_args':
'target_cpu="x86" v8_target_cpu="mipsel" mips_arch_variant="r2"',
},
'simulate_mips64el': { 'simulate_mips64el': {
'gn_args': 'target_cpu="x64" v8_target_cpu="mips64el"', 'gn_args': 'target_cpu="x64" v8_target_cpu="mips64el"',
}, },
@ -868,6 +880,10 @@
'gn_args': 'target_cpu="x64" v8_target_cpu="ppc64"', 'gn_args': 'target_cpu="x64" v8_target_cpu="ppc64"',
}, },
'simulate_riscv32': {
'gn_args': 'target_cpu="x86" v8_target_cpu="riscv32"',
},
'simulate_riscv64': { 'simulate_riscv64': {
'gn_args': 'target_cpu="x64" v8_target_cpu="riscv64"', 'gn_args': 'target_cpu="x64" v8_target_cpu="riscv64"',
}, },
@ -925,6 +941,10 @@
'gn_args': 'v8_enable_sandbox_future=true', 'gn_args': 'v8_enable_sandbox_future=true',
}, },
'v8_expose_memory_corruption_api': {
'gn_args': 'v8_expose_memory_corruption_api=true',
},
'v8_enable_lite_mode': { 'v8_enable_lite_mode': {
'gn_args': 'v8_enable_lite_mode=true', 'gn_args': 'v8_enable_lite_mode=true',
}, },
@ -1010,7 +1030,11 @@
}, },
'x64': { 'x64': {
'gn_args': 'target_cpu="x64"', 'gn_args': 'target_cpu="x64" v8_enable_sandbox=true',
},
'x64_no_sandbox': {
'gn_args': 'target_cpu="x64" v8_enable_sandbox=false',
}, },
'x86': { 'x86': {

View file

@ -77,8 +77,8 @@
{'name': 'mozilla', 'variant': 'extra'}, {'name': 'mozilla', 'variant': 'extra'},
{'name': 'test262', 'variant': 'default', 'shards': 2}, {'name': 'test262', 'variant': 'default', 'shards': 2},
{'name': 'test262', 'variant': 'extra', 'shards': 5}, {'name': 'test262', 'variant': 'extra', 'shards': 5},
{'name': 'v8testing', 'shards': 3}, {'name': 'v8testing', 'shards': 4},
{'name': 'v8testing', 'variant': 'extra', 'shards': 2}, {'name': 'v8testing', 'variant': 'extra', 'shards': 4},
# Noavx. # Noavx.
{ {
'name': 'mozilla', 'name': 'mozilla',
@ -95,7 +95,17 @@
'name': 'v8testing', 'name': 'v8testing',
'suffix': 'noavx', 'suffix': 'noavx',
'test_args': ['--extra-flags', '--noenable-avx'], 'test_args': ['--extra-flags', '--noenable-avx'],
'shards': 2 'shards': 4
},
# Nosse3.
{
'name': 'v8testing',
'suffix': 'nosse3',
'test_args': [
'--extra-flags',
'--noenable-sse3 --noenable-ssse3 --noenable-sse4-1 --noenable-avx',
],
'shards': 4,
}, },
], ],
}, },
@ -108,14 +118,6 @@
{'name': 'd8testing', 'test_args': ['--gc-stress'], 'shards': 5}, {'name': 'd8testing', 'test_args': ['--gc-stress'], 'shards': 5},
], ],
}, },
'v8_linux_gcc_rel_ng_triggered': {
'swarming_dimensions' : {
'os': 'Ubuntu-18.04',
},
'tests': [
{'name': 'v8testing'},
],
},
'v8_linux_nodcheck_rel_ng_triggered': { 'v8_linux_nodcheck_rel_ng_triggered': {
'swarming_dimensions' : { 'swarming_dimensions' : {
'cpu': 'x86-64-avx2', 'cpu': 'x86-64-avx2',
@ -130,6 +132,15 @@
{'name': 'test262', 'variant': 'extra', 'shards': 2}, {'name': 'test262', 'variant': 'extra', 'shards': 2},
{'name': 'v8testing', 'shards': 2}, {'name': 'v8testing', 'shards': 2},
{'name': 'v8testing', 'variant': 'extra'}, {'name': 'v8testing', 'variant': 'extra'},
{
'name': 'v8testing',
'suffix': 'nosse3',
'test_args': [
'--extra-flags',
'--noenable-sse3 --noenable-ssse3 --noenable-sse4-1 --noenable-avx',
],
'shards': 2,
},
], ],
}, },
'v8_linux_noi18n_rel_ng_triggered': { 'v8_linux_noi18n_rel_ng_triggered': {
@ -156,10 +167,19 @@
{'name': 'mozilla', 'variant': 'extra'}, {'name': 'mozilla', 'variant': 'extra'},
{'name': 'optimize_for_size'}, {'name': 'optimize_for_size'},
{'name': 'test262', 'shards': 4}, {'name': 'test262', 'shards': 4},
{'name': 'test262', 'variant': 'extra', 'shards': 3}, {'name': 'test262', 'variant': 'extra', 'shards': 6},
{'name': 'v8testing', 'shards': 2}, {'name': 'v8testing', 'shards': 4},
{'name': 'v8testing', 'suffix': 'isolates', 'test_args': ['--isolates'], 'shards': 2}, {'name': 'v8testing', 'suffix': 'isolates', 'test_args': ['--isolates'], 'shards': 4},
{'name': 'v8testing', 'variant': 'extra', 'shards': 2}, {'name': 'v8testing', 'variant': 'extra', 'shards': 4},
{
'name': 'v8testing',
'suffix': 'nosse3',
'test_args': [
'--extra-flags',
'--noenable-sse3 --noenable-ssse3 --noenable-sse4-1 --noenable-avx',
],
'shards': 4,
},
], ],
}, },
'v8_linux_optional_rel_ng_triggered': { 'v8_linux_optional_rel_ng_triggered': {
@ -314,9 +334,9 @@
'os': 'Ubuntu-18.04', 'os': 'Ubuntu-18.04',
}, },
'tests': [ 'tests': [
{'name': 'test262', 'shards': 7}, {'name': 'test262', 'shards': 12},
{'name': 'v8testing', 'shards': 3}, {'name': 'v8testing', 'shards': 5},
{'name': 'v8testing', 'variant': 'extra', 'shards': 3}, {'name': 'v8testing', 'variant': 'extra', 'shards': 5},
{'name': 'v8testing', 'variant': 'slow_path'}, {'name': 'v8testing', 'variant': 'slow_path'},
], ],
}, },
@ -361,6 +381,8 @@
{'name': 'v8testing', 'variant': 'stress_instruction_scheduling'}, {'name': 'v8testing', 'variant': 'stress_instruction_scheduling'},
{'name': 'v8testing', 'variant': 'stress_concurrent_allocation'}, {'name': 'v8testing', 'variant': 'stress_concurrent_allocation'},
{'name': 'v8testing', 'variant': 'stress_concurrent_inlining'}, {'name': 'v8testing', 'variant': 'stress_concurrent_inlining'},
# Maglev -- move to extra once more architectures are supported.
{'name': 'mjsunit', 'variant': 'maglev'},
], ],
}, },
'v8_linux64_dict_tracking_dbg_ng_triggered': { 'v8_linux64_dict_tracking_dbg_ng_triggered': {
@ -408,6 +430,10 @@
{'name': 'webkit', 'variant': 'stress_sampling'}, {'name': 'webkit', 'variant': 'stress_sampling'},
# Stress snapshot. # Stress snapshot.
{'name': 'mjsunit', 'variant': 'stress_snapshot'}, {'name': 'mjsunit', 'variant': 'stress_snapshot'},
# Maglev.
{'name': 'mjsunit', 'variant': 'maglev'},
# Stress maglev.
{'name': 'mjsunit', 'variant': 'stress_maglev'},
# Experimental regexp engine. # Experimental regexp engine.
{'name': 'mjsunit', 'variant': 'experimental_regexp'}, {'name': 'mjsunit', 'variant': 'experimental_regexp'},
# Wasm write protect code space. # Wasm write protect code space.
@ -426,6 +452,35 @@
}, },
], ],
}, },
'v8_linux64_gc_stress_dbg_ng_triggered': {
'swarming_dimensions' : {
'cpu': 'x86-64-avx2',
'os': 'Ubuntu-18.04',
},
'tests': [
{'name': 'd8testing', 'test_args': ['--gc-stress'], 'shards': 5},
{
'name': 'mjsunit',
'variant': 'slow_path',
'test_args': ['--gc-stress'],
'shards': 2
},
{
'name': 'mjsunit',
'variant': 'maglev',
'test_args': ['--gc-stress'],
'shards': 2
},
],
},
'v8_linux64_gcc_rel_ng_triggered': {
'swarming_dimensions' : {
'os': 'Ubuntu-20.04',
},
'tests': [
{'name': 'v8testing'},
],
},
'v8_linux64_gcov_coverage': { 'v8_linux64_gcov_coverage': {
'swarming_dimensions' : { 'swarming_dimensions' : {
'os': 'Ubuntu-18.04', 'os': 'Ubuntu-18.04',
@ -439,7 +494,20 @@
'os': 'Ubuntu-18.04', 'os': 'Ubuntu-18.04',
}, },
'tests': [ 'tests': [
{'name': 'v8testing', 'shards': 2}, {'name': 'v8testing', 'shards': 4},
],
},
'v8_linux64_minor_mc_dbg_ng_triggered': {
'swarming_dimensions' : {
'cpu': 'x86-64-avx2',
'os': 'Ubuntu-18.04',
},
'tests': [
{'name': 'v8testing', 'variant': 'minor_mc'},
{'name': 'benchmarks', 'variant': 'minor_mc'},
{'name': 'mozilla', 'variant': 'minor_mc'},
{'name': 'test262', 'variant': 'minor_mc', 'shards': 2},
{'name': 'mjsunit', 'variant': 'minor_mc'},
], ],
}, },
'v8_linux64_msan_rel_ng_triggered': { 'v8_linux64_msan_rel_ng_triggered': {
@ -473,6 +541,8 @@
{'name': 'v8testing', 'variant': 'extra'}, {'name': 'v8testing', 'variant': 'extra'},
{'name': 'v8testing', 'variant': 'no_lfa'}, {'name': 'v8testing', 'variant': 'no_lfa'},
{'name': 'v8testing', 'variant': 'stress_instruction_scheduling'}, {'name': 'v8testing', 'variant': 'stress_instruction_scheduling'},
# Maglev -- move to extra once more architectures are supported.
{'name': 'mjsunit', 'variant': 'maglev'},
], ],
}, },
'v8_linux64_perfetto_dbg_ng_triggered': { 'v8_linux64_perfetto_dbg_ng_triggered': {
@ -483,7 +553,7 @@
{'name': 'v8testing', 'shards': 3}, {'name': 'v8testing', 'shards': 3},
], ],
}, },
'v8_linux64_pointer_compression_rel_ng_triggered': { 'v8_linux64_no_pointer_compression_rel_ng_triggered': {
'swarming_dimensions' : { 'swarming_dimensions' : {
'os': 'Ubuntu-18.04', 'os': 'Ubuntu-18.04',
}, },
@ -491,6 +561,15 @@
{'name': 'v8testing', 'shards': 3}, {'name': 'v8testing', 'shards': 3},
], ],
}, },
'v8_linux64_no_sandbox_dbg_ng_triggered': {
'swarming_dimensions' : {
'cpu': 'x86-64-avx2',
'os': 'Ubuntu-18.04',
},
'tests': [
{'name': 'v8testing', 'shards': 5},
],
},
'v8_linux64_single_generation_dbg_ng_triggered': { 'v8_linux64_single_generation_dbg_ng_triggered': {
'swarming_dimensions' : { 'swarming_dimensions' : {
'os': 'Ubuntu-18.04', 'os': 'Ubuntu-18.04',
@ -518,6 +597,28 @@
{'name': 'v8testing', 'variant': 'no_lfa'}, {'name': 'v8testing', 'variant': 'no_lfa'},
{'name': 'v8testing', 'variant': 'slow_path'}, {'name': 'v8testing', 'variant': 'slow_path'},
{'name': 'v8testing', 'variant': 'stress_instruction_scheduling'}, {'name': 'v8testing', 'variant': 'stress_instruction_scheduling'},
# Maglev -- move to extra once more architectures are supported.
{'name': 'mjsunit', 'variant': 'maglev'},
],
},
'v8_linux64_predictable_rel_ng_triggered': {
'swarming_dimensions': {
'cpu': 'x86-64-avx2',
'os': 'Ubuntu-18.04',
},
'tests': [
{'name': 'benchmarks'},
{'name': 'd8testing'},
{'name': 'mozilla'},
],
},
'v8_linux64_no_sandbox_rel_ng_triggered': {
'swarming_dimensions' : {
'cpu': 'x86-64-avx2',
'os': 'Ubuntu-18.04',
},
'tests': [
{'name': 'v8testing', 'shards': 2},
], ],
}, },
'v8_linux64_tsan_rel_ng_triggered': { 'v8_linux64_tsan_rel_ng_triggered': {
@ -587,7 +688,7 @@
{'name': 'mozilla', 'shards': 4}, {'name': 'mozilla', 'shards': 4},
{'name': 'test262', 'variant': 'default', 'shards': 4}, {'name': 'test262', 'variant': 'default', 'shards': 4},
{'name': 'v8testing', 'shards': 14}, {'name': 'v8testing', 'shards': 14},
{'name': 'v8testing', 'variant': 'extra', 'shards': 12}, {'name': 'v8testing', 'variant': 'extra', 'shards': 14},
], ],
}, },
'v8_linux_arm64_gc_stress_dbg_ng_triggered': { 'v8_linux_arm64_gc_stress_dbg_ng_triggered': {
@ -615,7 +716,7 @@
{'name': 'mozilla', 'shards': 4}, {'name': 'mozilla', 'shards': 4},
{'name': 'test262', 'variant': 'default', 'shards': 4}, {'name': 'test262', 'variant': 'default', 'shards': 4},
{'name': 'v8testing', 'shards': 14}, {'name': 'v8testing', 'shards': 14},
{'name': 'v8testing', 'variant': 'extra', 'shards': 12}, {'name': 'v8testing', 'variant': 'extra', 'shards': 14},
], ],
}, },
'v8_linux_arm64_cfi_rel_ng_triggered': { 'v8_linux_arm64_cfi_rel_ng_triggered': {
@ -627,7 +728,7 @@
{'name': 'v8testing', 'shards': 4}, {'name': 'v8testing', 'shards': 4},
], ],
}, },
'v8_linux64_arm64_pointer_compression_rel_ng_triggered': { 'v8_linux64_arm64_no_pointer_compression_rel_ng_triggered': {
'swarming_dimensions' : { 'swarming_dimensions' : {
'os': 'Ubuntu-18.04', 'os': 'Ubuntu-18.04',
}, },
@ -646,7 +747,15 @@
], ],
}, },
############################################################################## ##############################################################################
# Linux64 with RISC-V simulators # Linux with RISC-V simulators
'v8_linux_riscv32_rel_ng_triggered': {
'swarming_dimensions': {
'os': 'Ubuntu-18.04',
},
'tests': [
{'name': 'v8testing', 'shards': 3},
],
},
'v8_linux64_riscv64_rel_ng_triggered': { 'v8_linux64_riscv64_rel_ng_triggered': {
'swarming_dimensions': { 'swarming_dimensions': {
'os': 'Ubuntu-18.04', 'os': 'Ubuntu-18.04',
@ -700,7 +809,7 @@
# Win64 # Win64
'v8_win64_asan_rel_ng_triggered': { 'v8_win64_asan_rel_ng_triggered': {
'swarming_dimensions' : { 'swarming_dimensions' : {
'os': 'Windows-10-15063', 'os': 'Windows-10-19042',
}, },
'tests': [ 'tests': [
{'name': 'v8testing', 'shards': 5}, {'name': 'v8testing', 'shards': 5},
@ -709,11 +818,11 @@
'v8_win64_dbg_ng_triggered': { 'v8_win64_dbg_ng_triggered': {
'swarming_dimensions' : { 'swarming_dimensions' : {
'cpu': 'x86-64', 'cpu': 'x86-64',
'os': 'Windows-10-15063', 'os': 'Windows-10-19042',
}, },
'tests': [ 'tests': [
{'name': 'mozilla'}, {'name': 'mozilla'},
{'name': 'test262', 'variant': 'default', 'shards': 2}, {'name': 'test262', 'variant': 'default', 'shards': 4},
{'name': 'v8testing', 'shards': 3}, {'name': 'v8testing', 'shards': 3},
{'name': 'v8testing', 'variant': 'extra', 'shards': 2}, {'name': 'v8testing', 'variant': 'extra', 'shards': 2},
], ],
@ -721,22 +830,22 @@
'v8_win64_msvc_rel_ng_triggered': { 'v8_win64_msvc_rel_ng_triggered': {
'swarming_dimensions' : { 'swarming_dimensions' : {
'cpu': 'x86-64', 'cpu': 'x86-64',
'os': 'Windows-10-15063', 'os': 'Windows-10-19042',
}, },
'tests': [ 'tests': [
{'name': 'mozilla'}, {'name': 'mozilla'},
{'name': 'test262', 'variant': 'default'}, {'name': 'test262', 'variant': 'default', 'shards': 2},
{'name': 'v8testing', 'shards': 2}, {'name': 'v8testing', 'shards': 2},
], ],
}, },
'v8_win64_rel_ng_triggered': { 'v8_win64_rel_ng_triggered': {
'swarming_dimensions' : { 'swarming_dimensions' : {
'cpu': 'x86-64', 'cpu': 'x86-64',
'os': 'Windows-10-15063', 'os': 'Windows-10-19042',
}, },
'tests': [ 'tests': [
{'name': 'mozilla'}, {'name': 'mozilla'},
{'name': 'test262', 'variant': 'default'}, {'name': 'test262', 'variant': 'default', 'shards': 2},
{'name': 'v8testing', 'shards': 2}, {'name': 'v8testing', 'shards': 2},
{'name': 'v8testing', 'variant': 'extra'}, {'name': 'v8testing', 'variant': 'extra'},
], ],
@ -815,6 +924,16 @@
{'name': 'v8testing'}, {'name': 'v8testing'},
], ],
}, },
'v8_mac_arm64_no_pointer_compression_dbg_ng_triggered': {
'swarming_dimensions' : {
'cpu': 'arm64',
'os': 'Mac-11',
'pool': 'chromium.tests',
},
'tests': [
{'name': 'v8testing'},
],
},
'v8_mac_arm64_sim_rel_ng_triggered': { 'v8_mac_arm64_sim_rel_ng_triggered': {
'swarming_dimensions' : { 'swarming_dimensions' : {
'cpu': 'x86-64', 'cpu': 'x86-64',
@ -1088,8 +1207,9 @@
{'name': 'v8testing', 'variant': 'default'}, {'name': 'v8testing', 'variant': 'default'},
], ],
}, },
'V8 Linux - predictable': { 'V8 Linux64 - predictable': {
'swarming_dimensions': { 'swarming_dimensions': {
'cpu': 'x86-64-avx2',
'os': 'Ubuntu-18.04', 'os': 'Ubuntu-18.04',
}, },
'tests': [ 'tests': [
@ -1116,14 +1236,6 @@
{'name': 'v8testing'}, {'name': 'v8testing'},
], ],
}, },
'V8 Linux gcc': {
'swarming_dimensions' : {
'os': 'Ubuntu-18.04',
},
'tests': [
{'name': 'v8testing'},
],
},
'V8 Linux64': { 'V8 Linux64': {
'swarming_dimensions': { 'swarming_dimensions': {
'cpu': 'x86-64-avx2', 'cpu': 'x86-64-avx2',
@ -1150,6 +1262,8 @@
{'name': 'v8testing', 'variant': 'minor_mc'}, {'name': 'v8testing', 'variant': 'minor_mc'},
{'name': 'v8testing', 'variant': 'no_lfa'}, {'name': 'v8testing', 'variant': 'no_lfa'},
{'name': 'v8testing', 'variant': 'stress_instruction_scheduling'}, {'name': 'v8testing', 'variant': 'stress_instruction_scheduling'},
# Maglev -- move to extra once more architectures are supported.
{'name': 'mjsunit', 'variant': 'maglev'},
# Noavx. # Noavx.
{ {
'name': 'mozilla', 'name': 'mozilla',
@ -1211,6 +1325,8 @@
{'name': 'v8testing', 'variant': 'stress_instruction_scheduling'}, {'name': 'v8testing', 'variant': 'stress_instruction_scheduling'},
{'name': 'v8testing', 'variant': 'stress_concurrent_allocation'}, {'name': 'v8testing', 'variant': 'stress_concurrent_allocation'},
{'name': 'v8testing', 'variant': 'stress_concurrent_inlining'}, {'name': 'v8testing', 'variant': 'stress_concurrent_inlining'},
# Maglev -- move to extra once more architectures are supported.
{'name': 'mjsunit', 'variant': 'maglev'},
# Noavx. # Noavx.
{ {
'name': 'mozilla', 'name': 'mozilla',
@ -1231,13 +1347,17 @@
}, },
], ],
}, },
'V8 Linux64 - dict tracking - debug': { 'V8 Linux64 - minor mc - debug': {
'swarming_dimensions': { 'swarming_dimensions': {
'cpu': 'x86-64-avx2', 'cpu': 'x86-64-avx2',
'os': 'Ubuntu-18.04', 'os': 'Ubuntu-18.04',
}, },
'tests': [ 'tests': [
{'name': 'v8testing', 'shards': 3}, {'name': 'v8testing', 'variant': 'minor_mc'},
{'name': 'benchmarks', 'variant': 'minor_mc'},
{'name': 'mozilla', 'variant': 'minor_mc'},
{'name': 'test262', 'variant': 'minor_mc', 'shards': 2},
{'name': 'mjsunit', 'variant': 'minor_mc'},
], ],
}, },
'V8 Linux64 - disable runtime call stats': { 'V8 Linux64 - disable runtime call stats': {
@ -1260,6 +1380,10 @@
{'name': 'webkit', 'variant': 'stress_sampling'}, {'name': 'webkit', 'variant': 'stress_sampling'},
# Stress snapshot. # Stress snapshot.
{'name': 'mjsunit', 'variant': 'stress_snapshot'}, {'name': 'mjsunit', 'variant': 'stress_snapshot'},
# Maglev.
{'name': 'mjsunit', 'variant': 'maglev'},
# Stress maglev.
{'name': 'mjsunit', 'variant': 'stress_maglev'},
# Experimental regexp engine. # Experimental regexp engine.
{'name': 'mjsunit', 'variant': 'experimental_regexp'}, {'name': 'mjsunit', 'variant': 'experimental_regexp'},
# Wasm write protect code space. # Wasm write protect code space.
@ -1322,12 +1446,49 @@
{'name': 'webkit', 'variant': 'stress_sampling'}, {'name': 'webkit', 'variant': 'stress_sampling'},
# Stress snapshot. # Stress snapshot.
{'name': 'mjsunit', 'variant': 'stress_snapshot'}, {'name': 'mjsunit', 'variant': 'stress_snapshot'},
# Maglev.
{'name': 'mjsunit', 'variant': 'maglev'},
# Stress maglev.
{'name': 'mjsunit', 'variant': 'stress_maglev'},
# Experimental regexp engine. # Experimental regexp engine.
{'name': 'mjsunit', 'variant': 'experimental_regexp'}, {'name': 'mjsunit', 'variant': 'experimental_regexp'},
# Wasm write protect code space. # Wasm write protect code space.
{'name': 'mjsunit', 'variant': 'wasm_write_protect_code'}, {'name': 'mjsunit', 'variant': 'wasm_write_protect_code'},
], ],
}, },
'V8 Linux64 gcc': {
'swarming_dimensions' : {
'os': 'Ubuntu-20.04',
},
'tests': [
{'name': 'v8testing'},
],
},
'V8 Linux64 - gc stress': {
'swarming_dimensions': {
'cpu': 'x86-64-avx2',
'os': 'Ubuntu-18.04',
},
'tests': [
{
'name': 'd8testing',
'test_args': ['--gc-stress'],
'shards': 5,
},
{
'name': 'mjsunit',
'variant': 'slow_path',
'test_args': ['--gc-stress'],
'shards': 2,
},
{
'name': 'mjsunit',
'variant': 'maglev',
'test_args': ['--gc-stress'],
'shards': 2
},
],
},
'V8 Linux64 - gcov coverage': { 'V8 Linux64 - gcov coverage': {
'swarming_dimensions' : { 'swarming_dimensions' : {
'os': 'Ubuntu-18.04', 'os': 'Ubuntu-18.04',
@ -1352,7 +1513,7 @@
{'name': 'v8testing'}, {'name': 'v8testing'},
], ],
}, },
'V8 Linux64 - pointer compression': { 'V8 Linux64 - no pointer compression': {
'swarming_dimensions' : { 'swarming_dimensions' : {
'os': 'Ubuntu-18.04', 'os': 'Ubuntu-18.04',
}, },
@ -1360,6 +1521,24 @@
{'name': 'v8testing', 'shards': 2}, {'name': 'v8testing', 'shards': 2},
], ],
}, },
'V8 Linux64 - no sandbox': {
'swarming_dimensions': {
'cpu': 'x86-64-avx2',
'os': 'Ubuntu-18.04',
},
'tests': [
{'name': 'v8testing'},
],
},
'V8 Linux64 - no sandbox - debug': {
'swarming_dimensions': {
'cpu': 'x86-64-avx2',
'os': 'Ubuntu-18.04',
},
'tests': [
{'name': 'v8testing', 'shards': 2},
],
},
'V8 Linux64 - shared': { 'V8 Linux64 - shared': {
'swarming_dimensions' : { 'swarming_dimensions' : {
'os': 'Ubuntu-18.04', 'os': 'Ubuntu-18.04',
@ -1559,6 +1738,16 @@
{'name': 'v8testing', 'variant': 'extra', 'shards': 2}, {'name': 'v8testing', 'variant': 'extra', 'shards': 2},
], ],
}, },
'V8 Mac - arm64 - no pointer compression debug': {
'swarming_dimensions' : {
'cpu': 'arm64',
'os': 'Mac-11',
'pool': 'chromium.tests',
},
'tests': [
{'name': 'v8testing'},
],
},
'V8 Mac - arm64 - sim - debug': { 'V8 Mac - arm64 - sim - debug': {
'swarming_dimensions' : { 'swarming_dimensions' : {
'cpu': 'x86-64', 'cpu': 'x86-64',
@ -1613,7 +1802,7 @@
}, },
'V8 Win64': { 'V8 Win64': {
'swarming_dimensions': { 'swarming_dimensions': {
'os': 'Windows-10-15063', 'os': 'Windows-10-19042',
}, },
'tests': [ 'tests': [
{'name': 'mozilla'}, {'name': 'mozilla'},
@ -1624,18 +1813,18 @@
}, },
'V8 Win64 - debug': { 'V8 Win64 - debug': {
'swarming_dimensions': { 'swarming_dimensions': {
'os': 'Windows-10-15063', 'os': 'Windows-10-19042',
}, },
'tests': [ 'tests': [
{'name': 'mozilla'}, {'name': 'mozilla'},
{'name': 'test262', 'variant': 'default'}, {'name': 'test262', 'variant': 'default', 'shards': 2},
{'name': 'v8testing', 'shards': 4}, {'name': 'v8testing', 'shards': 4},
{'name': 'v8testing', 'variant': 'extra', 'shards': 3}, {'name': 'v8testing', 'variant': 'extra', 'shards': 3},
], ],
}, },
'V8 Win64 - msvc': { 'V8 Win64 - msvc': {
'swarming_dimensions': { 'swarming_dimensions': {
'os': 'Windows-10-15063', 'os': 'Windows-10-19042',
}, },
'tests': [ 'tests': [
{'name': 'mozilla'}, {'name': 'mozilla'},
@ -1645,7 +1834,7 @@
}, },
'V8 Win64 ASAN': { 'V8 Win64 ASAN': {
'swarming_dimensions': { 'swarming_dimensions': {
'os': 'Windows-10-15063', 'os': 'Windows-10-19042',
}, },
'tests': [ 'tests': [
{'name': 'v8testing', 'shards': 5}, {'name': 'v8testing', 'shards': 5},
@ -1818,7 +2007,7 @@
'name': 'v8testing', 'name': 'v8testing',
'suffix': 'armv8-a', 'suffix': 'armv8-a',
'test_args': ['--extra-flags', '--enable-armv8'], 'test_args': ['--extra-flags', '--enable-armv8'],
'shards': 8 'shards': 10
}, },
# Novfp3. # Novfp3.
{ {
@ -1839,7 +2028,7 @@
'suffix': 'novfp3', 'suffix': 'novfp3',
'variant': 'default', 'variant': 'default',
'test_args': ['--novfp3'], 'test_args': ['--novfp3'],
'shards': 8 'shards': 3
}, },
], ],
}, },
@ -1884,7 +2073,7 @@
{'name': 'mozilla', 'shards': 2}, {'name': 'mozilla', 'shards': 2},
{'name': 'test262', 'variant': 'default', 'shards': 2}, {'name': 'test262', 'variant': 'default', 'shards': 2},
{'name': 'v8testing', 'shards': 12}, {'name': 'v8testing', 'shards': 12},
{'name': 'v8testing', 'variant': 'extra', 'shards': 11}, {'name': 'v8testing', 'variant': 'extra', 'shards': 14},
], ],
}, },
'V8 Linux - arm64 - sim - gc stress': { 'V8 Linux - arm64 - sim - gc stress': {
@ -1900,7 +2089,7 @@
{ {
'name': 'd8testing', 'name': 'd8testing',
'test_args': ['--gc-stress', '--extra-flags=--verify-heap-skip-remembered-set'], 'test_args': ['--gc-stress', '--extra-flags=--verify-heap-skip-remembered-set'],
'shards': 5 'shards': 7
}, },
], ],
}, },
@ -1944,7 +2133,7 @@
{'name': 'v8testing', 'shards': 4}, {'name': 'v8testing', 'shards': 4},
], ],
}, },
'V8 Linux - mipsel - sim': { 'V8 Linux - ppc64 - sim': {
'swarming_dimensions': { 'swarming_dimensions': {
'os': 'Ubuntu-18.04', 'os': 'Ubuntu-18.04',
}, },
@ -1954,11 +2143,10 @@
'priority': 35, 'priority': 35,
}, },
'tests': [ 'tests': [
{'name': 'test262', 'variant': 'default'}, {'name': 'v8testing', 'shards': 3},
{'name': 'v8testing', 'shards': 4},
], ],
}, },
'V8 Linux - ppc64 - sim': { 'V8 Linux - riscv32 - sim': {
'swarming_dimensions': { 'swarming_dimensions': {
'os': 'Ubuntu-18.04', 'os': 'Ubuntu-18.04',
}, },
@ -1997,7 +2185,7 @@
{'name': 'v8testing', 'shards': 3}, {'name': 'v8testing', 'shards': 3},
], ],
}, },
'V8 Linux64 - arm64 - sim - pointer compression': { 'V8 Linux64 - arm64 - sim - no pointer compression': {
'swarming_dimensions': { 'swarming_dimensions': {
'os': 'Ubuntu-18.04', 'os': 'Ubuntu-18.04',
}, },

View file

@ -20,12 +20,6 @@ int main(int argc, char* argv[]) {
v8::V8::InitializeExternalStartupData(argv[0]); v8::V8::InitializeExternalStartupData(argv[0]);
std::unique_ptr<v8::Platform> platform = v8::platform::NewDefaultPlatform(); std::unique_ptr<v8::Platform> platform = v8::platform::NewDefaultPlatform();
v8::V8::InitializePlatform(platform.get()); v8::V8::InitializePlatform(platform.get());
#ifdef V8_SANDBOX
if (!v8::V8::InitializeSandbox()) {
fprintf(stderr, "Error initializing the V8 sandbox\n");
return 1;
}
#endif
v8::V8::Initialize(); v8::V8::Initialize();
// Create a new Isolate and make it the current one. // Create a new Isolate and make it the current one.

View file

@ -703,12 +703,6 @@ int main(int argc, char* argv[]) {
v8::V8::InitializeExternalStartupData(argv[0]); v8::V8::InitializeExternalStartupData(argv[0]);
std::unique_ptr<v8::Platform> platform = v8::platform::NewDefaultPlatform(); std::unique_ptr<v8::Platform> platform = v8::platform::NewDefaultPlatform();
v8::V8::InitializePlatform(platform.get()); v8::V8::InitializePlatform(platform.get());
#ifdef V8_SANDBOX
if (!v8::V8::InitializeSandbox()) {
fprintf(stderr, "Error initializing the V8 sandbox\n");
return 1;
}
#endif
v8::V8::Initialize(); v8::V8::Initialize();
map<string, string> options; map<string, string> options;
string file; string file;

View file

@ -73,14 +73,8 @@ int main(int argc, char* argv[]) {
v8::V8::InitializeExternalStartupData(argv[0]); v8::V8::InitializeExternalStartupData(argv[0]);
std::unique_ptr<v8::Platform> platform = v8::platform::NewDefaultPlatform(); std::unique_ptr<v8::Platform> platform = v8::platform::NewDefaultPlatform();
v8::V8::InitializePlatform(platform.get()); v8::V8::InitializePlatform(platform.get());
#ifdef V8_SANDBOX
if (!v8::V8::InitializeSandbox()) {
fprintf(stderr, "Error initializing the V8 sandbox\n");
return 1;
}
#endif
v8::V8::Initialize();
v8::V8::SetFlagsFromCommandLine(&argc, argv, true); v8::V8::SetFlagsFromCommandLine(&argc, argv, true);
v8::V8::Initialize();
v8::Isolate::CreateParams create_params; v8::Isolate::CreateParams create_params;
create_params.array_buffer_allocator = create_params.array_buffer_allocator =
v8::ArrayBuffer::Allocator::NewDefaultAllocator(); v8::ArrayBuffer::Allocator::NewDefaultAllocator();

8
deps/v8/src/DEPS vendored
View file

@ -12,6 +12,7 @@ include_rules = [
"-src/compiler", "-src/compiler",
"+src/compiler/pipeline.h", "+src/compiler/pipeline.h",
"+src/compiler/code-assembler.h", "+src/compiler/code-assembler.h",
"+src/compiler/wasm-compiler-definitions.h",
"+src/compiler/wasm-compiler.h", "+src/compiler/wasm-compiler.h",
"-src/heap", "-src/heap",
"+src/heap/basic-memory-chunk.h", "+src/heap/basic-memory-chunk.h",
@ -23,6 +24,7 @@ include_rules = [
# TODO(v8:10496): Don't expose so much (through transitive includes) outside # TODO(v8:10496): Don't expose so much (through transitive includes) outside
# of heap/. # of heap/.
"+src/heap/heap.h", "+src/heap/heap.h",
"+src/heap/heap-verifier.h",
"+src/heap/heap-inl.h", "+src/heap/heap-inl.h",
"+src/heap/heap-write-barrier-inl.h", "+src/heap/heap-write-barrier-inl.h",
"+src/heap/heap-write-barrier.h", "+src/heap/heap-write-barrier.h",
@ -33,6 +35,7 @@ include_rules = [
# TODO(v8:10496): Don't expose memory chunk outside of heap/. # TODO(v8:10496): Don't expose memory chunk outside of heap/.
"+src/heap/memory-chunk.h", "+src/heap/memory-chunk.h",
"+src/heap/memory-chunk-inl.h", "+src/heap/memory-chunk-inl.h",
"+src/heap/paged-spaces-inl.h",
"+src/heap/parked-scope.h", "+src/heap/parked-scope.h",
"+src/heap/read-only-heap-inl.h", "+src/heap/read-only-heap-inl.h",
"+src/heap/read-only-heap.h", "+src/heap/read-only-heap.h",
@ -88,7 +91,10 @@ specific_include_rules = {
"builtins-trace\.cc": [ "builtins-trace\.cc": [
"+protos/perfetto", "+protos/perfetto",
], ],
"system-jit-win\.cc": [ "etw-jit-win\.cc": [
"+src/libplatform/tracing/recorder.h", "+src/libplatform/tracing/recorder.h",
], ],
"etw-jit-metadata-win\.h": [
"+src/libplatform/etw/etw-provider-win.h",
]
} }

View file

@ -12,7 +12,6 @@
#include "src/logging/runtime-call-stats-scope.h" #include "src/logging/runtime-call-stats-scope.h"
#include "src/objects/api-callbacks.h" #include "src/objects/api-callbacks.h"
#include "src/objects/slots-inl.h" #include "src/objects/slots-inl.h"
#include "src/tracing/trace-event.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
@ -134,7 +133,7 @@ Handle<Object> FunctionCallbackArguments::Call(CallHandlerInfo handler) {
Isolate* isolate = this->isolate(); Isolate* isolate = this->isolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kFunctionCallback); RCS_SCOPE(isolate, RuntimeCallCounterId::kFunctionCallback);
v8::FunctionCallback f = v8::FunctionCallback f =
v8::ToCData<v8::FunctionCallback>(handler.callback()); reinterpret_cast<v8::FunctionCallback>(handler.callback());
Handle<Object> receiver_check_unsupported; Handle<Object> receiver_check_unsupported;
if (isolate->debug_execution_mode() == DebugInfo::kSideEffects && if (isolate->debug_execution_mode() == DebugInfo::kSideEffects &&
!isolate->debug()->PerformSideEffectCheckForCallback( !isolate->debug()->PerformSideEffectCheckForCallback(
@ -148,6 +147,17 @@ Handle<Object> FunctionCallbackArguments::Call(CallHandlerInfo handler) {
return GetReturnValue<Object>(isolate); return GetReturnValue<Object>(isolate);
} }
PropertyCallbackArguments::~PropertyCallbackArguments(){
#ifdef DEBUG
// TODO(chromium:1310062): enable this check.
// if (javascript_execution_counter_) {
// CHECK_WITH_MSG(javascript_execution_counter_ ==
// isolate()->javascript_execution_counter(),
// "Unexpected side effect detected");
// }
#endif // DEBUG
}
Handle<JSObject> PropertyCallbackArguments::CallNamedEnumerator( Handle<JSObject> PropertyCallbackArguments::CallNamedEnumerator(
Handle<InterceptorInfo> interceptor) { Handle<InterceptorInfo> interceptor) {
DCHECK(interceptor->is_named()); DCHECK(interceptor->is_named());
@ -296,8 +306,12 @@ Handle<Object> PropertyCallbackArguments::CallAccessorGetter(
Handle<AccessorInfo> info, Handle<Name> name) { Handle<AccessorInfo> info, Handle<Name> name) {
Isolate* isolate = this->isolate(); Isolate* isolate = this->isolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kAccessorGetterCallback); RCS_SCOPE(isolate, RuntimeCallCounterId::kAccessorGetterCallback);
// Unlike interceptor callbacks we know that the property exists, so
// the callback is allowed to have side effects.
AcceptSideEffects();
AccessorNameGetterCallback f = AccessorNameGetterCallback f =
ToCData<AccessorNameGetterCallback>(info->getter()); reinterpret_cast<AccessorNameGetterCallback>(info->getter());
return BasicCallNamedGetterCallback(f, name, info, return BasicCallNamedGetterCallback(f, name, info,
handle(receiver(), isolate)); handle(receiver(), isolate));
} }
@ -307,8 +321,12 @@ Handle<Object> PropertyCallbackArguments::CallAccessorSetter(
Handle<Object> value) { Handle<Object> value) {
Isolate* isolate = this->isolate(); Isolate* isolate = this->isolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kAccessorSetterCallback); RCS_SCOPE(isolate, RuntimeCallCounterId::kAccessorSetterCallback);
// Unlike interceptor callbacks we know that the property exists, so
// the callback is allowed to have side effects.
AcceptSideEffects();
AccessorNameSetterCallback f = AccessorNameSetterCallback f =
ToCData<AccessorNameSetterCallback>(accessor_info->setter()); reinterpret_cast<AccessorNameSetterCallback>(accessor_info->setter());
PREPARE_CALLBACK_INFO(isolate, f, Handle<Object>, void, accessor_info, PREPARE_CALLBACK_INFO(isolate, f, Handle<Object>, void, accessor_info,
handle(receiver(), isolate), Setter); handle(receiver(), isolate), Setter);
f(v8::Utils::ToLocal(name), v8::Utils::ToLocal(value), callback_info); f(v8::Utils::ToLocal(name), v8::Utils::ToLocal(value), callback_info);

View file

@ -12,7 +12,12 @@ namespace internal {
PropertyCallbackArguments::PropertyCallbackArguments( PropertyCallbackArguments::PropertyCallbackArguments(
Isolate* isolate, Object data, Object self, JSObject holder, Isolate* isolate, Object data, Object self, JSObject holder,
Maybe<ShouldThrow> should_throw) Maybe<ShouldThrow> should_throw)
: Super(isolate) { : Super(isolate)
#ifdef DEBUG
,
javascript_execution_counter_(isolate->javascript_execution_counter())
#endif // DEBUG
{
slot_at(T::kThisIndex).store(self); slot_at(T::kThisIndex).store(self);
slot_at(T::kHolderIndex).store(holder); slot_at(T::kHolderIndex).store(holder);
slot_at(T::kDataIndex).store(data); slot_at(T::kDataIndex).store(data);
@ -33,8 +38,7 @@ PropertyCallbackArguments::PropertyCallbackArguments(
} }
FunctionCallbackArguments::FunctionCallbackArguments( FunctionCallbackArguments::FunctionCallbackArguments(
internal::Isolate* isolate, internal::Object data, internal::Isolate* isolate, internal::Object data, internal::Object holder,
internal::HeapObject callee, internal::Object holder,
internal::HeapObject new_target, internal::Address* argv, int argc) internal::HeapObject new_target, internal::Address* argv, int argc)
: Super(isolate), argv_(argv), argc_(argc) { : Super(isolate), argv_(argv), argc_(argc) {
slot_at(T::kDataIndex).store(data); slot_at(T::kDataIndex).store(data);

View file

@ -6,8 +6,6 @@
#define V8_API_API_ARGUMENTS_H_ #define V8_API_API_ARGUMENTS_H_
#include "include/v8-template.h" #include "include/v8-template.h"
#include "src/api/api.h"
#include "src/debug/debug.h"
#include "src/execution/isolate.h" #include "src/execution/isolate.h"
#include "src/objects/slots.h" #include "src/objects/slots.h"
#include "src/objects/visitors.h" #include "src/objects/visitors.h"
@ -58,7 +56,15 @@ class CustomArguments : public CustomArgumentsBase {
// Note: Calling args.Call() sets the return value on args. For multiple // Note: Calling args.Call() sets the return value on args. For multiple
// Call()'s, a new args should be used every time. // Call()'s, a new args should be used every time.
class PropertyCallbackArguments // This class also serves as a side effects detection scope (JavaScript code
// execution). It is used for ensuring correctness of the interceptor callback
// implementations. The idea is that the interceptor callback that does not
// intercept an operation must not produce side effects. If the callback
// signals that it has handled the operation (by either returning a respective
// result or by throwing an exception) then the AcceptSideEffects() method
// must be called to "accept" the side effects that have happened during the
// lifetime of the PropertyCallbackArguments object.
class PropertyCallbackArguments final
: public CustomArguments<PropertyCallbackInfo<Value> > { : public CustomArguments<PropertyCallbackInfo<Value> > {
public: public:
using T = PropertyCallbackInfo<Value>; using T = PropertyCallbackInfo<Value>;
@ -74,6 +80,7 @@ class PropertyCallbackArguments
PropertyCallbackArguments(Isolate* isolate, Object data, Object self, PropertyCallbackArguments(Isolate* isolate, Object data, Object self,
JSObject holder, Maybe<ShouldThrow> should_throw); JSObject holder, Maybe<ShouldThrow> should_throw);
inline ~PropertyCallbackArguments();
// Don't copy PropertyCallbackArguments, because they would both have the // Don't copy PropertyCallbackArguments, because they would both have the
// same prev_ pointer. // same prev_ pointer.
@ -128,6 +135,14 @@ class PropertyCallbackArguments
inline Handle<JSObject> CallIndexedEnumerator( inline Handle<JSObject> CallIndexedEnumerator(
Handle<InterceptorInfo> interceptor); Handle<InterceptorInfo> interceptor);
// Accept potential JavaScript side effects that might occurr during life
// time of this object.
inline void AcceptSideEffects() {
#ifdef DEBUG
javascript_execution_counter_ = 0;
#endif // DEBUG
}
private: private:
/* /*
* The following Call functions wrap the calling of all callbacks to handle * The following Call functions wrap the calling of all callbacks to handle
@ -148,6 +163,13 @@ class PropertyCallbackArguments
inline JSObject holder(); inline JSObject holder();
inline Object receiver(); inline Object receiver();
#ifdef DEBUG
// This stores current value of Isolate::javascript_execution_counter().
// It's used for detecting whether JavaScript code was executed between
// PropertyCallbackArguments's constructior and destructor.
uint32_t javascript_execution_counter_;
#endif // DEBUG
}; };
class FunctionCallbackArguments class FunctionCallbackArguments
@ -163,9 +185,8 @@ class FunctionCallbackArguments
static const int kIsolateIndex = T::kIsolateIndex; static const int kIsolateIndex = T::kIsolateIndex;
static const int kNewTargetIndex = T::kNewTargetIndex; static const int kNewTargetIndex = T::kNewTargetIndex;
FunctionCallbackArguments(Isolate* isolate, Object data, HeapObject callee, FunctionCallbackArguments(Isolate* isolate, Object data, Object holder,
Object holder, HeapObject new_target, Address* argv, HeapObject new_target, Address* argv, int argc);
int argc);
/* /*
* The following Call function wraps the calling of all callbacks to handle * The following Call function wraps the calling of all callbacks to handle

View file

@ -9,18 +9,16 @@
#include "src/api/api.h" #include "src/api/api.h"
#include "src/execution/interrupts-scope.h" #include "src/execution/interrupts-scope.h"
#include "src/execution/microtask-queue.h" #include "src/execution/microtask-queue.h"
#include "src/execution/protectors.h"
#include "src/handles/handles-inl.h" #include "src/handles/handles-inl.h"
#include "src/heap/heap-inl.h" #include "src/heap/heap-inl.h"
#include "src/objects/foreign-inl.h" #include "src/objects/foreign-inl.h"
#include "src/objects/js-weak-refs.h"
#include "src/objects/objects-inl.h" #include "src/objects/objects-inl.h"
namespace v8 { namespace v8 {
template <typename T> template <typename T>
inline T ToCData(v8::internal::Object obj) { inline T ToCData(v8::internal::Object obj) {
STATIC_ASSERT(sizeof(T) == sizeof(v8::internal::Address)); static_assert(sizeof(T) == sizeof(v8::internal::Address));
if (obj == v8::internal::Smi::zero()) return nullptr; if (obj == v8::internal::Smi::zero()) return nullptr;
return reinterpret_cast<T>( return reinterpret_cast<T>(
v8::internal::Foreign::cast(obj).foreign_address()); v8::internal::Foreign::cast(obj).foreign_address());
@ -35,7 +33,7 @@ inline v8::internal::Address ToCData(v8::internal::Object obj) {
template <typename T> template <typename T>
inline v8::internal::Handle<v8::internal::Object> FromCData( inline v8::internal::Handle<v8::internal::Object> FromCData(
v8::internal::Isolate* isolate, T obj) { v8::internal::Isolate* isolate, T obj) {
STATIC_ASSERT(sizeof(T) == sizeof(v8::internal::Address)); static_assert(sizeof(T) == sizeof(v8::internal::Address));
if (obj == nullptr) return handle(v8::internal::Smi::zero(), isolate); if (obj == nullptr) return handle(v8::internal::Smi::zero(), isolate);
return isolate->factory()->NewForeign( return isolate->factory()->NewForeign(
reinterpret_cast<v8::internal::Address>(obj)); reinterpret_cast<v8::internal::Address>(obj));
@ -96,7 +94,6 @@ TYPED_ARRAYS(MAKE_TO_LOCAL_TYPED_ARRAY)
MAKE_TO_LOCAL(ToLocal, FunctionTemplateInfo, FunctionTemplate) MAKE_TO_LOCAL(ToLocal, FunctionTemplateInfo, FunctionTemplate)
MAKE_TO_LOCAL(ToLocal, ObjectTemplateInfo, ObjectTemplate) MAKE_TO_LOCAL(ToLocal, ObjectTemplateInfo, ObjectTemplate)
MAKE_TO_LOCAL(SignatureToLocal, FunctionTemplateInfo, Signature) MAKE_TO_LOCAL(SignatureToLocal, FunctionTemplateInfo, Signature)
MAKE_TO_LOCAL(AccessorSignatureToLocal, FunctionTemplateInfo, AccessorSignature)
MAKE_TO_LOCAL(MessageToLocal, Object, Message) MAKE_TO_LOCAL(MessageToLocal, Object, Message)
MAKE_TO_LOCAL(PromiseToLocal, JSObject, Promise) MAKE_TO_LOCAL(PromiseToLocal, JSObject, Promise)
MAKE_TO_LOCAL(StackTraceToLocal, FixedArray, StackTrace) MAKE_TO_LOCAL(StackTraceToLocal, FixedArray, StackTrace)
@ -185,8 +182,8 @@ class V8_NODISCARD CallDepthScope {
!microtask_queue->DebugMicrotasksScopeDepthIsZero()); !microtask_queue->DebugMicrotasksScopeDepthIsZero());
} }
} }
#endif
DCHECK(CheckKeptObjectsClearedAfterMicrotaskCheckpoint(microtask_queue)); DCHECK(CheckKeptObjectsClearedAfterMicrotaskCheckpoint(microtask_queue));
#endif
isolate_->set_next_v8_call_is_safe_for_termination(safe_for_termination_); isolate_->set_next_v8_call_is_safe_for_termination(safe_for_termination_);
} }
@ -204,6 +201,7 @@ class V8_NODISCARD CallDepthScope {
} }
private: private:
#ifdef DEBUG
bool CheckKeptObjectsClearedAfterMicrotaskCheckpoint( bool CheckKeptObjectsClearedAfterMicrotaskCheckpoint(
i::MicrotaskQueue* microtask_queue) { i::MicrotaskQueue* microtask_queue) {
bool did_perform_microtask_checkpoint = bool did_perform_microtask_checkpoint =
@ -213,6 +211,7 @@ class V8_NODISCARD CallDepthScope {
return !did_perform_microtask_checkpoint || return !did_perform_microtask_checkpoint ||
isolate_->heap()->weak_refs_keep_during_job().IsUndefined(isolate_); isolate_->heap()->weak_refs_keep_during_job().IsUndefined(isolate_);
} }
#endif
i::Isolate* const isolate_; i::Isolate* const isolate_;
Local<Context> context_; Local<Context> context_;
@ -233,14 +232,6 @@ class V8_NODISCARD InternalEscapableScope : public EscapableHandleScope {
: EscapableHandleScope(reinterpret_cast<v8::Isolate*>(isolate)) {} : EscapableHandleScope(reinterpret_cast<v8::Isolate*>(isolate)) {}
}; };
inline bool IsExecutionTerminatingCheck(i::Isolate* isolate) {
if (isolate->has_scheduled_exception()) {
return isolate->scheduled_exception() ==
i::ReadOnlyRoots(isolate).termination_exception();
}
return false;
}
template <typename T> template <typename T>
void CopySmiElementsToTypedBuffer(T* dst, uint32_t length, void CopySmiElementsToTypedBuffer(T* dst, uint32_t length,
i::FixedArray elements) { i::FixedArray elements) {

View file

@ -5,8 +5,8 @@
// PRESUBMIT_INTENTIONALLY_MISSING_INCLUDE_GUARD // PRESUBMIT_INTENTIONALLY_MISSING_INCLUDE_GUARD
#undef LOG_API #undef LOG_API
#undef ENTER_V8_DO_NOT_USE #undef ENTER_V8_BASIC
#undef ENTER_V8_HELPER_DO_NOT_USE #undef ENTER_V8_HELPER_INTERNAL
#undef PREPARE_FOR_DEBUG_INTERFACE_EXECUTION_WITH_ISOLATE #undef PREPARE_FOR_DEBUG_INTERFACE_EXECUTION_WITH_ISOLATE
#undef PREPARE_FOR_EXECUTION_WITH_CONTEXT #undef PREPARE_FOR_EXECUTION_WITH_CONTEXT
#undef PREPARE_FOR_EXECUTION #undef PREPARE_FOR_EXECUTION

View file

@ -35,98 +35,117 @@
* TODO(verwaest): Remove calls form API methods to DO_NOT_USE macros. * TODO(verwaest): Remove calls form API methods to DO_NOT_USE macros.
*/ */
#define API_RCS_SCOPE(isolate, class_name, function_name) \ #define API_RCS_SCOPE(i_isolate, class_name, function_name) \
RCS_SCOPE(isolate, \ RCS_SCOPE(i_isolate, \
i::RuntimeCallCounterId::kAPI_##class_name##_##function_name); i::RuntimeCallCounterId::kAPI_##class_name##_##function_name);
#define ENTER_V8_DO_NOT_USE(isolate) i::VMState<v8::OTHER> __state__((isolate)) #define ENTER_V8_BASIC(i_isolate) \
/* Embedders should never enter V8 after terminating it */ \
DCHECK(!i_isolate->is_execution_terminating()); \
i::VMState<v8::OTHER> __state__((i_isolate))
#define ENTER_V8_HELPER_DO_NOT_USE(isolate, context, class_name, \ #define ENTER_V8_HELPER_INTERNAL(i_isolate, context, class_name, \
function_name, bailout_value, \ function_name, bailout_value, \
HandleScopeClass, do_callback) \ HandleScopeClass, do_callback) \
if (IsExecutionTerminatingCheck(isolate)) { \ if (i_isolate->is_execution_terminating()) { \
return bailout_value; \ return bailout_value; \
} \ } \
HandleScopeClass handle_scope(isolate); \ HandleScopeClass handle_scope(i_isolate); \
CallDepthScope<do_callback> call_depth_scope(isolate, context); \ CallDepthScope<do_callback> call_depth_scope(i_isolate, context); \
API_RCS_SCOPE(isolate, class_name, function_name); \ API_RCS_SCOPE(i_isolate, class_name, function_name); \
i::VMState<v8::OTHER> __state__((isolate)); \ i::VMState<v8::OTHER> __state__((i_isolate)); \
bool has_pending_exception = false bool has_pending_exception = false
#define PREPARE_FOR_DEBUG_INTERFACE_EXECUTION_WITH_ISOLATE(isolate, T) \ #define PREPARE_FOR_DEBUG_INTERFACE_EXECUTION_WITH_ISOLATE(i_isolate, T) \
if (IsExecutionTerminatingCheck(isolate)) { \ if (i_isolate->is_execution_terminating()) { \
return MaybeLocal<T>(); \ return MaybeLocal<T>(); \
} \ } \
InternalEscapableScope handle_scope(isolate); \ InternalEscapableScope handle_scope(i_isolate); \
CallDepthScope<false> call_depth_scope(isolate, v8::Local<v8::Context>()); \ CallDepthScope<false> call_depth_scope(i_isolate, v8::Local<v8::Context>()); \
i::VMState<v8::OTHER> __state__((isolate)); \ i::VMState<v8::OTHER> __state__((i_isolate)); \
bool has_pending_exception = false bool has_pending_exception = false
#define PREPARE_FOR_EXECUTION_WITH_CONTEXT(context, class_name, function_name, \ #define PREPARE_FOR_EXECUTION_WITH_CONTEXT(context, class_name, function_name, \
bailout_value, HandleScopeClass, \ bailout_value, HandleScopeClass, \
do_callback) \ do_callback) \
auto isolate = context.IsEmpty() \ auto i_isolate = context.IsEmpty() \
? i::Isolate::Current() \ ? i::Isolate::Current() \
: reinterpret_cast<i::Isolate*>(context->GetIsolate()); \ : reinterpret_cast<i::Isolate*>(context->GetIsolate()); \
ENTER_V8_HELPER_DO_NOT_USE(isolate, context, class_name, function_name, \ ENTER_V8_HELPER_INTERNAL(i_isolate, context, class_name, function_name, \
bailout_value, HandleScopeClass, do_callback); bailout_value, HandleScopeClass, do_callback);
#define PREPARE_FOR_EXECUTION(context, class_name, function_name, T) \ #define PREPARE_FOR_EXECUTION(context, class_name, function_name, T) \
PREPARE_FOR_EXECUTION_WITH_CONTEXT(context, class_name, function_name, \ PREPARE_FOR_EXECUTION_WITH_CONTEXT(context, class_name, function_name, \
MaybeLocal<T>(), InternalEscapableScope, \ MaybeLocal<T>(), InternalEscapableScope, \
false) false)
#define ENTER_V8(isolate, context, class_name, function_name, bailout_value, \ #define ENTER_V8(i_isolate, context, class_name, function_name, bailout_value, \
HandleScopeClass) \ HandleScopeClass) \
ENTER_V8_HELPER_DO_NOT_USE(isolate, context, class_name, function_name, \ ENTER_V8_HELPER_INTERNAL(i_isolate, context, class_name, function_name, \
bailout_value, HandleScopeClass, true) bailout_value, HandleScopeClass, true)
#ifdef DEBUG #ifdef DEBUG
#define ENTER_V8_NO_SCRIPT(isolate, context, class_name, function_name, \ #define ENTER_V8_NO_SCRIPT(i_isolate, context, class_name, function_name, \
bailout_value, HandleScopeClass) \ bailout_value, HandleScopeClass) \
ENTER_V8_HELPER_DO_NOT_USE(isolate, context, class_name, function_name, \ ENTER_V8_HELPER_INTERNAL(i_isolate, context, class_name, function_name, \
bailout_value, HandleScopeClass, false); \ bailout_value, HandleScopeClass, false); \
i::DisallowJavascriptExecutionDebugOnly __no_script__((isolate)) i::DisallowJavascriptExecutionDebugOnly __no_script__((i_isolate))
#define DCHECK_NO_SCRIPT_NO_EXCEPTION_MAYBE_TEARDOWN(i_isolate) \
i::DisallowJavascriptExecutionDebugOnly __no_script__((i_isolate)); \
i::DisallowExceptions __no_exceptions__((i_isolate))
// Lightweight version for APIs that don't require an active context. // Lightweight version for APIs that don't require an active context.
#define ASSERT_NO_SCRIPT_NO_EXCEPTION(isolate) \ #define DCHECK_NO_SCRIPT_NO_EXCEPTION(i_isolate) \
i::DisallowJavascriptExecutionDebugOnly __no_script__((isolate)); \ /* Embedders should never enter V8 after terminating it */ \
i::DisallowExceptions __no_exceptions__((isolate)) DCHECK(!i_isolate->is_execution_terminating()); \
DCHECK_NO_SCRIPT_NO_EXCEPTION_MAYBE_TEARDOWN(i_isolate)
#define ENTER_V8_NO_SCRIPT_NO_EXCEPTION(isolate) \ #define ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate) \
i::VMState<v8::OTHER> __state__((isolate)); \ i::VMState<v8::OTHER> __state__((i_isolate)); \
ASSERT_NO_SCRIPT_NO_EXCEPTION(isolate) DCHECK_NO_SCRIPT_NO_EXCEPTION(i_isolate)
#define ENTER_V8_FOR_NEW_CONTEXT(isolate) \ // Used instead of ENTER_V8_NO_SCRIPT_NO_EXCEPTION where the V8 Api is entered
i::VMState<v8::OTHER> __state__((isolate)); \ // during termination sequences.
i::DisallowExceptions __no_exceptions__((isolate)) #define ENTER_V8_MAYBE_TEARDOWN(i_isolate) \
#else i::VMState<v8::OTHER> __state__((i_isolate)); \
#define ENTER_V8_NO_SCRIPT(isolate, context, class_name, function_name, \ DCHECK_NO_SCRIPT_NO_EXCEPTION_MAYBE_TEARDOWN(i_isolate)
#define ENTER_V8_FOR_NEW_CONTEXT(i_isolate) \
DCHECK(!(i_isolate)->is_execution_terminating()); \
i::VMState<v8::OTHER> __state__((i_isolate)); \
i::DisallowExceptions __no_exceptions__((i_isolate))
#else // DEBUG
#define ENTER_V8_NO_SCRIPT(i_isolate, context, class_name, function_name, \
bailout_value, HandleScopeClass) \ bailout_value, HandleScopeClass) \
ENTER_V8_HELPER_DO_NOT_USE(isolate, context, class_name, function_name, \ ENTER_V8_HELPER_INTERNAL(i_isolate, context, class_name, function_name, \
bailout_value, HandleScopeClass, false) bailout_value, HandleScopeClass, false)
#define ASSERT_NO_SCRIPT_NO_EXCEPTION(isolate) #define DCHECK_NO_SCRIPT_NO_EXCEPTION(i_isolate)
#define DCHECK_NO_SCRIPT_NO_EXCEPTION_MAYBE_TEARDOWN(i_isolate)
#define ENTER_V8_NO_SCRIPT_NO_EXCEPTION(isolate) \ #define ENTER_V8_NO_SCRIPT_NO_EXCEPTION(i_isolate) \
i::VMState<v8::OTHER> __state__((isolate)); i::VMState<v8::OTHER> __state__((i_isolate));
#define ENTER_V8_FOR_NEW_CONTEXT(isolate) \ #define ENTER_V8_MAYBE_TEARDOWN(i_isolate) \
i::VMState<v8::OTHER> __state__((isolate)); i::VMState<v8::OTHER> __state__((i_isolate));
#define ENTER_V8_FOR_NEW_CONTEXT(i_isolate) \
i::VMState<v8::OTHER> __state__((i_isolate));
#endif // DEBUG #endif // DEBUG
#define EXCEPTION_BAILOUT_CHECK_SCOPED_DO_NOT_USE(isolate, value) \ #define EXCEPTION_BAILOUT_CHECK_SCOPED_DO_NOT_USE(i_isolate, value) \
do { \ do { \
if (has_pending_exception) { \ if (has_pending_exception) { \
call_depth_scope.Escape(); \ call_depth_scope.Escape(); \
return value; \ return value; \
} \ } \
} while (false) } while (false)
#define RETURN_ON_FAILED_EXECUTION(T) \ #define RETURN_ON_FAILED_EXECUTION(T) \
EXCEPTION_BAILOUT_CHECK_SCOPED_DO_NOT_USE(isolate, MaybeLocal<T>()) EXCEPTION_BAILOUT_CHECK_SCOPED_DO_NOT_USE(i_isolate, MaybeLocal<T>())
#define RETURN_ON_FAILED_EXECUTION_PRIMITIVE(T) \ #define RETURN_ON_FAILED_EXECUTION_PRIMITIVE(T) \
EXCEPTION_BAILOUT_CHECK_SCOPED_DO_NOT_USE(isolate, Nothing<T>()) EXCEPTION_BAILOUT_CHECK_SCOPED_DO_NOT_USE(i_isolate, Nothing<T>())
#define RETURN_ESCAPED(value) return handle_scope.Escape(value); #define RETURN_ESCAPED(value) return handle_scope.Escape(value);

View file

@ -4,15 +4,12 @@
#include "src/api/api-natives.h" #include "src/api/api-natives.h"
#include "src/api/api-inl.h"
#include "src/common/message-template.h" #include "src/common/message-template.h"
#include "src/execution/isolate-inl.h" #include "src/execution/isolate-inl.h"
#include "src/heap/heap-inl.h" #include "src/heap/heap-inl.h"
#include "src/logging/runtime-call-stats-scope.h" #include "src/logging/runtime-call-stats-scope.h"
#include "src/objects/api-callbacks.h" #include "src/objects/api-callbacks.h"
#include "src/objects/hash-table-inl.h"
#include "src/objects/lookup.h" #include "src/objects/lookup.h"
#include "src/objects/property-cell.h"
#include "src/objects/templates.h" #include "src/objects/templates.h"
namespace v8 { namespace v8 {

Some files were not shown because too many files have changed in this diff Show more