deps: update V8 to 13.6.233.8

PR-URL: https://github.com/nodejs/node/pull/58070
Reviewed-By: Antoine du Hamel <duhamelantoine1995@gmail.com>
Reviewed-By: Darshan Sen <raisinten@gmail.com>
Reviewed-By: Joyee Cheung <joyeec9h3@gmail.com>
Reviewed-By: Rafael Gonzaga <rafael.nunu@hotmail.com>
This commit is contained in:
Michaël Zasso 2025-04-29 08:03:15 +02:00
parent 4c93107553
commit 918fe04351
No known key found for this signature in database
GPG key ID: 770F7A9A5AE15600
5274 changed files with 454731 additions and 196837 deletions

7
deps/v8/.bazelrc vendored
View file

@ -2,10 +2,14 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Enable Bzlmod for every Bazel command
common --enable_bzlmod
# Pass CC, CXX and PATH from the environment
build --action_env=CC
build --action_env=CXX
build --action_env=PATH
build --copt=-std=c++20
# Use Clang compiler
build:clang --action_env=BAZEL_COMPILER=clang
@ -17,7 +21,8 @@ build:debug --compilation_mode=dbg
build:debug --config=v8_enable_debugging_features
build:debug --//:v8_enable_fast_mksnapshot
build:debug --//:v8_enable_backtrace
build:debug --//:v8_enable_handle_zapping
build:debug --//:v8_enable_local_handle_zapping
build:debug --//:v8_enable_global_handle_zapping
# v8_enable_debugging_features flags
build:v8_enable_debugging_features --//:v8_enable_verify_heap

2
deps/v8/.clang-tidy vendored
View file

@ -9,7 +9,9 @@
modernize-redundant-void-arg,
modernize-replace-random-shuffle,
modernize-shrink-to-fit,
modernize-type-traits,
# modernize-use-auto,
modernize-use-constraints,
modernize-use-bool-literals,
modernize-use-equals-default,
# modernize-use-equals-delete,

13
deps/v8/.gitignore vendored
View file

@ -82,14 +82,21 @@
/third_party/googletest/src/googletest/include/gtest/*
!/third_party/googletest/src/googletest/include/gtest/gtest_prod.h
!/third_party/highway
/third_party/highway/src
/third_party/highway/src/*
!/third_party/highway/src/hwy
!/third_party/inspector_protocol
!/third_party/jsoncpp
/third_party/jsoncpp/source
!/third_party/llvm-libc
/third_party/llvm-libc/src
!/third_party/rapidhash-v8
!/third_party/re2
/third_party/re2/src
!/third_party/test262-harness
!/third_party/siphash
!/third_party/utf8-decoder
!/third_party/v8
!/third_party/valgrind
!/third_party/vtune
!/third_party/wasm-api
/tools/builtins-pgo/profiles/*
!/tools/builtins-pgo/profiles/.*
@ -103,6 +110,7 @@
/tools/oom_dump/oom_dump
/tools/oom_dump/oom_dump.o
/tools/protoc_wrapper
/tools/rust
/tools/turbolizer/build
/tools/turbolizer/.rpt2_cache
/tools/turbolizer/deploy
@ -145,3 +153,4 @@ bazel-v8
!/third_party/fp16
/third_party/fp16/src/*
!/third_party/fp16/src/include
!/third_party/simdutf

3
deps/v8/.gn vendored
View file

@ -27,8 +27,9 @@ no_check_targets = [
default_args = {
# Disable js dependencies like the closure compiler.
enable_js_protobuf = false
# Disable rust dependencies.
enable_rust = false
enable_rust = true
}
# These are the list of GN files that run exec_script. This whitelist exists

11
deps/v8/AUTHORS vendored
View file

@ -57,8 +57,9 @@ Alexander Botero-Lowry <alexbl@FreeBSD.org>
Alexander Karpinsky <homm86@gmail.com>
Alexander Neville <dark@volatile.bz>
Alexandre Vassalotti <avassalotti@gmail.com>
Alexey Pavlyutkin <alexey.pavlyutkin.community@gmail.com>
Alexey Pavlyutkin <alexey.pavlyutkin@syntacore.com>
Alexis Campailla <alexis@janeasystems.com>
Aliya Minimullina <a.minimullina@syntacore.com>
Allan Sandfeld Jensen <allan.jensen@qt.io>
Amos Lim <eui-sang.lim@samsung.com>
Andreas Anyuru <andreas.anyuru@gmail.com>
@ -120,6 +121,7 @@ Dominic Farolini <domfarolino@gmail.com>
Douglas Crosher <dtc-v8@scieneer.com>
Dusan Milosavljevic <dusan.m.milosavljevic@gmail.com>
Eden Wang <nedenwang@tencent.com>
Edoardo Marangoni <edoardo@wasmer.io>
Elisha Hollander <just4now666666@gmail.com>
Eric Rannaud <eric.rannaud@gmail.com>
Erich Ocean <erich.ocean@me.com>
@ -210,11 +212,13 @@ Maxim Mazurok <maxim@mazurok.com>
Maxim Mossienko <maxim.mossienko@gmail.com>
Md Hasibul Hasan <hasibulhasan873@gmail.com>
Meir Shpilraien <meir@redis.com>
Meng Tan <tannal2409@gmail.com>
Michael Lutz <michi@icosahedron.de>
Michael Mclaughlin <m8ch88l@gmail.com>
Michael Smith <mike@w3.org>
Michaël Zasso <mic.besace@gmail.com>
Mihir Shah <mihirshah.11204@gmail.com>
Mika Fischer <mika.fischer@zoopnet.de>
Mike Gilbert <floppymaster@gmail.com>
Mike Pennisi <mike@mikepennisi.com>
Mikhail Gusarov <dottedmag@dottedmag.net>
@ -268,6 +272,7 @@ Sébastien Doeraene <sjrdoeraene@gmail.com>
Seo Sanghyeon <sanxiyn@gmail.com>
Shawn Anastasio <shawnanastasio@gmail.com>
Shawn Presser <shawnpresser@gmail.com>
Sho Miyamoto <me@shqld.dev>
Stefan Penner <stefan.penner@gmail.com>
Stefan Stojanovic <stefko.stojanovic@gmail.com>
Stephan Hartmann <stha09@googlemail.com>
@ -290,6 +295,7 @@ Varun Varada <varuncvarada@gmail.com>
Victor Costan <costan@gmail.com>
Victor Polevoy <fx@thefx.co>
Vlad Burlik <vladbph@gmail.com>
Vladimir Kempik <vladimir.kempik@syntacore.com>
Vladimir Krivosheev <develar@gmail.com>
Vladimir Shutoff <vovan@shutoff.ru>
Wael Almattar <waelsy123@gmail.com>
@ -303,6 +309,7 @@ Wiktor Garbacz <wiktor.garbacz@gmail.com>
Wouter Vermeiren <wouter.vermeiren@essensium.com>
Xiaofang Zou <zouxiaofang@iscas.ac.cn>
Xiaoyin Liu <xiaoyin.l@outlook.com>
Yagiz Nizipli <yagiz@nizipli.com>
Yanbo Li <lybvinci@gmail.com>
Yannic Bonenberger <contact@yannic-bonenberger.com>
Yi Wang <wangyi8848@gmail.com>
@ -317,6 +324,7 @@ Yuxiang Cao <caoyxsh@outlook.com>
Zac Hansen <xaxxon@gmail.com>
Zeynep Cankara <zeynepcankara402@gmail.com>
Zhao Jiazhong <kyslie3100@gmail.com>
Zhao Qin <qzmiss@gmail.com>
Zhaojun Meng <zhaojun.meng@gmail.com>
Zheng Liu <i6122f@gmail.com>
Zhongping Wang <kewpie.w.zp@gmail.com>
@ -325,3 +333,4 @@ Yang Xiang <xiangyangemail@gmail.com>
Kotaro Ohsugi <dec4m4rk@gmail.com>
Jing Peiyang <jingpeiyang@eswincomputing.com>
magic-akari <akari.ccino@gmail.com>
Ryuhei Shima <shimaryuhei@gmail.com>

277
deps/v8/BUILD.bazel vendored
View file

@ -3,7 +3,7 @@
# found in the LICENSE file.
load("@bazel_skylib//lib:selects.bzl", "selects")
load("@rules_python//python:defs.bzl", "py_binary")
load("@rules_python//python:defs.bzl", "py_binary", "py_test")
load("@v8_python_deps//:requirements.bzl", "requirement")
load(
"@v8//:bazel/defs.bzl",
@ -68,10 +68,12 @@ load(":bazel/v8-non-pointer-compression.bzl", "v8_binary_non_pointer_compression
# v8_control_flow_integrity
# v8_enable_sandbox
# cppgc_enable_caged_heap
# cppgc_enable_api_checks
# cppgc_enable_check_assignments_in_prefinalizers
# cppgc_enable_slim_write_barrier
# cppgc_enable_object_names
# cppgc_enable_pointer_compression
# cppgc_enable_slow_api_checks
# cppgc_enable_verify_heap
# cppgc_enable_young_generation
# v8_enable_zone_compression
@ -83,6 +85,7 @@ load(":bazel/v8-non-pointer-compression.bzl", "v8_binary_non_pointer_compression
# v8_enable_javascript_promise_hooks
# v8_enable_allocation_folding
# v8_allocation_site_tracking
# v8_lower_limits_mode
v8_flag(name = "v8_android_log_stdout")
@ -106,7 +109,9 @@ v8_flag(name = "v8_enable_debug_code")
v8_flag(name = "v8_enable_disassembler")
v8_flag(name = "v8_enable_handle_zapping")
v8_flag(name = "v8_enable_local_handle_zapping")
v8_flag(name = "v8_enable_global_handle_zapping")
v8_flag(name = "v8_enable_runtime_call_stats")
@ -148,6 +153,8 @@ v8_flag(name = "v8_enable_verify_predictable")
v8_flag(name = "v8_enable_test_features")
v8_flag(name = "v8_wasm_random_fuzzers")
v8_flag(
name = "v8_enable_maglev",
default = True,
@ -187,6 +194,14 @@ selects.config_setting_group(
],
)
selects.config_setting_group(
name = "enable_maglev_riscv",
match_all = [
":enable_maglev",
"@v8//bazel/config:v8_target_riscv64",
],
)
selects.config_setting_group(
name = "enable_maglev_arm",
match_all = [
@ -248,6 +263,22 @@ selects.config_setting_group(
],
)
selects.config_setting_group(
name = "webassembly_on_posix",
match_all = [
"@v8//bazel/config:is_posix",
":is_v8_enable_webassembly",
],
)
selects.config_setting_group(
name = "webassembly_on_windows",
match_all = [
"@v8//bazel/config:is_windows",
":is_v8_enable_webassembly",
],
)
v8_flag(
name = "v8_jitless",
default = False,
@ -326,10 +357,10 @@ selects.config_setting_group(
],
)
# Enable isolated cage if v8_enable_pointer_compression and
# Enable multiple cages if v8_enable_pointer_compression and
# NOT v8_enable_pointer_compression_shared_cage.
selects.config_setting_group(
name = "enable_pointer_compression_isolated_cage",
name = "enable_pointer_compression_multiple_cages",
match_all = [
":is_v8_enable_pointer_compression",
":is_not_v8_enable_pointer_compression_shared_cage",
@ -435,7 +466,8 @@ v8_config(
"v8_imminent_deprecation_warnings": "V8_IMMINENT_DEPRECATION_WARNINGS",
"v8_enable_debug_code": "V8_ENABLE_DEBUG_CODE",
"v8_enable_disassembler": "ENABLE_DISASSEMBLER",
"v8_enable_handle_zapping": "ENABLE_HANDLE_ZAPPING",
"v8_enable_global_handle_zapping": "ENABLE_GLOBAL_HANDLE_ZAPPING",
"v8_enable_local_handle_zapping": "ENABLE_LOCAL_HANDLE_ZAPPING",
"v8_enable_hugepage": "ENABLE_HUGEPAGE",
"v8_enable_future": "V8_ENABLE_FUTURE",
"v8_enable_lazy_source_positions": "V8_ENABLE_LAZY_SOURCE_POSITIONS",
@ -460,9 +492,11 @@ v8_config(
"GOOGLE3",
"V8_ADVANCED_BIGINT_ALGORITHMS",
"V8_CONCURRENT_MARKING",
"V8_ENABLE_LEAPTIERING",
"V8_ENABLE_SPARKPLUG",
"V8_ENABLE_EXTENSIBLE_RO_SNAPSHOT",
"V8_ENABLE_CONTINUATION_PRESERVED_EMBEDDER_DATA",
"V8_TLS_USED_IN_LIBRARY",
] + select({
"@v8//bazel/config:is_debug": [
"DEBUG",
@ -480,8 +514,7 @@ v8_config(
],
"@v8//bazel/config:v8_target_arm64": ["V8_TARGET_ARCH_ARM64"],
"@v8//bazel/config:v8_target_s390x": [
"V8_TARGET_ARCH_S390",
"V8_TARGET_ARCH_S390X",
"V8_TARGET_ARCH_S390X"
],
"@v8//bazel/config:v8_target_riscv64": [
# NOTE: Bazel rules for riscv64 weren't tested on a real system.
@ -513,7 +546,7 @@ v8_config(
"UNICODE",
"_UNICODE",
"_CRT_RAND_S",
"_WIN32_WINNT=0x0602",
"_WIN32_WINNT=0x0A00",
],
}) + select({
":is_v8_enable_pointer_compression": [
@ -525,16 +558,8 @@ v8_config(
":enable_pointer_compression_shared_cage": [
"V8_COMPRESS_POINTERS_IN_SHARED_CAGE",
],
":enable_pointer_compression_isolated_cage": [
"V8_COMPRESS_POINTERS_IN_ISOLATE_CAGE",
],
"//conditions:default": [],
}) + select({
# Shared RO heap is unconfigurable in bazel. However, we
# still have to make sure that the flag is disabled when
# v8_enable_pointer_compression_shared_cage is set to false.
":is_v8_enable_pointer_compression_shared_cage": [
"V8_SHARED_RO_HEAP",
":enable_pointer_compression_multiple_cages": [
"V8_COMPRESS_POINTERS_IN_MULTIPLE_CAGES",
],
"//conditions:default": [],
}) + select({
@ -576,6 +601,7 @@ v8_config(
filegroup(
name = "public_header_files",
srcs = glob(["include/**/*.h"]),
visibility = ["//visibility:public"],
)
filegroup(
@ -612,7 +638,6 @@ filegroup(
"include/cppgc/cross-thread-persistent.h",
"include/cppgc/custom-space.h",
"include/cppgc/default-platform.h",
"include/cppgc/ephemeron-pair.h",
"include/cppgc/explicit-management.h",
"include/cppgc/garbage-collected.h",
"include/cppgc/heap.h",
@ -626,6 +651,7 @@ filegroup(
"include/cppgc/internal/caged-heap.h",
"include/cppgc/internal/caged-heap-local-data.h",
"include/cppgc/internal/compiler-specific.h",
"include/cppgc/internal/conditional-stack-allocated.h",
"include/cppgc/internal/finalizer-trait.h",
"include/cppgc/internal/gc-info.h",
"include/cppgc/internal/logging.h",
@ -667,6 +693,7 @@ filegroup(
"include/v8-embedder-state-scope.h",
"include/v8-exception.h",
"include/v8-extension.h",
"include/v8-external-memory-accounter.h",
"include/v8-external.h",
"include/v8-fast-api-calls.h",
"include/v8-forward.h",
@ -771,8 +798,10 @@ filegroup(
"src/base/file-utils.cc",
"src/base/file-utils.h",
"src/base/flags.h",
"src/base/fpu.cc",
"src/base/fpu.h",
"src/base/free_deleter.h",
"src/base/functional.h",
"src/base/hashing.h",
"src/base/hashmap.h",
"src/base/hashmap-entry.h",
"src/base/ieee754.cc",
@ -817,6 +846,7 @@ filegroup(
"src/base/platform/mutex.h",
"src/base/platform/platform.cc",
"src/base/platform/platform.h",
"src/base/platform/platform-posix.h", # Always included by src/execution/isolate.h
"src/base/platform/semaphore.cc",
"src/base/platform/semaphore.h",
"src/base/platform/time.cc",
@ -835,9 +865,10 @@ filegroup(
"src/base/string-format.h",
"src/base/strings.cc",
"src/base/strings.h",
"src/base/strong-alias.h",
"src/base/sys-info.cc",
"src/base/sys-info.h",
"src/base/template-meta-programming/algorithm.h",
"src/base/template-meta-programming/common.h",
"src/base/template-meta-programming/functional.h",
"src/base/template-meta-programming/list.h",
"src/base/template-meta-programming/string-literal.h",
@ -860,7 +891,6 @@ filegroup(
] + select({
"@v8//bazel/config:is_posix": [
"src/base/platform/platform-posix.cc",
"src/base/platform/platform-posix.h",
"src/base/platform/platform-posix-time.cc",
"src/base/platform/platform-posix-time.h",
],
@ -883,6 +913,7 @@ filegroup(
"@v8//bazel/config:is_windows": [
"src/base/debug/stack_trace_win.cc",
"src/base/platform/platform-win32.cc",
"src/base/platform/platform-win32.h",
"src/base/win32-headers.h",
],
}),
@ -1299,6 +1330,7 @@ filegroup(
"src/base/sanitizer/lsan-virtual-address-space.h",
"src/base/sanitizer/msan.h",
"src/base/sanitizer/tsan.h",
"src/base/sanitizer/ubsan.h",
"src/baseline/baseline.cc",
"src/baseline/baseline.h",
"src/baseline/baseline-assembler.h",
@ -1449,6 +1481,7 @@ filegroup(
"src/common/ptr-compr.cc",
"src/common/ptr-compr.h",
"src/common/ptr-compr-inl.h",
"src/common/thread-local-storage.h",
"src/common/segmented-table.h",
"src/common/segmented-table-inl.h",
"src/common/simd128.h",
@ -1554,7 +1587,7 @@ filegroup(
"src/execution/protectors.cc",
"src/execution/protectors.h",
"src/execution/protectors-inl.h",
"src/execution/shared-mutex-guard-if-off-thread.h",
"src/execution/mutex-guard-if-off-thread.h",
"src/execution/simulator.h",
"src/execution/simulator-base.cc",
"src/execution/simulator-base.h",
@ -1628,6 +1661,8 @@ filegroup(
"src/heap/combined-heap.h",
"src/heap/concurrent-marking.cc",
"src/heap/concurrent-marking.h",
"src/heap/conservative-stack-visitor.h",
"src/heap/conservative-stack-visitor-inl.h",
"src/heap/cppgc-js/cpp-heap.cc",
"src/heap/cppgc-js/cpp-heap.h",
"src/heap/cppgc-js/cpp-marking-state.h",
@ -1674,8 +1709,13 @@ filegroup(
"src/heap/heap-controller.cc",
"src/heap/heap-controller.h",
"src/heap/heap-inl.h",
"src/heap/heap-layout.cc",
"src/heap/heap-layout.h",
"src/heap/heap-layout-inl.h",
"src/heap/heap-layout-tracer.cc",
"src/heap/heap-layout-tracer.h",
"src/heap/heap-utils.h",
"src/heap/heap-utils-inl.h",
"src/heap/heap-verifier.cc",
"src/heap/heap-verifier.h",
"src/heap/heap-write-barrier.cc",
@ -1695,6 +1735,8 @@ filegroup(
"src/heap/large-spaces.h",
"src/heap/linear-allocation-area.h",
"src/heap/list.h",
"src/heap/live-object-range.h",
"src/heap/live-object-range-inl.h",
"src/heap/local-factory.cc",
"src/heap/local-factory.h",
"src/heap/local-factory-inl.h",
@ -1718,6 +1760,7 @@ filegroup(
"src/heap/marking-barrier.cc",
"src/heap/marking-barrier.h",
"src/heap/marking-barrier-inl.h",
"src/heap/marking-progress-tracker.h",
"src/heap/marking-state.h",
"src/heap/marking-state-inl.h",
"src/heap/marking-visitor.h",
@ -1734,8 +1777,8 @@ filegroup(
"src/heap/mutable-page-metadata-inl.h",
"src/heap/memory-chunk.cc",
"src/heap/memory-chunk.h",
"src/heap/memory-chunk-constants.h",
"src/heap/memory-chunk-inl.h",
"src/heap/memory-chunk-layout.cc",
"src/heap/memory-chunk-layout.h",
"src/heap/memory-measurement.cc",
"src/heap/memory-measurement.h",
@ -1751,12 +1794,14 @@ filegroup(
"src/heap/object-lock-inl.h",
"src/heap/object-stats.cc",
"src/heap/object-stats.h",
"src/heap/objects-visiting.cc",
"src/heap/objects-visiting.h",
"src/heap/objects-visiting-inl.h",
"src/heap/heap-visitor.cc",
"src/heap/heap-visitor.h",
"src/heap/heap-visitor-inl.h",
"src/heap/page-metadata.cc",
"src/heap/page-metadata.h",
"src/heap/page-metadata-inl.h",
"src/heap/page-pool.cc",
"src/heap/page-pool.h",
"src/heap/paged-spaces.cc",
"src/heap/paged-spaces.h",
"src/heap/paged-spaces-inl.h",
@ -1766,7 +1811,6 @@ filegroup(
"src/heap/pretenuring-handler.cc",
"src/heap/pretenuring-handler.h",
"src/heap/pretenuring-handler-inl.h",
"src/heap/progress-bar.h",
"src/heap/read-only-heap.cc",
"src/heap/read-only-heap.h",
"src/heap/read-only-heap-inl.h",
@ -1792,6 +1836,8 @@ filegroup(
"src/heap/sweeper.h",
"src/heap/traced-handles-marking-visitor.cc",
"src/heap/traced-handles-marking-visitor.h",
"src/heap/visit-object.cc",
"src/heap/visit-object.h",
"src/heap/weak-object-worklists.cc",
"src/heap/weak-object-worklists.h",
"src/heap/young-generation-marking-visitor.h",
@ -1891,6 +1937,8 @@ filegroup(
"src/numbers/conversions.h",
"src/numbers/conversions-inl.h",
"src/numbers/hash-seed-inl.h",
"src/numbers/ieee754.cc",
"src/numbers/ieee754.h",
"src/numbers/integer-literal.h",
"src/numbers/integer-literal-inl.h",
"src/numbers/math-random.cc",
@ -1911,7 +1959,6 @@ filegroup(
"src/objects/backing-store.h",
"src/objects/bigint.cc",
"src/objects/bigint.h",
"src/objects/bigint-inl.h",
"src/objects/bytecode-array.cc",
"src/objects/bytecode-array.h",
"src/objects/bytecode-array-inl.h",
@ -1919,6 +1966,7 @@ filegroup(
"src/objects/call-site-info.h",
"src/objects/call-site-info-inl.h",
"src/objects/casting.h",
"src/objects/casting-inl.h",
"src/objects/cell.h",
"src/objects/cell-inl.h",
"src/objects/code.cc",
@ -2037,6 +2085,7 @@ filegroup(
"src/objects/js-temporal-objects.cc",
"src/objects/js-temporal-objects.h",
"src/objects/js-temporal-objects-inl.h",
"src/objects/js-weak-refs.cc",
"src/objects/js-weak-refs.h",
"src/objects/js-weak-refs-inl.h",
"src/objects/keys.cc",
@ -2193,8 +2242,8 @@ filegroup(
"src/parsing/expression-scope.h",
"src/parsing/func-name-inferrer.cc",
"src/parsing/func-name-inferrer.h",
"src/parsing/import-assertions.cc",
"src/parsing/import-assertions.h",
"src/parsing/import-attributes.cc",
"src/parsing/import-attributes.h",
"src/parsing/keywords-gen.h",
"src/parsing/literal-buffer.cc",
"src/parsing/literal-buffer.h",
@ -2291,6 +2340,8 @@ filegroup(
"src/regexp/regexp-nodes.h",
"src/regexp/regexp-parser.cc",
"src/regexp/regexp-parser.h",
"src/regexp/regexp-result-vector.cc",
"src/regexp/regexp-result-vector.h",
"src/regexp/regexp-stack.cc",
"src/regexp/regexp-stack.h",
"src/regexp/regexp-utils.cc",
@ -2342,6 +2393,7 @@ filegroup(
"src/sandbox/external-pointer-table.cc",
"src/sandbox/external-pointer-table.h",
"src/sandbox/external-pointer-table-inl.h",
"src/sandbox/cppheap-pointer.h",
"src/sandbox/cppheap-pointer-inl.h",
"src/sandbox/cppheap-pointer-table.cc",
"src/sandbox/cppheap-pointer-table.h",
@ -2352,6 +2404,8 @@ filegroup(
"src/sandbox/js-dispatch-table.cc",
"src/sandbox/js-dispatch-table.h",
"src/sandbox/js-dispatch-table-inl.h",
"src/sandbox/trusted-pointer-scope.cc",
"src/sandbox/trusted-pointer-scope.h",
"src/sandbox/trusted-pointer-table.cc",
"src/sandbox/trusted-pointer-table.h",
"src/sandbox/trusted-pointer-table-inl.h",
@ -2365,12 +2419,6 @@ filegroup(
"src/sandbox/indirect-pointer-tag.h",
"src/sandbox/indirect-pointer-inl.h",
"src/sandbox/code-entrypoint-tag.h",
"src/sandbox/external-buffer.h",
"src/sandbox/external-buffer-tag.h",
"src/sandbox/external-buffer-inl.h",
"src/sandbox/external-buffer-table.cc",
"src/sandbox/external-buffer-table-inl.h",
"src/sandbox/external-buffer-table.h",
"src/sandbox/external-entity-table.h",
"src/sandbox/external-entity-table-inl.h",
"src/sandbox/hardware-support.cc",
@ -2436,6 +2484,7 @@ filegroup(
"src/strings/string-builder-inl.h",
"src/strings/string-case.cc",
"src/strings/string-case.h",
"src/strings/string-hasher.cc",
"src/strings/string-hasher.h",
"src/strings/string-hasher-inl.h",
"src/strings/string-search.h",
@ -2456,13 +2505,11 @@ filegroup(
"src/tasks/task-utils.h",
"src/temporal/temporal-parser.cc",
"src/temporal/temporal-parser.h",
"src/third_party/siphash/halfsiphash.cc",
"src/third_party/siphash/halfsiphash.h",
"src/third_party/utf8-decoder/utf8-decoder.h",
"src/torque/runtime-macro-shims.h",
"src/tracing/trace-event.cc",
"src/tracing/trace-event.h",
"src/tracing/trace-event-no-perfetto.h",
"src/tracing/trace-id.h",
"src/tracing/traced-value.cc",
"src/tracing/traced-value.h",
"src/tracing/tracing-category-observer.cc",
@ -2514,6 +2561,10 @@ filegroup(
"src/zone/zone-segment.h",
"src/zone/zone-type-traits.h",
"src/zone/zone-utils.h",
"third_party/rapidhash-v8/rapidhash.h",
"third_party/siphash/halfsiphash.cc",
"third_party/siphash/halfsiphash.h",
"third_party/utf8-decoder/utf8-decoder.h",
":cppgc_base_files",
":generated_bytecode_builtins_list",
":v8_bigint",
@ -2735,10 +2786,16 @@ filegroup(
"src/wasm/baseline/ppc/liftoff-assembler-ppc-inl.h",
],
}) + select({
"@v8//bazel/config:is_posix": [
":webassembly_on_posix": [
"src/trap-handler/handler-inside-posix.cc",
"src/trap-handler/handler-outside-posix.cc",
],
":webassembly_on_windows": [
"src/trap-handler/handler-inside-win.cc",
"src/trap-handler/handler-inside-win.h",
"src/trap-handler/handler-outside-win.cc",
"include/v8-wasm-trap-handler-win.h",
],
"//conditions:default": [],
}) + select({
"@v8//bazel/config:v8_arm64_simulator": [
@ -2746,13 +2803,6 @@ filegroup(
"src/trap-handler/trap-handler-simulator.h",
],
"//conditions:default": [],
}) + select({
"@v8//bazel/config:is_windows": [
"src/trap-handler/handler-inside-win.cc",
"src/trap-handler/handler-inside-win.h",
"src/trap-handler/handler-outside-win.cc",
],
"//conditions:default": [],
}) + select({
"@v8//bazel/config:is_windows_64bit": [
"src/diagnostics/unwinding-info-win64.cc",
@ -2765,17 +2815,20 @@ filegroup(
"src/maglev/maglev-assembler.h",
"src/maglev/maglev-basic-block.h",
"src/maglev/maglev-code-gen-state.h",
"src/maglev/maglev-code-gen-state-inl.h",
"src/maglev/maglev-code-generator.h",
"src/maglev/maglev-compilation-info.h",
"src/maglev/maglev-compilation-unit.h",
"src/maglev/maglev-compiler.h",
"src/maglev/maglev-concurrent-dispatcher.h",
"src/maglev/maglev-deopt-frame-visitor.h",
"src/maglev/maglev-graph-builder.h",
"src/maglev/maglev-graph-labeller.h",
"src/maglev/maglev-graph-printer.h",
"src/maglev/maglev-graph-processor.h",
"src/maglev/maglev-graph-verifier.h",
"src/maglev/maglev-graph.h",
"src/maglev/maglev-inlining.h",
"src/maglev/maglev-interpreter-frame-state.h",
"src/maglev/maglev-ir-inl.h",
"src/maglev/maglev-ir.h",
@ -2817,6 +2870,13 @@ filegroup(
"src/maglev/arm64/maglev-ir-arm64.cc",
],
"//conditions:default": [],
}) + select({
":enable_maglev_riscv": [
"src/maglev/riscv/maglev-assembler-riscv-inl.h",
"src/maglev/riscv/maglev-assembler-riscv.cc",
"src/maglev/riscv/maglev-ir-riscv.cc",
],
"//conditions:default": [],
}) + select({
":enable_maglev_arm": [
"src/maglev/arm/maglev-assembler-arm-inl.h",
@ -2841,7 +2901,6 @@ filegroup(
"src/debug/debug-wasm-objects-inl.h",
"src/runtime/runtime-test-wasm.cc",
"src/runtime/runtime-wasm.cc",
"src/third_party/utf8-decoder/generalized-utf8-decoder.h",
"src/trap-handler/handler-inside.cc",
"src/trap-handler/handler-inside-posix.h",
"src/trap-handler/handler-outside.cc",
@ -2877,10 +2936,7 @@ filegroup(
"src/wasm/function-body-decoder-impl.h",
"src/wasm/function-compiler.cc",
"src/wasm/function-compiler.h",
"src/wasm/fuzzing/random-module-generation.cc",
"src/wasm/fuzzing/random-module-generation.h",
"src/wasm/graph-builder-interface.cc",
"src/wasm/graph-builder-interface.h",
"src/wasm/inlining-tree.h",
"src/wasm/jump-table-assembler.cc",
"src/wasm/jump-table-assembler.h",
@ -2900,7 +2956,6 @@ filegroup(
"src/wasm/object-access.h",
"src/wasm/pgo.cc",
"src/wasm/pgo.h",
"src/wasm/serialized-signature-inl.h",
"src/wasm/signature-hashing.h",
"src/wasm/simd-shuffle.cc",
"src/wasm/simd-shuffle.h",
@ -2966,6 +3021,7 @@ filegroup(
"src/wasm/well-known-imports.cc",
"src/wasm/well-known-imports.h",
"src/wasm/wrappers.cc",
"third_party/utf8-decoder/generalized-utf8-decoder.h",
],
"//conditions:default": [],
}) + select({
@ -2983,6 +3039,11 @@ filegroup(
"src/wasm/interpreter/wasm-interpreter-simd.cc",
],
"//conditions:default": [],
}) + select({
":is_v8_wasm_random_fuzzers": [
"src/wasm/fuzzing/random-module-generation.cc",
],
"//conditions:default": [],
}),
)
@ -3082,8 +3143,6 @@ filegroup(
"src/compiler/backend/unwinding-info-writer.h",
"src/compiler/basic-block-instrumentor.cc",
"src/compiler/basic-block-instrumentor.h",
"src/compiler/branch-condition-duplicator.cc",
"src/compiler/branch-condition-duplicator.h",
"src/compiler/branch-elimination.cc",
"src/compiler/branch-elimination.h",
"src/compiler/bytecode-analysis.cc",
@ -3097,6 +3156,7 @@ filegroup(
"src/compiler/checkpoint-elimination.h",
"src/compiler/code-assembler.cc",
"src/compiler/code-assembler.h",
"src/compiler/code-assembler-compilation-job.h",
"src/compiler/common-node-cache.cc",
"src/compiler/common-node-cache.h",
"src/compiler/common-operator.cc",
@ -3109,8 +3169,6 @@ filegroup(
"src/compiler/compiler-source-position-table.h",
"src/compiler/constant-folding-reducer.cc",
"src/compiler/constant-folding-reducer.h",
"src/compiler/const-tracking-let-helpers.cc",
"src/compiler/const-tracking-let-helpers.h",
"src/compiler/control-equivalence.cc",
"src/compiler/control-equivalence.h",
"src/compiler/control-path-state.h",
@ -3118,8 +3176,6 @@ filegroup(
"src/compiler/csa-load-elimination.h",
"src/compiler/dead-code-elimination.cc",
"src/compiler/dead-code-elimination.h",
"src/compiler/decompression-optimizer.cc",
"src/compiler/decompression-optimizer.h",
"src/compiler/diamond.h",
"src/compiler/escape-analysis.cc",
"src/compiler/escape-analysis.h",
@ -3135,16 +3191,12 @@ filegroup(
"src/compiler/frame-states.h",
"src/compiler/functional-list.h",
"src/compiler/globals.h",
"src/compiler/graph.cc",
"src/compiler/graph.h",
"src/compiler/graph-assembler.cc",
"src/compiler/graph-assembler.h",
"src/compiler/graph-reducer.cc",
"src/compiler/graph-reducer.h",
"src/compiler/graph-trimmer.cc",
"src/compiler/graph-trimmer.h",
"src/compiler/graph-visualizer.cc",
"src/compiler/graph-visualizer.h",
"src/compiler/graph-zone-traits.h",
"src/compiler/heap-refs.cc",
"src/compiler/heap-refs.h",
@ -3268,6 +3320,14 @@ filegroup(
"src/compiler/string-builder-optimizer.h",
"src/compiler/turbofan.h",
"src/compiler/turbofan-enabled.cc",
"src/compiler/turbofan-graph.cc",
"src/compiler/turbofan-graph.h",
"src/compiler/turbofan-graph-visualizer.cc",
"src/compiler/turbofan-graph-visualizer.h",
"src/compiler/turbofan-typer.cc",
"src/compiler/turbofan-typer.h",
"src/compiler/turbofan-types.cc",
"src/compiler/turbofan-types.h",
"src/compiler/turboshaft/access-builder.h",
"src/compiler/turboshaft/analyzer-iterator.cc",
"src/compiler/turboshaft/analyzer-iterator.h",
@ -3282,7 +3342,6 @@ filegroup(
"src/compiler/turboshaft/build-graph-phase.cc",
"src/compiler/turboshaft/build-graph-phase.h",
"src/compiler/turboshaft/builtin-call-descriptors.h",
"src/compiler/turboshaft/builtin-compiler.cc",
"src/compiler/turboshaft/builtin-compiler.h",
"src/compiler/turboshaft/csa-optimize-phase.cc",
"src/compiler/turboshaft/csa-optimize-phase.h",
@ -3336,8 +3395,8 @@ filegroup(
"src/compiler/turboshaft/machine-lowering-phase.h",
"src/compiler/turboshaft/machine-lowering-reducer-inl.h",
"src/compiler/turboshaft/maglev-early-lowering-reducer-inl.h",
"src/compiler/turboshaft/maglev-graph-building-phase.cc",
"src/compiler/turboshaft/maglev-graph-building-phase.h",
"src/compiler/turboshaft/turbolev-graph-builder.cc",
"src/compiler/turboshaft/turbolev-graph-builder.h",
"src/compiler/turboshaft/machine-optimization-reducer.h",
"src/compiler/turboshaft/memory-optimization-reducer.cc",
"src/compiler/turboshaft/memory-optimization-reducer.h",
@ -3353,10 +3412,6 @@ filegroup(
"src/compiler/turboshaft/pipelines.h",
"src/compiler/turboshaft/pretenuring-propagation-reducer.cc",
"src/compiler/turboshaft/pretenuring-propagation-reducer.h",
"src/compiler/turboshaft/recreate-schedule.cc",
"src/compiler/turboshaft/recreate-schedule.h",
"src/compiler/turboshaft/recreate-schedule-phase.cc",
"src/compiler/turboshaft/recreate-schedule-phase.h",
"src/compiler/turboshaft/reducer-traits.h",
"src/compiler/turboshaft/register-allocation-phase.h",
"src/compiler/turboshaft/representations.cc",
@ -3366,9 +3421,6 @@ filegroup(
"src/compiler/turboshaft/select-lowering-reducer.h",
"src/compiler/turboshaft/sidetable.cc",
"src/compiler/turboshaft/sidetable.h",
"src/compiler/turboshaft/simplified-lowering-phase.cc",
"src/compiler/turboshaft/simplified-lowering-phase.h",
"src/compiler/turboshaft/simplified-lowering-reducer.h",
"src/compiler/turboshaft/simplify-tf-loops.cc",
"src/compiler/turboshaft/simplify-tf-loops.h",
"src/compiler/turboshaft/snapshot-table.h",
@ -3377,6 +3429,8 @@ filegroup(
"src/compiler/turboshaft/store-store-elimination-phase.cc",
"src/compiler/turboshaft/store-store-elimination-phase.h",
"src/compiler/turboshaft/store-store-elimination-reducer-inl.h",
"src/compiler/turboshaft/string-escape-analysis-reducer.cc",
"src/compiler/turboshaft/string-escape-analysis-reducer.h",
"src/compiler/turboshaft/string-view.h",
"src/compiler/turboshaft/structural-optimization-reducer.h",
"src/compiler/turboshaft/tracing.h",
@ -3409,10 +3463,6 @@ filegroup(
"src/compiler/type-narrowing-reducer.h",
"src/compiler/typed-optimization.cc",
"src/compiler/typed-optimization.h",
"src/compiler/typer.cc",
"src/compiler/typer.h",
"src/compiler/types.cc",
"src/compiler/types.h",
"src/compiler/use-info.h",
"src/compiler/value-numbering-reducer.cc",
"src/compiler/value-numbering-reducer.h",
@ -3478,10 +3528,13 @@ filegroup(
":is_v8_enable_webassembly": [
"src/compiler/int64-lowering.cc",
"src/compiler/int64-lowering.h",
"src/compiler/turboshaft/growable-stacks-reducer.h",
"src/compiler/turboshaft/int64-lowering-phase.cc",
"src/compiler/turboshaft/int64-lowering-phase.h",
"src/compiler/turboshaft/int64-lowering-reducer.h",
"src/compiler/turboshaft/wasm-assembler-helpers.h",
"src/compiler/turboshaft/wasm-debug-memory-lowering-phase.cc",
"src/compiler/turboshaft/wasm-debug-memory-lowering-phase.h",
"src/compiler/turboshaft/wasm-gc-optimize-phase.cc",
"src/compiler/turboshaft/wasm-gc-optimize-phase.h",
"src/compiler/turboshaft/wasm-gc-typed-optimization-reducer.cc",
@ -3497,6 +3550,10 @@ filegroup(
"src/compiler/turboshaft/wasm-optimize-phase.h",
"src/compiler/turboshaft/wasm-turboshaft-compiler.cc",
"src/compiler/turboshaft/wasm-turboshaft-compiler.h",
"src/compiler/turboshaft/wasm-shuffle-reducer.cc",
"src/compiler/turboshaft/wasm-shuffle-reducer.h",
"src/compiler/turboshaft/wasm-simd-phase.cc",
"src/compiler/turboshaft/wasm-simd-phase.h",
"src/compiler/wasm-address-reassociation.cc",
"src/compiler/wasm-address-reassociation.h",
"src/compiler/wasm-call-descriptors.cc",
@ -3513,16 +3570,10 @@ filegroup(
"src/compiler/wasm-gc-operator-reducer.h",
"src/compiler/wasm-graph-assembler.cc",
"src/compiler/wasm-graph-assembler.h",
"src/compiler/wasm-inlining.cc",
"src/compiler/wasm-inlining.h",
"src/compiler/wasm-inlining-into-js.cc",
"src/compiler/wasm-inlining-into-js.h",
"src/compiler/wasm-load-elimination.cc",
"src/compiler/wasm-load-elimination.h",
"src/compiler/wasm-loop-peeling.cc",
"src/compiler/wasm-loop-peeling.h",
"src/compiler/wasm-js-lowering.cc",
"src/compiler/wasm-js-lowering.h",
"src/compiler/wasm-typer.cc",
"src/compiler/wasm-typer.h",
],
@ -3534,6 +3585,8 @@ filegroup(
"//conditions:default": [
"src/maglev/maglev-basic-block.h",
"src/maglev/maglev-code-gen-state.h",
"src/maglev/maglev-code-gen-state-inl.h",
"src/maglev/maglev-deopt-frame-visitor.h",
"src/maglev/maglev-compilation-info.cc",
"src/maglev/maglev-compilation-info.h",
"src/maglev/maglev-compilation-unit.cc",
@ -3546,6 +3599,7 @@ filegroup(
"src/maglev/maglev-graph-printer.h",
"src/maglev/maglev-graph-processor.h",
"src/maglev/maglev-graph-verifier.h",
"src/maglev/maglev-inlining.h",
"src/maglev/maglev-interpreter-frame-state.cc",
"src/maglev/maglev-interpreter-frame-state.h",
"src/maglev/maglev-ir.cc",
@ -3626,6 +3680,8 @@ filegroup(
"src/codegen/code-stub-assembler.h",
"src/codegen/define-code-stub-assembler-macros.inc",
"src/codegen/undef-code-stub-assembler-macros.inc",
"src/compiler/turboshaft/builtin-compiler.cc",
"src/compiler/turboshaft/builtin-compiler.h",
"src/heap/setup-heap-internal.cc",
"src/ic/accessor-assembler.cc",
"src/ic/accessor-assembler.h",
@ -3802,31 +3858,12 @@ filegroup(
"@v8//bazel/config:is_inline_asm_s390x": ["src/heap/base/asm/s390/push_registers_asm.cc"],
"@v8//bazel/config:is_inline_asm_riscv64": ["src/heap/base/asm/riscv64/push_registers_asm.cc"],
"@v8//bazel/config:is_inline_asm_ppc64le": ["src/heap/base/asm/ppc/push_registers_asm.cc"],
"@v8//bazel/config:is_msvc_asm_ia32": ["src/heap/base/asm/ia32/push_registers_masm.asm"],
"@v8//bazel/config:is_msvc_asm_ia32": ["src/heap/base/asm/ia32/push_registers_asm.cc"],
"@v8//bazel/config:is_msvc_asm_x64": ["src/heap/base/asm/x64/push_registers_masm.asm"],
"@v8//bazel/config:is_msvc_asm_arm64": ["src/heap/base/asm/arm64/push_registers_masm.S"],
"@v8//bazel/config:is_msvc_asm_arm64": ["src/heap/base/asm/arm64/push_registers_asm.cc"],
}),
)
v8_library(
name = "lib_fast_float",
srcs = [
"third_party/fast_float/src/include/fast_float/ascii_number.h",
"third_party/fast_float/src/include/fast_float/bigint.h",
"third_party/fast_float/src/include/fast_float/constexpr_feature_detect.h",
"third_party/fast_float/src/include/fast_float/decimal_to_binary.h",
"third_party/fast_float/src/include/fast_float/digit_comparison.h",
"third_party/fast_float/src/include/fast_float/fast_float.h",
"third_party/fast_float/src/include/fast_float/fast_table.h",
"third_party/fast_float/src/include/fast_float/float_common.h",
"third_party/fast_float/src/include/fast_float/parse_number.h",
],
hdrs = [ "third_party/fast_float/src/include/fast_float/fast_float.h" ],
includes = [
"third_party/fast_float/src/include",
],
)
v8_library(
name = "lib_fp16",
srcs = ["third_party/fp16/src/include/fp16.h"],
@ -3979,10 +4016,8 @@ filegroup(
"third_party/inspector_protocol/crdtp/export.h",
"third_party/inspector_protocol/crdtp/find_by_first.h",
"third_party/inspector_protocol/crdtp/frontend_channel.h",
"third_party/inspector_protocol/crdtp/glue.h",
"third_party/inspector_protocol/crdtp/json.cc",
"third_party/inspector_protocol/crdtp/json.h",
"third_party/inspector_protocol/crdtp/maybe.h",
"third_party/inspector_protocol/crdtp/parser_handler.h",
"third_party/inspector_protocol/crdtp/protocol_core.cc",
"third_party/inspector_protocol/crdtp/protocol_core.h",
@ -4135,7 +4170,6 @@ py_binary(
"third_party/inspector_protocol/templates/TypeBuilder_h.template",
],
imports = ["third_party/inspector_protocol/"],
python_version = "PY3",
deps = [
requirement("jinja2"),
],
@ -4272,6 +4306,7 @@ v8_library(
":v8_shared_internal_headers",
],
copts = ["-Wno-implicit-fallthrough"],
deps = ["@abseil-cpp//absl/synchronization", "@abseil-cpp//absl/time"],
)
cc_library(
@ -4298,10 +4333,15 @@ cc_library(
deps = [
":torque_base_headers",
":v8_libbase",
"//external:absl_optional",
],
)
cc_library(
name = "simdutf",
srcs = ["third_party/simdutf/simdutf.cpp"],
hdrs = ["third_party/simdutf/simdutf.h"],
)
v8_library(
name = "v8_libshared",
srcs = [
@ -4330,12 +4370,14 @@ v8_library(
":noicu/generated_torque_definitions",
],
deps = [
":lib_fast_float",
"//third_party/fast_float/src:fast_float",
":lib_fp16",
":simdutf",
":v8_libbase",
"//external:absl_btree",
"//external:absl_flat_hash_map",
"//external:absl_flat_hash_set",
"@abseil-cpp//absl/container:btree",
"@abseil-cpp//absl/container:flat_hash_map",
"@abseil-cpp//absl/container:flat_hash_set",
"@highway//:hwy",
],
)
@ -4388,9 +4430,9 @@ alias(
v8_library(
name = "v8_vtune",
srcs = [
"src/third_party/vtune/v8-vtune.h",
"src/third_party/vtune/vtune-jit.cc",
"src/third_party/vtune/vtune-jit.h",
"third_party/vtune/v8-vtune.h",
"third_party/vtune/vtune-jit.cc",
"third_party/vtune/vtune-jit.h",
],
copts = ["-I"],
deps = [
@ -4422,7 +4464,6 @@ v8_binary(
srcs = [
"src/regexp/gen-regexp-special-case.cc",
"src/regexp/special-case.h",
":v8_libbase_files",
":v8_shared_internal_headers",
],
copts = ["-Wno-implicit-fallthrough"],
@ -4434,7 +4475,7 @@ v8_binary(
"UNISTR_FROM_CHAR_EXPLICIT=",
],
deps = [
"//external:absl_optional",
":v8_libbase",
"//external:icu",
],
)
@ -4556,7 +4597,6 @@ py_test(
"//testing/pybase",
] + glob(["test/**"]) + glob(["tools/**/*.js"]) + glob(["tools/**/*.mjs"]),
main = "tools/run-tests.py",
python_version = "PY3",
tags = [
# Disable sanitizers, as they don't work in general in V8.
"nosan",
@ -4595,7 +4635,6 @@ py_test(
"//testing/pybase",
] + glob(["test/**"]) + glob(["tools/**/*.js"]) + glob(["tools/**/*.mjs"]),
main = "tools/run-tests.py",
python_version = "PY3",
tags = [
# Disable sanitizers, as they don't work in general in V8.
"nosan",

1022
deps/v8/BUILD.gn vendored

File diff suppressed because it is too large Load diff

View file

@ -5,7 +5,6 @@ bikineev@chromium.org
bmeurer@chromium.org
cbruni@chromium.org
clemensb@chromium.org
danno@chromium.org
dinfuehr@chromium.org
dlehmann@chromium.org
dmercadier@chromium.org

339
deps/v8/DEPS vendored
View file

@ -27,7 +27,6 @@ vars = {
'checkout_fuchsia_boot_images': "terminal.x64",
'checkout_fuchsia_product_bundles': '"{checkout_fuchsia_boot_images}" != ""',
'checkout_centipede_deps': False,
'checkout_instrumented_libraries': False,
'checkout_ittapi': False,
@ -46,7 +45,6 @@ vars = {
'checkout_v8_builtins_pgo_profiles': False,
'android_url': 'https://android.googlesource.com',
'boringssl_url': 'https://boringssl.googlesource.com',
'chromium_url': 'https://chromium.googlesource.com',
'download_gcmole': False,
'download_jsfunfuzz': False,
@ -60,7 +58,7 @@ vars = {
'checkout_fuchsia_no_hooks': False,
# reclient CIPD package version
'reclient_version': 're_client_version:0.163.0.d27158ab-gomaip',
'reclient_version': 're_client_version:0.177.1.e58c0145-gomaip',
# Fetch configuration files required for the 'use_remoteexec' gn arg
'download_remoteexec_cfg': False,
@ -76,27 +74,29 @@ vars = {
'build_with_chromium': False,
# GN CIPD package version.
'gn_version': 'git_revision:20806f79c6b4ba295274e3a589d85db41a02fdaa',
'gn_version': 'git_revision:6e8e0d6d4a151ab2ed9b4a35366e630c55888444',
# ninja CIPD package version
# https://chrome-infra-packages.appspot.com/p/infra/3pp/tools/ninja
'ninja_version': 'version:2@1.12.1.chromium.4',
'ninja_version': 'version:3@1.12.1.chromium.4',
# siso CIPD package version
'siso_version': 'git_revision:eaee19cf51478b64614e2e8daad77378238a3c6c',
# luci-go CIPD package version.
'luci_go': 'git_revision:1aca70b6bf116c1bd8fbf0526c9a89e9be308718',
'siso_version': 'git_revision:68bdc49e4e23aef066fc652cbdb1b4973aab1a31',
# Three lines of non-changing comments so that
# the commit queue can handle CLs rolling Fuchsia sdk
# and whatever else without interference from each other.
'fuchsia_version': 'version:24.20240913.4.1',
'fuchsia_version': 'version:27.20250326.5.1',
# Three lines of non-changing comments so that
# the commit queue can handle CLs rolling partition_alloc_version
# and whatever else without interference from each other.
'partition_alloc_version': 'ab56923a27b2793f21994589b0c39bc3324ff49f',
# Three lines of non-changing comments so that
# the commit queue can handle CLs rolling android_sdk_build-tools_version
# and whatever else without interference from each other.
'android_sdk_build-tools_version': 'DxwAZ3hD551Neu6ycuW5CPnXFrdleRBd93oX1eB_m9YC',
'android_sdk_build-tools_version': 'y3EsZLg4bxPmpW0oYsAHylywNyMnIwPS3kh1VbQLAFAC',
# Three lines of non-changing comments so that
# the commit queue can handle CLs rolling android_sdk_emulator_version
# and whatever else without interference from each other.
@ -112,11 +112,11 @@ vars = {
# Three lines of non-changing comments so that
# the commit queue can handle CLs rolling android_sdk_platform-tools_version
# and whatever else without interference from each other.
'android_sdk_platform-tools_version': 'WihaseZR6cojZbkzIqwGhpTp92ztaGfqq8njBU8eTXYC',
'android_sdk_platform-tools_version': 'mjFmRj7k_XR9yj60pYbr9mG38FyEbU5oWdU56bZQ5cwC',
# Three lines of non-changing comments so that
# the commit queue can handle CLs rolling android_sdk_platforms_version
# and whatever else without interference from each other.
'android_sdk_platforms_version': 'kIXA-9XuCfOESodXEdOBkW5f1ytrGWdbp3HFp1I8A_0C',
'android_sdk_platforms_version': '_YHemUrK49JrE7Mctdf5DDNOHu1VKBx_PTcWnZ-cbOAC',
# Three lines of non-changing comments so that
# the commit queue can handle CLs rolling android_sdk_sources_version
# and whatever else without interference from each other.
@ -124,14 +124,14 @@ vars = {
# Three lines of non-changing comments so that
# the commit queue can handle CLs rolling android_sdk_tools-lint_version
# and whatever else without interference from each other.
'android_sdk_cmdline-tools_version': 'B4p95sDPpm34K8Cf4JcfTM-iYSglWko9qjWgbT9dxWQC',
'android_sdk_cmdline-tools_version': 'gekOVsZjseS1w9BXAT3FsoW__ByGDJYS9DgqesiwKYoC',
}
deps = {
'build':
Var('chromium_url') + '/chromium/src/build.git' + '@' + '4bd877395d215d47c694a8383147eb158fafbbd6',
Var('chromium_url') + '/chromium/src/build.git' + '@' + '451ef881d77fff0b7a8bbfa61934f5e4a35b4c96',
'buildtools':
Var('chromium_url') + '/chromium/src/buildtools.git' + '@' + 'a7a84ac61eae5a8946807265a2fd8bd812daf384',
Var('chromium_url') + '/chromium/src/buildtools.git' + '@' + '6f359296daa889aa726f3d05046b9f37be241169',
'buildtools/linux64': {
'packages': [
{
@ -177,15 +177,15 @@ deps = {
'test/mozilla/data':
Var('chromium_url') + '/v8/deps/third_party/mozilla-tests.git' + '@' + 'f6c578a10ea707b1a8ab0b88943fe5115ce2b9be',
'test/test262/data':
Var('chromium_url') + '/external/github.com/tc39/test262.git' + '@' + 'd62fa93c8f9ce5e687c0bbaa5d2b59670ab2ff60',
Var('chromium_url') + '/external/github.com/tc39/test262.git' + '@' + 'ce7e72d2107f99d165f4259571f10aa75753d997',
'third_party/android_platform': {
'url': Var('chromium_url') + '/chromium/src/third_party/android_platform.git' + '@' + '6337c445f9963ec3914e7e0c5787941d07b46509',
'url': Var('chromium_url') + '/chromium/src/third_party/android_platform.git' + '@' + '98aee46efb1cc4e09fa0e3ecaa6b19dc258645fa',
'condition': 'checkout_android',
},
'third_party/android_sdk/public': {
'packages': [
{
'package': 'chromium/third_party/android_sdk/public/build-tools/35.0.0',
'package': 'chromium/third_party/android_sdk/public/build-tools/36.0.0',
'version': Var('android_sdk_build-tools_version'),
},
{
@ -205,7 +205,7 @@ deps = {
'version': Var('android_sdk_platform-tools_version'),
},
{
'package': 'chromium/third_party/android_sdk/public/platforms/android-35',
'package': 'chromium/third_party/android_sdk/public/platforms/android-36',
'version': Var('android_sdk_platforms_version'),
},
{
@ -230,20 +230,12 @@ deps = {
'condition': 'checkout_android',
'dep_type': 'cipd',
},
'third_party/boringssl': {
'url': Var('chromium_url') + '/chromium/src/third_party/boringssl.git' + '@' + 'c79987a83ceaf2cf911f7d21bec621ddc90c45cc',
'condition': "checkout_centipede_deps",
},
'third_party/boringssl/src': {
'url': Var('boringssl_url') + '/boringssl.git' + '@' + '58f3bc83230d2958bb9710bc910972c4f5d382dc',
'condition': "checkout_centipede_deps",
},
'third_party/catapult': {
'url': Var('chromium_url') + '/catapult.git' + '@' + '296226a4a0067c8cffeb8831fb87526a8035f3cc',
'url': Var('chromium_url') + '/catapult.git' + '@' + '5bda0fdab9d93ec9963e2cd858c7b49ad7fec7d4',
'condition': 'checkout_android',
},
'third_party/clang-format/script':
Var('chromium_url') + '/external/github.com/llvm/llvm-project/clang/tools/clang-format.git' + '@' + '3c0acd2d4e73dd911309d9e970ba09d58bf23a62',
Var('chromium_url') + '/external/github.com/llvm/llvm-project/clang/tools/clang-format.git' + '@' + '37f6e68a107df43b7d7e044fd36a13cbae3413f2',
'third_party/colorama/src': {
'url': Var('chromium_url') + '/external/colorama.git' + '@' + '3de9f013df4b470069d03d250224062e8cf15c49',
'condition': 'checkout_android',
@ -253,15 +245,17 @@ deps = {
'condition': 'checkout_android',
},
'third_party/depot_tools':
Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + '22df6f8e622dc3e8df8dc8b5d3e3503b169af78e',
Var('chromium_url') + '/chromium/tools/depot_tools.git' + '@' + 'f40ddcd8d51626fb7be3ab3c418b3f3be801623f',
'third_party/fp16/src':
Var('chromium_url') + '/external/github.com/Maratyszcza/FP16.git' + '@' + '0a92994d729ff76a58f692d3028ca1b64b145d91',
'third_party/fast_float/src':
Var('chromium_url') + '/external/github.com/fastfloat/fast_float.git' + '@' + '3e57d8dcfb0a04b5a8a26b486b54490a2e9b310f',
Var('chromium_url') + '/external/github.com/fastfloat/fast_float.git' + '@' + 'cb1d42aaa1e14b09e1452cfdef373d051b8c02a4',
'third_party/fuchsia-gn-sdk': {
'url': Var('chromium_url') + '/chromium/src/third_party/fuchsia-gn-sdk.git' + '@' + '5086f6c9e4c6d3295a76fdb5d27209f2d6449c6a',
'url': Var('chromium_url') + '/chromium/src/third_party/fuchsia-gn-sdk.git' + '@' + '3845a68eb4421e64fbdf9f4805b5ac6d73742e08',
'condition': 'checkout_fuchsia',
},
'third_party/simdutf':
Var('chromium_url') + '/chromium/src/third_party/simdutf' + '@' + '40d1fa26cd5ca221605c974e22c001ca2fb12fde',
# Exists for rolling the Fuchsia SDK. Check out of the SDK should always
# rely on the hook running |update_sdk.py| script below.
'third_party/fuchsia-sdk/sdk': {
@ -275,23 +269,23 @@ deps = {
'dep_type': 'cipd',
},
'third_party/google_benchmark_chrome': {
'url': Var('chromium_url') + '/chromium/src/third_party/google_benchmark.git' + '@' + 'f049b96d7a50ae19f2748aae7fba7bde705bcd8c',
'url': Var('chromium_url') + '/chromium/src/third_party/google_benchmark.git' + '@' + '917e1208b42fdce63511e401067677ffee3a5c7d',
},
'third_party/google_benchmark_chrome/src': {
'url': Var('chromium_url') + '/external/github.com/google/benchmark.git' + '@' + '344117638c8ff7e239044fd0fa7085839fc03021',
'url': Var('chromium_url') + '/external/github.com/google/benchmark.git' + '@' + '761305ec3b33abf30e08d50eb829e19a802581cc',
},
'third_party/fuzztest':
Var('chromium_url') + '/chromium/src/third_party/fuzztest.git' + '@' + '69fe98bf87d80fdc773481ae5180c63e431a13a1',
Var('chromium_url') + '/chromium/src/third_party/fuzztest.git' + '@' + 'df29ed1355d06c486e17fc421767ff01af050ca4',
'third_party/fuzztest/src':
Var('chromium_url') + '/external/github.com/google/fuzztest.git' + '@' + '32eb84a95951fa3a0148fb3e6a1a02f830ded136',
Var('chromium_url') + '/external/github.com/google/fuzztest.git' + '@' + '3c7bc855a4938c5d0d1d07303aa0697c88d33e6c',
'third_party/googletest/src':
Var('chromium_url') + '/external/github.com/google/googletest.git' + '@' + '0953a17a4281fc26831da647ad3fcd5e21e6473b',
Var('chromium_url') + '/external/github.com/google/googletest.git' + '@' + '52204f78f94d7512df1f0f3bea1d47437a2c3a58',
'third_party/highway/src':
Var('chromium_url') + '/external/github.com/google/highway.git' + '@' + '8295336dd70f1201d42c22ab5b0861de38cf8fbf',
Var('chromium_url') + '/external/github.com/google/highway.git' + '@' + '00fe003dac355b979f36157f9407c7c46448958e',
'third_party/icu':
Var('chromium_url') + '/chromium/deps/icu.git' + '@' + '9408c6fd4a39e6fef0e1c4077602e1c83b15f3fb',
Var('chromium_url') + '/chromium/deps/icu.git' + '@' + 'c9fb4b3a6fb54aa8c20a03bbcaa0a4a985ffd34b',
'third_party/instrumented_libs': {
'url': Var('chromium_url') + '/chromium/third_party/instrumented_libraries.git' + '@' + 'bb6dbcf2df7a9beb34c3773ef4df161800e3aed9',
'url': Var('chromium_url') + '/chromium/third_party/instrumented_libraries.git' + '@' + '69015643b3f68dbd438c010439c59adc52cac808',
'condition': 'checkout_instrumented_libraries',
},
'third_party/ittapi': {
@ -301,157 +295,159 @@ deps = {
'condition': "checkout_ittapi or check_v8_header_includes",
},
'third_party/jinja2':
Var('chromium_url') + '/chromium/src/third_party/jinja2.git' + '@' + '2f6f2ff5e4c1d727377f5e1b9e1903d871f41e74',
Var('chromium_url') + '/chromium/src/third_party/jinja2.git' + '@' + '5e1ee241ab04b38889f8d517f2da8b3df7cfbd9a',
'third_party/jsoncpp/source':
Var('chromium_url') + '/external/github.com/open-source-parsers/jsoncpp.git'+ '@' + '42e892d96e47b1f6e29844cc705e148ec4856448',
'third_party/libc++/src':
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxx.git' + '@' + '50ab693ecb611942ce4440d8c9ed707ee65ed5e8',
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxx.git' + '@' + '449310fe2e37834a7e62972d2a690cade2ef596b',
'third_party/libc++abi/src':
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxxabi.git' + '@' + '29b2e9a0f48688da116692cb04758393053d269c',
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libcxxabi.git' + '@' + '94c5d7a8edc09f0680aee57548c0b5d400c2840d',
'third_party/libunwind/src':
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libunwind.git' + '@' + 'dc70138c3e68e2f946585f134e20815851e26263',
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libunwind.git' + '@' + 'e2e6f2a67e9420e770b014ce9bba476fa2ab9874',
'third_party/llvm-libc/src':
Var('chromium_url') + '/external/github.com/llvm/llvm-project/libc.git' + '@' + '188329a7f2118a957efbb3e6219c255e7dba997c',
'third_party/llvm-build/Release+Asserts': {
'dep_type': 'gcs',
'bucket': 'chromium-browser-clang',
'objects': [
{
'object_name': 'Linux_x64/clang-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': '6a30f7bc7c5f0eac02a40a4ec9a1ab906ddff2adacf2c9ff065916047c79f0fb',
'size_bytes': 52892060,
'generation': 1726118358890940,
'object_name': 'Linux_x64/clang-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': '790fcc5b04e96882e8227ba7994161ab945c0e096057fc165a0f71e32a7cb061',
'size_bytes': 54517328,
'generation': 1742541959624765,
'condition': 'host_os == "linux"',
},
{
'object_name': 'Linux_x64/clang-tidy-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': '35e00fc8f58cf7cd30f0ad27c2fdef56b677e287030072c46c0f024d23363ae4',
'size_bytes': 13283180,
'generation': 1726118359291453,
'object_name': 'Linux_x64/clang-tidy-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': '6e325d9f62e831bfbae23413a75535a851fd2cdf2f7cf06a5b724e86f72b2df0',
'size_bytes': 13206280,
'generation': 1742541959572183,
'condition': 'host_os == "linux" and checkout_clang_tidy',
},
{
'object_name': 'Linux_x64/clangd-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': '33e2276976dfeaf387f5ea16651ea591eebe3570a12469f3884c74f8079e88bf',
'size_bytes': 26305668,
'generation': 1726118359489734,
'object_name': 'Linux_x64/clangd-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': '95d4146cb0b892db68c55bbb523b30301e538d0f4dc71517612fdee62664b81a',
'size_bytes': 13566616,
'generation': 1742541959718102,
'condition': 'host_os == "linux" and checkout_clangd',
},
{
'object_name': 'Linux_x64/llvm-code-coverage-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': '426c6bd378848de0817a7695fee821bece9efb51e3ed1d7b750a75bc17bf00eb',
'size_bytes': 2370472,
'generation': 1726118360237343,
'object_name': 'Linux_x64/llvm-code-coverage-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': 'a10c2831ad30275a72f2955e65e62f6af78542f380661443dab4d20c65f203a4',
'size_bytes': 2299292,
'generation': 1742541960157221,
'condition': 'host_os == "linux" and checkout_clang_coverage_tools',
},
{
'object_name': 'Linux_x64/llvmobjdump-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': 'e11c3043e76c7c79fe7905861a11c78433c6d796d049f837eda0a2ce118f0793',
'size_bytes': 5410724,
'generation': 1726118359908897,
'object_name': 'Linux_x64/llvmobjdump-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': '56a5bb654a4550d332f86a23e38a0495c6187092868af817ecb999bd9de9c8a0',
'size_bytes': 5429676,
'generation': 1742541959869492,
'condition': '(checkout_linux or checkout_mac or checkout_android and host_os != "mac")',
},
{
'object_name': 'Mac/clang-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': 'cabfc7ca792ef13d3e665c3a7811f9a76cc39094059c11606cea1724f0394bbc',
'size_bytes': 47551968,
'generation': 1726118361528729,
'object_name': 'Mac/clang-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': '330f8c4cdde3095ac54aff772dbf9bbd96a753df58525546931cbd9bb615f793',
'size_bytes': 51652432,
'generation': 1742541961981004,
'condition': 'host_os == "mac" and host_cpu == "x64"',
},
{
'object_name': 'Mac/clang-mac-runtime-library-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': '50a618246d7fd23645640fc50ccb0d4684c1895def378b90963a289f920ea88a',
'size_bytes': 879508,
'generation': 1726118377526206,
'object_name': 'Mac/clang-mac-runtime-library-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': '013f468c65fae6f736cd380791fef892a0fc9fc107516fcae34d1f998eeb081f',
'size_bytes': 978248,
'generation': 1742541983231339,
'condition': 'checkout_mac and not host_os == "mac"',
},
{
'object_name': 'Mac/clang-tidy-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': '2c325505ea43a8d8a14770890d62aba9af37b397e3063e3fb622cfd51d4706f6',
'size_bytes': 12884412,
'generation': 1726118361811669,
'object_name': 'Mac/clang-tidy-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': 'f29391d18e9fa40774e0386224235890933a8b9eddb9b7eb93d2a4a0867241a1',
'size_bytes': 13468608,
'generation': 1742541962672221,
'condition': 'host_os == "mac" and host_cpu == "x64" and checkout_clang_tidy',
},
{
'object_name': 'Mac/clangd-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': '1c1a0965cc95053dec8c649a7b3bb627ad2300ad230eed97b52ee70a8a8edd85',
'size_bytes': 26553148,
'generation': 1726118361978146,
'object_name': 'Mac/clangd-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': '06c48661d55a7b465d8fb02be56f8550c34d3962a9d0f8ce19b17bdd37127691',
'size_bytes': 15012228,
'generation': 1742541962463652,
'condition': 'host_os == "mac" and host_cpu == "x64" and checkout_clangd',
},
{
'object_name': 'Mac/llvm-code-coverage-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': '9259bd27c19ca9662c70ffc2b42c10afb584e7c584470d6e656e164643614b50',
'size_bytes': 2247028,
'generation': 1726118362377026,
'object_name': 'Mac/llvm-code-coverage-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': '12f3accae43fa9591bbf28a8e0785b99ff75ed2c84f89518bd5ef5119a2525f0',
'size_bytes': 2255296,
'generation': 1742541963013464,
'condition': 'host_os == "mac" and host_cpu == "x64" and checkout_clang_coverage_tools',
},
{
'object_name': 'Mac_arm64/clang-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': 'e87eb4caa95c98ef36c40aec5b8cd07a6c4fb8959d3c7e7d452f6ed860c8c2bf',
'size_bytes': 41352592,
'generation': 1726118378868177,
'object_name': 'Mac_arm64/clang-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': '49967dab3fa4c5f1ff1fe235059be71727c190ff4ccc80f08d39e1bba4dfed58',
'size_bytes': 43810156,
'generation': 1742541984650930,
'condition': 'host_os == "mac" and host_cpu == "arm64"',
},
{
'object_name': 'Mac_arm64/clang-tidy-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': 'fcf8f25a8461db90686d0455bd8f195750a2cdc425cb03c48debe4d3e8bb9299',
'size_bytes': 11476316,
'generation': 1726118379144738,
'object_name': 'Mac_arm64/clang-tidy-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': '4f5326253ed3736ec262e8e69d93befadf9473419865240673a2ec883c3614b6',
'size_bytes': 11607236,
'generation': 1742541984970894,
'condition': 'host_os == "mac" and host_cpu == "arm64" and checkout_clang_tidy',
},
{
'object_name': 'Mac_arm64/clangd-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': 'cca4049d3362528511ebc603db05189c9bef406a80ae4fead22b1db0a4de83e6',
'size_bytes': 22679568,
'generation': 1726118379283835,
'object_name': 'Mac_arm64/clangd-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': 'b3da417d27ba5afe6c9f612d5808c416a17ed1b28f2acd745e0cd2962a5eeac1',
'size_bytes': 12000852,
'generation': 1742541985144552,
'condition': 'host_os == "mac" and host_cpu == "arm64" and checkout_clangd',
},
{
'object_name': 'Mac_arm64/llvm-code-coverage-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': '001e8582de4bc7c434f321b5bacd2b0b45e553f3134cb7d78e1a4f62e2b97ac6',
'size_bytes': 1969844,
'generation': 1726118379757221,
'object_name': 'Mac_arm64/llvm-code-coverage-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': '78139b473cdf4d43da880b573661a5d28d94a8bcb4dea41607d324301745f28c',
'size_bytes': 1976480,
'generation': 1742541985608174,
'condition': 'host_os == "mac" and host_cpu == "arm64" and checkout_clang_coverage_tools',
},
{
'object_name': 'Win/clang-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': 'cb416511e6379b7fd3f362f637ebb8a28957d0d2ff2dc6e2d9f4484a381f2885',
'size_bytes': 44655000,
'generation': 1726118399720986,
'object_name': 'Win/clang-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': 'b46fb4a5cbf9c52d0b345fc2d77ad4ac15dfbb45aa494fb49261786c679af44a',
'size_bytes': 46813332,
'generation': 1742542010902044,
'condition': 'host_os == "win"',
},
{
'object_name': 'Win/clang-tidy-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': '15af2ae61dabdfe0ddbdd48f467b996855ba51d0ef633c5c0ac3c74cdc0d8f2c',
'size_bytes': 13114928,
'generation': 1726118400057660,
'object_name': 'Win/clang-tidy-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': '50d527e811ae8543effa2bb5734a1d424d9c497fbf1d96c76d44b6b5ee7f240b',
'size_bytes': 13233236,
'generation': 1742542011983982,
'condition': 'host_os == "win" and checkout_clang_tidy',
},
{
'object_name': 'Win/clang-win-runtime-library-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': '81d66840357d83ca1a2c85ebca5259a7a86d9e99c77b37727fbaee87ccacf675',
'size_bytes': 2897452,
'generation': 1726118416326356,
'object_name': 'Win/clang-win-runtime-library-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': 'bd2ad1930c0ba7d00364dd344886fd57e16aa070ff1b6a1aade72b58d28e8275',
'size_bytes': 2474048,
'generation': 1742542035740788,
'condition': 'checkout_win and not host_os == "win"',
},
{
'object_name': 'Win/clangd-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': '1304718c221543b16465a4b6108572fa1ba9f2b75c4e4398bdb01fb983428c10',
'size_bytes': 25169688,
'generation': 1726118400193324,
'object_name': 'Win/clangd-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': '06563cfcb24d7196717551dfcda13ed0b97fb9e795dba06007c55ae563e824b0',
'size_bytes': 13759668,
'generation': 1742542011820938,
'condition': 'host_os == "win" and checkout_clangd',
},
{
'object_name': 'Win/llvm-code-coverage-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': 'e01b8fbca72fc1cca6988e359d9a0eea8fa5ccbaff8d41deffd62970c7f4fed5',
'size_bytes': 2382756,
'generation': 1726118400642803,
'object_name': 'Win/llvm-code-coverage-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': '7c2fe8784910dc05445cd7f16742e0b2a09a45fb2ba96ddd5f1d8c895ac65d44',
'size_bytes': 2365956,
'generation': 1742542013491786,
'condition': 'host_os == "win" and checkout_clang_coverage_tools',
},
{
'object_name': 'Win/llvmobjdump-llvmorg-20-init-3847-g69c43468-28.tar.xz',
'sha256sum': '2f837a21d910ad748666282d0c1da15a438d9aae4fc1bc85dab7313da6dfeb7b',
'size_bytes': 5439736,
'generation': 1726118400404099,
'object_name': 'Win/llvmobjdump-llvmorg-21-init-5118-g52cd27e6-5.tar.xz',
'sha256sum': 'a07be25cb4d565422b10001ca3595111d40bd42c47b37b41e2fff5708fe82302',
'size_bytes': 5527784,
'generation': 1742542012678160,
'condition': 'checkout_linux or checkout_mac or checkout_android and host_os == "win"',
},
],
@ -459,7 +455,7 @@ deps = {
'third_party/logdog/logdog':
Var('chromium_url') + '/infra/luci/luci-py/client/libs/logdog' + '@' + '0b2078a90f7a638d576b3a7c407d136f2fb62399',
'third_party/markupsafe':
Var('chromium_url') + '/chromium/src/third_party/markupsafe.git' + '@' + '6638e9b0a79afc2ff7edd9e84b518fe7d5d5fea9',
Var('chromium_url') + '/chromium/src/third_party/markupsafe.git' + '@' + '9f8efc8637f847ab1ba984212598e6fb9cf1b3d4',
'third_party/ninja': {
'packages': [
{
@ -470,16 +466,58 @@ deps = {
'dep_type': 'cipd',
'condition': 'host_cpu != "s390" and host_os != "zos" and host_cpu != "ppc"'
},
'third_party/partition_alloc': {
'url': Var('chromium_url') + '/chromium/src/base/allocator/partition_allocator.git@' + Var('partition_alloc_version'),
'condition': 'not build_with_chromium',
},
'third_party/perfetto':
Var('android_url') + '/platform/external/perfetto.git' + '@' + '6fc824d618d2f06b5d9cd8655ba0419b6b3b366e',
Var('android_url') + '/platform/external/perfetto.git' + '@' + '40b529923598b739b2892a536a7692eedbed5685',
'third_party/protobuf':
Var('chromium_url') + '/chromium/src/third_party/protobuf.git' + '@' + '37bbf271c62d6c01c58c66505b17c7dcf086371a',
Var('chromium_url') + '/chromium/src/third_party/protobuf.git' + '@' + 'b714f7890b8b6ad3ff3471d3148b28c2c7bbff90',
'third_party/re2/src':
Var('chromium_url') + '/external/github.com/google/re2.git' + '@' + '6dcd83d60f7944926bfd308cc13979fc53dd69ca',
Var('chromium_url') + '/external/github.com/google/re2.git' + '@' + 'c84a140c93352cdabbfb547c531be34515b12228',
'third_party/requests': {
'url': Var('chromium_url') + '/external/github.com/kennethreitz/requests.git' + '@' + 'c7e0fc087ceeadb8b4c84a0953a422c474093d6d',
'condition': 'checkout_android',
},
'tools/rust':
Var('chromium_url') + '/chromium/src/tools/rust' + '@' + '7cdd3d9540f3ab428dbcc9ab83c2896c100bcdc5',
'third_party/rust':
Var('chromium_url') + '/chromium/src/third_party/rust' + '@' + 'ed577320339cd175171e9c96d3d73452ddbcbd98',
'third_party/rust-toolchain': {
'dep_type': 'gcs',
'bucket': 'chromium-browser-clang',
'objects': [
{
'object_name': 'Linux_x64/rust-toolchain-f7b43542838f0a4a6cfdb17fbeadf45002042a77-1-llvmorg-21-init-5118-g52cd27e6.tar.xz',
'sha256sum': '213ffcc751ba5f5a4e15fc0dbcbdb94aa7dbc4b6cddd3605121cd26ff8a8b359',
'size_bytes': 118223072,
'generation': 1741985831167267,
'condition': 'host_os == "linux"',
},
{
'object_name': 'Mac/rust-toolchain-f7b43542838f0a4a6cfdb17fbeadf45002042a77-1-llvmorg-21-init-5118-g52cd27e6.tar.xz',
'sha256sum': 'f5ad2fe26336a87713ffcad9e06ae4c1ecb4773ae496a33450a7091c5eec560c',
'size_bytes': 111168208,
'generation': 1741985832885972,
'condition': 'host_os == "mac" and host_cpu == "x64"',
},
{
'object_name': 'Mac_arm64/rust-toolchain-f7b43542838f0a4a6cfdb17fbeadf45002042a77-1-llvmorg-21-init-5118-g52cd27e6.tar.xz',
'sha256sum': 'fac3586c08239bbb8fd192a7ba5deaa9ae62f6fde2c1d665953f87176467a156',
'size_bytes': 100534232,
'generation': 1741985834191792,
'condition': 'host_os == "mac" and host_cpu == "arm64"',
},
{
'object_name': 'Win/rust-toolchain-f7b43542838f0a4a6cfdb17fbeadf45002042a77-1-llvmorg-21-init-5118-g52cd27e6.tar.xz',
'sha256sum': '7b41e74c9b45ca97ca65279c605e6af878de5682fe574d1f1860d9da9b3a5909',
'size_bytes': 180896336,
'generation': 1741985835535129,
'condition': 'host_os == "win"',
},
],
},
'third_party/siso': {
'packages': [
{
@ -491,27 +529,13 @@ deps = {
'condition': 'not build_with_chromium and host_cpu != "s390" and host_os != "zos" and host_cpu != "ppc"',
},
'third_party/zlib':
Var('chromium_url') + '/chromium/src/third_party/zlib.git'+ '@' + 'fa9f14143c7938e6a1d18443900efee7a1e5e669',
Var('chromium_url') + '/chromium/src/third_party/zlib.git'+ '@' + '788cb3c270e8700b425c7bdca1f9ce6b0c1400a9',
'tools/clang':
Var('chromium_url') + '/chromium/src/tools/clang.git' + '@' + 'e47c184ec52d50c7aa2a99cd3bd26ebcafaa94b9',
'tools/luci-go': {
'packages': [
{
'package': 'infra/tools/luci/isolate/${{platform}}',
'version': Var('luci_go'),
},
{
'package': 'infra/tools/luci/swarming/${{platform}}',
'version': Var('luci_go'),
},
],
'condition': 'host_cpu != "s390" and host_os != "zos" and host_os != "aix"',
'dep_type': 'cipd',
},
Var('chromium_url') + '/chromium/src/tools/clang.git' + '@' + '0078c27c43cae91e96bb28d8a4407045966e0542',
'tools/protoc_wrapper':
Var('chromium_url') + '/chromium/src/tools/protoc_wrapper.git' + '@' + 'dbcbea90c20ae1ece442d8ef64e61c7b10e2b013',
'third_party/abseil-cpp': {
'url': Var('chromium_url') + '/chromium/src/third_party/abseil-cpp.git' + '@' + '1f7e21e34c3807a8841c9562cfc8b3213eb50bfc',
'url': Var('chromium_url') + '/chromium/src/third_party/abseil-cpp.git' + '@' + '3fbb10e80d80e3430224b75add53c47c7a711612',
'condition': 'not build_with_chromium',
},
'third_party/zoslib': {
@ -524,12 +548,15 @@ include_rules = [
# Everybody can use some things.
'+include',
'+unicode',
'+third_party/fdlibm',
'+third_party/ittapi/include',
'+third_party/fast_float/src/include',
'+third_party/fdlibm',
'+third_party/fp16/src/include',
'+third_party/v8/codegen',
'+third_party/fuzztest',
'+third_party/ittapi/include',
'+third_party/simdutf',
'+third_party/v8/codegen',
'+third_party/vtune',
'+hwy/highway.h',
# Abseil features are allow-listed. Please use your best judgement when adding
# to this set -- if in doubt, email v8-dev@. For general guidance, refer to
# the Chromium guidelines (though note that some requirements in V8 may be
@ -538,9 +565,9 @@ include_rules = [
'+absl/container/flat_hash_map.h',
'+absl/container/flat_hash_set.h',
'+absl/container/btree_map.h',
'+absl/types/optional.h',
'+absl/types/variant.h',
'+absl/status',
'+absl/synchronization/mutex.h',
'+absl/time/time.h',
# Some abseil features are explicitly banned.
'-absl/types/any.h', # Requires RTTI.
'-absl/types/flags', # Requires RTTI.
@ -755,16 +782,6 @@ hooks = [
'--quiet',
],
},
{
# Clean up build dirs for crbug.com/1337238.
# After a libc++ roll and revert, .ninja_deps would get into a state
# that breaks Ninja on Windows.
# TODO(crbug.com/1337238): Remove in a month or so.
'name': 'del_ninja_deps_cache',
'pattern': '.',
'condition': 'host_os == "win"',
'action': ['python3', 'build/del_ninja_deps_cache.py'],
},
# Configure remote exec cfg files
{
'name': 'download_and_configure_reclient_cfgs',

View file

@ -10,3 +10,6 @@ monorail {
component: "Blink>JavaScript"
}
team_email: "v8-dev@googlegroups.com"
buganizer_public: {
component_id: 1456824
}

View file

@ -3,9 +3,8 @@
# directories.
adamk@chromium.org
danno@chromium.org
gdeepti@chromium.org
hpayer@chromium.org
hpayer@chromium.org #{LAST_RESORT_SUGGESTION}
leszeks@chromium.org
mlippautz@chromium.org
syg@chromium.org

2
deps/v8/LICENSE vendored
View file

@ -19,7 +19,7 @@ are:
This code is copyrighted by Sun Microsystems Inc. and released
under a 3-clause BSD license.
- Valgrind client API header, located at src/third_party/valgrind/valgrind.h
- Valgrind client API header, located at third_party/valgrind/valgrind.h
This is released under the BSD license.
- The Wasm C/C++ API headers, located at third_party/wasm-api/wasm.{h,hh}

19
deps/v8/MODULE.bazel vendored Normal file
View file

@ -0,0 +1,19 @@
module(
name = "v8",
version = "0.0.0",
)
bazel_dep(name = "bazel_skylib", version = "1.7.1")
bazel_dep(name = "rules_python", version = "1.0.0")
bazel_dep(name = "platforms", version = "0.0.11")
bazel_dep(name = "abseil-cpp", version = "20240722.0.bcr.2")
bazel_dep(name = "highway", version = "1.2.0")
pip = use_extension("@rules_python//python/extensions:pip.bzl", "pip")
pip.parse(
hub_name = "v8_python_deps",
python_version = "3.11",
requirements_lock = "//:bazel/requirements.txt",
extra_pip_args = ["--require-hashes"],
)
use_repo(pip, "v8_python_deps")

2
deps/v8/OWNERS vendored
View file

@ -12,9 +12,9 @@ per-file BUILD.bazel=file:COMMON_OWNERS
per-file BUILD.gn=file:COMMON_OWNERS
per-file DEPS=file:COMMON_OWNERS
per-file INFRA_OWNERS=file:INFRA_OWNERS
per-file MODULE.bazel=file:COMMON_OWNERS
per-file PRESUBMIT.py=file:INFRA_OWNERS
per-file WATCHLISTS=file:COMMON_OWNERS
per-file WORKSPACE=file:COMMON_OWNERS
# Needed by the auto_tag builder
per-file WATCHLISTS=v8-ci-autoroll-builder@chops-service-accounts.iam.gserviceaccount.com

46
deps/v8/PRESUBMIT.py vendored
View file

@ -361,6 +361,51 @@ def _CheckNoInlineHeaderIncludesInNormalHeaders(input_api, output_api):
return []
def _CheckInlineHeadersIncludeNonInlineHeadersFirst(input_api, output_api):
"""Checks that the first include in each inline header ("*-inl.h") is the
non-inl counterpart of that header, if that file exists."""
file_inclusion_pattern = r'.+-inl\.h'
include_error = (
'The first include of an -inl.h header should be the non-inl counterpart.'
)
def FilterFile(affected_file):
files_to_skip = _EXCLUDED_PATHS + input_api.DEFAULT_FILES_TO_SKIP + (
# Exclude macro-assembler-<ARCH>-inl.h headers because they have special
# include rules (arch-specific macro assembler headers must be included
# via the general macro-assembler.h).
r'src[\\\/]codegen[\\\/].*[\\\/]macro-assembler-.*-inl\.h',)
return input_api.FilterSourceFile(
affected_file,
files_to_check=(file_inclusion_pattern,),
files_to_skip=files_to_skip)
to_non_inl = lambda filename: filename[:-len("-inl.h")] + ".h"
problems = []
for f in input_api.AffectedSourceFiles(FilterFile):
if not os.path.isfile(to_non_inl(f.AbsoluteLocalPath())):
continue
non_inl_header = to_non_inl(f.LocalPath())
first_include = None
for line in f.NewContents():
if line.startswith('#include '):
first_include = line
break
expected_include = f'#include "{non_inl_header}"'
if first_include is None:
problems.append(f'{f.LocalPath()}: should include {non_inl_header}\n'
' found no includes in the file.')
elif not first_include.startswith(expected_include):
problems.append(
f'{f.LocalPath()}: should include {non_inl_header} first\n'
f' found: {first_include}')
if problems:
return [output_api.PresubmitError(include_error, problems)]
else:
return []
def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
"""Attempts to prevent use of functions intended only for testing in
non-testing code. For now this is just a best-effort implementation
@ -436,6 +481,7 @@ def _CommonChecks(input_api, output_api):
_CheckNoProductionCodeUsingTestOnlyFunctions,
_CheckHeadersHaveIncludeGuards,
_CheckNoInlineHeaderIncludesInNormalHeaders,
_CheckInlineHeadersIncludeNonInlineHeadersFirst,
_CheckJSONFiles,
_CheckNoexceptAnnotations,
_RunTestsWithVPythonSpec,

6
deps/v8/WATCHLISTS vendored
View file

@ -51,7 +51,7 @@
'baseline': {
'filepath': 'src/baseline/',
},
'feature_shipping_status': {
'flags': {
'filepath': 'src/flags/flag-definitions.h',
},
'heap_changes': {
@ -128,8 +128,8 @@
'leszeks+watch@chromium.org',
'verwaest+watch@chromium.org',
],
'feature_shipping_status': [
'saelo+watch@chromium.org',
'flags': [
'v8-flag-updates@chromium.org',
],
'heap_changes': [
'hpayer@chromium.org',

85
deps/v8/WORKSPACE vendored
View file

@ -1,85 +0,0 @@
# Copyright 2021 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
workspace(name = "v8")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "bazel_skylib",
sha256 = "1c531376ac7e5a180e0237938a2536de0c54d93f5c278634818e0efc952dd56c",
urls = [
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.0.3/bazel-skylib-1.0.3.tar.gz",
"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.0.3/bazel-skylib-1.0.3.tar.gz",
],
)
load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace")
bazel_skylib_workspace()
http_archive(
name = "rules_python",
sha256 = "a30abdfc7126d497a7698c29c46ea9901c6392d6ed315171a6df5ce433aa4502",
strip_prefix = "rules_python-0.6.0",
url = "https://github.com/bazelbuild/rules_python/archive/0.6.0.tar.gz",
)
load("@rules_python//python:pip.bzl", "pip_install")
pip_install(
name = "v8_python_deps",
extra_pip_args = ["--require-hashes"],
requirements = "//:bazel/requirements.txt",
)
local_repository(
name = "com_google_absl",
path = "third_party/abseil-cpp",
)
bind(
name = "absl_optional",
actual = "@com_google_absl//absl/types:optional"
)
bind(
name = "absl_btree",
actual = "@com_google_absl//absl/container:btree"
)
bind(
name = "absl_flat_hash_map",
actual = "@com_google_absl//absl/container:flat_hash_map"
)
bind(
name = "absl_flat_hash_set",
actual = "@com_google_absl//absl/container:flat_hash_set"
)
new_local_repository(
name = "com_googlesource_chromium_icu",
build_file = ":bazel/BUILD.icu",
path = "third_party/icu",
)
bind(
name = "icu",
actual = "@com_googlesource_chromium_icu//:icu",
)
http_archive(
name = "intel_ittapi",
add_prefix = "third_party/ittapi",
build_file = "@//:bazel/BUILD.ittapi",
sha256 = "36c42d3f2446ddfaa2d7dfa02dfaa79615933f1a68a72d7e4f1d70de7b56e2c9",
strip_prefix = "ittapi-3.24.0",
url = "https://github.com/intel/ittapi/archive/refs/tags/v3.24.0.tar.gz",
)
bind(
name = "ittapi",
actual = "@intel_ittapi//:lib_ittapi",
)

View file

@ -98,7 +98,7 @@ def _default_args():
"UNICODE",
"_UNICODE",
"_CRT_RAND_S",
"_WIN32_WINNT=0x0602", # Override bazel default to Windows 8
"_WIN32_WINNT=0x0A00", # Override bazel default to Windows 10
],
"//conditions:default": [],
}),
@ -124,6 +124,7 @@ def _default_args():
"@v8//bazel/config:is_clang": [
"-Wno-invalid-offsetof",
"-Wno-deprecated-this-capture",
"-Wno-deprecated-declarations",
"-std=c++20",
],
"@v8//bazel/config:is_gcc": [
@ -176,7 +177,7 @@ def _default_args():
"Advapi32.lib",
],
"@v8//bazel/config:is_macos": ["-pthread"],
"//conditions:default": ["-Wl,--no-as-needed -ldl -pthread"],
"//conditions:default": ["-Wl,--no-as-needed -ldl -latomic -pthread"],
}) + select({
":should_add_rdynamic": ["-rdynamic"],
"//conditions:default": [],
@ -316,6 +317,10 @@ def v8_library(
# split the set of outputs by using OutputGroupInfo, that way we do not need to
# run the torque generator twice.
def _torque_files_impl(ctx):
# Allow building V8 as a dependency: workspace_root points to external/v8
# when building V8 from a different repository and empty otherwise.
v8root = ctx.label.workspace_root
if v8root == "":
v8root = "."
# Arguments
@ -432,7 +437,7 @@ def _v8_target_cpu_transition_impl(settings,
"armeabi-v7a": "arm32",
"s390x": "s390x",
"riscv64": "riscv64",
"ppc": "ppc64le",
"ppc64": "ppc64le",
}
v8_target_cpu = mapping[settings["//command_line_option:cpu"]]
return {"@v8//bazel/config:v8_target_cpu": v8_target_cpu}
@ -495,6 +500,7 @@ def v8_mksnapshot(name, args, suffix = ""):
suffix = suffix,
target_os = select({
"@v8//bazel/config:is_macos": "mac",
"@v8//bazel/config:is_windows": "win",
"//conditions:default": "",
}),
)
@ -506,6 +512,7 @@ def v8_mksnapshot(name, args, suffix = ""):
suffix = suffix,
target_os = select({
"@v8//bazel/config:is_macos": "mac",
"@v8//bazel/config:is_windows": "win",
"//conditions:default": "",
}),
)
@ -535,6 +542,7 @@ def build_config_content(cpu, icu):
("arch", arch),
("asan", "false"),
("atomic_object_field_writes", "false"),
("cet_shadow_stack", "false"),
("cfi", "false"),
("clang_coverage", "false"),
("clang", "true"),
@ -564,6 +572,7 @@ def build_config_content(cpu, icu):
("leaptiering", "true"),
("lite_mode", "false"),
("local_off_stack_check", "false"),
("lower_limits_mode", "false"),
("memory_corruption_api", "false"),
("mips_arch_variant", '""'),
("mips_use_msa", "false"),
@ -588,6 +597,7 @@ def build_config_content(cpu, icu):
("verify_csa", "false"),
("verify_heap", "false"),
("verify_predictable", "false"),
("wasm_random_fuzzers", "false"),
("write_barriers", "false"),
])

View file

@ -0,0 +1,47 @@
# Copyright 2025 The V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Import into partition_alloc, some required variable definitions.
import("//build/config/compiler/compiler.gni")
import("//build/config/dcheck_always_on.gni")
import("//build/config/sanitizers/sanitizers.gni")
# partition_alloc is performance critical and it should generally be optimized
# for speed, even in debug mode.
partition_alloc_remove_configs =
[ "//build/config/compiler:default_optimization" ]
partition_alloc_add_configs = [ "//build/config/compiler:optimize_speed" ]
partition_alloc_enable_arc_config = "//build/config/compiler:enable_arc"
# MSVC's cl.exe compiler is not supported.
_supported_compiler = is_clang || !is_win
# Windows: debug CRT is not yet compatible with the allocator shim.
# https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/free-dbg
_supported_build_type = !is_win || (!is_component_build && !is_debug)
_supported_platform =
is_android || is_chromeos || is_fuchsia || is_linux || is_mac || is_win
# Sanitizers are already replacing the allocator with their own. We shouldn't
# enable partition_alloc when this happens
_use_sanitizer = is_asan || is_cfi || is_msan || is_tsan || is_ubsan
_supported = _supported_compiler && _supported_build_type &&
_supported_platform && !_use_sanitizer
use_partition_alloc_as_malloc_default = _supported
use_allocator_shim_default = _supported
enable_backup_ref_ptr_support_default = _supported
put_ref_count_in_previous_slot_default = true
enable_backup_ref_ptr_slow_checks_default = false
enable_dangling_raw_ptr_checks_default = _supported
assert_cpp20_default = true
# V8 doesn't use raw_ptr, so it doesn't really matter. Still, it has to be
# defined, so we take the configuration used by v8.
raw_ptr_zero_on_construct_default = true
raw_ptr_zero_on_move_default = true
raw_ptr_zero_on_destruct_default = false

View file

@ -3,8 +3,5 @@
# found in the LICENSE file.
declare_args() {
# TODO(liviurau): Remove old name after Chromium config update
# https://crbug.com/1476977.
is_on_release_branch = false
v8_is_on_release_branch = true
}

41
deps/v8/gni/v8.gni vendored
View file

@ -13,6 +13,10 @@ import("//build_overrides/build.gni")
import("release_branch_toggle.gni")
import("split_static_library.gni")
if (is_ios) {
import("//build/config/apple/mobile_config.gni") # For `target_platform`.
}
declare_args() {
# Includes files needed for correctness fuzzing.
v8_correctness_fuzzer = false
@ -75,12 +79,19 @@ declare_args() {
# Sets -DV8_LITE_MODE.
v8_enable_lite_mode = false
# iOS executable code pages is in 17.4 SDK. We
# use target_os == "ios" here because it isn't equivalent
# to is_ios (is_ios is based on host_os).
# We use target_os == "ios" here because it isn't equivalent to is_ios
# (is_ios is based on host_os).
if (target_os == "ios") {
if (target_platform == "iphoneos") {
# iOS executable code pages is in 17.4 SDK.
# TODO(dtapuska): Change this to an assert.
v8_enable_lite_mode = ios_deployment_target != "17.4"
} else if (target_platform == "tvos") {
# tvOS runs in single process mode and is not allowed to use JIT.
# TODO(crbug.com/394710095): Enable the v8 lite mode to run v8 with the
# jitless mode on tvOS.
v8_enable_lite_mode = true
}
}
# Enable the Turbofan compiler.
@ -102,10 +113,7 @@ declare_args() {
v8_enable_wasm_simd256_revec = false
# Enable runtime call stats.
# TODO(liviurau): Remove old name after Chromium config update
# https://crbug.com/1476977.
v8_enable_runtime_call_stats =
!(is_on_release_branch || v8_is_on_release_branch)
v8_enable_runtime_call_stats = !v8_is_on_release_branch
# Add fuzzilli fuzzer support.
v8_fuzzilli = false
@ -128,6 +136,12 @@ declare_args() {
cppgc_is_standalone = false
# Enables certain checks on API level functionality.
cppgc_enable_api_checks = is_debug || dcheck_always_on
# Enable slow checks on API level functionality.
cppgc_enable_slow_api_checks = false
# Enable object names in cppgc for profiling purposes.
cppgc_enable_object_names = is_chrome_for_testing
@ -143,12 +157,9 @@ declare_args() {
# Enable pointer compression in cppgc.
cppgc_enable_pointer_compression = false
# Enable 2gb cage for fast compression/decompression. Currently disabled
# due to an increased number of OOMs.
cppgc_enable_2gb_cage = false
# Enable support for larger cages, up to 16GB.
cppgc_enable_larger_cage = true
# iOS cannot mmap above 8GB, so use the smaller cage.
cppgc_enable_larger_cage = !is_ios
# Enable advanced BigInt algorithms, costing about 10-30 KB binary size
# depending on platform. Disabled on Android to save binary size.
@ -179,6 +190,9 @@ declare_args() {
# (experimental).
# TODO(sroettger): enable by default once we have bot support for testing.
v8_enable_memory_sealing = false
# Sets -DV8_ENABLE_ETW_STACK_WALKING. Enables ETW Stack Walking
v8_enable_etw_stack_walking = is_win
}
if (v8_use_external_startup_data == "") {
@ -218,7 +232,8 @@ assert(!(v8_enable_webassembly && v8_enable_lite_mode),
if (v8_enable_pointer_compression == "") {
v8_enable_pointer_compression =
v8_current_cpu == "arm64" || v8_current_cpu == "x64"
v8_current_cpu == "arm64" || v8_current_cpu == "x64" ||
v8_current_cpu == "loong64"
}
# The Wasm interpreter is currently supported only on arm64 and x64, on

View file

@ -5,6 +5,8 @@ include_rules = [
# Used by v8-cppgc.h to bridge to cppgc.
"+cppgc/custom-space.h",
"+cppgc/heap-statistics.h",
"+cppgc/internal/conditional-stack-allocated.h",
"+cppgc/internal/write-barrier.h",
"+cppgc/visitor.h",
"+perfetto",
]

View file

@ -9,3 +9,6 @@
monorail {
component: "Blink>JavaScript>API"
}
buganizer_public: {
component_id: 1456124
}

View file

@ -14,6 +14,8 @@ per-file js_protocol.pdl=file:../src/inspector/OWNERS
per-file v8-inspector*=file:../src/inspector/OWNERS
per-file v8-inspector*=file:../src/inspector/OWNERS
per-file v8-profiler.h=file:../src/profiler/OWNERS
# Needed by the auto_tag builder
per-file v8-version.h=v8-ci-autoroll-builder@chops-service-accounts.iam.gserviceaccount.com

View file

@ -44,8 +44,7 @@ class AllocationHandle;
namespace internal {
// Similar to C++17 std::align_val_t;
enum class AlignVal : size_t {};
using AlignVal = std::align_val_t;
class MakeGarbageCollectedTraitInternal {
protected:

View file

@ -1,30 +0,0 @@
// Copyright 2020 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_EPHEMERON_PAIR_H_
#define INCLUDE_CPPGC_EPHEMERON_PAIR_H_
#include "cppgc/liveness-broker.h"
#include "cppgc/member.h"
namespace cppgc {
/**
* An ephemeron pair is used to conditionally retain an object.
* The `value` will be kept alive only if the `key` is alive.
*/
template <typename K, typename V>
struct EphemeronPair {
EphemeronPair(K* k, V* v) : key(k), value(v) {}
WeakMember<K> key;
Member<V> value;
void ClearValueIfKeyIsDead(const LivenessBroker& broker) {
if (!broker.IsHeapObjectAlive(key)) value = nullptr;
}
};
} // namespace cppgc
#endif // INCLUDE_CPPGC_EPHEMERON_PAIR_H_

View file

@ -33,16 +33,6 @@ static constexpr uint16_t kFullyConstructedBitMask = uint16_t{1};
static constexpr size_t kPageSizeBits = 17;
static constexpr size_t kPageSize = size_t{1} << kPageSizeBits;
#if defined(V8_HOST_ARCH_ARM64) && defined(V8_OS_DARWIN)
constexpr size_t kGuardPageSize = 0;
#elif defined(V8_HOST_ARCH_PPC64)
constexpr size_t kGuardPageSize = 0;
#elif defined(V8_HOST_ARCH_LOONG64) || defined(V8_HOST_ARCH_MIPS64)
constexpr size_t kGuardPageSize = 0;
#else
constexpr size_t kGuardPageSize = 4096;
#endif
static constexpr size_t kLargeObjectSizeThreshold = kPageSize / 2;
#if defined(CPPGC_POINTER_COMPRESSION)
@ -54,12 +44,6 @@ constexpr unsigned kPointerCompressionShift = 1;
#endif // !defined(CPPGC_POINTER_COMPRESSION)
#if defined(CPPGC_CAGED_HEAP)
#if defined(CPPGC_2GB_CAGE)
constexpr size_t kCagedHeapDefaultReservationSize =
static_cast<size_t>(2) * kGB;
constexpr size_t kCagedHeapMaxReservationSize =
kCagedHeapDefaultReservationSize;
#else // !defined(CPPGC_2GB_CAGE)
constexpr size_t kCagedHeapDefaultReservationSize =
static_cast<size_t>(4) * kGB;
#if defined(CPPGC_POINTER_COMPRESSION)
@ -69,7 +53,6 @@ constexpr size_t kCagedHeapMaxReservationSize =
constexpr size_t kCagedHeapMaxReservationSize =
kCagedHeapDefaultReservationSize;
#endif // !defined(CPPGC_POINTER_COMPRESSION)
#endif // !defined(CPPGC_2GB_CAGE)
constexpr size_t kCagedHeapReservationAlignment = kCagedHeapMaxReservationSize;
#endif // defined(CPPGC_CAGED_HEAP)

View file

@ -19,9 +19,7 @@ class BasePageHandle {
public:
static V8_INLINE BasePageHandle* FromPayload(void* payload) {
return reinterpret_cast<BasePageHandle*>(
(reinterpret_cast<uintptr_t>(payload) &
~(api_constants::kPageSize - 1)) +
api_constants::kGuardPageSize);
reinterpret_cast<uintptr_t>(payload) & ~(api_constants::kPageSize - 1));
}
static V8_INLINE const BasePageHandle* FromPayload(const void* payload) {
return FromPayload(const_cast<void*>(payload));
@ -33,7 +31,7 @@ class BasePageHandle {
protected:
explicit BasePageHandle(HeapHandle& heap_handle) : heap_handle_(heap_handle) {
CPPGC_DCHECK(reinterpret_cast<uintptr_t>(this) % api_constants::kPageSize ==
api_constants::kGuardPageSize);
0);
}
HeapHandle& heap_handle_;

View file

@ -77,11 +77,7 @@ class V8_EXPORT AgeTable final {
__builtin_ctz(static_cast<uint32_t>(kCardSizeInBytes));
#else //! V8_HAS_BUILTIN_CTZ
// Hardcode and check with assert.
#if defined(CPPGC_2GB_CAGE)
11;
#else // !defined(CPPGC_2GB_CAGE)
12;
#endif // !defined(CPPGC_2GB_CAGE)
#endif // !V8_HAS_BUILTIN_CTZ
static_assert((1 << kGranularityBits) == kCardSizeInBytes);
const size_t entry = offset >> kGranularityBits;

View file

@ -32,16 +32,12 @@ class V8_EXPORT CagedHeapBase {
}
V8_INLINE static bool AreWithinCage(const void* addr1, const void* addr2) {
#if defined(CPPGC_2GB_CAGE)
static constexpr size_t kHeapBaseShift = sizeof(uint32_t) * CHAR_BIT - 1;
#else //! defined(CPPGC_2GB_CAGE)
#if defined(CPPGC_POINTER_COMPRESSION)
static constexpr size_t kHeapBaseShift =
31 + api_constants::kPointerCompressionShift;
#else // !defined(CPPGC_POINTER_COMPRESSION)
static constexpr size_t kHeapBaseShift = sizeof(uint32_t) * CHAR_BIT;
#endif // !defined(CPPGC_POINTER_COMPRESSION)
#endif //! defined(CPPGC_2GB_CAGE)
static_assert((static_cast<size_t>(1) << kHeapBaseShift) ==
api_constants::kCagedHeapMaxReservationSize);
CPPGC_DCHECK(g_heap_base_);

View file

@ -0,0 +1,41 @@
// Copyright 2025 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_CPPGC_INTERNAL_CONDITIONAL_STACK_ALLOCATED_H_
#define INCLUDE_CPPGC_INTERNAL_CONDITIONAL_STACK_ALLOCATED_H_
#include <type_traits>
#include "cppgc/macros.h" // NOLINT(build/include_directory)
#include "cppgc/type-traits.h" // NOLINT(build/include_directory)
namespace cppgc {
namespace internal {
// Base class that is marked as stack allocated if T is either marked as stack
// allocated or a traceable type.
template <typename T>
class ConditionalStackAllocatedBase;
template <typename T>
concept RequiresStackAllocated =
!std::is_void_v<T> &&
(cppgc::IsStackAllocatedType<T> || cppgc::internal::IsTraceableV<T> ||
cppgc::IsGarbageCollectedOrMixinTypeV<T>);
template <typename T>
requires(RequiresStackAllocated<T>)
class ConditionalStackAllocatedBase<T> {
public:
CPPGC_STACK_ALLOCATED();
};
template <typename T>
requires(!RequiresStackAllocated<T>)
class ConditionalStackAllocatedBase<T> {};
} // namespace internal
} // namespace cppgc
#endif // INCLUDE_CPPGC_INTERNAL_CONDITIONAL_STACK_ALLOCATED_H_

View file

@ -20,18 +20,18 @@ FatalImpl(const char*, const SourceLocation& = SourceLocation::Current());
template <typename>
struct EatParams {};
#if defined(DEBUG)
#ifdef CPPGC_ENABLE_API_CHECKS
#define CPPGC_DCHECK_MSG(condition, message) \
do { \
if (V8_UNLIKELY(!(condition))) { \
::cppgc::internal::DCheckImpl(message); \
} \
} while (false)
#else // !defined(DEBUG)
#else // !CPPGC_ENABLE_API_CHECKS
#define CPPGC_DCHECK_MSG(condition, message) \
(static_cast<void>(::cppgc::internal::EatParams<decltype( \
static_cast<void>(condition), message)>{}))
#endif // !defined(DEBUG)
#endif // !CPPGC_ENABLE_API_CHECKS
#define CPPGC_DCHECK(condition) CPPGC_DCHECK_MSG(condition, #condition)

View file

@ -10,6 +10,7 @@
#include <type_traits>
#include "cppgc/internal/api-constants.h"
#include "cppgc/internal/caged-heap.h"
#include "cppgc/internal/logging.h"
#include "cppgc/sentinel-pointer.h"
#include "v8config.h" // NOLINT(build/include_directory)
@ -71,11 +72,17 @@ class V8_EXPORT CageBaseGlobal final {
class V8_TRIVIAL_ABI CompressedPointer final {
public:
struct AtomicInitializerTag {};
using IntegralType = uint32_t;
static constexpr auto kWriteBarrierSlotType =
WriteBarrierSlotType::kCompressed;
V8_INLINE CompressedPointer() : value_(0u) {}
V8_INLINE explicit CompressedPointer(const void* value,
AtomicInitializerTag) {
StoreAtomic(value);
}
V8_INLINE explicit CompressedPointer(const void* ptr)
: value_(Compress(ptr)) {}
V8_INLINE explicit CompressedPointer(std::nullptr_t) : value_(0u) {}
@ -139,17 +146,12 @@ class V8_TRIVIAL_ABI CompressedPointer final {
CPPGC_DCHECK(
(reinterpret_cast<uintptr_t>(ptr) & kPointerCompressionShiftMask) == 0);
#if defined(CPPGC_2GB_CAGE)
// Truncate the pointer.
auto compressed =
static_cast<IntegralType>(reinterpret_cast<uintptr_t>(ptr));
#else // !defined(CPPGC_2GB_CAGE)
const auto uptr = reinterpret_cast<uintptr_t>(ptr);
// Shift the pointer and truncate.
auto compressed = static_cast<IntegralType>(
uptr >> api_constants::kPointerCompressionShift);
#endif // !defined(CPPGC_2GB_CAGE)
// Normal compressed pointers must have the MSB set.
// Normal compressed pointers must have the MSB set. This is guaranteed by
// the cage alignment.
CPPGC_DCHECK((!compressed || compressed == kCompressedSentinel) ||
(compressed & (1 << 31)));
return compressed;
@ -164,43 +166,77 @@ class V8_TRIVIAL_ABI CompressedPointer final {
static V8_INLINE void* Decompress(IntegralType ptr, uintptr_t base) {
CPPGC_DCHECK(CageBaseGlobal::IsSet());
CPPGC_DCHECK(base == CageBaseGlobal::Get());
// Treat compressed pointer as signed and cast it to uint64_t, which will
// sign-extend it.
#if defined(CPPGC_2GB_CAGE)
const uint64_t mask = static_cast<uint64_t>(static_cast<int32_t>(ptr));
#else // !defined(CPPGC_2GB_CAGE)
// Then, shift the result. It's important to shift the unsigned
// value, as otherwise it would result in undefined behavior.
// Sign-extend compressed pointer to full width. This ensure that normal
// pointers have only 1s in the base part of the address. It's also
// important to shift the unsigned value, as otherwise it would result in
// undefined behavior.
const uint64_t mask = static_cast<uint64_t>(static_cast<int32_t>(ptr))
<< api_constants::kPointerCompressionShift;
#endif // !defined(CPPGC_2GB_CAGE)
// Set the base part of the address for normal compressed pointers. Note
// that nullptr and the sentinel value do not have 1s in the base part and
// remain as-is in this operation.
return reinterpret_cast<void*>(mask & base);
}
// For a given memory `address`, this method iterates all possible pointers
// that can be reasonably recovered with the current compression scheme and
// passes them to `callback`.
template <typename Callback>
static V8_INLINE void VisitPossiblePointers(const void* address,
Callback callback);
private:
#if defined(CPPGC_2GB_CAGE)
static constexpr IntegralType kCompressedSentinel =
SentinelPointer::kSentinelValue;
#else // !defined(CPPGC_2GB_CAGE)
static constexpr IntegralType kCompressedSentinel =
SentinelPointer::kSentinelValue >>
api_constants::kPointerCompressionShift;
#endif // !defined(CPPGC_2GB_CAGE)
// All constructors initialize `value_`. Do not add a default value here as it
// results in a non-atomic write on some builds, even when the atomic version
// of the constructor is used.
IntegralType value_;
};
template <typename Callback>
// static
void CompressedPointer::VisitPossiblePointers(const void* address,
Callback callback) {
const uintptr_t base = CageBaseGlobal::Get();
CPPGC_DCHECK(base);
// We may have random compressed pointers on stack (e.g. due to inlined
// collections). These could be present in both halfwords.
const uint32_t compressed_low =
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(address));
callback(CompressedPointer::Decompress(compressed_low, base));
const uint32_t compressed_high = static_cast<uint32_t>(
reinterpret_cast<uintptr_t>(address) >> (sizeof(uint32_t) * CHAR_BIT));
callback(CompressedPointer::Decompress(compressed_high, base));
// Iterate possible intermediate values, see `Decompress()`. The intermediate
// value of decompressing is a 64-bit value where 35 bits are the offset. We
// don't assume sign extension is stored and recover that part.
//
// Note that this case conveniently also recovers the full pointer.
static constexpr uintptr_t kBitForIntermediateValue =
(sizeof(uint32_t) * CHAR_BIT) + api_constants::kPointerCompressionShift;
static constexpr uintptr_t kSignExtensionMask =
~((uintptr_t{1} << kBitForIntermediateValue) - 1);
const uintptr_t intermediate_sign_extended =
reinterpret_cast<uintptr_t>(address) | kSignExtensionMask;
callback(reinterpret_cast<void*>(intermediate_sign_extended & base));
}
#endif // defined(CPPGC_POINTER_COMPRESSION)
class V8_TRIVIAL_ABI RawPointer final {
public:
struct AtomicInitializerTag {};
using IntegralType = uintptr_t;
static constexpr auto kWriteBarrierSlotType =
WriteBarrierSlotType::kUncompressed;
V8_INLINE RawPointer() : ptr_(nullptr) {}
V8_INLINE explicit RawPointer(const void* ptr, AtomicInitializerTag) {
StoreAtomic(ptr);
}
V8_INLINE explicit RawPointer(const void* ptr) : ptr_(ptr) {}
V8_INLINE const void* Load() const { return ptr_; }
@ -243,6 +279,13 @@ class V8_TRIVIAL_ABI RawPointer final {
return a.ptr_ >= b.ptr_;
}
template <typename Callback>
static V8_INLINE void VisitPossiblePointers(const void* address,
Callback callback) {
// Pass along the full pointer.
return callback(const_cast<void*>(address));
}
private:
// All constructors initialize `ptr_`. Do not add a default value here as it
// results in a non-atomic write on some builds, even when the atomic version

View file

@ -18,6 +18,7 @@ namespace internal {
class CrossThreadPersistentRegion;
class FatalOutOfMemoryHandler;
class HeapBase;
class RootVisitor;
// PersistentNode represents a variant of two states:
@ -133,10 +134,14 @@ class V8_EXPORT PersistentRegionBase {
};
// Variant of PersistentRegionBase that checks whether the allocation and
// freeing happens only on the thread that created the region.
// freeing happens only on the thread that created the heap.
class V8_EXPORT PersistentRegion final : public PersistentRegionBase {
public:
explicit PersistentRegion(const FatalOutOfMemoryHandler&);
V8_INLINE PersistentRegion(const HeapBase& heap,
const FatalOutOfMemoryHandler& oom_handler)
: PersistentRegionBase(oom_handler), heap_(heap) {
CPPGC_DCHECK(IsCreationThread());
}
// Clears Persistent fields to avoid stale pointers after heap teardown.
~PersistentRegion() = default;
@ -161,7 +166,7 @@ class V8_EXPORT PersistentRegion final : public PersistentRegionBase {
private:
bool IsCreationThread();
int creation_thread_id_;
const HeapBase& heap_;
};
// CrossThreadPersistent uses PersistentRegionBase but protects it using this

View file

@ -28,13 +28,19 @@ class WeakMemberTag;
class UntracedMemberTag;
struct DijkstraWriteBarrierPolicy {
V8_INLINE static void InitializingBarrier(const void*, const void*) {
// Since in initializing writes the source object is always white, having no
// barrier doesn't break the tri-color invariant.
V8_INLINE static void InitializingBarrier(const void*, const void*) {}
V8_INLINE static void InitializingBarrier(const void*, RawPointer storage) {
}
#if defined(CPPGC_POINTER_COMPRESSION)
V8_INLINE static void InitializingBarrier(const void*,
CompressedPointer storage) {}
#endif
template <WriteBarrierSlotType SlotType>
V8_INLINE static void AssigningBarrier(const void* slot, const void* value) {
V8_INLINE static void AssigningBarrier(const void* slot,
const void* value) {
#ifdef CPPGC_SLIM_WRITE_BARRIER
if (V8_UNLIKELY(WriteBarrier::IsEnabled()))
WriteBarrier::CombinedWriteBarrierSlow<SlotType>(slot);
@ -101,6 +107,11 @@ struct DijkstraWriteBarrierPolicy {
struct NoWriteBarrierPolicy {
V8_INLINE static void InitializingBarrier(const void*, const void*) {}
V8_INLINE static void InitializingBarrier(const void*, RawPointer storage) {}
#if defined(CPPGC_POINTER_COMPRESSION)
V8_INLINE static void InitializingBarrier(const void*,
CompressedPointer storage) {}
#endif
template <WriteBarrierSlotType>
V8_INLINE static void AssigningBarrier(const void*, const void*) {}
template <WriteBarrierSlotType, typename MemberStorage>
@ -119,10 +130,29 @@ template <bool kCheckOffHeapAssignments>
class V8_EXPORT SameThreadEnabledCheckingPolicy
: private SameThreadEnabledCheckingPolicyBase {
protected:
template <typename T>
V8_INLINE void CheckPointer(RawPointer raw_pointer) {
if (raw_pointer.IsCleared() || raw_pointer.IsSentinel()) {
return;
}
CheckPointersImplTrampoline<T>::Call(
this, static_cast<const T*>(raw_pointer.Load()));
}
#if defined(CPPGC_POINTER_COMPRESSION)
template <typename T>
V8_INLINE void CheckPointer(CompressedPointer compressed_pointer) {
if (compressed_pointer.IsCleared() || compressed_pointer.IsSentinel()) {
return;
}
CheckPointersImplTrampoline<T>::Call(
this, static_cast<const T*>(compressed_pointer.Load()));
}
#endif
template <typename T>
void CheckPointer(const T* ptr) {
if (!ptr || (kSentinelPointer == ptr)) return;
if (!ptr || (kSentinelPointer == ptr)) {
return;
}
CheckPointersImplTrampoline<T>::Call(this, ptr);
}
@ -145,20 +175,27 @@ class V8_EXPORT SameThreadEnabledCheckingPolicy
class DisabledCheckingPolicy {
protected:
V8_INLINE void CheckPointer(const void*) {}
template <typename T>
V8_INLINE void CheckPointer(T*) {}
template <typename T>
V8_INLINE void CheckPointer(RawPointer) {}
#if defined(CPPGC_POINTER_COMPRESSION)
template <typename T>
V8_INLINE void CheckPointer(CompressedPointer) {}
#endif
};
#ifdef DEBUG
#ifdef CPPGC_ENABLE_SLOW_API_CHECKS
// Off heap members are not connected to object graph and thus cannot ressurect
// dead objects.
using DefaultMemberCheckingPolicy =
SameThreadEnabledCheckingPolicy<false /* kCheckOffHeapAssignments*/>;
using DefaultPersistentCheckingPolicy =
SameThreadEnabledCheckingPolicy<true /* kCheckOffHeapAssignments*/>;
#else // !DEBUG
#else // !CPPGC_ENABLE_SLOW_API_CHECKS
using DefaultMemberCheckingPolicy = DisabledCheckingPolicy;
using DefaultPersistentCheckingPolicy = DisabledCheckingPolicy;
#endif // !DEBUG
#endif // !CPPGC_ENABLE_SLOW_API_CHECKS
// For CT(W)P neither marking information (for value), nor objectstart bitmap
// (for slot) are guaranteed to be present because there's no synchronization
// between heaps after marking.

View file

@ -11,10 +11,18 @@
namespace cppgc {
#define CPPGC_DISALLOW_NEW() \
public: \
using IsDisallowNewMarker CPPGC_UNUSED = int; \
void* operator new(size_t, void* location) { return location; } \
void* operator new(size_t) = delete; \
static_assert(true, "Force semicolon.")
// Use CPPGC_STACK_ALLOCATED if the object is only stack allocated.
// Add the CPPGC_STACK_ALLOCATED_IGNORE annotation on a case-by-case basis when
// enforcement of CPPGC_STACK_ALLOCATED should be suppressed.
#if defined(__clang__)
#define CPPGC_STACK_ALLOCATED() \
public: \
using IsStackAllocatedTypeMarker CPPGC_UNUSED = int; \
@ -23,13 +31,26 @@ namespace cppgc {
void* operator new(size_t) = delete; \
void* operator new(size_t, void*) = delete; \
static_assert(true, "Force semicolon.")
#define CPPGC_STACK_ALLOCATED_IGNORE(bug_or_reason) \
__attribute__((annotate("stack_allocated_ignore")))
#define CPPGC_PLUGIN_IGNORE(bug_or_reason) \
__attribute__((annotate("blink_gc_plugin_ignore"), \
annotate("stack_allocated_ignore")))
#else // !defined(__clang__)
#define CPPGC_STACK_ALLOCATED() static_assert(true, "Force semicolon.")
#define CPPGC_STACK_ALLOCATED_IGNORE(bug_or_reason)
#define CPPGC_PLUGIN_IGNORE(bug_or_reason)
#endif // !defined(__clang__)
template <typename T>
concept IsStackAllocatedType =
requires { typename T::IsStackAllocatedTypeMarker; };
} // namespace cppgc
#endif // INCLUDE_CPPGC_MACROS_H_

View file

@ -38,9 +38,8 @@ class V8_TRIVIAL_ABI MemberBase {
V8_INLINE MemberBase() = default;
V8_INLINE explicit MemberBase(const void* value) : raw_(value) {}
V8_INLINE MemberBase(const void* value, AtomicInitializerTag) {
SetRawAtomic(value);
}
V8_INLINE MemberBase(const void* value, AtomicInitializerTag)
: raw_(value, typename RawStorage::AtomicInitializerTag{}) {}
V8_INLINE explicit MemberBase(RawStorage raw) : raw_(raw) {}
V8_INLINE explicit MemberBase(std::nullptr_t) : raw_(nullptr) {}
@ -87,7 +86,7 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase<StorageType>,
V8_INLINE BasicMember(SentinelPointer s) : Base(s) {} // NOLINT
V8_INLINE BasicMember(T* raw) : Base(raw) { // NOLINT
InitializingWriteBarrier(raw);
this->CheckPointer(Get());
CheckPointer(raw);
}
V8_INLINE BasicMember(T& raw) // NOLINT
: BasicMember(&raw) {}
@ -103,7 +102,7 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase<StorageType>,
V8_INLINE BasicMember(T* raw, AtomicInitializerTag atomic)
: Base(raw, atomic) {
InitializingWriteBarrier(raw);
this->CheckPointer(Get());
CheckPointer(raw);
}
V8_INLINE BasicMember(T& raw, AtomicInitializerTag atomic)
: BasicMember(&raw, atomic) {}
@ -117,7 +116,7 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase<StorageType>,
// need to be adjusted.
template <typename U, typename OtherBarrierPolicy, typename OtherWeaknessTag,
typename OtherCheckingPolicy,
std::enable_if_t<internal::IsDecayedSameV<T, U>>* = nullptr>
std::enable_if_t<IsDecayedSameV<T, U>>* = nullptr>
V8_INLINE BasicMember( // NOLINT
const BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy,
OtherCheckingPolicy, StorageType>& other)
@ -125,7 +124,7 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase<StorageType>,
template <typename U, typename OtherBarrierPolicy, typename OtherWeaknessTag,
typename OtherCheckingPolicy,
std::enable_if_t<internal::IsStrictlyBaseOfV<T, U>>* = nullptr>
std::enable_if_t<IsStrictlyBaseOfV<T, U>>* = nullptr>
V8_INLINE BasicMember( // NOLINT
const BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy,
OtherCheckingPolicy, StorageType>& other)
@ -142,7 +141,7 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase<StorageType>,
// need to be adjusted.
template <typename U, typename OtherBarrierPolicy, typename OtherWeaknessTag,
typename OtherCheckingPolicy,
std::enable_if_t<internal::IsDecayedSameV<T, U>>* = nullptr>
std::enable_if_t<IsDecayedSameV<T, U>>* = nullptr>
V8_INLINE BasicMember(
BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy, OtherCheckingPolicy,
StorageType>&& other) noexcept
@ -152,7 +151,7 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase<StorageType>,
template <typename U, typename OtherBarrierPolicy, typename OtherWeaknessTag,
typename OtherCheckingPolicy,
std::enable_if_t<internal::IsStrictlyBaseOfV<T, U>>* = nullptr>
std::enable_if_t<IsStrictlyBaseOfV<T, U>>* = nullptr>
V8_INLINE BasicMember(
BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy, OtherCheckingPolicy,
StorageType>&& other) noexcept
@ -183,10 +182,10 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase<StorageType>,
V8_INLINE BasicMember& operator=(
const BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy,
OtherCheckingPolicy, StorageType>& other) {
if constexpr (internal::IsDecayedSameV<T, U>) {
if constexpr (IsDecayedSameV<T, U>) {
return operator=(other.GetRawStorage());
} else {
static_assert(internal::IsStrictlyBaseOfV<T, U>);
static_assert(IsStrictlyBaseOfV<T, U>);
return operator=(other.Get());
}
}
@ -206,10 +205,10 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase<StorageType>,
V8_INLINE BasicMember& operator=(
BasicMember<U, OtherWeaknessTag, OtherBarrierPolicy, OtherCheckingPolicy,
StorageType>&& other) noexcept {
if constexpr (internal::IsDecayedSameV<T, U>) {
if constexpr (IsDecayedSameV<T, U>) {
operator=(other.GetRawStorage());
} else {
static_assert(internal::IsStrictlyBaseOfV<T, U>);
static_assert(IsStrictlyBaseOfV<T, U>);
operator=(other.Get());
}
other.Clear();
@ -231,7 +230,7 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase<StorageType>,
V8_INLINE BasicMember& operator=(T* other) {
Base::SetRawAtomic(other);
AssigningWriteBarrier(other);
this->CheckPointer(Get());
CheckPointer(other);
return *this;
}
@ -270,9 +269,7 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase<StorageType>,
return static_cast<T*>(const_cast<void*>(Base::GetRaw()));
}
V8_INLINE void Clear() {
Base::SetRawStorageAtomic(RawStorage{});
}
V8_INLINE void Clear() { Base::SetRawStorageAtomic(RawStorage{}); }
V8_INLINE T* Release() {
T* result = Get();
@ -284,20 +281,18 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase<StorageType>,
return reinterpret_cast<const T**>(Base::GetRawSlot());
}
V8_INLINE RawStorage GetRawStorage() const {
return Base::GetRawStorage();
}
V8_INLINE RawStorage GetRawStorage() const { return Base::GetRawStorage(); }
private:
V8_INLINE explicit BasicMember(RawStorage raw) : Base(raw) {
InitializingWriteBarrier(Get());
this->CheckPointer(Get());
InitializingWriteBarrier();
CheckPointer();
}
V8_INLINE BasicMember& operator=(RawStorage other) {
Base::SetRawStorageAtomic(other);
AssigningWriteBarrier();
this->CheckPointer(Get());
CheckPointer();
return *this;
}
@ -308,6 +303,10 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase<StorageType>,
V8_INLINE void InitializingWriteBarrier(T* value) const {
WriteBarrierPolicy::InitializingBarrier(Base::GetRawSlot(), value);
}
V8_INLINE void InitializingWriteBarrier() const {
WriteBarrierPolicy::InitializingBarrier(Base::GetRawSlot(),
Base::GetRawStorage());
}
V8_INLINE void AssigningWriteBarrier(T* value) const {
WriteBarrierPolicy::template AssigningBarrier<
StorageType::kWriteBarrierSlotType>(Base::GetRawSlot(), value);
@ -317,6 +316,12 @@ class V8_TRIVIAL_ABI BasicMember final : private MemberBase<StorageType>,
StorageType::kWriteBarrierSlotType>(Base::GetRawSlot(),
Base::GetRawStorage());
}
V8_INLINE void CheckPointer(T* value) {
CheckingPolicy::template CheckPointer<T>(value);
}
V8_INLINE void CheckPointer() {
CheckingPolicy::template CheckPointer<T>(Base::GetRawStorage());
}
V8_INLINE void ClearFromGC() const { Base::ClearFromGC(); }
@ -341,12 +346,11 @@ V8_INLINE bool operator==(
StorageType>& member1,
const BasicMember<T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2,
StorageType>& member2) {
if constexpr (internal::IsDecayedSameV<T1, T2>) {
if constexpr (IsDecayedSameV<T1, T2>) {
// Check compressed pointers if types are the same.
return member1.GetRawStorage() == member2.GetRawStorage();
} else {
static_assert(internal::IsStrictlyBaseOfV<T1, T2> ||
internal::IsStrictlyBaseOfV<T2, T1>);
static_assert(IsStrictlyBaseOfV<T1, T2> || IsStrictlyBaseOfV<T2, T1>);
// Otherwise, check decompressed pointers.
return member1.Get() == member2.Get();
}
@ -372,12 +376,12 @@ V8_INLINE bool operator==(
StorageType>& member,
U* raw) {
// Never allow comparison with erased pointers.
static_assert(!internal::IsDecayedSameV<void, U>);
static_assert(!IsDecayedSameV<void, U>);
if constexpr (internal::IsDecayedSameV<T, U>) {
if constexpr (IsDecayedSameV<T, U>) {
// Check compressed pointers if types are the same.
return member.GetRawStorage() == StorageType(raw);
} else if constexpr (internal::IsStrictlyBaseOfV<T, U>) {
} else if constexpr (IsStrictlyBaseOfV<T, U>) {
// Cast the raw pointer to T, which may adjust the pointer.
return member.GetRawStorage() == StorageType(static_cast<T*>(raw));
} else {
@ -494,7 +498,7 @@ V8_INLINE bool operator<(
const BasicMember<T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2,
StorageType>& member2) {
static_assert(
internal::IsDecayedSameV<T1, T2>,
IsDecayedSameV<T1, T2>,
"Comparison works only for same pointer type modulo cv-qualifiers");
return member1.GetRawStorage() < member2.GetRawStorage();
}
@ -509,7 +513,7 @@ V8_INLINE bool operator<=(
const BasicMember<T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2,
StorageType>& member2) {
static_assert(
internal::IsDecayedSameV<T1, T2>,
IsDecayedSameV<T1, T2>,
"Comparison works only for same pointer type modulo cv-qualifiers");
return member1.GetRawStorage() <= member2.GetRawStorage();
}
@ -524,7 +528,7 @@ V8_INLINE bool operator>(
const BasicMember<T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2,
StorageType>& member2) {
static_assert(
internal::IsDecayedSameV<T1, T2>,
IsDecayedSameV<T1, T2>,
"Comparison works only for same pointer type modulo cv-qualifiers");
return member1.GetRawStorage() > member2.GetRawStorage();
}
@ -539,16 +543,15 @@ V8_INLINE bool operator>=(
const BasicMember<T2, WeaknessTag2, WriteBarrierPolicy2, CheckingPolicy2,
StorageType>& member2) {
static_assert(
internal::IsDecayedSameV<T1, T2>,
IsDecayedSameV<T1, T2>,
"Comparison works only for same pointer type modulo cv-qualifiers");
return member1.GetRawStorage() >= member2.GetRawStorage();
}
template <typename T, typename WriteBarrierPolicy, typename CheckingPolicy,
typename StorageType>
struct IsWeak<internal::BasicMember<T, WeakMemberTag, WriteBarrierPolicy,
CheckingPolicy, StorageType>>
: std::true_type {};
struct IsWeak<BasicMember<T, WeakMemberTag, WriteBarrierPolicy, CheckingPolicy,
StorageType>> : std::true_type {};
} // namespace internal
@ -626,4 +629,35 @@ static constexpr size_t kSizeofCompressedMember =
} // namespace cppgc
// Mark `BasicMember<T>` and `T*` as having a common reference type of `T*` (the
// type to which both can be converted or bound). This makes them satisfy
// `std::equality_comparable`, which allows usage like the following:
// ```
// HeapVector<Member<T>> v;
// T* e;
// auto it = std::ranges::find(v, e);
// ```
// Without this, the `find()` call above would fail to compile with an error
// about being unable to invoke `std::ranges::equal_to()`.
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
typename CheckingPolicy, typename StorageType,
template <typename> typename TQ, template <typename> typename UQ>
struct std::basic_common_reference<
cppgc::internal::BasicMember<T, WeaknessTag, WriteBarrierPolicy,
CheckingPolicy, StorageType>,
T*, TQ, UQ> {
using type = T*;
};
template <typename T, typename WeaknessTag, typename WriteBarrierPolicy,
typename CheckingPolicy, typename StorageType,
template <typename> typename TQ, template <typename> typename UQ>
struct std::basic_common_reference<
T*,
cppgc::internal::BasicMember<T, WeaknessTag, WriteBarrierPolicy,
CheckingPolicy, StorageType>,
TQ, UQ> {
using type = T*;
};
#endif // INCLUDE_CPPGC_MEMBER_H_

View file

@ -55,6 +55,9 @@ class V8_EXPORT NameProvider {
* Specifies a name for the garbage-collected object. Such names will never
* be hidden, as they are explicitly specified by the user of this API.
*
* Implementations of this function must not allocate garbage-collected
* objects or otherwise modify the cppgc heap.
*
* V8 may call this function while generating a heap snapshot or at other
* times. If V8 is currently generating a heap snapshot (according to
* HeapProfiler::IsTakingSnapshot), then the returned string must stay alive

View file

@ -136,6 +136,8 @@ class V8_EXPORT Platform {
virtual TracingController* GetTracingController();
};
V8_EXPORT bool IsInitialized();
/**
* Process-global initialization of the garbage collector. Must be called before
* creating a Heap.

View file

@ -9,6 +9,7 @@
// against Oilpan types without including any other parts.
#include <cstddef>
#include <type_traits>
#include <utility>
namespace cppgc {

View file

@ -8,12 +8,12 @@
#include <type_traits>
#include "cppgc/custom-space.h"
#include "cppgc/ephemeron-pair.h"
#include "cppgc/garbage-collected.h"
#include "cppgc/internal/logging.h"
#include "cppgc/internal/member-storage.h"
#include "cppgc/internal/pointer-policies.h"
#include "cppgc/liveness-broker.h"
#include "cppgc/macros.h"
#include "cppgc/member.h"
#include "cppgc/sentinel-pointer.h"
#include "cppgc/source-location.h"
@ -36,6 +36,25 @@ class VisitorFactory;
using WeakCallback = void (*)(const LivenessBroker&, const void*);
/**
* An ephemeron pair is used to conditionally retain an object.
* The `value` will be kept alive only if the `key` is alive.
*/
template <typename K, typename V>
struct EphemeronPair {
CPPGC_DISALLOW_NEW();
EphemeronPair(K* k, V* v) : key(k), value(v) {}
WeakMember<K> key;
Member<V> value;
void ClearValueIfKeyIsDead(const LivenessBroker& broker) {
if (!broker.IsHeapObjectAlive(key)) value = nullptr;
}
void Trace(Visitor* visitor) const;
};
/**
* Visitor passed to trace methods. All managed pointers must have called the
* Visitor's trace method on them.
@ -436,6 +455,11 @@ class V8_EXPORT Visitor {
friend class internal::VisitorBase;
};
template <typename K, typename V>
void EphemeronPair<K, V>::Trace(Visitor* visitor) const {
visitor->TraceEphemeron(key, value);
}
namespace internal {
class V8_EXPORT RootVisitor {

View file

@ -369,6 +369,14 @@ domain Debugger
# call stacks (default).
integer maxDepth
# Replace previous blackbox execution contexts with passed ones. Forces backend to skip
# stepping/pausing in scripts in these execution contexts. VM will try to leave blackboxed script by
# performing 'step in' several times, finally resorting to 'step out' if unsuccessful.
experimental command setBlackboxExecutionContexts
parameters
# Array of execution context unique ids for the debugger to ignore.
array of string uniqueIds
# Replace previous blackbox patterns with passed ones. Forces backend to skip stepping/pausing in
# scripts with url matching one of the patterns. VM will try to leave blackboxed script by
# performing 'step in' several times, finally resorting to 'step out' if unsuccessful.
@ -376,6 +384,8 @@ domain Debugger
parameters
# Array of regexps that will be used to check script url for blackbox state.
array of string patterns
# If true, also ignore scripts with no source url.
optional boolean skipAnonymous
# Makes backend skip steps in the script in blackboxed ranges. VM will try leave blacklisted
# scripts by performing 'step in' several times, finally resorting to 'step out' if unsuccessful.
@ -554,7 +564,8 @@ domain Debugger
experimental optional array of LocationRange skipList
# Fired when breakpoint is resolved to an actual script and location.
event breakpointResolved
# Deprecated in favor of `resolvedBreakpoints` in the `scriptParsed` event.
deprecated event breakpointResolved
parameters
# Breakpoint unique identifier.
BreakpointId breakpointId
@ -606,13 +617,19 @@ domain Debugger
properties
# Type of the debug symbols.
enum type
None
SourceMap
EmbeddedDWARF
ExternalDWARF
# URL of the external symbol source.
optional string externalURL
type ResolvedBreakpoint extends object
properties
# Breakpoint unique identifier.
BreakpointId breakpointId
# Actual breakpoint location.
Location location
# Fired when virtual machine fails to parse the script.
event scriptFailedToParse
parameters
@ -632,6 +649,8 @@ domain Debugger
Runtime.ExecutionContextId executionContextId
# Content hash of the script, SHA-256.
string hash
# For Wasm modules, the content of the `build_id` custom section.
string buildId
# Embedder-specific auxiliary data likely matching {isDefault: boolean, type: 'default'|'isolated'|'worker', frameId: string}
optional object executionContextAuxData
# URL of source map associated with script (if any).
@ -671,6 +690,8 @@ domain Debugger
Runtime.ExecutionContextId executionContextId
# Content hash of the script, SHA-256.
string hash
# For Wasm modules, the content of the `build_id` custom section.
string buildId
# Embedder-specific auxiliary data likely matching {isDefault: boolean, type: 'default'|'isolated'|'worker', frameId: string}
optional object executionContextAuxData
# True, if this script is generated as a result of the live edit operation.
@ -689,10 +710,14 @@ domain Debugger
experimental optional integer codeOffset
# The language of the script.
experimental optional Debugger.ScriptLanguage scriptLanguage
# If the scriptLanguage is WebASsembly, the source of debug symbols for the module.
experimental optional Debugger.DebugSymbols debugSymbols
# If the scriptLanguage is WebAssembly, the source of debug symbols for the module.
experimental optional array of Debugger.DebugSymbols debugSymbols
# The name the embedder supplied for this script.
experimental optional string embedderName
# The list of set breakpoints in this script if calls to `setBreakpointByUrl`
# matches this script's URL or hash. Clients that use this list can ignore the
# `breakpointResolved` event. They are equivalent.
experimental optional array of ResolvedBreakpoint resolvedBreakpoints
experimental domain HeapProfiler
depends on Runtime
@ -1547,10 +1572,14 @@ domain Runtime
# It is the total usage of the corresponding isolate not scoped to a particular Runtime.
experimental command getHeapUsage
returns
# Used heap size in bytes.
# Used JavaScript heap size in bytes.
number usedSize
# Allocated heap size in bytes.
# Allocated JavaScript heap size in bytes.
number totalSize
# Used size in bytes in the embedder's garbage-collected heap.
number embedderHeapUsedSize
# Size in bytes of backing storage for array buffers and external strings.
number backingStorageSize
# Returns properties of a given object. Object group of the result is inherited from the target
# object.

View file

@ -18,11 +18,11 @@
#else // defined(_WIN32)
// Setup for Linux shared library export.
#ifdef BUILDING_V8_PLATFORM_SHARED
#if defined(BUILDING_V8_PLATFORM_SHARED) || USING_V8_PLATFORM_SHARED
#define V8_PLATFORM_EXPORT __attribute__((visibility("default")))
#else
#define V8_PLATFORM_EXPORT
#endif
#endif // defined(BUILDING_V8_PLATFORM_SHARED) || ...
#endif // defined(_WIN32)

View file

@ -6,7 +6,6 @@
#define V8_LIBPLATFORM_V8_TRACING_H_
#include <atomic>
#include <fstream>
#include <memory>
#include <unordered_set>
#include <vector>

View file

@ -10,13 +10,20 @@
#include <memory>
#include "v8-local-handle.h" // NOLINT(build/include_directory)
#include "v8-memory-span.h" // NOLINT(build/include_directory)
#include "v8-object.h" // NOLINT(build/include_directory)
#include "v8-platform.h" // NOLINT(build/include_directory)
#include "v8config.h" // NOLINT(build/include_directory)
namespace v8 {
class SharedArrayBuffer;
#if defined(V8_COMPRESS_POINTERS) && \
!defined(V8_COMPRESS_POINTERS_IN_SHARED_CAGE)
class IsolateGroup;
#endif
#ifndef V8_ARRAY_BUFFER_INTERNAL_FIELD_COUNT
// Defined using gn arg `v8_array_buffer_internal_field_count`.
#define V8_ARRAY_BUFFER_INTERNAL_FIELD_COUNT 2
@ -24,6 +31,7 @@ class SharedArrayBuffer;
enum class ArrayBufferCreationMode { kInternalized, kExternalized };
enum class BackingStoreInitializationMode { kZeroInitialized, kUninitialized };
enum class BackingStoreOnFailureMode { kReturnNull, kOutOfMemory };
/**
* A wrapper around the backing store (i.e. the raw memory) of an array buffer.
@ -83,18 +91,6 @@ class V8_EXPORT BackingStore : public v8::internal::BackingStoreBase {
*/
void operator delete(void* ptr) { ::operator delete(ptr); }
/**
* Wrapper around ArrayBuffer::Allocator::Reallocate that preserves IsShared.
* Assumes that the backing_store was allocated by the ArrayBuffer allocator
* of the given isolate.
*/
V8_DEPRECATED(
"Reallocate is unsafe, please do not use. Please allocate a new "
"BackingStore and copy instead.")
static std::unique_ptr<BackingStore> Reallocate(
v8::Isolate* isolate, std::unique_ptr<BackingStore> backing_store,
size_t byte_length);
/**
* This callback is used only if the memory block for a BackingStore cannot be
* allocated with an ArrayBuffer::Allocator. In such cases the destructor of
@ -172,21 +168,13 @@ class V8_EXPORT ArrayBuffer : public Object {
virtual void Free(void* data, size_t length) = 0;
/**
* Reallocate the memory block of size |old_length| to a memory block of
* size |new_length| by expanding, contracting, or copying the existing
* memory block. If |new_length| > |old_length|, then the new part of
* the memory must be initialized to zeros. Return nullptr if reallocation
* is not successful.
*
* The caller guarantees that the memory block was previously allocated
* using Allocate or AllocateUninitialized.
*
* The default implementation allocates a new block and copies data.
* Returns a size_t that determines the largest ArrayBuffer that can be
* allocated. Override if your Allocator is more restrictive than the
* default. Will only be called once, and the value returned will be
* cached.
* Should not return a value that is larger than kMaxByteLength.
*/
V8_DEPRECATED(
"Reallocate is unsafe, please do not use. Please allocate new memory "
"and copy instead.")
virtual void* Reallocate(void* data, size_t old_length, size_t new_length);
virtual size_t MaxAllocationSize() const { return kMaxByteLength; }
/**
* ArrayBuffer allocation mode. kNormal is a malloc/free style allocation,
@ -195,11 +183,37 @@ class V8_EXPORT ArrayBuffer : public Object {
*/
enum class AllocationMode { kNormal, kReservation };
/**
* Returns page allocator used by this Allocator instance.
*
* When the sandbox used by Allocator it is expected that this returns
* sandbox's page allocator.
* Otherwise, it should return system page allocator.
*/
virtual PageAllocator* GetPageAllocator() { return nullptr; }
#if defined(V8_COMPRESS_POINTERS) && \
!defined(V8_COMPRESS_POINTERS_IN_SHARED_CAGE)
/**
* Convenience allocator.
*
* When the sandbox is enabled, this allocator will allocate its backing
* memory inside the sandbox. Otherwise, it will rely on malloc/free.
* memory inside the sandbox that belongs to passed isolate group.
* Otherwise, it will rely on malloc/free.
*
* Caller takes ownership, i.e. the returned object needs to be freed using
* |delete allocator| once it is no longer in use.
*/
static Allocator* NewDefaultAllocator(const IsolateGroup& group);
#endif // defined(V8_COMPRESS_POINTERS) &&
// !defined(V8_COMPRESS_POINTERS_IN_SHARED_CAGE)
/**
* Convenience allocator.
*
* When the sandbox is enabled, this allocator will allocate its backing
* memory inside the default global sandbox. Otherwise, it will rely on
* malloc/free.
*
* Caller takes ownership, i.e. the returned object needs to be freed using
* |delete allocator| once it is no longer in use.
@ -217,6 +231,18 @@ class V8_EXPORT ArrayBuffer : public Object {
*/
size_t MaxByteLength() const;
/**
* Attempt to create a new ArrayBuffer. Allocate |byte_length| bytes.
* Allocated memory will be owned by a created ArrayBuffer and
* will be deallocated when it is garbage-collected,
* unless the object is externalized. If allocation fails, the Maybe
* returned will be empty.
*/
static MaybeLocal<ArrayBuffer> MaybeNew(
Isolate* isolate, size_t byte_length,
BackingStoreInitializationMode initialization_mode =
BackingStoreInitializationMode::kZeroInitialized);
/**
* Create a new ArrayBuffer. Allocate |byte_length| bytes, which are either
* zero-initialized or uninitialized. Allocated memory will be owned by a
@ -246,17 +272,25 @@ class V8_EXPORT ArrayBuffer : public Object {
/**
* Returns a new standalone BackingStore that is allocated using the array
* buffer allocator of the isolate. The allocation can either be zero
* intialized, or uninitialized. The result can be later passed to
* initialized, or uninitialized. The result can be later passed to
* ArrayBuffer::New.
*
* If the allocator returns nullptr, then the function may cause GCs in the
* given isolate and re-try the allocation. If GCs do not help, then the
* given isolate and re-try the allocation.
*
* If GCs do not help and on_failure is kOutOfMemory, then the
* function will crash with an out-of-memory error.
*
* Otherwise if GCs do not help (or the allocation is too large for GCs to
* help) and on_failure is kReturnNull, then a null result is returned.
*/
static std::unique_ptr<BackingStore> NewBackingStore(
Isolate* isolate, size_t byte_length,
BackingStoreInitializationMode initialization_mode =
BackingStoreInitializationMode::kZeroInitialized);
BackingStoreInitializationMode::kZeroInitialized,
BackingStoreOnFailureMode on_failure =
BackingStoreOnFailureMode::kOutOfMemory);
/**
* Returns a new standalone BackingStore that takes over the ownership of
* the given buffer. The destructor of the BackingStore invokes the given
@ -354,9 +388,21 @@ class V8_EXPORT ArrayBuffer : public Object {
V8_ARRAY_BUFFER_INTERNAL_FIELD_COUNT;
static constexpr int kEmbedderFieldCount = kInternalFieldCount;
#if V8_ENABLE_SANDBOX
static constexpr size_t kMaxByteLength =
internal::kMaxSafeBufferSizeForSandbox;
#elif V8_HOST_ARCH_32_BIT
static constexpr size_t kMaxByteLength = std::numeric_limits<int>::max();
#else
// The maximum safe integer (2^53 - 1).
static constexpr size_t kMaxByteLength =
static_cast<size_t>((uint64_t{1} << 53) - 1);
#endif
private:
ArrayBuffer();
static void CheckCast(Value* obj);
friend class TypedArray;
};
#ifndef V8_ARRAY_BUFFER_VIEW_INTERNAL_FIELD_COUNT
@ -394,6 +440,16 @@ class V8_EXPORT ArrayBufferView : public Object {
*/
size_t CopyContents(void* dest, size_t byte_length);
/**
* Returns the contents of the ArrayBufferView's buffer as a MemorySpan. If
* the contents are on the V8 heap, they get copied into `storage`. Otherwise
* a view into the off-heap backing store is returned. The provided storage
* should be at least as large as the maximum on-heap size of a TypedArray,
* was defined in gn with `typed_array_max_size_in_heap`. The default value is
* 64 bytes.
*/
v8::MemorySpan<uint8_t> GetContents(v8::MemorySpan<uint8_t> storage);
/**
* Returns true if ArrayBufferView's backing ArrayBuffer has already been
* allocated.
@ -463,6 +519,18 @@ class V8_EXPORT SharedArrayBuffer : public Object {
BackingStoreInitializationMode initialization_mode =
BackingStoreInitializationMode::kZeroInitialized);
/**
* Create a new SharedArrayBuffer. Allocate |byte_length| bytes, which are
* either zero-initialized or uninitialized. Allocated memory will be owned by
* a created SharedArrayBuffer and will be deallocated when it is
* garbage-collected, unless the object is externalized. If allocation
* fails, the Maybe returned will be empty.
*/
static MaybeLocal<SharedArrayBuffer> MaybeNew(
Isolate* isolate, size_t byte_length,
BackingStoreInitializationMode initialization_mode =
BackingStoreInitializationMode::kZeroInitialized);
/**
* Create a new SharedArrayBuffer with an existing backing store.
* The created array keeps a reference to the backing store until the array
@ -481,17 +549,26 @@ class V8_EXPORT SharedArrayBuffer : public Object {
/**
* Returns a new standalone BackingStore that is allocated using the array
* buffer allocator of the isolate. The allocation can either be zero
* intialized, or uninitialized. The result can be later passed to
* initialized, or uninitialized. The result can be later passed to
* SharedArrayBuffer::New.
*
* If the allocator returns nullptr, then the function may cause GCs in the
* given isolate and re-try the allocation. If GCs do not help, then the
* function will crash with an out-of-memory error.
* given isolate and re-try the allocation.
*
* If on_failure is kOutOfMemory and GCs do not help, then the function will
* crash with an out-of-memory error.
*
* Otherwise, if on_failure is kReturnNull and GCs do not help (or the
* byte_length is so large that the allocation cannot succeed), then a null
* result is returned.
*/
static std::unique_ptr<BackingStore> NewBackingStore(
Isolate* isolate, size_t byte_length,
BackingStoreInitializationMode initialization_mode =
BackingStoreInitializationMode::kZeroInitialized);
BackingStoreInitializationMode::kZeroInitialized,
BackingStoreOnFailureMode on_failure =
BackingStoreOnFailureMode::kOutOfMemory);
/**
* Returns a new standalone BackingStore that takes over the ownership of
* the given buffer. The destructor of the BackingStore invokes the given

View file

@ -129,7 +129,9 @@ struct JitCodeEvent {
enum JitCodeEventOptions {
kJitCodeEventDefault = 0,
// Generate callbacks for already existent code.
kJitCodeEventEnumExisting = 1
kJitCodeEventEnumExisting = 1,
kLastJitCodeEventOption = kJitCodeEventEnumExisting
};
/**
@ -188,6 +190,9 @@ using GCCallback = void (*)(GCType type, GCCallbackFlags flags);
using InterruptCallback = void (*)(Isolate* isolate, void* data);
using PrintCurrentStackTraceFilterCallback =
bool (*)(Isolate* isolate, Local<String> script_name);
/**
* This callback is invoked when the heap size is close to the heap limit and
* V8 is likely to abort with out-of-memory error.
@ -369,6 +374,47 @@ using HostImportModuleDynamicallyCallback = MaybeLocal<Promise> (*)(
Local<Value> resource_name, Local<String> specifier,
Local<FixedArray> import_attributes);
/**
* HostImportModuleWithPhaseDynamicallyCallback is called when we
* require the embedder to load a module with a specific phase. This is used
* as part of the dynamic import syntax.
*
* The referrer contains metadata about the script/module that calls
* import.
*
* The specifier is the name of the module that should be imported.
*
* The phase is the phase of the import requested.
*
* The import_attributes are import attributes for this request in the form:
* [key1, value1, key2, value2, ...] where the keys and values are of type
* v8::String. Note, unlike the FixedArray passed to ResolveModuleCallback and
* returned from ModuleRequest::GetImportAttributes(), this array does not
* contain the source Locations of the attributes.
*
* The Promise returned from this function is forwarded to userland
* JavaScript. The embedder must resolve this promise according to the phase
* requested:
* - For ModuleImportPhase::kSource, the promise must be resolved with a
* compiled ModuleSource object, or rejected with a SyntaxError if the
* module does not support source representation.
* - For ModuleImportPhase::kEvaluation, the promise must be resolved with a
* ModuleNamespace object of a module that has been compiled, instantiated,
* and evaluated.
*
* In case of an exception, the embedder must reject this promise with the
* exception. If the promise creation itself fails (e.g. due to stack
* overflow), the embedder must propagate that exception by returning an empty
* MaybeLocal.
*
* This callback is still experimental and is only invoked for source phase
* imports.
*/
using HostImportModuleWithPhaseDynamicallyCallback = MaybeLocal<Promise> (*)(
Local<Context> context, Local<Data> host_defined_options,
Local<Value> resource_name, Local<String> specifier,
ModuleImportPhase phase, Local<FixedArray> import_attributes);
/**
* Callback for requesting a compile hint for a function from the embedder. The
* first parameter is the position of the function in source code and the second
@ -404,6 +450,14 @@ using HostInitializeImportMetaObjectCallback = void (*)(Local<Context> context,
using HostCreateShadowRealmContextCallback =
MaybeLocal<Context> (*)(Local<Context> initiator_context);
/**
* IsJSApiWrapperNativeErrorCallback is called on an JSApiWrapper object to
* determine if Error.isError should return true or false. For instance, in an
* HTML embedder, DOMExceptions return true when passed to Error.isError.
*/
using IsJSApiWrapperNativeErrorCallback = bool (*)(Isolate* isolate,
Local<Object> obj);
/**
* PrepareStackTraceCallback is called when the stack property of an error is
* first accessed. The return value will be used as the stack value. If this
@ -444,14 +498,26 @@ using PrepareStackTraceCallback = MaybeLocal<Value> (*)(Local<Context> context,
* with a list of regular expressions that should match the document URL
* in order to enable ETW tracing:
* {
* "version": "1.0",
* "version": "2.0",
* "filtered_urls": [
* "https:\/\/.*\.chromium\.org\/.*", "https://v8.dev/";, "..."
* ]
* ],
* "trace_interpreter_frames": true
* }
*/
using FilterETWSessionByURLCallback =
bool (*)(Local<Context> context, const std::string& etw_filter_payload);
struct FilterETWSessionByURLResult {
// If true, enable ETW tracing for the current isolate.
bool enable_etw_tracing;
// If true, also enables ETW tracing for interpreter stack frames.
bool trace_interpreter_frames;
};
using FilterETWSessionByURL2Callback = FilterETWSessionByURLResult (*)(
Local<Context> context, const std::string& etw_filter_payload);
#endif // V8_OS_WIN
} // namespace v8

View file

@ -427,7 +427,8 @@ class V8_EXPORT Context : public Data {
static void CheckCast(Data* obj);
internal::Address* GetDataFromSnapshotOnce(size_t index);
internal::ValueHelper::InternalRepresentationType GetDataFromSnapshotOnce(
size_t index);
Local<Value> SlowGetEmbedderData(int index);
void* SlowGetAlignedPointerFromEmbedderData(int index);
};
@ -497,10 +498,10 @@ void* Context::GetAlignedPointerFromEmbedderData(int index) {
template <class T>
MaybeLocal<T> Context::GetDataFromSnapshotOnce(size_t index) {
if (auto slot = GetDataFromSnapshotOnce(index); slot) {
internal::PerformCastCheck(
internal::ValueHelper::SlotAsValue<T, false>(slot));
return Local<T>::FromSlot(slot);
if (auto repr = GetDataFromSnapshotOnce(index);
repr != internal::ValueHelper::kEmpty) {
internal::PerformCastCheck(internal::ValueHelper::ReprAsValue<T>(repr));
return Local<T>::FromRepr(repr);
}
return {};
}

View file

@ -87,6 +87,7 @@ class V8_EXPORT CppHeap {
*
* After this call, object allocation is prohibited.
*/
V8_DEPRECATED("Terminate gets automatically called in the CppHeap destructor")
void Terminate();
/**
@ -101,7 +102,7 @@ class V8_EXPORT CppHeap {
/**
* Collects statistics for the given spaces and reports them to the receiver.
*
* \param custom_spaces a collection of custom space indicies.
* \param custom_spaces a collection of custom space indices.
* \param receiver an object that gets the results.
*/
void CollectCustomSpaceStatisticsAtLastGC(

View file

@ -27,6 +27,11 @@ class V8_EXPORT Data {
*/
bool IsModule() const;
/**
* Returns true if this data is a |v8::ModuleRequest|.
*/
bool IsModuleRequest() const;
/**
* Returns tru if this data is a |v8::FixedArray|
*/

View file

@ -42,6 +42,12 @@ class V8_EXPORT StackFrame {
*/
int GetColumn() const { return GetLocation().GetColumnNumber() + 1; }
/**
* Returns zero based source position (character offset) for the associated
* function.
*/
int GetSourcePosition() const;
/**
* Returns the id of the script for the function for this StackFrame.
* This method will return Message::kNoScriptIdInfo if it is unable to
@ -130,6 +136,11 @@ class V8_EXPORT StackTrace {
kDetailed = kOverview | kIsEval | kIsConstructor | kScriptNameOrSourceURL
};
/**
* Returns the (unique) ID of this stack trace.
*/
int GetID() const;
/**
* Returns a StackFrame at a particular index.
*/

View file

@ -21,37 +21,10 @@ class Value;
*/
class V8_EXPORT EmbedderRootsHandler {
public:
enum class RootHandling {
kQueryEmbedderForNonDroppableReferences,
kDontQueryEmbedderForAnyReference,
};
virtual ~EmbedderRootsHandler() = default;
EmbedderRootsHandler() = default;
V8_DEPRECATED("Use the default constructor instead.")
explicit EmbedderRootsHandler(RootHandling default_traced_reference_handling)
: default_traced_reference_handling_(default_traced_reference_handling) {}
/**
* Returns true if the |TracedReference| handle should be considered as root
* for the currently running non-tracing garbage collection and false
* otherwise. The default implementation will keep all |TracedReference|
* references as roots.
*
* If this returns false, then V8 may decide that the object referred to by
* such a handle is reclaimed. In that case, V8 calls |ResetRoot()| for the
* |TracedReference|.
*
* Note that the `handle` is different from the handle that the embedder holds
* for retaining the object.
*
* The concrete implementations must be thread-safe.
*/
V8_DEPRECATED("Use TracedReferenceHandling::kDroppable instead.")
virtual bool IsRoot(const v8::TracedReference<v8::Value>& handle) = 0;
/**
* Used in combination with |IsRoot|. Called by V8 when an
* object that is backed by a handle is reclaimed by a non-tracing garbage
@ -69,14 +42,10 @@ class V8_EXPORT EmbedderRootsHandler {
* |false| is returned, |ResetRoot()| will be recalled for the same handle.
*/
virtual bool TryResetRoot(const v8::TracedReference<v8::Value>& handle) {
ResetRoot(handle);
return true;
return false;
}
private:
const RootHandling default_traced_reference_handling_ =
RootHandling::kDontQueryEmbedderForAnyReference;
friend class internal::TracedHandles;
};

View file

@ -45,6 +45,8 @@ class V8_EXPORT Exception {
Local<Value> options = {});
static Local<Value> WasmRuntimeError(Local<String> message,
Local<Value> options = {});
static Local<Value> WasmSuspendError(Local<String> message,
Local<Value> options = {});
static Local<Value> Error(Local<String> message, Local<Value> options = {});
/**

View file

@ -0,0 +1,60 @@
// Copyright 2024 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_EXTERNAL_MEMORY_ACCOUNTER_H_
#define INCLUDE_EXTERNAL_MEMORY_ACCOUNTER_H_
#include <stdint.h>
#include "v8-isolate.h"
namespace v8 {
/**
* This class is used to give V8 an indication of the amount of externally
* allocated memory that is kept alive by JavaScript objects. V8 uses this to
* decide when to perform garbage collections. Registering externally allocated
* memory will trigger garbage collections more often than it would otherwise in
* an attempt to garbage collect the JavaScript objects that keep the externally
* allocated memory alive. Instances of ExternalMemoryAccounter check that the
* reported external memory is back to 0 on destruction.
*/
class V8_EXPORT ExternalMemoryAccounter {
public:
/**
* Returns the amount of external memory registered for `isolate`.
*/
static int64_t GetTotalAmountOfExternalAllocatedMemoryForTesting(
const Isolate* isolate);
ExternalMemoryAccounter() = default;
~ExternalMemoryAccounter();
ExternalMemoryAccounter(ExternalMemoryAccounter&&);
ExternalMemoryAccounter& operator=(ExternalMemoryAccounter&&);
ExternalMemoryAccounter(const ExternalMemoryAccounter&) = delete;
ExternalMemoryAccounter& operator=(const ExternalMemoryAccounter&) = delete;
/**
* Reports an increase of `size` bytes of external memory.
*/
void Increase(Isolate* isolate, size_t size);
/**
* Reports an update of `delta` bytes of external memory.
*/
void Update(Isolate* isolate, int64_t delta);
/**
* Reports an decrease of `size` bytes of external memory.
*/
void Decrease(Isolate* isolate, size_t size);
private:
#ifdef V8_ENABLE_MEMORY_ACCOUNTING_CHECKS
size_t amount_of_external_memory_ = 0;
v8::Isolate* isolate_ = nullptr;
#endif
};
} // namespace v8
#endif // INCLUDE_EXTERNAL_MEMORY_ACCOUNTER_H_

View file

@ -2,6 +2,9 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_V8_FAST_API_CALLS_H_
#define INCLUDE_V8_FAST_API_CALLS_H_
/**
* This file provides additional API on top of the default one for making
* API calls, which come from embedder C++ functions. The functions are being
@ -216,9 +219,6 @@
* associated SlowCallback.
*/
#ifndef INCLUDE_V8_FAST_API_CALLS_H_
#define INCLUDE_V8_FAST_API_CALLS_H_
#include <stddef.h>
#include <stdint.h>
@ -235,6 +235,7 @@ namespace v8 {
class Isolate;
START_ALLOW_USE_DEPRECATED()
class CTypeInfo {
public:
enum class Type : uint8_t {
@ -268,11 +269,11 @@ class CTypeInfo {
// than any valid Type enum.
static constexpr Type kCallbackOptionsType = Type(255);
enum class SequenceType : uint8_t {
enum class V8_DEPRECATE_SOON(
"There is no special support in V8 anymore, there is no need to"
"use a SequenceType") SequenceType : uint8_t {
kScalar,
kIsSequence, // sequence<T>
kIsTypedArray, // TypedArray of T or any ArrayBufferView if T
// is void
kIsArrayBuffer // ArrayBuffer
};
@ -284,8 +285,11 @@ class CTypeInfo {
kIsRestrictedBit = 1 << 3, // T must be float or double
};
explicit constexpr CTypeInfo(
Type type, SequenceType sequence_type = SequenceType::kScalar,
explicit constexpr CTypeInfo(Type type, Flags flags = Flags::kNone)
: type_(type), sequence_type_(SequenceType::kScalar), flags_(flags) {}
V8_DEPRECATE_SOON("Use CTypeInfo(Type, Flags) instead")
constexpr CTypeInfo(Type type, SequenceType sequence_type,
Flags flags = Flags::kNone)
: type_(type), sequence_type_(sequence_type), flags_(flags) {}
@ -301,6 +305,7 @@ class CTypeInfo {
}
constexpr Type GetType() const { return type_; }
V8_DEPRECATE_SOON("Use the constant SequenceType::kScalar instead")
constexpr SequenceType GetSequenceType() const { return sequence_type_; }
constexpr Flags GetFlags() const { return flags_; }
@ -324,69 +329,7 @@ class CTypeInfo {
SequenceType sequence_type_;
Flags flags_;
};
struct FastApiTypedArrayBase {
public:
// Returns the length in number of elements.
size_t V8_EXPORT length() const { return length_; }
// Checks whether the given index is within the bounds of the collection.
void V8_EXPORT ValidateIndex(size_t index) const;
protected:
size_t length_ = 0;
};
template <typename T>
struct V8_DEPRECATE_SOON(
"When an API function expects a TypedArray as a parameter, the type in the "
"signature should be `v8::Local<v8::Value>` instead of "
"FastApiTypedArray<>. The API function then has to type-check the "
"parameter and convert it to a `v8::Local<v8::TypedArray` to access the "
"data. In essence, the parameter should be handled the same as for a "
"regular API call.") FastApiTypedArray : public FastApiTypedArrayBase {
public:
V8_INLINE T get(size_t index) const {
#ifdef DEBUG
ValidateIndex(index);
#endif // DEBUG
T tmp;
memcpy(&tmp, static_cast<void*>(reinterpret_cast<T*>(data_) + index),
sizeof(T));
return tmp;
}
bool getStorageIfAligned(T** elements) const {
if (reinterpret_cast<uintptr_t>(data_) % alignof(T) != 0) {
return false;
}
*elements = reinterpret_cast<T*>(data_);
return true;
}
private:
// This pointer should include the typed array offset applied.
// It's not guaranteed that it's aligned to sizeof(T), it's only
// guaranteed that it's 4-byte aligned, so for 8-byte types we need to
// provide a special implementation for reading from it, which hides
// the possibly unaligned read in the `get` method.
void* data_;
};
// Any TypedArray. It uses kTypedArrayBit with base type void
// Overloaded args of ArrayBufferView and TypedArray are not supported
// (for now) because the generic “any” ArrayBufferView doesnt have its
// own instance type. It could be supported if we specify that
// TypedArray<T> always has precedence over the generic ArrayBufferView,
// but this complicates overload resolution.
struct FastApiArrayBufferView {
void* data;
size_t byte_length;
};
struct FastApiArrayBuffer {
void* data;
size_t byte_length;
};
END_ALLOW_USE_DEPRECATED()
struct FastOneByteString {
const char* data;
@ -493,40 +436,6 @@ class V8_EXPORT CFunction {
enum class OverloadResolution { kImpossible, kAtRuntime, kAtCompileTime };
// Returns whether an overload between this and the given CFunction can
// be resolved at runtime by the RTTI available for the arguments or at
// compile time for functions with different number of arguments.
OverloadResolution GetOverloadResolution(const CFunction* other) {
// Runtime overload resolution can only deal with functions with the
// same number of arguments. Functions with different arity are handled
// by compile time overload resolution though.
if (ArgumentCount() != other->ArgumentCount()) {
return OverloadResolution::kAtCompileTime;
}
// The functions can only differ by a single argument position.
int diff_index = -1;
for (unsigned int i = 0; i < ArgumentCount(); ++i) {
if (ArgumentInfo(i).GetSequenceType() !=
other->ArgumentInfo(i).GetSequenceType()) {
if (diff_index >= 0) {
return OverloadResolution::kImpossible;
}
diff_index = i;
// We only support overload resolution between sequence types.
if (ArgumentInfo(i).GetSequenceType() ==
CTypeInfo::SequenceType::kScalar ||
other->ArgumentInfo(i).GetSequenceType() ==
CTypeInfo::SequenceType::kScalar) {
return OverloadResolution::kImpossible;
}
}
}
return OverloadResolution::kAtRuntime;
}
template <typename F>
static CFunction Make(F* func,
CFunctionInfo::Int64Representation int64_rep =
@ -656,9 +565,6 @@ struct TypeInfoHelper {
} \
\
static constexpr CTypeInfo::Type Type() { return CTypeInfo::Type::Enum; } \
static constexpr CTypeInfo::SequenceType SequenceType() { \
return CTypeInfo::SequenceType::kScalar; \
} \
};
template <CTypeInfo::Type type>
@ -687,6 +593,7 @@ struct CTypeInfoTraits {};
V(void, kVoid) \
V(v8::Local<v8::Value>, kV8Value) \
V(v8::Local<v8::Object>, kV8Value) \
V(v8::Local<v8::Array>, kV8Value) \
V(AnyCType, kAny)
// ApiObject was a temporary solution to wrap the pointer to the v8::Value.
@ -699,52 +606,8 @@ PRIMITIVE_C_TYPES(DEFINE_TYPE_INFO_TRAITS)
#undef PRIMITIVE_C_TYPES
#undef ALL_C_TYPES
#define SPECIALIZE_GET_TYPE_INFO_HELPER_FOR_TA(T, Enum) \
template <> \
struct TypeInfoHelper<const FastApiTypedArray<T>&> { \
static constexpr CTypeInfo::Flags Flags() { \
return CTypeInfo::Flags::kNone; \
} \
\
static constexpr CTypeInfo::Type Type() { return CTypeInfo::Type::Enum; } \
static constexpr CTypeInfo::SequenceType SequenceType() { \
return CTypeInfo::SequenceType::kIsTypedArray; \
} \
};
#define TYPED_ARRAY_C_TYPES(V) \
V(uint8_t, kUint8) \
V(int32_t, kInt32) \
V(uint32_t, kUint32) \
V(int64_t, kInt64) \
V(uint64_t, kUint64) \
V(float, kFloat32) \
V(double, kFloat64)
TYPED_ARRAY_C_TYPES(SPECIALIZE_GET_TYPE_INFO_HELPER_FOR_TA)
#undef TYPED_ARRAY_C_TYPES
template <>
struct TypeInfoHelper<v8::Local<v8::Array>> {
static constexpr CTypeInfo::Flags Flags() { return CTypeInfo::Flags::kNone; }
static constexpr CTypeInfo::Type Type() { return CTypeInfo::Type::kVoid; }
static constexpr CTypeInfo::SequenceType SequenceType() {
return CTypeInfo::SequenceType::kIsSequence;
}
};
template <>
struct TypeInfoHelper<v8::Local<v8::Uint32Array>> {
static constexpr CTypeInfo::Flags Flags() { return CTypeInfo::Flags::kNone; }
static constexpr CTypeInfo::Type Type() { return CTypeInfo::Type::kUint32; }
static constexpr CTypeInfo::SequenceType SequenceType() {
return CTypeInfo::SequenceType::kIsTypedArray;
}
};
template <>
struct TypeInfoHelper<FastApiCallbackOptions&> {
static constexpr CTypeInfo::Flags Flags() { return CTypeInfo::Flags::kNone; }
@ -752,9 +615,6 @@ struct TypeInfoHelper<FastApiCallbackOptions&> {
static constexpr CTypeInfo::Type Type() {
return CTypeInfo::kCallbackOptionsType;
}
static constexpr CTypeInfo::SequenceType SequenceType() {
return CTypeInfo::SequenceType::kScalar;
}
};
template <>
@ -764,9 +624,6 @@ struct TypeInfoHelper<const FastOneByteString&> {
static constexpr CTypeInfo::Type Type() {
return CTypeInfo::Type::kSeqOneByteString;
}
static constexpr CTypeInfo::SequenceType SequenceType() {
return CTypeInfo::SequenceType::kScalar;
}
};
#define STATIC_ASSERT_IMPLIES(COND, ASSERTION, MSG) \
@ -783,14 +640,7 @@ class V8_EXPORT CTypeInfoBuilder {
constexpr CTypeInfo::Flags kFlags =
MergeFlags(internal::TypeInfoHelper<T>::Flags(), Flags...);
constexpr CTypeInfo::Type kType = internal::TypeInfoHelper<T>::Type();
constexpr CTypeInfo::SequenceType kSequenceType =
internal::TypeInfoHelper<T>::SequenceType();
STATIC_ASSERT_IMPLIES(
uint8_t(kFlags) & uint8_t(CTypeInfo::Flags::kAllowSharedBit),
(kSequenceType == CTypeInfo::SequenceType::kIsTypedArray ||
kSequenceType == CTypeInfo::SequenceType::kIsArrayBuffer),
"kAllowSharedBit is only allowed for TypedArrays and ArrayBuffers.");
STATIC_ASSERT_IMPLIES(
uint8_t(kFlags) & uint8_t(CTypeInfo::Flags::kEnforceRangeBit),
CTypeInfo::IsIntegralType(kType),
@ -803,17 +653,9 @@ class V8_EXPORT CTypeInfoBuilder {
uint8_t(kFlags) & uint8_t(CTypeInfo::Flags::kIsRestrictedBit),
CTypeInfo::IsFloatingPointType(kType),
"kIsRestrictedBit is only allowed for floating point types.");
STATIC_ASSERT_IMPLIES(kSequenceType == CTypeInfo::SequenceType::kIsSequence,
kType == CTypeInfo::Type::kVoid,
"Sequences are only supported from void type.");
STATIC_ASSERT_IMPLIES(
kSequenceType == CTypeInfo::SequenceType::kIsTypedArray,
CTypeInfo::IsPrimitive(kType) || kType == CTypeInfo::Type::kVoid,
"TypedArrays are only supported from primitive types or void.");
// Return the same type with the merged flags.
return CTypeInfo(internal::TypeInfoHelper<T>::Type(),
internal::TypeInfoHelper<T>::SequenceType(), kFlags);
return CTypeInfo(internal::TypeInfoHelper<T>::Type(), kFlags);
}
private:

View file

@ -126,23 +126,6 @@ class FunctionCallbackInfo {
V8_INLINE Local<Value> operator[](int i) const;
/** Returns the receiver. This corresponds to the "this" value. */
V8_INLINE Local<Object> This() const;
/**
* If the callback was created without a Signature, this is the same
* value as This(). If there is a signature, and the signature didn't match
* This() but one of its hidden prototypes, this will be the respective
* hidden prototype.
*
* Note that this is not the prototype of This() on which the accessor
* referencing this callback was found (which in V8 internally is often
* referred to as holder [sic]).
*/
V8_DEPRECATED(
"V8 will stop providing access to hidden prototype (i.e. "
"JSGlobalObject). Use This() instead. \n"
"DO NOT try to workaround this by accessing JSGlobalObject via "
"v8::Object::GetPrototype() - it'll be deprecated soon too. \n"
"See http://crbug.com/333672197. ")
V8_INLINE Local<Object> Holder() const;
/** For construct calls, this returns the "new.target" value. */
V8_INLINE Local<Value> NewTarget() const;
/** Indicates whether this is a regular call or a construct call. */
@ -154,18 +137,16 @@ class FunctionCallbackInfo {
/** The ReturnValue for the call. */
V8_INLINE ReturnValue<T> GetReturnValue() const;
// This is a temporary replacement for Holder() added just for the purpose
// of testing the deprecated Holder() machinery until it's removed for real.
// DO NOT use it.
V8_INLINE Local<Object> HolderSoonToBeDeprecated() const;
private:
friend class internal::FunctionCallbackArguments;
friend class internal::CustomArguments<FunctionCallbackInfo>;
friend class debug::ConsoleCallArguments;
friend void internal::PrintFunctionCallbackInfo(void*);
static constexpr int kHolderIndex = 0;
// TODO(ishell, http://crbug.com/326505377): in case of non-constructor
// call, don't pass kNewTarget and kUnused. Add IsConstructCall flag to
// kIsolate field.
static constexpr int kUnusedIndex = 0;
static constexpr int kIsolateIndex = 1;
static constexpr int kContextIndex = 2;
static constexpr int kReturnValueIndex = 3;
@ -649,16 +630,6 @@ Local<Object> FunctionCallbackInfo<T>::This() const {
return Local<Object>::FromSlot(values_ + kThisValuesIndex);
}
template <typename T>
Local<Object> FunctionCallbackInfo<T>::HolderSoonToBeDeprecated() const {
return Local<Object>::FromSlot(&implicit_args_[kHolderIndex]);
}
template <typename T>
Local<Object> FunctionCallbackInfo<T>::Holder() const {
return HolderSoonToBeDeprecated();
}
template <typename T>
Local<Value> FunctionCallbackInfo<T>::NewTarget() const {
return Local<Value>::FromSlot(&implicit_args_[kNewTargetIndex]);

View file

@ -18,6 +18,7 @@
namespace v8 {
class Context;
class Location;
class UnboundScript;
/**
@ -88,6 +89,12 @@ class V8_EXPORT Function : public Object {
*/
int GetScriptColumnNumber() const;
/**
* Returns zero based line and column number of function body, else returns
* {-1, -1}.
*/
Location GetScriptLocation() const;
/**
* Returns zero based start position (character offset) of function body and
* kLineOffsetNotFound if no information available.

View file

@ -43,7 +43,11 @@ class V8_TRIVIAL_ABI StackAllocated<true> : public StackAllocated<false> {
no_checking_tag tag)
: StackAllocated<false>(other, tag) {}
#ifdef ENABLE_SLOW_DCHECKS
V8_EXPORT void VerifyOnStack() const;
#else
V8_INLINE V8_EXPORT void VerifyOnStack() const {}
#endif
};
/**
@ -86,6 +90,16 @@ class IndirectHandleBase {
return internal::ValueHelper::SlotAsValue<T, check_null>(slot());
}
#ifdef V8_ENABLE_DIRECT_HANDLE
V8_INLINE internal::ValueHelper::InternalRepresentationType repr() const {
return location_ ? *location_ : internal::ValueHelper::kEmpty;
}
#else
V8_INLINE internal::ValueHelper::InternalRepresentationType repr() const {
return location_;
}
#endif // V8_ENABLE_DIRECT_HANDLE
private:
internal::Address* location_ = nullptr;
};
@ -126,6 +140,10 @@ class DirectHandleBase {
return reinterpret_cast<T*>(ptr_);
}
V8_INLINE internal::ValueHelper::InternalRepresentationType repr() const {
return ptr_;
}
private:
internal::Address ptr_ = internal::ValueHelper::kEmpty;
};

View file

@ -112,11 +112,18 @@ class V8_EXPORT V8 {
const bool kV8TargetOsIsAndroid = false;
#endif
#ifdef V8_ENABLE_CHECKS
const bool kV8EnableChecks = true;
#else
const bool kV8EnableChecks = false;
#endif
const int kBuildConfiguration =
(internal::PointerCompressionIsEnabled() ? kPointerCompression : 0) |
(internal::SmiValuesAre31Bits() ? k31BitSmis : 0) |
(internal::SandboxIsEnabled() ? kSandbox : 0) |
(kV8TargetOsIsAndroid ? kTargetOsIsAndroid : 0);
(kV8TargetOsIsAndroid ? kTargetOsIsAndroid : 0) |
(kV8EnableChecks ? kEnableChecks : 0);
return Initialize(kBuildConfiguration);
}
@ -288,6 +295,7 @@ class V8_EXPORT V8 {
k31BitSmis = 1 << 1,
kSandbox = 1 << 2,
kTargetOsIsAndroid = 1 << 3,
kEnableChecks = 1 << 4,
};
/**

View file

@ -139,6 +139,7 @@ struct V8_EXPORT V8StackFrame {
StringView functionName;
int lineNumber;
int columnNumber;
int scriptId;
};
class V8_EXPORT V8StackTrace {
@ -297,12 +298,14 @@ class V8_EXPORT V8InspectorClient {
return v8::MaybeLocal<v8::Value>();
}
virtual void consoleTime(v8::Isolate* isolate, v8::Local<v8::String> label);
virtual void consoleTime(v8::Isolate* isolate, v8::Local<v8::String> label) {}
virtual void consoleTimeEnd(v8::Isolate* isolate,
v8::Local<v8::String> label);
v8::Local<v8::String> label) {}
virtual void consoleTimeStamp(v8::Isolate* isolate,
v8::Local<v8::String> label);
v8::Local<v8::String> label) {}
virtual void consoleTimeStampWithArgs(
v8::Isolate* isolate, v8::Local<v8::String> label,
const v8::LocalVector<v8::Value>& args) {}
virtual void consoleClear(int contextGroupId) {}
virtual double currentTimeMS() { return 0; }
typedef void (*TimerCallback)(void*);
@ -361,6 +364,7 @@ class V8_EXPORT V8Inspector {
virtual void resetContextGroup(int contextGroupId) = 0;
virtual v8::MaybeLocal<v8::Context> contextById(int contextId) = 0;
virtual V8DebuggerId uniqueDebuggerId(int contextId) = 0;
virtual uint64_t isolateId() = 0;
// Various instrumentation.
virtual void idleStarted() = 0;

View file

@ -44,7 +44,10 @@ class Isolate;
namespace internal {
class Heap;
class LocalHeap;
class Isolate;
class IsolateGroup;
class LocalIsolate;
typedef uintptr_t Address;
static constexpr Address kNullAddress = 0;
@ -160,15 +163,15 @@ struct SmiTagging<8> {
std::is_signed_v<T>>* = nullptr>
V8_INLINE static constexpr bool IsValidSmi(T value) {
// To be representable as a long smi, the value must be a 32-bit integer.
return (value == static_cast<int32_t>(value));
return std::numeric_limits<int32_t>::min() <= value &&
value <= std::numeric_limits<int32_t>::max();
}
template <class T,
typename std::enable_if_t<std::is_integral_v<T> &&
std::is_unsigned_v<T>>* = nullptr>
V8_INLINE static constexpr bool IsValidSmi(T value) {
return (static_cast<uintptr_t>(value) ==
static_cast<uintptr_t>(static_cast<int32_t>(value)));
return value <= std::numeric_limits<int32_t>::max();
}
};
@ -234,10 +237,6 @@ using SandboxedPointer_t = Address;
// virtual address space for userspace. As such, limit the sandbox to 128GB (a
// quarter of the total available address space).
constexpr size_t kSandboxSizeLog2 = 37; // 128 GB
#elif defined(V8_TARGET_ARCH_LOONG64)
// Some Linux distros on LoongArch64 configured with only 40 bits of virtual
// address space for userspace. Limit the sandbox to 256GB here.
constexpr size_t kSandboxSizeLog2 = 38; // 256 GB
#else
// Everywhere else use a 1TB sandbox.
constexpr size_t kSandboxSizeLog2 = 40; // 1 TB
@ -258,9 +257,12 @@ constexpr size_t kSandboxAlignment = kPtrComprCageBaseAlignment;
constexpr uint64_t kSandboxedPointerShift = 64 - kSandboxSizeLog2;
// Size of the guard regions surrounding the sandbox. This assumes a worst-case
// scenario of a 32-bit unsigned index used to access an array of 64-bit
// values.
constexpr size_t kSandboxGuardRegionSize = 32ULL * GB;
// scenario of a 32-bit unsigned index used to access an array of 64-bit values
// with an additional 4GB (compressed pointer) offset. In particular, accesses
// to TypedArrays are effectively computed as
// `entry_pointer = array->base + array->offset + index * array->element_size`.
// See also https://crbug.com/40070746 for more details.
constexpr size_t kSandboxGuardRegionSize = 32ULL * GB + 4ULL * GB;
static_assert((kSandboxGuardRegionSize % kSandboxAlignment) == 0,
"The size of the guard regions around the sandbox must be a "
@ -335,6 +337,16 @@ constexpr size_t kMaxExternalPointers = 0;
#endif // V8_COMPRESS_POINTERS
constexpr uint64_t kExternalPointerMarkBit = 1ULL << 48;
constexpr uint64_t kExternalPointerTagShift = 49;
constexpr uint64_t kExternalPointerTagMask = 0x00fe000000000000ULL;
constexpr uint64_t kExternalPointerShiftedTagMask =
kExternalPointerTagMask >> kExternalPointerTagShift;
static_assert(kExternalPointerShiftedTagMask << kExternalPointerTagShift ==
kExternalPointerTagMask);
constexpr uint64_t kExternalPointerTagAndMarkbitMask = 0x00ff000000000000ULL;
constexpr uint64_t kExternalPointerPayloadMask = 0xff00ffffffffffffULL;
// A ExternalPointerHandle represents a (opaque) reference to an external
// pointer that can be stored inside the sandbox. A ExternalPointerHandle has
// meaning only in combination with an (active) Isolate as it references an
@ -403,47 +415,89 @@ constexpr size_t kMaxCppHeapPointers = 0;
#endif // V8_COMPRESS_POINTERS
// See `ExternalPointerHandle` for the main documentation. The difference to
// `ExternalPointerHandle` is that the handle always refers to a
// (external pointer, size) tuple. The handles are used in combination with a
// dedicated external buffer table (EBT).
using ExternalBufferHandle = uint32_t;
// Generic tag range struct to represent ranges of type tags.
//
// When referencing external objects via pointer tables, type tags are
// frequently necessary to guarantee type safety for the external objects. When
// support for subtyping is necessary, range-based type checks are used in
// which all subtypes of a given supertype use contiguous tags. This struct can
// then be used to represent such a type range.
//
// As an example, consider the following type hierarchy:
//
// A F
// / \
// B E
// / \
// C D
//
// A potential type id assignment for range-based type checks is
// {A: 0, B: 1, C: 2, D: 3, E: 4, F: 5}. With that, the type check for type A
// would check for the range [A, E], while the check for B would check range
// [B, D], and for F it would simply check [F, F].
//
// In addition, there is an option for performance tweaks: if the size of the
// type range corresponding to a supertype is a power of two and starts at a
// power of two (e.g. [0x100, 0x13f]), then the compiler can often optimize
// the type check to use even fewer instructions (essentially replace a AND +
// SUB with a single AND).
//
template <typename Tag>
struct TagRange {
static_assert(std::is_enum_v<Tag> &&
std::is_same_v<std::underlying_type_t<Tag>, uint16_t>,
"Tag parameter must be an enum with base type uint16_t");
// ExternalBuffer point to buffer located outside the sandbox. When the V8
// sandbox is enabled, these are stored on heap as ExternalBufferHandles,
// otherwise they are simply raw pointers.
#ifdef V8_ENABLE_SANDBOX
using ExternalBuffer_t = ExternalBufferHandle;
#else
using ExternalBuffer_t = Address;
#endif
// Construct the inclusive tag range [first, last].
constexpr TagRange(Tag first, Tag last) : first(first), last(last) {}
#ifdef V8_TARGET_OS_ANDROID
// The size of the virtual memory reservation for the external buffer table.
// As with the external pointer table, a maximum table size in combination with
// shifted indices allows omitting bounds checks.
constexpr size_t kExternalBufferTableReservationSize = 64 * MB;
// Construct a tag range consisting of a single tag.
//
// A single tag is always implicitly convertible to a tag range. This greatly
// increases readability as most of the time, the exact tag of a field is
// known and so no tag range needs to explicitly be created for it.
constexpr TagRange(Tag tag) // NOLINT(runtime/explicit)
: first(tag), last(tag) {}
// The external buffer handles are stores shifted to the left by this amount
// to guarantee that they are smaller than the maximum table size.
constexpr uint32_t kExternalBufferHandleShift = 10;
#else
constexpr size_t kExternalBufferTableReservationSize = 128 * MB;
constexpr uint32_t kExternalBufferHandleShift = 9;
#endif // V8_TARGET_OS_ANDROID
// Construct an empty tag range.
constexpr TagRange() : TagRange(static_cast<Tag>(0)) {}
// A null handle always references an entry that contains nullptr.
constexpr ExternalBufferHandle kNullExternalBufferHandle = 0;
// A tag range is considered empty if it only contains the null tag.
constexpr bool IsEmpty() const { return first == 0 && last == 0; }
// The maximum number of entries in an external buffer table.
constexpr int kExternalBufferTableEntrySize = 16;
constexpr int kExternalBufferTableEntrySizeLog2 = 4;
constexpr size_t kMaxExternalBufferPointers =
kExternalBufferTableReservationSize / kExternalBufferTableEntrySize;
static_assert((1 << (32 - kExternalBufferHandleShift)) ==
kMaxExternalBufferPointers,
"kExternalBufferTableReservationSize and "
"kExternalBufferHandleShift don't match");
constexpr size_t Size() const {
if (IsEmpty()) {
return 0;
} else {
return last - first + 1;
}
}
constexpr bool Contains(Tag tag) const {
// Need to perform the math with uint32_t. Otherwise, the uint16_ts would
// be promoted to (signed) int, allowing the compiler to (wrongly) assume
// that an underflow cannot happen as that would be undefined behavior.
return static_cast<uint32_t>(tag) - first <=
static_cast<uint32_t>(last) - first;
}
constexpr bool Contains(TagRange tag_range) const {
return tag_range.first >= first && tag_range.last <= last;
}
constexpr bool operator==(const TagRange other) const {
return first == other.first && last == other.last;
}
constexpr size_t hash_value() const {
static_assert(std::is_same_v<std::underlying_type_t<Tag>, uint16_t>);
return (static_cast<size_t>(first) << 16) | last;
}
// Internally we represent tag ranges as half-open ranges [first, last).
const Tag first;
const Tag last;
};
//
// External Pointers.
@ -452,41 +506,12 @@ static_assert((1 << (32 - kExternalBufferHandleShift)) ==
// pointer table and are referenced from HeapObjects through an index (a
// "handle"). When stored in the table, the pointers are tagged with per-type
// tags to prevent type confusion attacks between different external objects.
// Besides type information bits, these tags also contain the GC marking bit
// which indicates whether the pointer table entry is currently alive. When a
// pointer is written into the table, the tag is ORed into the top bits. When
// that pointer is later loaded from the table, it is ANDed with the inverse of
// the expected tag. If the expected and actual type differ, this will leave
// some of the top bits of the pointer set, rendering the pointer inaccessible.
// The AND operation also removes the GC marking bit from the pointer.
//
// The tags are constructed such that UNTAG(TAG(0, T1), T2) != 0 for any two
// (distinct) tags T1 and T2. In practice, this is achieved by generating tags
// that all have the same number of zeroes and ones but different bit patterns.
// With N type tag bits, this allows for (N choose N/2) possible type tags.
// Besides the type tag bits, the tags also have the GC marking bit set so that
// the marking bit is automatically set when a pointer is written into the
// external pointer table (in which case it is clearly alive) and is cleared
// when the pointer is loaded. The exception to this is the free entry tag,
// which doesn't have the mark bit set, as the entry is not alive. This
// construction allows performing the type check and removing GC marking bits
// from the pointer in one efficient operation (bitwise AND). The number of
// available bits is limited in the following way: on x64, bits [47, 64) are
// generally available for tagging (userspace has 47 address bits available).
// On Arm64, userspace typically has a 40 or 48 bit address space. However, due
// to top-byte ignore (TBI) and memory tagging (MTE), the top byte is unusable
// for type checks as type-check failures would go unnoticed or collide with
// MTE bits. Some bits of the top byte can, however, still be used for the GC
// marking bit. The bits available for the type tags are therefore limited to
// [48, 56), i.e. (8 choose 4) = 70 different types.
// The following options exist to increase the number of possible types:
// - Using multiple ExternalPointerTables since tags can safely be reused
// across different tables
// - Using "extended" type checks, where additional type information is stored
// either in an adjacent pointer table entry or at the pointed-to location
// - Using a different tagging scheme, for example based on XOR which would
// allow for 2**8 different tags but require a separate operation to remove
// the marking bit
// When loading an external pointer, a range of allowed tags can be specified.
// This way, type hierarchies can be supported. The main requirement for that
// is that all (transitive) child classes of a given parent class have type ids
// in the same range, and that there are no unrelated types in that range. For
// more details about how to assign type tags to types, see the TagRange class.
//
// The external pointer sandboxing mechanism ensures that every access to an
// external pointer field will result in a valid pointer of the expected type
@ -515,167 +540,136 @@ static_assert((1 << (32 - kExternalBufferHandleShift)) ==
// for this purpose, instead of using the ExternalPointer accessors one needs to
// use ExternalPointerHandles directly and use them to access the pointers in an
// ExternalPointerTable.
constexpr uint64_t kExternalPointerMarkBit = 1ULL << 62;
constexpr uint64_t kExternalPointerTagMask = 0x40ff000000000000;
constexpr uint64_t kExternalPointerTagMaskWithoutMarkBit = 0xff000000000000;
constexpr uint64_t kExternalPointerTagShift = 48;
//
// The tag is currently in practice limited to 15 bits since it needs to fit
// together with a marking bit into the unused parts of a pointer.
enum ExternalPointerTag : uint16_t {
kFirstExternalPointerTag = 0,
kExternalPointerNullTag = 0,
// All possible 8-bit type tags.
// These are sorted so that tags can be grouped together and it can efficiently
// be checked if a tag belongs to a given group. See for example the
// IsSharedExternalPointerType routine.
constexpr uint64_t kAllTagsForAndBasedTypeChecking[] = {
0b00001111, 0b00010111, 0b00011011, 0b00011101, 0b00011110, 0b00100111,
0b00101011, 0b00101101, 0b00101110, 0b00110011, 0b00110101, 0b00110110,
0b00111001, 0b00111010, 0b00111100, 0b01000111, 0b01001011, 0b01001101,
0b01001110, 0b01010011, 0b01010101, 0b01010110, 0b01011001, 0b01011010,
0b01011100, 0b01100011, 0b01100101, 0b01100110, 0b01101001, 0b01101010,
0b01101100, 0b01110001, 0b01110010, 0b01110100, 0b01111000, 0b10000111,
0b10001011, 0b10001101, 0b10001110, 0b10010011, 0b10010101, 0b10010110,
0b10011001, 0b10011010, 0b10011100, 0b10100011, 0b10100101, 0b10100110,
0b10101001, 0b10101010, 0b10101100, 0b10110001, 0b10110010, 0b10110100,
0b10111000, 0b11000011, 0b11000101, 0b11000110, 0b11001001, 0b11001010,
0b11001100, 0b11010001, 0b11010010, 0b11010100, 0b11011000, 0b11100001,
0b11100010, 0b11100100, 0b11101000, 0b11110000};
// When adding new tags, please ensure that the code using these tags is
// "substitution-safe", i.e. still operate safely if external pointers of the
// same type are swapped by an attacker. See comment above for more details.
#define TAG(i) \
((kAllTagsForAndBasedTypeChecking[i] << kExternalPointerTagShift) | \
kExternalPointerMarkBit)
// Shared external pointers are owned by the shared Isolate and stored in the
// shared external pointer table associated with that Isolate, where they can
// be accessed from multiple threads at the same time. The objects referenced
// in this way must therefore always be thread-safe.
kFirstSharedExternalPointerTag,
kWaiterQueueNodeTag = kFirstSharedExternalPointerTag,
kExternalStringResourceTag,
kExternalStringResourceDataTag,
kLastSharedExternalPointerTag = kExternalStringResourceDataTag,
// clang-format off
// External pointers using these tags are kept in a per-Isolate external
// pointer table and can only be accessed when this Isolate is active.
kNativeContextMicrotaskQueueTag,
kEmbedderDataSlotPayloadTag,
// This tag essentially stands for a `void*` pointer in the V8 API, and it is
// the Embedder's responsibility to ensure type safety (against substitution)
// and lifetime validity of these objects.
kExternalObjectValueTag,
kFirstMaybeReadOnlyExternalPointerTag,
kFunctionTemplateInfoCallbackTag = kFirstMaybeReadOnlyExternalPointerTag,
kAccessorInfoGetterTag,
kAccessorInfoSetterTag,
kLastMaybeReadOnlyExternalPointerTag = kAccessorInfoSetterTag,
kWasmInternalFunctionCallTargetTag,
kWasmTypeInfoNativeTypeTag,
kWasmExportedFunctionDataSignatureTag,
kWasmStackMemoryTag,
kWasmIndirectFunctionTargetTag,
// When adding new tags, please ensure that the code using these tags is
// "substitution-safe", i.e. still operate safely if external pointers of the
// same type are swapped by an attacker. See comment above for more details.
// Foreigns
kFirstForeignExternalPointerTag,
kGenericForeignTag = kFirstForeignExternalPointerTag,
kApiNamedPropertyQueryCallbackTag,
kApiNamedPropertyGetterCallbackTag,
kApiNamedPropertySetterCallbackTag,
kApiNamedPropertyDescriptorCallbackTag,
kApiNamedPropertyDefinerCallbackTag,
kApiNamedPropertyDeleterCallbackTag,
kApiIndexedPropertyQueryCallbackTag,
kApiIndexedPropertyGetterCallbackTag,
kApiIndexedPropertySetterCallbackTag,
kApiIndexedPropertyDescriptorCallbackTag,
kApiIndexedPropertyDefinerCallbackTag,
kApiIndexedPropertyDeleterCallbackTag,
kApiIndexedPropertyEnumeratorCallbackTag,
kApiAccessCheckCallbackTag,
kApiAbortScriptExecutionCallbackTag,
kSyntheticModuleTag,
kMicrotaskCallbackTag,
kMicrotaskCallbackDataTag,
kCFunctionTag,
kCFunctionInfoTag,
kMessageListenerTag,
kWaiterQueueForeignTag,
// Shared external pointers are owned by the shared Isolate and stored in the
// shared external pointer table associated with that Isolate, where they can
// be accessed from multiple threads at the same time. The objects referenced
// in this way must therefore always be thread-safe.
#define SHARED_EXTERNAL_POINTER_TAGS(V) \
V(kFirstSharedTag, TAG(0)) \
V(kWaiterQueueNodeTag, TAG(0)) \
V(kExternalStringResourceTag, TAG(1)) \
V(kExternalStringResourceDataTag, TAG(2)) \
V(kLastSharedTag, TAG(2))
// Leave some space in the tag range here for future shared tags.
// Managed
kFirstManagedResourceTag,
kFirstManagedExternalPointerTag = kFirstManagedResourceTag,
kGenericManagedTag = kFirstManagedExternalPointerTag,
kWasmWasmStreamingTag,
kWasmFuncDataTag,
kWasmManagedDataTag,
kWasmNativeModuleTag,
kIcuBreakIteratorTag,
kIcuUnicodeStringTag,
kIcuListFormatterTag,
kIcuLocaleTag,
kIcuSimpleDateFormatTag,
kIcuDateIntervalFormatTag,
kIcuRelativeDateTimeFormatterTag,
kIcuLocalizedNumberFormatterTag,
kIcuPluralRulesTag,
kIcuCollatorTag,
kDisplayNamesInternalTag,
kD8WorkerTag,
kD8ModuleEmbedderDataTag,
kLastForeignExternalPointerTag = kD8ModuleEmbedderDataTag,
kLastManagedExternalPointerTag = kLastForeignExternalPointerTag,
// External resources whose lifetime is tied to their entry in the external
// pointer table but which are not referenced via a Managed
kArrayBufferExtensionTag,
kLastManagedResourceTag = kArrayBufferExtensionTag,
// External pointers using these tags are kept in a per-Isolate external
// pointer table and can only be accessed when this Isolate is active.
#define PER_ISOLATE_EXTERNAL_POINTER_TAGS(V) \
V(kNativeContextMicrotaskQueueTag, TAG(5)) \
V(kEmbedderDataSlotPayloadTag, TAG(6)) \
/* This tag essentially stands for a `void*` pointer in the V8 API, and */ \
/* it is the Embedder's responsibility to ensure type safety (against */ \
/* substitution) and lifetime validity of these objects. */ \
V(kExternalObjectValueTag, TAG(7)) \
V(kFunctionTemplateInfoCallbackTag, TAG(8)) \
V(kAccessorInfoGetterTag, TAG(9)) \
V(kAccessorInfoSetterTag, TAG(10)) \
V(kWasmInternalFunctionCallTargetTag, TAG(11)) \
V(kWasmTypeInfoNativeTypeTag, TAG(12)) \
V(kWasmExportedFunctionDataSignatureTag, TAG(13)) \
V(kWasmContinuationJmpbufTag, TAG(14)) \
V(kWasmStackMemoryTag, TAG(15)) \
V(kWasmIndirectFunctionTargetTag, TAG(16)) \
/* Foreigns */ \
V(kGenericForeignTag, TAG(20)) \
V(kApiNamedPropertyQueryCallbackTag, TAG(21)) \
V(kApiNamedPropertyGetterCallbackTag, TAG(22)) \
V(kApiNamedPropertySetterCallbackTag, TAG(23)) \
V(kApiNamedPropertyDescriptorCallbackTag, TAG(24)) \
V(kApiNamedPropertyDefinerCallbackTag, TAG(25)) \
V(kApiNamedPropertyDeleterCallbackTag, TAG(26)) \
V(kApiIndexedPropertyQueryCallbackTag, TAG(27)) \
V(kApiIndexedPropertyGetterCallbackTag, TAG(28)) \
V(kApiIndexedPropertySetterCallbackTag, TAG(29)) \
V(kApiIndexedPropertyDescriptorCallbackTag, TAG(30)) \
V(kApiIndexedPropertyDefinerCallbackTag, TAG(31)) \
V(kApiIndexedPropertyDeleterCallbackTag, TAG(32)) \
V(kApiIndexedPropertyEnumeratorCallbackTag, TAG(33)) \
V(kApiAccessCheckCallbackTag, TAG(34)) \
V(kApiAbortScriptExecutionCallbackTag, TAG(35)) \
V(kSyntheticModuleTag, TAG(36)) \
V(kMicrotaskCallbackTag, TAG(37)) \
V(kMicrotaskCallbackDataTag, TAG(38)) \
V(kCFunctionTag, TAG(39)) \
V(kCFunctionInfoTag, TAG(40)) \
V(kMessageListenerTag, TAG(41)) \
V(kWaiterQueueForeignTag, TAG(42)) \
/* Managed */ \
V(kFirstManagedResourceTag, TAG(50)) \
V(kGenericManagedTag, TAG(50)) \
V(kWasmWasmStreamingTag, TAG(51)) \
V(kWasmFuncDataTag, TAG(52)) \
V(kWasmManagedDataTag, TAG(53)) \
V(kWasmNativeModuleTag, TAG(54)) \
V(kIcuBreakIteratorTag, TAG(55)) \
V(kIcuUnicodeStringTag, TAG(56)) \
V(kIcuListFormatterTag, TAG(57)) \
V(kIcuLocaleTag, TAG(58)) \
V(kIcuSimpleDateFormatTag, TAG(59)) \
V(kIcuDateIntervalFormatTag, TAG(60)) \
V(kIcuRelativeDateTimeFormatterTag, TAG(61)) \
V(kIcuLocalizedNumberFormatterTag, TAG(62)) \
V(kIcuPluralRulesTag, TAG(63)) \
V(kIcuCollatorTag, TAG(64)) \
V(kDisplayNamesInternalTag, TAG(65)) \
/* External resources whose lifetime is tied to */ \
/* their entry in the external pointer table but */ \
/* which are not referenced via a Managed */ \
V(kArrayBufferExtensionTag, TAG(66)) \
V(kLastManagedResourceTag, TAG(66)) \
// All external pointer tags.
#define ALL_EXTERNAL_POINTER_TAGS(V) \
SHARED_EXTERNAL_POINTER_TAGS(V) \
PER_ISOLATE_EXTERNAL_POINTER_TAGS(V)
#define EXTERNAL_POINTER_TAG_ENUM(Name, Tag) Name = Tag,
#define MAKE_TAG(HasMarkBit, TypeTag) \
((static_cast<uint64_t>(TypeTag) << kExternalPointerTagShift) | \
(HasMarkBit ? kExternalPointerMarkBit : 0))
enum ExternalPointerTag : uint64_t {
// Empty tag value. Mostly used as placeholder.
kExternalPointerNullTag = MAKE_TAG(1, 0b00000000),
// External pointer tag that will match any external pointer. Use with care!
kAnyExternalPointerTag = MAKE_TAG(1, 0b11111111),
// External pointer tag that will match any external pointer in a Foreign.
// Use with care! If desired, this could be made more fine-granular.
kAnyForeignTag = kAnyExternalPointerTag,
// The free entry tag has all type bits set so every type check with a
// different type fails. It also doesn't have the mark bit set as free
// entries are (by definition) not alive.
kExternalPointerFreeEntryTag = MAKE_TAG(0, 0b11111111),
// Evacuation entries are used during external pointer table compaction.
kExternalPointerEvacuationEntryTag = MAKE_TAG(1, 0b11111110),
// Tag for zapped/invalidated entries. Those are considered to no longer be
// in use and so have the marking bit cleared.
kExternalPointerZappedEntryTag = MAKE_TAG(0, 0b11111101),
ALL_EXTERNAL_POINTER_TAGS(EXTERNAL_POINTER_TAG_ENUM)
kExternalPointerZappedEntryTag = 0x7d,
kExternalPointerEvacuationEntryTag = 0x7e,
kExternalPointerFreeEntryTag = 0x7f,
// The tags are limited to 7 bits, so the last tag is 0x7f.
kLastExternalPointerTag = 0x7f,
};
#undef MAKE_TAG
#undef TAG
#undef EXTERNAL_POINTER_TAG_ENUM
using ExternalPointerTagRange = TagRange<ExternalPointerTag>;
// clang-format on
constexpr ExternalPointerTagRange kAnyExternalPointerTagRange(
kFirstExternalPointerTag, kLastExternalPointerTag);
constexpr ExternalPointerTagRange kAnySharedExternalPointerTagRange(
kFirstSharedExternalPointerTag, kLastSharedExternalPointerTag);
constexpr ExternalPointerTagRange kAnyForeignExternalPointerTagRange(
kFirstForeignExternalPointerTag, kLastForeignExternalPointerTag);
constexpr ExternalPointerTagRange kAnyManagedExternalPointerTagRange(
kFirstManagedExternalPointerTag, kLastManagedExternalPointerTag);
constexpr ExternalPointerTagRange kAnyMaybeReadOnlyExternalPointerTagRange(
kFirstMaybeReadOnlyExternalPointerTag,
kLastMaybeReadOnlyExternalPointerTag);
constexpr ExternalPointerTagRange kAnyManagedResourceExternalPointerTag(
kFirstManagedResourceTag, kLastManagedResourceTag);
// True if the external pointer must be accessed from the shared isolate's
// external pointer table.
V8_INLINE static constexpr bool IsSharedExternalPointerType(
ExternalPointerTag tag) {
return tag >= kFirstSharedTag && tag <= kLastSharedTag;
ExternalPointerTagRange tag_range) {
return kAnySharedExternalPointerTagRange.Contains(tag_range);
}
// True if the external pointer may live in a read-only object, in which case
// the table entry will be in the shared read-only segment of the external
// pointer table.
V8_INLINE static constexpr bool IsMaybeReadOnlyExternalPointerType(
ExternalPointerTag tag) {
return tag == kAccessorInfoGetterTag || tag == kAccessorInfoSetterTag ||
tag == kFunctionTemplateInfoCallbackTag;
ExternalPointerTagRange tag_range) {
return kAnyMaybeReadOnlyExternalPointerTagRange.Contains(tag_range);
}
// True if the external pointer references an external object whose lifetime is
@ -683,26 +677,23 @@ V8_INLINE static constexpr bool IsMaybeReadOnlyExternalPointerType(
// In this case, the entry in the ExternalPointerTable always points to an
// object derived from ExternalPointerTable::ManagedResource.
V8_INLINE static constexpr bool IsManagedExternalPointerType(
ExternalPointerTag tag) {
return tag >= kFirstManagedResourceTag && tag <= kLastManagedResourceTag;
ExternalPointerTagRange tag_range) {
return kAnyManagedResourceExternalPointerTag.Contains(tag_range);
}
// Sanity checks.
#define CHECK_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
static_assert(IsSharedExternalPointerType(Tag));
#define CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS(Tag, ...) \
static_assert(!IsSharedExternalPointerType(Tag));
// When an external poiner field can contain the null external pointer handle,
// the type checking mechanism needs to also check for null.
// TODO(saelo): this is mostly a temporary workaround to introduce range-based
// type checks. In the future, we should either (a) change the type tagging
// scheme so that null always passes or (b) (more likely) introduce dedicated
// null entries for those tags that need them (similar to other well-known
// empty value constants such as the empty fixed array).
V8_INLINE static constexpr bool ExternalPointerCanBeEmpty(
ExternalPointerTagRange tag_range) {
return tag_range.Contains(kArrayBufferExtensionTag) ||
tag_range.Contains(kEmbedderDataSlotPayloadTag);
}
SHARED_EXTERNAL_POINTER_TAGS(CHECK_SHARED_EXTERNAL_POINTER_TAGS)
PER_ISOLATE_EXTERNAL_POINTER_TAGS(CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS)
#undef CHECK_NON_SHARED_EXTERNAL_POINTER_TAGS
#undef CHECK_SHARED_EXTERNAL_POINTER_TAGS
#undef SHARED_EXTERNAL_POINTER_TAGS
#undef EXTERNAL_POINTER_TAGS
//
// Indirect Pointers.
//
// When the sandbox is enabled, indirect pointers are used to reference
@ -746,7 +737,7 @@ using TrustedPointerHandle = IndirectPointerHandle;
// shifted indices allows omitting bounds checks.
constexpr size_t kTrustedPointerTableReservationSize = 64 * MB;
// The trusted pointer handles are stores shifted to the left by this amount
// The trusted pointer handles are stored shifted to the left by this amount
// to guarantee that they are smaller than the maximum table size.
constexpr uint32_t kTrustedPointerHandleShift = 9;
@ -822,29 +813,6 @@ constexpr bool kAllCodeObjectsLiveInTrustedSpace =
kRuntimeGeneratedCodeObjectsLiveInTrustedSpace &&
kBuiltinCodeObjectsLiveInTrustedSpace;
//
// JavaScript Dispatch Table
//
// A JSDispatchHandle represents a 32-bit index into a JSDispatchTable.
using JSDispatchHandle = uint32_t;
constexpr JSDispatchHandle kNullJSDispatchHandle = 0;
// The size of the virtual memory reservation for the JSDispatchTable.
// As with the other tables, a maximum table size in combination with shifted
// indices allows omitting bounds checks.
constexpr size_t kJSDispatchTableReservationSize = 128 * MB;
constexpr uint32_t kJSDispatchHandleShift = 9;
// The maximum number of entries in a JSDispatchTable.
constexpr int kJSDispatchTableEntrySize = 16;
constexpr int kJSDispatchTableEntrySizeLog2 = 4;
constexpr size_t kMaxJSDispatchEntries =
kJSDispatchTableReservationSize / kJSDispatchTableEntrySize;
static_assert((1 << (32 - kJSDispatchHandleShift)) == kMaxJSDispatchEntries,
"kJSDispatchTableReservationSize and kJSDispatchEntryHandleShift "
"don't match");
// {obj} must be the raw tagged pointer representation of a HeapObject
// that's guaranteed to never be in ReadOnlySpace.
V8_EXPORT internal::Isolate* IsolateFromNeverReadOnlySpaceObject(Address obj);
@ -902,6 +870,7 @@ class Internals {
static const int kNumberOfBooleanFlags = 6;
static const int kErrorMessageParamSize = 1;
static const int kTablesAlignmentPaddingSize = 1;
static const int kRegExpStaticResultOffsetsVectorSize = kApiSystemPointerSize;
static const int kBuiltinTier0EntryTableSize = 7 * kApiSystemPointerSize;
static const int kBuiltinTier0TableSize = 7 * kApiSystemPointerSize;
static const int kLinearAllocationAreaSize = 3 * kApiSystemPointerSize;
@ -912,7 +881,6 @@ class Internals {
// ExternalPointerTable and TrustedPointerTable layout guarantees.
static const int kExternalPointerTableBasePointerOffset = 0;
static const int kExternalPointerTableSize = 2 * kApiSystemPointerSize;
static const int kExternalBufferTableSize = 2 * kApiSystemPointerSize;
static const int kTrustedPointerTableSize = 2 * kApiSystemPointerSize;
static const int kTrustedPointerTableBasePointerOffset = 0;
@ -924,9 +892,9 @@ class Internals {
kIsolateStackGuardOffset + kStackGuardSize;
static const int kErrorMessageParamOffset =
kVariousBooleanFlagsOffset + kNumberOfBooleanFlags;
static const int kBuiltinTier0EntryTableOffset = kErrorMessageParamOffset +
kErrorMessageParamSize +
kTablesAlignmentPaddingSize;
static const int kBuiltinTier0EntryTableOffset =
kErrorMessageParamOffset + kErrorMessageParamSize +
kTablesAlignmentPaddingSize + kRegExpStaticResultOffsetsVectorSize;
static const int kBuiltinTier0TableOffset =
kBuiltinTier0EntryTableOffset + kBuiltinTier0EntryTableSize;
static const int kNewAllocationInfoOffset =
@ -935,14 +903,15 @@ class Internals {
kNewAllocationInfoOffset + kLinearAllocationAreaSize;
static const int kFastCCallAlignmentPaddingSize =
kApiSystemPointerSize == 8 ? 0 : kApiSystemPointerSize;
static const int kIsolateFastCCallCallerFpOffset =
kApiSystemPointerSize == 8 ? 5 * kApiSystemPointerSize
: 1 * kApiSystemPointerSize;
static const int kIsolateFastCCallCallerPcOffset =
kOldAllocationInfoOffset + kLinearAllocationAreaSize +
kFastCCallAlignmentPaddingSize;
static const int kIsolateFastCCallCallerPcOffset =
kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
static const int kIsolateFastApiCallTargetOffset =
static const int kIsolateFastCCallCallerFpOffset =
kIsolateFastCCallCallerPcOffset + kApiSystemPointerSize;
static const int kIsolateFastApiCallTargetOffset =
kIsolateFastCCallCallerFpOffset + kApiSystemPointerSize;
static const int kIsolateLongTaskStatsCounterOffset =
kIsolateFastApiCallTargetOffset + kApiSystemPointerSize;
static const int kIsolateThreadLocalTopOffset =
@ -963,8 +932,14 @@ class Internals {
kIsolateCppHeapPointerTableOffset + kExternalPointerTableSize;
static const int kIsolateTrustedPointerTableOffset =
kIsolateTrustedCageBaseOffset + kApiSystemPointerSize;
static const int kIsolateApiCallbackThunkArgumentOffset =
static const int kIsolateSharedTrustedPointerTableAddressOffset =
kIsolateTrustedPointerTableOffset + kTrustedPointerTableSize;
static const int kIsolateTrustedPointerPublishingScopeOffset =
kIsolateSharedTrustedPointerTableAddressOffset + kApiSystemPointerSize;
static const int kIsolateCodePointerTableBaseAddressOffset =
kIsolateTrustedPointerPublishingScopeOffset + kApiSystemPointerSize;
static const int kIsolateApiCallbackThunkArgumentOffset =
kIsolateCodePointerTableBaseAddressOffset + kApiSystemPointerSize;
#else
static const int kIsolateApiCallbackThunkArgumentOffset =
kIsolateCppHeapPointerTableOffset + kExternalPointerTableSize;
@ -973,8 +948,10 @@ class Internals {
static const int kIsolateApiCallbackThunkArgumentOffset =
kIsolateEmbedderDataOffset + kNumIsolateDataSlots * kApiSystemPointerSize;
#endif // V8_COMPRESS_POINTERS
static const int kContinuationPreservedEmbedderDataOffset =
static const int kIsolateRegexpExecVectorArgumentOffset =
kIsolateApiCallbackThunkArgumentOffset + kApiSystemPointerSize;
static const int kContinuationPreservedEmbedderDataOffset =
kIsolateRegexpExecVectorArgumentOffset + kApiSystemPointerSize;
static const int kIsolateRootsOffset =
kContinuationPreservedEmbedderDataOffset + kApiSystemPointerSize;
@ -986,12 +963,12 @@ class Internals {
// These constants are copied from static-roots.h and guarded by static asserts.
#define EXPORTED_STATIC_ROOTS_PTR_LIST(V) \
V(UndefinedValue, 0x69) \
V(NullValue, 0x85) \
V(TrueValue, 0xc9) \
V(FalseValue, 0xad) \
V(EmptyString, 0xa1) \
V(TheHoleValue, 0x791)
V(UndefinedValue, 0x11) \
V(NullValue, 0x2d) \
V(TrueValue, 0x71) \
V(FalseValue, 0x55) \
V(EmptyString, 0x49) \
V(TheHoleValue, 0x761)
using Tagged_t = uint32_t;
struct StaticReadOnlyRoot {
@ -1001,7 +978,7 @@ class Internals {
// Use 0 for kStringMapLowerBound since string maps are the first maps.
static constexpr Tagged_t kStringMapLowerBound = 0;
static constexpr Tagged_t kStringMapUpperBound = 0x47d;
static constexpr Tagged_t kStringMapUpperBound = 0x425;
#define PLUSONE(...) +1
static constexpr size_t kNumberOfExportedStaticRoots =
@ -1047,7 +1024,7 @@ class Internals {
// Soft limit for AdjustAmountofExternalAllocatedMemory. Trigger an
// incremental GC once the external memory reaches this limit.
static constexpr int kExternalAllocationSoftLimit = 64 * 1024 * 1024;
static constexpr size_t kExternalAllocationSoftLimit = 64 * 1024 * 1024;
#ifdef V8_MAP_PACKING
static const uintptr_t kMapWordMetadataMask = 0xffffULL << 48;
@ -1289,15 +1266,15 @@ class Internals {
#endif
}
template <ExternalPointerTag tag>
template <ExternalPointerTagRange tag_range>
V8_INLINE static Address ReadExternalPointerField(v8::Isolate* isolate,
Address heap_object_ptr,
int offset) {
#ifdef V8_ENABLE_SANDBOX
static_assert(tag != kExternalPointerNullTag);
// See src/sandbox/external-pointer-table-inl.h. Logic duplicated here so
static_assert(!tag_range.IsEmpty());
// See src/sandbox/external-pointer-table.h. Logic duplicated here so
// it can be inlined and doesn't require an additional call.
Address* table = IsSharedExternalPointerType(tag)
Address* table = IsSharedExternalPointerType(tag_range)
? GetSharedExternalPointerTableBase(isolate)
: GetExternalPointerTableBase(isolate);
internal::ExternalPointerHandle handle =
@ -1306,7 +1283,14 @@ class Internals {
std::atomic<Address>* ptr =
reinterpret_cast<std::atomic<Address>*>(&table[index]);
Address entry = std::atomic_load_explicit(ptr, std::memory_order_relaxed);
return entry & ~tag;
ExternalPointerTag actual_tag = static_cast<ExternalPointerTag>(
(entry & kExternalPointerTagMask) >> kExternalPointerTagShift);
if (V8_LIKELY(tag_range.Contains(actual_tag))) {
return entry & kExternalPointerPayloadMask;
} else {
return 0;
}
return entry;
#else
return ReadRawField<Address>(heap_object_ptr, offset);
#endif // V8_ENABLE_SANDBOX
@ -1360,7 +1344,7 @@ class BackingStoreBase {};
// The maximum value in enum GarbageCollectionReason, defined in heap.h.
// This is needed for histograms sampling garbage collection reasons.
constexpr int kGarbageCollectionReasonMaxValue = 27;
constexpr int kGarbageCollectionReasonMaxValue = 29;
// Base class for the address block allocator compatible with standard
// containers, which registers its allocated range as strong roots.
@ -1377,7 +1361,10 @@ class V8_EXPORT StrongRootAllocatorBase {
protected:
explicit StrongRootAllocatorBase(Heap* heap) : heap_(heap) {}
explicit StrongRootAllocatorBase(LocalHeap* heap);
explicit StrongRootAllocatorBase(Isolate* isolate);
explicit StrongRootAllocatorBase(v8::Isolate* isolate);
explicit StrongRootAllocatorBase(LocalIsolate* isolate);
// Allocate/deallocate a range of n elements of type internal::Address.
Address* allocate_impl(size_t n);
@ -1397,9 +1384,8 @@ class StrongRootAllocator : private std::allocator<T> {
public:
using value_type = T;
explicit StrongRootAllocator(Heap* heap) {}
explicit StrongRootAllocator(Isolate* isolate) {}
explicit StrongRootAllocator(v8::Isolate* isolate) {}
template <typename HeapOrIsolateT>
explicit StrongRootAllocator(HeapOrIsolateT*) {}
template <typename U>
StrongRootAllocator(const StrongRootAllocator<U>& other) noexcept {}
@ -1430,7 +1416,7 @@ struct MaybeDefineIteratorConcept {};
template <typename Iterator>
struct MaybeDefineIteratorConcept<
Iterator, std::enable_if_t<kHaveIteratorConcept<Iterator>>> {
using iterator_concept = Iterator::iterator_concept;
using iterator_concept = typename Iterator::iterator_concept;
};
// Otherwise fall back to `std::iterator_traits<Iterator>` if possible.
template <typename Iterator>
@ -1443,7 +1429,8 @@ struct MaybeDefineIteratorConcept<
// TODO(pkasting): Add this unconditionally after dropping support for old
// libstdc++ versions.
#if __has_include(<ranges>)
using iterator_concept = std::iterator_traits<Iterator>::iterator_concept;
using iterator_concept =
typename std::iterator_traits<Iterator>::iterator_concept;
#endif
};
@ -1492,8 +1479,12 @@ class WrappedIterator : public MaybeDefineIteratorConcept<Iterator> {
[[nodiscard]] constexpr reference operator*() const noexcept { return *it_; }
[[nodiscard]] constexpr pointer operator->() const noexcept {
if constexpr (std::is_pointer_v<Iterator>) {
return it_;
} else {
return it_.operator->();
}
}
template <typename OtherIterator, typename OtherElementType>
[[nodiscard]] constexpr bool operator==(
@ -1623,16 +1614,25 @@ class WrappedIterator : public MaybeDefineIteratorConcept<Iterator> {
// whether direct local support is enabled.
class ValueHelper final {
public:
// ValueHelper::InternalRepresentationType is an abstract type that
// corresponds to the internal representation of v8::Local and essentially
// to what T* really is (these two are always in sync). This type is used in
// methods like GetDataFromSnapshotOnce that need access to a handle's
// internal representation. In particular, if `x` is a `v8::Local<T>`, then
// `v8::Local<T>::FromRepr(x.repr())` gives exactly the same handle as `x`.
#ifdef V8_ENABLE_DIRECT_HANDLE
static constexpr Address kTaggedNullAddress = 1;
static constexpr Address kEmpty = kTaggedNullAddress;
using InternalRepresentationType = internal::Address;
static constexpr InternalRepresentationType kEmpty = kTaggedNullAddress;
#else
static constexpr Address kEmpty = kNullAddress;
using InternalRepresentationType = internal::Address*;
static constexpr InternalRepresentationType kEmpty = nullptr;
#endif // V8_ENABLE_DIRECT_HANDLE
template <typename T>
V8_INLINE static bool IsEmpty(T* value) {
return reinterpret_cast<Address>(value) == kEmpty;
return ValueAsRepr(value) == kEmpty;
}
// Returns a handle's "value" for all kinds of abstract handles. For Local,
@ -1659,6 +1659,16 @@ class ValueHelper final {
return *reinterpret_cast<T**>(slot);
}
template <typename T>
V8_INLINE static InternalRepresentationType ValueAsRepr(const T* value) {
return reinterpret_cast<InternalRepresentationType>(value);
}
template <typename T>
V8_INLINE static T* ReprAsValue(InternalRepresentationType repr) {
return reinterpret_cast<T*>(repr);
}
#else // !V8_ENABLE_DIRECT_HANDLE
template <typename T>
@ -1671,6 +1681,17 @@ class ValueHelper final {
return reinterpret_cast<T*>(slot);
}
template <typename T>
V8_INLINE static InternalRepresentationType ValueAsRepr(const T* value) {
return const_cast<InternalRepresentationType>(
reinterpret_cast<const Address*>(value));
}
template <typename T>
V8_INLINE static T* ReprAsValue(InternalRepresentationType repr) {
return reinterpret_cast<T*>(repr);
}
#endif // V8_ENABLE_DIRECT_HANDLE
};

View file

@ -164,28 +164,6 @@ class V8_EXPORT ResourceConstraints {
uint32_t* stack_limit_ = nullptr;
};
/**
* Option flags passed to the SetRAILMode function.
* See documentation https://developers.google.com/web/tools/chrome-devtools/
* profile/evaluate-performance/rail
*/
enum RAILMode : unsigned {
// Response performance mode: In this mode very low virtual machine latency
// is provided. V8 will try to avoid JavaScript execution interruptions.
// Throughput may be throttled.
PERFORMANCE_RESPONSE,
// Animation performance mode: In this mode low virtual machine latency is
// provided. V8 will try to avoid as many JavaScript execution interruptions
// as possible. Throughput may be throttled. This is the default mode.
PERFORMANCE_ANIMATION,
// Idle performance mode: The embedder is idle. V8 can complete deferred work
// in this mode.
PERFORMANCE_IDLE,
// Load performance mode: In this mode high throughput is provided. V8 may
// turn off latency optimizations.
PERFORMANCE_LOAD
};
/**
* Memory pressure level for the MemoryPressureNotification.
* kNone hints V8 that there is no memory pressure.
@ -196,11 +174,95 @@ enum RAILMode : unsigned {
*/
enum class MemoryPressureLevel { kNone, kModerate, kCritical };
/**
* Signal for dependants of contexts. Useful for
* `ContextDisposedNotification()` to implement different strategies.
*/
enum class ContextDependants {
/** Context has no dependants. These are usually top-level contexts. */
kNoDependants,
/** Context has some dependants, i.e., it may depend on other contexts. This
is usually the case for inner contexts. */
kSomeDependants
};
/**
* Indicator for the stack state.
*/
using StackState = cppgc::EmbedderStackState;
/**
* The set of V8 isolates in a process is partitioned into groups. Each group
* has its own sandbox (if V8 was configured with support for the sandbox) and
* pointer-compression cage (if configured with pointer compression).
*
* By default, all isolates are placed in the same group. This is the most
* efficient configuration in terms of speed and memory use. However, with
* pointer compression enabled, total heap usage of isolates in a group
* cannot exceed 4 GB, not counting array buffers and other off-heap storage.
* Using multiple isolate groups can allow embedders to allocate more than 4GB
* of objects with pointer compression enabled, if the embedder's use case can
* span multiple isolates.
*
* Creating an isolate group reserves a range of virtual memory addresses. A
* group's memory mapping will be released when the last isolate in the group is
* disposed, and there are no more live IsolateGroup objects that refer to it.
*
* Note that Isolate groups are reference counted, and
* the IsolateGroup type is a reference to one.
*
* Note that it's not going to be possible to pass shared JS objects
* across IsolateGroup boundary.
*
*/
class V8_EXPORT IsolateGroup {
public:
/**
* Get the default isolate group. If this V8's build configuration only
* supports a single group, this is a reference to that single group.
* Otherwise this is a group like any other, distinguished only
* in that it is the first group.
*/
static IsolateGroup GetDefault();
/**
* Return true if new isolate groups can be created at run-time, or false if
* all isolates must be in the same group.
*/
static bool CanCreateNewGroups();
/**
* Create a new isolate group. If this V8's build configuration only supports
* a single group, abort.
*/
static IsolateGroup Create();
IsolateGroup(IsolateGroup&& other);
IsolateGroup& operator=(IsolateGroup&& other);
IsolateGroup(const IsolateGroup&);
IsolateGroup& operator=(const IsolateGroup&);
~IsolateGroup();
bool operator==(const IsolateGroup& other) const {
return isolate_group_ == other.isolate_group_;
}
bool operator!=(const IsolateGroup& other) const {
return !operator==(other);
}
private:
friend class Isolate;
friend class ArrayBuffer::Allocator;
// The isolate_group pointer should be already acquired.
explicit IsolateGroup(internal::IsolateGroup*&& isolate_group);
internal::IsolateGroup* isolate_group_;
};
/**
* Isolate represents an isolated instance of the V8 engine. V8 isolates have
* completely separate states. Objects from one isolate must not be used in
@ -524,7 +586,7 @@ class V8_EXPORT Isolate {
kDurationFormat = 117,
kInvalidatedNumberStringNotRegexpLikeProtector = 118,
kOBSOLETE_RegExpUnicodeSetIncompatibilitiesWithUnicodeMode = 119,
kImportAssertionDeprecatedSyntax = 120,
kOBSOLETE_ImportAssertionDeprecatedSyntax = 120,
kLocaleInfoObsoletedGetters = 121,
kLocaleInfoFunctions = 122,
kCompileHintsMagicAll = 123,
@ -549,6 +611,29 @@ class V8_EXPORT Isolate {
kDocumentAllLegacyConstruct = 142,
kConsoleContext = 143,
kWasmImportedStringsUtf8 = 144,
kResizableArrayBuffer = 145,
kGrowableSharedArrayBuffer = 146,
kArrayByCopy = 147,
kArrayFromAsync = 148,
kIteratorMethods = 149,
kPromiseAny = 150,
kSetMethods = 151,
kArrayFindLast = 152,
kArrayGroup = 153,
kArrayBufferTransfer = 154,
kPromiseWithResolvers = 155,
kAtomicsWaitAsync = 156,
kExtendingNonExtensibleWithPrivate = 157,
kPromiseTry = 158,
kStringReplaceAll = 159,
kStringWellFormed = 160,
kWeakReferences = 161,
kErrorIsError = 162,
kInvalidatedTypedArrayLengthLookupChainProtector = 163,
kRegExpEscape = 164,
kFloat16Array = 165,
kExplicitResourceManagement = 166,
kWasmBranchHinting = 167,
// If you add new values here, you'll also need to update Chromium's:
// web_feature.mojom, use_counter_callback.cc, and enums.xml. V8 changes to
@ -589,9 +674,9 @@ class V8_EXPORT Isolate {
* currently entered isolate.
*
* Only Isolate::GetData() and Isolate::SetData(), which access the
* embedder-controlled parts of the isolate, are allowed to be called on the
* uninitialized isolate. To initialize the isolate, call
* `Isolate::Initialize()` or initialize a `SnapshotCreator`.
* embedder-controlled parts of the isolate, as well as Isolate::GetGroup(),
* are allowed to be called on the uninitialized isolate. To initialize the
* isolate, call `Isolate::Initialize()` or initialize a `SnapshotCreator`.
*
* When an isolate is no longer used its resources should be freed
* by calling Dispose(). Using the delete operator is not allowed.
@ -599,6 +684,12 @@ class V8_EXPORT Isolate {
* V8::Initialize() must have run prior to this.
*/
static Isolate* Allocate();
static Isolate* Allocate(const IsolateGroup& group);
/**
* Return the group for this isolate.
*/
IsolateGroup GetGroup() const;
/**
* Initialize an Isolate previously allocated by Isolate::Allocate().
@ -615,6 +706,7 @@ class V8_EXPORT Isolate {
* V8::Initialize() must have run prior to this.
*/
static Isolate* New(const CreateParams& params);
static Isolate* New(const IsolateGroup& group, const CreateParams& params);
/**
* Returns the entered isolate for the current thread or NULL in
@ -672,6 +764,18 @@ class V8_EXPORT Isolate {
void SetHostImportModuleDynamicallyCallback(
HostImportModuleDynamicallyCallback callback);
/**
* This specifies the callback called by the upcoming dynamic
* import() and import.source() language feature to load modules.
*
* This API is experimental and is expected to be changed or removed in the
* future. The callback is currently only called when for source-phase
* imports. Evaluation-phase imports use the existing
* HostImportModuleDynamicallyCallback callback.
*/
void SetHostImportModuleWithPhaseDynamicallyCallback(
HostImportModuleWithPhaseDynamicallyCallback callback);
/**
* This specifies the callback called by the upcoming import.meta
* language feature to retrieve host-defined meta data for a module.
@ -686,6 +790,14 @@ class V8_EXPORT Isolate {
void SetHostCreateShadowRealmContextCallback(
HostCreateShadowRealmContextCallback callback);
/**
* Set the callback that checks whether a Error.isError should return true for
* a JSApiWrapper object, i.e. whether it represents a native JS error. For
* example, in an HTML embedder, DOMExceptions are considered native errors.
*/
void SetIsJSApiWrapperNativeErrorCallback(
IsJSApiWrapperNativeErrorCallback callback);
/**
* This specifies the callback called when the stack property of Error
* is accessed.
@ -701,7 +813,10 @@ class V8_EXPORT Isolate {
/**
* This specifies the callback called when an ETW tracing session starts.
*/
V8_DEPRECATE_SOON("Use SetFilterETWSessionByURL2Callback instead")
void SetFilterETWSessionByURLCallback(FilterETWSessionByURLCallback callback);
void SetFilterETWSessionByURL2Callback(
FilterETWSessionByURL2Callback callback);
#endif // V8_OS_WIN
/**
@ -720,6 +835,14 @@ class V8_EXPORT Isolate {
*/
void SetBatterySaverMode(bool battery_saver_mode_enabled);
/**
* Optional request from the embedder to tune v8 towards memory efficiency
* rather than speed if `memory_saver_mode_enabled` is true, because the
* embedder is in memory saver mode. If false, the correct tuning is left
* to v8 to decide.
*/
void SetMemorySaverMode(bool memory_saver_mode_enabled);
/**
* Drop non-essential caches. Should only be called from testing code.
* The method can potentially block for a long time and does not necessarily
@ -887,18 +1010,13 @@ class V8_EXPORT Isolate {
size_t frames_limit, SampleInfo* sample_info);
/**
* Adjusts the amount of registered external memory. Used to give V8 an
* indication of the amount of externally allocated memory that is kept alive
* by JavaScript objects. V8 uses this to decide when to perform global
* garbage collections. Registering externally allocated memory will trigger
* global garbage collections more often than it would otherwise in an attempt
* to garbage collect the JavaScript objects that keep the externally
* allocated memory alive.
* Adjusts the amount of registered external memory.
*
* \param change_in_bytes the change in externally allocated memory that is
* kept alive by JavaScript objects.
* \returns the adjusted value.
*/
V8_DEPRECATE_SOON("Use ExternalMemoryAccounter instead.")
int64_t AdjustAmountOfExternalAllocatedMemory(int64_t change_in_bytes);
/**
@ -963,6 +1081,14 @@ class V8_EXPORT Isolate {
*/
Local<Value> ThrowException(Local<Value> exception);
/**
* Returns true if an exception was thrown but not processed yet by an
* exception handler on JavaScript side or by v8::TryCatch handler.
*
* This is an experimental feature and may still change significantly.
*/
bool HasPendingException();
using GCCallback = void (*)(Isolate* isolate, GCType type,
GCCallbackFlags flags);
using GCCallbackWithData = void (*)(Isolate* isolate, GCType type,
@ -1046,28 +1172,14 @@ class V8_EXPORT Isolate {
*/
void SetEmbedderRootsHandler(EmbedderRootsHandler* handler);
/**
* Attaches a managed C++ heap as an extension to the JavaScript heap. The
* embedder maintains ownership of the CppHeap. At most one C++ heap can be
* attached to V8.
*
* Multi-threaded use requires the use of v8::Locker/v8::Unlocker, see
* CppHeap.
*
* If a CppHeap is set via CreateParams, then this call is a noop.
*/
V8_DEPRECATE_SOON(
"Set the heap on Isolate creation using CreateParams instead.")
void AttachCppHeap(CppHeap*);
using ReleaseCppHeapCallback = void (*)(std::unique_ptr<CppHeap>);
/**
* Detaches a managed C++ heap if one was attached using `AttachCppHeap()`.
*
* If a CppHeap is set via CreateParams, then this call is a noop.
* Sets a callback on the isolate that gets called when the CppHeap gets
* detached. The callback can then either take ownership of the CppHeap, or
* the CppHeap gets deallocated.
*/
V8_DEPRECATE_SOON(
"Set the heap on Isolate creation using CreateParams instead.")
void DetachCppHeap();
void SetReleaseCppHeapCallbackForTesting(ReleaseCppHeapCallback callback);
/**
* \returns the C++ heap managed by V8. Only available if such a heap has been
@ -1075,85 +1187,6 @@ class V8_EXPORT Isolate {
*/
CppHeap* GetCppHeap() const;
/**
* Use for |AtomicsWaitCallback| to indicate the type of event it receives.
*/
enum class AtomicsWaitEvent {
/** Indicates that this call is happening before waiting. */
kStartWait,
/** `Atomics.wait()` finished because of an `Atomics.wake()` call. */
kWokenUp,
/** `Atomics.wait()` finished because it timed out. */
kTimedOut,
/** `Atomics.wait()` was interrupted through |TerminateExecution()|. */
kTerminatedExecution,
/** `Atomics.wait()` was stopped through |AtomicsWaitWakeHandle|. */
kAPIStopped,
/** `Atomics.wait()` did not wait, as the initial condition was not met. */
kNotEqual
};
/**
* Passed to |AtomicsWaitCallback| as a means of stopping an ongoing
* `Atomics.wait` call.
*/
class V8_EXPORT AtomicsWaitWakeHandle {
public:
/**
* Stop this `Atomics.wait()` call and call the |AtomicsWaitCallback|
* with |kAPIStopped|.
*
* This function may be called from another thread. The caller has to ensure
* through proper synchronization that it is not called after
* the finishing |AtomicsWaitCallback|.
*
* Note that the ECMAScript specification does not plan for the possibility
* of wakeups that are neither coming from a timeout or an `Atomics.wake()`
* call, so this may invalidate assumptions made by existing code.
* The embedder may accordingly wish to schedule an exception in the
* finishing |AtomicsWaitCallback|.
*/
void Wake();
};
/**
* Embedder callback for `Atomics.wait()` that can be added through
* |SetAtomicsWaitCallback|.
*
* This will be called just before starting to wait with the |event| value
* |kStartWait| and after finishing waiting with one of the other
* values of |AtomicsWaitEvent| inside of an `Atomics.wait()` call.
*
* |array_buffer| will refer to the underlying SharedArrayBuffer,
* |offset_in_bytes| to the location of the waited-on memory address inside
* the SharedArrayBuffer.
*
* |value| and |timeout_in_ms| will be the values passed to
* the `Atomics.wait()` call. If no timeout was used, |timeout_in_ms|
* will be `INFINITY`.
*
* In the |kStartWait| callback, |stop_handle| will be an object that
* is only valid until the corresponding finishing callback and that
* can be used to stop the wait process while it is happening.
*
* This callback may schedule exceptions, *unless* |event| is equal to
* |kTerminatedExecution|.
*/
using AtomicsWaitCallback = void (*)(AtomicsWaitEvent event,
Local<SharedArrayBuffer> array_buffer,
size_t offset_in_bytes, int64_t value,
double timeout_in_ms,
AtomicsWaitWakeHandle* stop_handle,
void* data);
/**
* Set a new |AtomicsWaitCallback|. This overrides an earlier
* |AtomicsWaitCallback|, if there was any. If |callback| is nullptr,
* this unsets the callback. |data| will be passed to the callback
* as its last parameter.
*/
void SetAtomicsWaitCallback(AtomicsWaitCallback callback, void* data);
using GetExternallyAllocatedMemoryInBytesCallback = size_t (*)();
/**
@ -1398,8 +1431,18 @@ class V8_EXPORT Isolate {
* The optional parameter |dependant_context| specifies whether the disposed
* context was depending on state from other contexts or not.
*/
V8_DEPRECATE_SOON("Use version that passes ContextDependants.")
int ContextDisposedNotification(bool dependant_context = true);
/**
* Optional notification that a context has been disposed. V8 uses these
* notifications to guide heuristics on e.g. GC or compilers.
*
* \param dependants A signal on whether this context possibly had any
* dependants.
*/
void ContextDisposedNotification(ContextDependants dependants);
/**
* Optional notification that the isolate switched to the foreground.
* V8 uses these notifications to guide heuristics.
@ -1421,18 +1464,26 @@ class V8_EXPORT Isolate {
void SetPriority(Priority priority);
/**
* Optional notification to tell V8 the current performance requirements
* of the embedder based on RAIL.
* Optional notification to tell V8 whether the embedder is currently loading
* resources. If the embedder uses this notification, it should call
* SetIsLoading(true) when loading starts and SetIsLoading(false) when it
* ends.
* It's valid to call SetIsLoading(true) again while loading, which will
* update the timestamp when V8 considers the load started. Calling
* SetIsLoading(false) while not loading does nothing.
* V8 uses these notifications to guide heuristics.
* This is an unfinished experimental feature. Semantics and implementation
* may change frequently.
*/
void SetRAILMode(RAILMode rail_mode);
void SetIsLoading(bool is_loading);
/**
* Update load start time of the RAIL mode
* Optional notification to tell V8 whether the embedder is currently frozen.
* V8 uses these notifications to guide heuristics.
* This is an unfinished experimental feature. Semantics and implementation
* may change frequently.
*/
void UpdateLoadStartTime();
void Freeze(bool is_frozen);
/**
* Optional notification to tell V8 the current isolate is used for debugging
@ -1665,14 +1716,6 @@ class V8_EXPORT Isolate {
bool capture, int frame_limit = 10,
StackTrace::StackTraceOptions options = StackTrace::kOverview);
/**
* Iterates through all external resources referenced from current isolate
* heap. GC is not invoked prior to iterating, therefore there is no
* guarantee that visited objects are still alive.
*/
V8_DEPRECATED("Will be removed without replacement. crbug.com/v8/14172")
void VisitExternalResources(ExternalResourceVisitor* visitor);
/**
* Check if this isolate is in use.
* True if at least one thread Enter'ed this isolate.
@ -1731,6 +1774,11 @@ class V8_EXPORT Isolate {
*/
std::string GetDefaultLocale();
/**
* Returns the hash seed for that isolate, for testing purposes.
*/
uint64_t GetHashSeed();
Isolate() = delete;
~Isolate() = delete;
Isolate(const Isolate&) = delete;
@ -1745,8 +1793,11 @@ class V8_EXPORT Isolate {
private:
template <class K, class V, class Traits>
friend class PersistentValueMapBase;
friend class ExternalMemoryAccounter;
internal::Address* GetDataFromSnapshotOnce(size_t index);
internal::ValueHelper::InternalRepresentationType GetDataFromSnapshotOnce(
size_t index);
int64_t AdjustAmountOfExternalAllocatedMemoryImpl(int64_t change_in_bytes);
void HandleExternalMemoryInterrupt();
};
@ -1767,10 +1818,10 @@ uint32_t Isolate::GetNumberOfDataSlots() {
template <class T>
MaybeLocal<T> Isolate::GetDataFromSnapshotOnce(size_t index) {
if (auto slot = GetDataFromSnapshotOnce(index); slot) {
internal::PerformCastCheck(
internal::ValueHelper::SlotAsValue<T, false>(slot));
return Local<T>::FromSlot(slot);
if (auto repr = GetDataFromSnapshotOnce(index);
repr != internal::ValueHelper::kEmpty) {
internal::PerformCastCheck(internal::ValueHelper::ReprAsValue<T>(repr));
return Local<T>::FromRepr(repr);
}
return {};
}

View file

@ -179,8 +179,14 @@ class LocalBase : public api_internal::DirectHandleBase {
}
V8_INLINE static LocalBase<T> FromSlot(internal::Address* slot) {
if (slot == nullptr) return LocalBase<T>();
return LocalBase<T>(*slot);
}
V8_INLINE static LocalBase<T> FromRepr(
internal::ValueHelper::InternalRepresentationType repr) {
return LocalBase<T>(repr);
}
};
#else // !V8_ENABLE_DIRECT_HANDLE
@ -213,6 +219,11 @@ class LocalBase : public api_internal::IndirectHandleBase {
V8_INLINE static LocalBase<T> FromSlot(internal::Address* slot) {
return LocalBase<T>(slot);
}
V8_INLINE static LocalBase<T> FromRepr(
internal::ValueHelper::InternalRepresentationType repr) {
return LocalBase<T>(repr);
}
};
#endif // V8_ENABLE_DIRECT_HANDLE
@ -255,17 +266,19 @@ class V8_TRIVIAL_ABI Local : public LocalBase<T>,
#endif
{
public:
/**
* Default constructor: Returns an empty handle.
*/
V8_INLINE Local() = default;
template <class S>
V8_INLINE Local(Local<S> that) : LocalBase<T>(that) {
/**
* This check fails when trying to convert between incompatible
* handles. For example, converting from a Local<String> to a
* Local<Number>.
* Constructor for handling automatic up casting.
* Ex. Local<Object> can be passed when Local<Value> is expected but not
* the other way round.
*/
static_assert(std::is_base_of<T, S>::value, "type check");
}
template <class S>
requires std::is_base_of_v<T, S>
V8_INLINE Local(Local<S> that) : LocalBase<T>(that) {}
V8_INLINE T* operator->() const { return this->template value<T>(); }
@ -305,7 +318,7 @@ class V8_TRIVIAL_ABI Local : public LocalBase<T>,
/**
* Cast a handle to a subclass, e.g. Local<Value> to Local<Object>.
* This is only valid if the handle actually refers to a value of the
* target type.
* target type or if the handle is empty.
*/
template <class S>
V8_INLINE static Local<T> Cast(Local<S> that) {
@ -321,7 +334,7 @@ class V8_TRIVIAL_ABI Local : public LocalBase<T>,
/**
* Calling this is equivalent to Local<S>::Cast().
* In particular, this is only valid if the handle actually refers to a value
* of the target type.
* of the target type or if the handle is empty.
*/
template <class S>
V8_INLINE Local<S> As() const {
@ -396,6 +409,11 @@ class V8_TRIVIAL_ABI Local : public LocalBase<T>,
V8_INLINE explicit Local(const LocalBase<T>& other) : LocalBase<T>(other) {}
V8_INLINE static Local<T> FromRepr(
internal::ValueHelper::InternalRepresentationType repr) {
return Local<T>(LocalBase<T>::FromRepr(repr));
}
V8_INLINE static Local<T> FromSlot(internal::Address* slot) {
return Local<T>(LocalBase<T>::FromSlot(slot));
}
@ -434,8 +452,9 @@ class V8_TRIVIAL_ABI LocalUnchecked : public Local<T> {
#if defined(V8_ENABLE_LOCAL_OFF_STACK_CHECK) && V8_HAS_ATTRIBUTE_TRIVIAL_ABI
// In this case, the check is also enforced in the copy constructor and we
// need to suppress it.
LocalUnchecked(const LocalUnchecked& other)
: Local<T>(other, Local<T>::do_not_check) noexcept {}
LocalUnchecked(
const LocalUnchecked& other) noexcept // NOLINT(runtime/explicit)
: Local<T>(other, Local<T>::do_not_check) {}
LocalUnchecked& operator=(const LocalUnchecked&) noexcept = default;
#endif
@ -455,11 +474,9 @@ class StrongRootAllocator<LocalUnchecked<T>> : public StrongRootAllocatorBase {
static_assert(std::is_standard_layout_v<value_type>);
static_assert(sizeof(value_type) == sizeof(Address));
explicit StrongRootAllocator(Heap* heap) : StrongRootAllocatorBase(heap) {}
explicit StrongRootAllocator(Isolate* isolate)
: StrongRootAllocatorBase(isolate) {}
explicit StrongRootAllocator(v8::Isolate* isolate)
: StrongRootAllocatorBase(reinterpret_cast<Isolate*>(isolate)) {}
template <typename HeapOrIsolateT>
explicit StrongRootAllocator(HeapOrIsolateT* heap_or_isolate)
: StrongRootAllocatorBase(heap_or_isolate) {}
template <typename U>
StrongRootAllocator(const StrongRootAllocator<U>& other) noexcept
: StrongRootAllocatorBase(other) {}
@ -565,7 +582,11 @@ class LocalVector {
void push_back(const Local<T>& x) { backing_.push_back(x); }
void pop_back() { backing_.pop_back(); }
void emplace_back(const Local<T>& x) { backing_.emplace_back(x); }
template <typename... Args>
void emplace_back(Args&&... args) {
backing_.push_back(value_type{std::forward<Args>(args)...});
}
void clear() noexcept { backing_.clear(); }
void resize(size_t n) { backing_.resize(n); }
@ -613,9 +634,22 @@ using Handle = Local<T>;
template <class T>
class MaybeLocal {
public:
V8_INLINE MaybeLocal() : local_() {}
/**
* Default constructor: Returns an empty handle.
*/
V8_INLINE MaybeLocal() = default;
/**
* Implicitly construct MaybeLocal from Local.
*/
template <class S>
requires std::is_base_of_v<T, S>
V8_INLINE MaybeLocal(Local<S> that) : local_(that) {}
/**
* Implicitly up-cast MaybeLocal<S> to MaybeLocal<T> if T is a base of S.
*/
template <class S>
requires std::is_base_of_v<T, S>
V8_INLINE MaybeLocal(MaybeLocal<S> that) : local_(that.local_) {}
V8_INLINE bool IsEmpty() const { return local_.IsEmpty(); }
@ -650,23 +684,17 @@ class MaybeLocal {
/**
* Cast a handle to a subclass, e.g. MaybeLocal<Value> to MaybeLocal<Object>.
* This is only valid if the handle actually refers to a value of the target
* type.
* type or if the handle is empty.
*/
template <class S>
V8_INLINE static MaybeLocal<T> Cast(MaybeLocal<S> that) {
#ifdef V8_ENABLE_CHECKS
// If we're going to perform the type check then we have to check
// that the handle isn't empty before doing the checked cast.
if (that.IsEmpty()) return MaybeLocal<T>();
T::Cast(that.local_.template value<S>());
#endif
return MaybeLocal<T>(that.local_);
return MaybeLocal<T>{Local<T>::Cast(that.local_)};
}
/**
* Calling this is equivalent to MaybeLocal<S>::Cast().
* In particular, this is only valid if the handle actually refers to a value
* of the target type.
* of the target type or if the handle is empty.
*/
template <class S>
V8_INLINE MaybeLocal<S> As() const {

View file

@ -8,6 +8,7 @@
#include <type_traits>
#include <utility>
#include "cppgc/internal/conditional-stack-allocated.h" // NOLINT(build/include_directory)
#include "v8-internal.h" // NOLINT(build/include_directory)
#include "v8config.h" // NOLINT(build/include_directory)
@ -29,7 +30,7 @@ V8_EXPORT void FromJustIsNothing();
* "Nothing" value is returned.
*/
template <class T>
class Maybe {
class Maybe : public cppgc::internal::ConditionalStackAllocatedBase<T> {
public:
V8_INLINE bool IsNothing() const { return !has_value_; }
V8_INLINE bool IsJust() const { return has_value_; }

View file

@ -8,6 +8,7 @@
#include <stddef.h>
#include <array>
#include <cstddef>
#include <iterator>
#include <type_traits>
@ -229,7 +230,7 @@ class V8_EXPORT MemorySpan {
constexpr Iterator& operator+=(difference_type rhs) {
ptr_ += rhs;
return this;
return *this;
}
[[nodiscard]] friend constexpr Iterator operator+(Iterator lhs,
@ -245,7 +246,7 @@ class V8_EXPORT MemorySpan {
constexpr Iterator& operator-=(difference_type rhs) {
ptr_ -= rhs;
return this;
return *this;
}
[[nodiscard]] friend constexpr Iterator operator-(Iterator lhs,

View file

@ -9,6 +9,7 @@
#include <iosfwd>
#include "v8-callbacks.h" // NOLINT(build/include_directory)
#include "v8-local-handle.h" // NOLINT(build/include_directory)
#include "v8-maybe.h" // NOLINT(build/include_directory)
#include "v8-primitive.h" // NOLINT(build/include_directory)
@ -185,7 +186,14 @@ class V8_EXPORT Message {
bool IsSharedCrossOrigin() const;
bool IsOpaque() const;
static void PrintCurrentStackTrace(Isolate* isolate, std::ostream& out);
/**
* If provided, the callback can be used to selectively include
* or redact frames based on their script names. (true to include a frame)
*/
static void PrintCurrentStackTrace(
Isolate* isolate, std::ostream& out,
PrintCurrentStackTraceFilterCallback should_include_frame_callback =
nullptr);
static const int kNoLineNumberInfo = 0;
static const int kNoColumnInfo = 0;

View file

@ -8,9 +8,11 @@
#include <stddef.h>
#include <stdint.h>
#include <optional>
#include <vector>
#include "v8-internal.h" // NOLINT(build/include_directory)
#include "v8-isolate.h" // NOLINT(build/include_directory)
#include "v8-local-handle.h" // NOLINT(build/include_directory)
#include "v8config.h" // NOLINT(build/include_directory)
@ -37,6 +39,10 @@ struct GarbageCollectionSizes {
struct GarbageCollectionFullCycle {
int reason = -1;
// The priority of the isolate during the GC cycle. A nullopt value denotes a
// mixed priority cycle, meaning the Isolate's priority was changed while the
// cycle was in progress.
std::optional<v8::Isolate::Priority> priority = std::nullopt;
GarbageCollectionPhases total;
GarbageCollectionPhases total_cpp;
GarbageCollectionPhases main_thread;
@ -86,6 +92,10 @@ using GarbageCollectionFullMainThreadBatchedIncrementalSweep =
struct GarbageCollectionYoungCycle {
int reason = -1;
// The priority of the isolate during the GC cycle. A nullopt value denotes a
// mixed priority cycle, meaning the Isolate's priority was changed while the
// cycle was in progress.
std::optional<v8::Isolate::Priority> priority = std::nullopt;
int64_t total_wall_clock_duration_in_us = -1;
int64_t main_thread_wall_clock_duration_in_us = -1;
double collection_rate_in_percent = -1.0;

View file

@ -704,6 +704,7 @@ class V8_EXPORT Object : public Value {
* Prefer using version with Isolate parameter.
*/
MaybeLocal<Context> GetCreationContext(v8::Isolate* isolate);
V8_DEPRECATE_SOON("Use the version with the isolate argument.")
MaybeLocal<Context> GetCreationContext();
/**
@ -712,13 +713,17 @@ class V8_EXPORT Object : public Value {
* Prefer using version with Isolate parameter.
**/
Local<Context> GetCreationContextChecked(v8::Isolate* isolate);
V8_DEPRECATE_SOON("Use the version with the isolate argument.")
Local<Context> GetCreationContextChecked();
/** Same as above, but works for Persistents */
V8_INLINE static MaybeLocal<Context> GetCreationContext(
const PersistentBase<Object>& object) {
return object.template value<Object>()->GetCreationContext();
v8::Isolate* isolate, const PersistentBase<Object>& object) {
return object.template value<Object>()->GetCreationContext(isolate);
}
V8_DEPRECATE_SOON("Use the version with the isolate argument.")
V8_INLINE static MaybeLocal<Context> GetCreationContext(
const PersistentBase<Object>& object);
/**
* Gets the context in which the object was created (see GetCreationContext())

View file

@ -46,7 +46,11 @@ class Eternal : public api_internal::IndirectHandleBase {
public:
V8_INLINE Eternal() = default;
/**
* Constructor for handling automatic up casting.
*/
template <class S>
requires(std::is_base_of_v<T, S>)
V8_INLINE Eternal(Isolate* isolate, Local<S> handle) {
Set(isolate, handle);
}
@ -59,8 +63,8 @@ class Eternal : public api_internal::IndirectHandleBase {
}
template <class S>
requires(std::is_base_of_v<T, S>)
void Set(Isolate* isolate, Local<S> handle) {
static_assert(std::is_base_of<T, S>::value, "type check");
slot() =
api_internal::Eternalize(isolate, *handle.template UnsafeAs<Value>());
}
@ -251,28 +255,26 @@ class Persistent : public PersistentBase<T> {
V8_INLINE Persistent() = default;
/**
* Construct a Persistent from a Local.
* Construct a Persistent from a Local with automatic up casting.
* When the Local is non-empty, a new storage cell is created
* pointing to the same object, and no flags are set.
*/
template <class S>
requires(std::is_base_of_v<T, S>)
V8_INLINE Persistent(Isolate* isolate, Local<S> that)
: PersistentBase<T>(
PersistentBase<T>::New(isolate, that.template value<S>())) {
static_assert(std::is_base_of<T, S>::value, "type check");
}
PersistentBase<T>::New(isolate, that.template value<S>())) {}
/**
* Construct a Persistent from a Persistent.
* Construct a Persistent from a Persistent with automatic up casting.
* When the Persistent is non-empty, a new storage cell is created
* pointing to the same object, and no flags are set.
*/
template <class S, class M2>
requires(std::is_base_of_v<T, S>)
V8_INLINE Persistent(Isolate* isolate, const Persistent<S, M2>& that)
: PersistentBase<T>(
PersistentBase<T>::New(isolate, that.template value<S>())) {
static_assert(std::is_base_of<T, S>::value, "type check");
}
PersistentBase<T>::New(isolate, that.template value<S>())) {}
/**
* The copy constructors and assignment operator create a Persistent
@ -352,28 +354,26 @@ class Global : public PersistentBase<T> {
V8_INLINE Global() = default;
/**
* Construct a Global from a Local.
* Construct a Global from a Local with automatic up casting.
* When the Local is non-empty, a new storage cell is created
* pointing to the same object, and no flags are set.
*/
template <class S>
requires(std::is_base_of_v<T, S>)
V8_INLINE Global(Isolate* isolate, Local<S> that)
: PersistentBase<T>(
PersistentBase<T>::New(isolate, that.template value<S>())) {
static_assert(std::is_base_of<T, S>::value, "type check");
}
PersistentBase<T>::New(isolate, that.template value<S>())) {}
/**
* Construct a Global from a PersistentBase.
* Construct a Global from a PersistentBase with automatic up casting.
* When the Persistent is non-empty, a new storage cell is created
* pointing to the same object, and no flags are set.
*/
template <class S>
requires(std::is_base_of_v<T, S>)
V8_INLINE Global(Isolate* isolate, const PersistentBase<S>& that)
: PersistentBase<T>(
PersistentBase<T>::New(isolate, that.template value<S>())) {
static_assert(std::is_base_of<T, S>::value, "type check");
}
PersistentBase<T>::New(isolate, that.template value<S>())) {}
/**
* Move constructor.

View file

@ -1063,7 +1063,7 @@ class Platform {
* Allows the embedder to manage memory page allocations.
* Returning nullptr will cause V8 to use the default page allocator.
*/
virtual PageAllocator* GetPageAllocator() = 0;
virtual PageAllocator* GetPageAllocator() { return nullptr; }
/**
* Allows the embedder to provide an allocator that uses per-thread memory
@ -1116,6 +1116,7 @@ class Platform {
* Embedders should override PostTaskOnWorkerThreadImpl() instead of
* CallOnWorkerThread().
*/
V8_DEPRECATE_SOON("Use PostTaskOnWorkerThread instead.")
void CallOnWorkerThread(
std::unique_ptr<Task> task,
const SourceLocation& location = SourceLocation::Current()) {
@ -1129,6 +1130,7 @@ class Platform {
* Embedders should override PostTaskOnWorkerThreadImpl() instead of
* CallBlockingTaskOnWorkerThread().
*/
V8_DEPRECATE_SOON("Use PostTaskOnWorkerThread instead.")
void CallBlockingTaskOnWorkerThread(
std::unique_ptr<Task> task,
const SourceLocation& location = SourceLocation::Current()) {
@ -1143,6 +1145,7 @@ class Platform {
* Embedders should override PostTaskOnWorkerThreadImpl() instead of
* CallLowPriorityTaskOnWorkerThread().
*/
V8_DEPRECATE_SOON("Use PostTaskOnWorkerThread instead.")
void CallLowPriorityTaskOnWorkerThread(
std::unique_ptr<Task> task,
const SourceLocation& location = SourceLocation::Current()) {
@ -1158,6 +1161,7 @@ class Platform {
* Embedders should override PostDelayedTaskOnWorkerThreadImpl() instead of
* CallDelayedOnWorkerThread().
*/
V8_DEPRECATE_SOON("Use PostDelayedTaskOnWorkerThread instead.")
void CallDelayedOnWorkerThread(
std::unique_ptr<Task> task, double delay_in_seconds,
const SourceLocation& location = SourceLocation::Current()) {
@ -1166,6 +1170,31 @@ class Platform {
location);
}
/**
* Schedules a task to be invoked on a worker thread.
* Embedders should override PostTaskOnWorkerThreadImpl() instead of
* PostTaskOnWorkerThread().
*/
void PostTaskOnWorkerThread(
TaskPriority priority, std::unique_ptr<Task> task,
const SourceLocation& location = SourceLocation::Current()) {
PostTaskOnWorkerThreadImpl(priority, std::move(task), location);
}
/**
* Schedules a task to be invoked on a worker thread after |delay_in_seconds|
* expires.
* Embedders should override PostDelayedTaskOnWorkerThreadImpl() instead of
* PostDelayedTaskOnWorkerThread().
*/
void PostDelayedTaskOnWorkerThread(
TaskPriority priority, std::unique_ptr<Task> task,
double delay_in_seconds,
const SourceLocation& location = SourceLocation::Current()) {
PostDelayedTaskOnWorkerThreadImpl(priority, std::move(task),
delay_in_seconds, location);
}
/**
* Returns true if idle tasks are enabled for the given |isolate|.
*/

View file

@ -140,8 +140,14 @@ class V8_EXPORT String : public Name {
* Returns the number of bytes in the UTF-8 encoded
* representation of this string.
*/
V8_DEPRECATED("Use Utf8LengthV2 instead.")
int Utf8Length(Isolate* isolate) const;
/**
* Returns the number of bytes needed for the Utf8 encoding of this string.
*/
size_t Utf8LengthV2(Isolate* isolate) const;
/**
* Returns whether this string is known to contain only one byte data,
* i.e. ISO-8859-1 code points.
@ -194,15 +200,72 @@ class V8_EXPORT String : public Name {
};
// 16-bit character codes.
V8_DEPRECATED("Use WriteV2 instead.")
int Write(Isolate* isolate, uint16_t* buffer, int start = 0, int length = -1,
int options = NO_OPTIONS) const;
// One byte characters.
V8_DEPRECATED("Use WriteOneByteV2 instead.")
int WriteOneByte(Isolate* isolate, uint8_t* buffer, int start = 0,
int length = -1, int options = NO_OPTIONS) const;
// UTF-8 encoded characters.
V8_DEPRECATED("Use WriteUtf8V2 instead.")
int WriteUtf8(Isolate* isolate, char* buffer, int length = -1,
int* nchars_ref = nullptr, int options = NO_OPTIONS) const;
struct WriteFlags {
enum {
kNone = 0,
// Indicates that the output string should be null-terminated. In that
// case, the output buffer must include sufficient space for the
// additional null character.
kNullTerminate = 1,
// Used by WriteUtf8 to replace orphan surrogate code units with the
// unicode replacement character. Needs to be set to guarantee valid UTF-8
// output.
kReplaceInvalidUtf8 = 2
};
};
/**
* Write the contents of the string to an external buffer.
*
* Copies length characters into the output buffer starting at offset. The
* output buffer must have sufficient space for all characters and the null
* terminator if null termination is requested through the flags.
*
* \param offset The position within the string at which copying begins.
* \param length The number of characters to copy from the string.
* \param buffer The buffer into which the string will be copied.
* \param flags Various flags that influence the behavior of this operation.
*/
void WriteV2(Isolate* isolate, uint32_t offset, uint32_t length,
uint16_t* buffer, int flags = WriteFlags::kNone) const;
void WriteOneByteV2(Isolate* isolate, uint32_t offset, uint32_t length,
uint8_t* buffer, int flags = WriteFlags::kNone) const;
/**
* Encode the contents of the string as Utf8 into an external buffer.
*
* Encodes the characters of this string as Utf8 and writes them into the
* output buffer until either all characters were encoded or the buffer is
* full. Will not write partial UTF-8 sequences, preferring to stop before
* the end of the buffer. If null termination is requested, the output buffer
* will always be null terminated even if not all characters fit. In that
* case, the capacity must be at least one. The required size of the output
* buffer can be determined using Utf8Length().
*
* \param buffer The buffer into which the string will be written.
* \param capacity The number of bytes available in the output buffer.
* \param flags Various flags that influence the behavior of this operation.
* \param processed_characters_return The number of processed characters from
* the buffer.
* \return The number of bytes copied to the buffer including the null
* terminator (if written).
*/
size_t WriteUtf8V2(Isolate* isolate, char* buffer, size_t capacity,
int flags = WriteFlags::kNone,
size_t* processed_characters_return = nullptr) const;
/**
* A zero length string.
*/
@ -240,6 +303,44 @@ class V8_EXPORT String : public Name {
*/
virtual bool IsCacheable() const { return true; }
/**
* Internally V8 will call this Unaccount method when the external string
* resource should be unaccounted for. This method can be overridden in
* subclasses to control how allocated external bytes are accounted.
*/
virtual void Unaccount(Isolate* isolate) {}
/**
* Returns an estimate of the memory occupied by this external string, to be
* used by V8 when producing a heap snapshot. If this function returns
* kDefaultMemoryEstimate, then V8 will estimate the external size based on
* the string length. This function should return only memory that is
* uniquely owned by this resource. If the resource has shared ownership of
* a secondary allocation, it can report that memory by implementing
* EstimateSharedMemoryUsage.
*/
virtual size_t EstimateMemoryUsage() const {
return kDefaultMemoryEstimate;
}
static constexpr size_t kDefaultMemoryEstimate = static_cast<size_t>(-1);
class V8_EXPORT SharedMemoryUsageRecorder {
public:
/**
* Record that a shared allocation at the given location has the given
* size.
*/
virtual void RecordSharedMemoryUsage(const void* location,
size_t size) = 0;
};
/**
* Estimates memory that this string resource may share with other string
* resources, to be used by V8 when producing a heap snapshot.
*/
virtual void EstimateSharedMemoryUsage(
SharedMemoryUsageRecorder* recorder) const {}
// Disallow copying and assigning.
ExternalStringResourceBase(const ExternalStringResourceBase&) = delete;
void operator=(const ExternalStringResourceBase&) = delete;
@ -474,8 +575,20 @@ class V8_EXPORT String : public Name {
* The string is not modified if the operation fails. See NewExternal for
* information on the lifetime of the resource.
*/
V8_DEPRECATE_SOON("Use the version with the isolate argument instead.")
bool MakeExternal(ExternalStringResource* resource);
/**
* Associate an external string resource with this string by transforming it
* in place so that existing references to this string in the JavaScript heap
* will use the external string resource. The external string resource's
* character contents need to be equivalent to this string.
* Returns true if the string has been changed to be an external string.
* The string is not modified if the operation fails. See NewExternal for
* information on the lifetime of the resource.
*/
bool MakeExternal(Isolate* isolate, ExternalStringResource* resource);
/**
* Creates a new external string using the one-byte data defined in the given
* resource. When the external string is no longer live on V8's heap the
@ -496,8 +609,20 @@ class V8_EXPORT String : public Name {
* The string is not modified if the operation fails. See NewExternal for
* information on the lifetime of the resource.
*/
V8_DEPRECATE_SOON("Use the version with the isolate argument instead.")
bool MakeExternal(ExternalOneByteStringResource* resource);
/**
* Associate an external string resource with this string by transforming it
* in place so that existing references to this string in the JavaScript heap
* will use the external string resource. The external string resource's
* character contents need to be equivalent to this string.
* Returns true if the string has been changed to be an external string.
* The string is not modified if the operation fails. See NewExternal for
* information on the lifetime of the resource.
*/
bool MakeExternal(Isolate* isolate, ExternalOneByteStringResource* resource);
/**
* Returns true if this string can be made external, given the encoding for
* the external string resource.
@ -527,7 +652,7 @@ class V8_EXPORT String : public Name {
~Utf8Value();
char* operator*() { return str_; }
const char* operator*() const { return str_; }
int length() const { return length_; }
size_t length() const { return length_; }
// Disallow copying and assigning.
Utf8Value(const Utf8Value&) = delete;
@ -535,7 +660,7 @@ class V8_EXPORT String : public Name {
private:
char* str_;
int length_;
size_t length_;
};
/**
@ -557,7 +682,7 @@ class V8_EXPORT String : public Name {
~Value();
uint16_t* operator*() { return str_; }
const uint16_t* operator*() const { return str_; }
int length() const { return length_; }
uint32_t length() const { return length_; }
// Disallow copying and assigning.
Value(const Value&) = delete;
@ -565,7 +690,7 @@ class V8_EXPORT String : public Name {
private:
uint16_t* str_;
int length_;
uint32_t length_;
};
/**
@ -594,7 +719,7 @@ class V8_EXPORT String : public Name {
#endif
return data16_;
}
int length() const { return length_; }
uint32_t length() const { return length_; }
bool is_one_byte() const { return is_one_byte_; }
// Disallow copying and assigning.
@ -609,7 +734,7 @@ class V8_EXPORT String : public Name {
const uint8_t* data8_;
const uint16_t* data16_;
};
int length_;
uint32_t length_;
bool is_one_byte_;
// Avoid exposing the internal DisallowGarbageCollection scope.
alignas(internal::Internals::

View file

@ -418,8 +418,11 @@ class V8_EXPORT CpuProfiler {
* Synchronously collect current stack sample in all profilers attached to
* the |isolate|. The call does not affect number of ticks recorded for
* the current top node.
* |trace_id| is an optional identifier set to the collected sample.
* this is useful to associate the sample with a trace event.
*/
static void CollectSample(Isolate* isolate);
static void CollectSample(
Isolate* isolate, const std::optional<uint64_t> trace_id = std::nullopt);
/**
* Disposes the CPU profiler object.
@ -937,6 +940,15 @@ class V8_EXPORT EmbedderGraph {
*/
virtual void AddEdge(Node* from, Node* to, const char* name = nullptr) = 0;
/**
* Adds a count of bytes that are not associated with any particular Node.
* An embedder may use this to represent the size of nodes which were omitted
* from this EmbedderGraph despite being retained by the graph, or other
* overhead costs. This number will contribute to the total size in a heap
* snapshot, without being represented in the object graph.
*/
virtual void AddNativeSize(size_t size) {}
virtual ~EmbedderGraph() = default;
};

View file

@ -1,4 +1,3 @@
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

View file

@ -1,4 +1,3 @@
// Copyright 2021 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

View file

@ -23,8 +23,7 @@ namespace v8 {
* type check for a supertype must succeed for any subtype.
*
* The tag is currently in practice limited to 15 bits since it needs to fit
* together with a marking bit into the unused parts of a pointer (the top 16
* bits).
* together with a marking bit into the unused parts of a pointer.
*/
enum class CppHeapPointerTag : uint16_t {
kFirstTag = 0,
@ -67,6 +66,7 @@ enum class CppHeapPointerTag : uint16_t {
// against supertypes, which cover a range of types (their subtypes).
// Both the lower- and the upper bound are inclusive. In other words, this
// struct represents the range [lower_bound, upper_bound].
// TODO(saelo): reuse internal::TagRange here.
struct CppHeapPointerTagRange {
constexpr CppHeapPointerTagRange(CppHeapPointerTag lower,
CppHeapPointerTag upper)

View file

@ -155,7 +155,7 @@ class V8_EXPORT ModuleRequest : public Data {
*/
Local<FixedArray> GetImportAttributes() const;
V8_DEPRECATE_SOON("Use GetImportAttributes instead")
V8_DEPRECATED("Use GetImportAttributes instead")
Local<FixedArray> GetImportAssertions() const {
return GetImportAttributes();
}
@ -273,6 +273,13 @@ class V8_EXPORT Module : public Data {
*/
bool IsGraphAsync() const;
/**
* Returns whether this module is individually asynchronous (for example,
* if it's a Source Text Module Record containing a top-level await).
* See [[HasTLA]] in https://tc39.es/ecma262/#sec-cyclic-module-records
*/
bool HasTopLevelAwait() const;
/**
* Returns whether the module is a SourceTextModule.
*/
@ -670,6 +677,7 @@ class V8_EXPORT ScriptCompiler {
kProduceCompileHints = 1 << 2,
kConsumeCompileHints = 1 << 3,
kFollowCompileHintsMagicComment = 1 << 4,
kFollowCompileHintsPerFunctionMagicComment = 1 << 5,
};
static inline bool CompileOptionsIsValid(CompileOptions compile_options) {
@ -710,7 +718,8 @@ class V8_EXPORT ScriptCompiler {
kNoCacheBecausePacScript,
kNoCacheBecauseInDocumentWrite,
kNoCacheBecauseResourceWithNoCacheHandler,
kNoCacheBecauseDeferredProduceCodeCache
kNoCacheBecauseDeferredProduceCodeCache,
kNoCacheBecauseStaticCodeCache,
};
/**

View file

@ -76,7 +76,12 @@ class V8_EXPORT SourceLocation final {
*
* \returns a human-readable string representing source location information.
*/
std::string ToString() const;
std::string ToString() const {
if (!file_) {
return {};
}
return std::string(function_) + "@" + file_ + ":" + std::to_string(line_);
}
private:
constexpr SourceLocation(const char* function, const char* file, size_t line)

View file

@ -701,6 +701,14 @@ class V8_EXPORT FunctionTemplate : public Template {
*/
bool IsLeafTemplateForApiObject(v8::Local<v8::Value> value) const;
/**
* Checks if the object can be promoted to read only space, seals it and
* prepares for promotion.
*
* This is an experimental feature and may still change significantly.
*/
void SealAndPrepareForPromotionToReadOnly();
V8_INLINE static FunctionTemplate* Cast(Data* data);
private:

23
deps/v8/include/v8-trace-categories.h vendored Normal file
View file

@ -0,0 +1,23 @@
// Copyright 2025 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_V8_TRACE_CATEGORIES_H_
#define INCLUDE_V8_TRACE_CATEGORIES_H_
#if defined(V8_USE_PERFETTO)
#include "perfetto/tracing/track_event.h"
#include "v8config.h"
namespace v8 {
// Returns the perfeto TrackEventCategoryRegistry for v8 tracing categories.
V8_EXPORT const perfetto::internal::TrackEventCategoryRegistry&
GetTrackEventCategoryRegistry();
} // namespace v8
#endif // defined(V8_USE_PERFETTO)
#endif // INCLUDE_V8_TRACE_CATEGORIES_H_

View file

@ -23,15 +23,11 @@ class V8_EXPORT TypedArray : public ArrayBufferView {
* The largest supported typed array byte size. Each subclass defines a
* type-specific kMaxLength for the maximum length that can be passed to New.
*/
#if V8_ENABLE_SANDBOX
static constexpr size_t kMaxByteLength =
internal::kMaxSafeBufferSizeForSandbox;
#elif V8_HOST_ARCH_32_BIT
static constexpr size_t kMaxByteLength = std::numeric_limits<int>::max();
#else
// The maximum safe integer (2^53 - 1).
static constexpr size_t kMaxByteLength =
static_cast<size_t>((uint64_t{1} << 53) - 1);
static constexpr size_t kMaxByteLength = ArrayBuffer::kMaxByteLength;
#ifdef V8_ENABLE_SANDBOX
static_assert(v8::TypedArray::kMaxByteLength <=
v8::internal::kMaxSafeBufferSizeForSandbox);
#endif
/**
@ -253,10 +249,10 @@ class V8_EXPORT Int32Array : public TypedArray {
* An instance of Float16Array constructor.
*/
class V8_EXPORT Float16Array : public TypedArray {
public:
static constexpr size_t kMaxLength =
TypedArray::kMaxByteLength / sizeof(uint16_t);
public:
static Local<Float16Array> New(Local<ArrayBuffer> array_buffer,
size_t byte_offset, size_t length);
static Local<Float16Array> New(Local<SharedArrayBuffer> shared_array_buffer,

View file

@ -19,7 +19,7 @@ struct CalleeSavedRegisters {
};
#elif V8_TARGET_ARCH_X64 || V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_ARM64 || \
V8_TARGET_ARCH_MIPS64 || V8_TARGET_ARCH_PPC64 || V8_TARGET_ARCH_RISCV64 || \
V8_TARGET_ARCH_S390 || V8_TARGET_ARCH_LOONG64 || V8_TARGET_ARCH_RISCV32
V8_TARGET_ARCH_S390X || V8_TARGET_ARCH_LOONG64 || V8_TARGET_ARCH_RISCV32
struct CalleeSavedRegisters {};
#else
#error Target architecture was not detected as supported by v8

View file

@ -2,6 +2,9 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_V8_VALUE_SERIALIZER_VERSION_H_
#define INCLUDE_V8_VALUE_SERIALIZER_VERSION_H_
/**
* Compile-time constants.
*
@ -10,9 +13,6 @@
* to V8.
*/
#ifndef INCLUDE_V8_VALUE_SERIALIZER_VERSION_H_
#define INCLUDE_V8_VALUE_SERIALIZER_VERSION_H_
#include <stdint.h>
namespace v8 {

View file

@ -346,6 +346,11 @@ class V8_EXPORT Value : public Data {
*/
bool IsWasmMemoryObject() const;
/**
* Returns true if this value is a WasmMemoryMapDescriptor.
*/
bool IsWasmMemoryMapDescriptor() const;
/**
* Returns true if this value is a WasmModuleObject.
*/
@ -361,6 +366,11 @@ class V8_EXPORT Value : public Data {
*/
bool IsModuleNamespaceObject() const;
/**
* Returns true if the value is a primitive.
*/
bool IsPrimitive() const;
/**
* Perform `ToPrimitive(value)` as specified in:
* https://tc39.es/ecma262/#sec-toprimitive.
@ -462,6 +472,14 @@ class V8_EXPORT Value : public Data {
Maybe<bool> InstanceOf(Local<Context> context, Local<Object> object);
/**
* Get the hash of this value. The hash is not guaranteed to be
* unique. For |Object| and |Name| instances the result is equal to
* |GetIdentityHash|. Hashes are not guaranteed to be stable across
* different isolates or processes.
*/
uint32_t GetHash();
private:
V8_INLINE bool QuickIsUndefined() const;
V8_INLINE bool QuickIsNull() const;

View file

@ -9,9 +9,9 @@
// NOTE these macros are used by some of the tool scripts and the build
// system so their names cannot be changed without changing the scripts.
#define V8_MAJOR_VERSION 13
#define V8_MINOR_VERSION 0
#define V8_BUILD_NUMBER 245
#define V8_PATCH_LEVEL 25
#define V8_MINOR_VERSION 6
#define V8_BUILD_NUMBER 233
#define V8_PATCH_LEVEL 8
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)

View file

@ -199,6 +199,30 @@ class V8_EXPORT WasmStreaming final {
std::unique_ptr<WasmStreamingImpl> impl_;
};
/**
* The V8 interface for a WebAssembly memory map descriptor. This is an
* experimental feature that may change and be removed without further
* communication.
*/
class V8_EXPORT WasmMemoryMapDescriptor : public Object {
public:
WasmMemoryMapDescriptor() = delete;
V8_INLINE static WasmMemoryMapDescriptor* Cast(Value* value) {
#ifdef V8_ENABLE_CHECKS
CheckCast(value);
#endif
return static_cast<WasmMemoryMapDescriptor*>(value);
}
using WasmFileDescriptor = int32_t;
static Local<WasmMemoryMapDescriptor> New(Isolate* isolate,
WasmFileDescriptor fd);
private:
static void CheckCast(Value* object);
};
} // namespace v8
#endif // INCLUDE_V8_WASM_H_

View file

@ -5,6 +5,9 @@
#ifndef INCLUDE_V8_WEAK_CALLBACK_INFO_H_
#define INCLUDE_V8_WEAK_CALLBACK_INFO_H_
#include <cstring>
#include "cppgc/internal/conditional-stack-allocated.h" // NOLINT(build/include_directory)
#include "v8config.h" // NOLINT(build/include_directory)
namespace v8 {
@ -15,11 +18,12 @@ namespace api_internal {
V8_EXPORT void InternalFieldOutOfBounds(int index);
} // namespace api_internal
static const int kInternalFieldsInWeakCallback = 2;
static const int kEmbedderFieldsInWeakCallback = 2;
static constexpr int kInternalFieldsInWeakCallback = 2;
static constexpr int kEmbedderFieldsInWeakCallback = 2;
template <typename T>
class WeakCallbackInfo {
class WeakCallbackInfo
: public cppgc::internal::ConditionalStackAllocatedBase<T> {
public:
using Callback = void (*)(const WeakCallbackInfo<T>& data);
@ -27,21 +31,25 @@ class WeakCallbackInfo {
void* embedder_fields[kEmbedderFieldsInWeakCallback],
Callback* callback)
: isolate_(isolate), parameter_(parameter), callback_(callback) {
for (int i = 0; i < kEmbedderFieldsInWeakCallback; ++i) {
embedder_fields_[i] = embedder_fields[i];
}
memcpy(embedder_fields_, embedder_fields,
sizeof(embedder_fields[0]) * kEmbedderFieldsInWeakCallback);
}
V8_INLINE Isolate* GetIsolate() const { return isolate_; }
V8_INLINE T* GetParameter() const { return parameter_; }
V8_INLINE void* GetInternalField(int index) const;
// When first called, the embedder MUST Reset() the Global which triggered the
// callback. The Global itself is unusable for anything else. No v8 other api
// calls may be called in the first callback. Should additional work be
// required, the embedder must set a second pass callback, which will be
// called after all the initial callbacks are processed.
// Calling SetSecondPassCallback on the second pass will immediately crash.
/**
* When a weak callback is first invoked the embedders _must_ Reset() the
* handle which triggered the callback. The handle itself is unusable for
* anything else. No other V8 API calls may be called in the first callback.
* Additional work requires scheduling a second invocation via
* `SetSecondPassCallback()` which will be called some time after all the
* initial callbacks are processed.
*
* The second pass callback is prohibited from executing JavaScript. Embedders
* should schedule another callback in case this is required.
*/
void SetSecondPassCallback(Callback callback) const { *callback_ = callback; }
private:

View file

@ -2,6 +2,9 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef INCLUDE_V8_H_
#define INCLUDE_V8_H_
/** \mainpage V8 API Reference Guide
*
* V8 is Google's open source JavaScript engine.
@ -12,9 +15,6 @@
* For other documentation see https://v8.dev/.
*/
#ifndef INCLUDE_V8_H_
#define INCLUDE_V8_H_
#include <stddef.h>
#include <stdint.h>

View file

@ -371,6 +371,7 @@ path. Add it with -I<path> to the command line
# define V8_HAS_ATTRIBUTE_UNUSED (__has_attribute(unused))
# define V8_HAS_ATTRIBUTE_USED (__has_attribute(used))
# define V8_HAS_ATTRIBUTE_RETAIN (__has_attribute(retain))
# define V8_HAS_ATTRIBUTE_OPTNONE (__has_attribute(optnone))
// Support for the "preserve_most" attribute is limited:
// - 32-bit platforms do not implement it,
// - component builds fail because _dl_runtime_resolve clobbers registers,
@ -499,6 +500,16 @@ path. Add it with -I<path> to the command line
# define V8_INLINE inline
#endif
// A macro to force better inlining of calls in a statement. Don't bother for
// debug builds.
// Use like:
// V8_INLINE_STATEMENT foo = bar(); // Will force inlining the bar() call.
#if !defined(DEBUG) && defined(__clang__) && V8_HAS_ATTRIBUTE_ALWAYS_INLINE
# define V8_INLINE_STATEMENT [[clang::always_inline]]
#else
# define V8_INLINE_STATEMENT
#endif
#if V8_HAS_BUILTIN_ASSUME
#ifdef DEBUG
// In debug mode, check assumptions in addition to adding annotations.
@ -581,15 +592,11 @@ path. Add it with -I<path> to the command line
// functions.
// Use like:
// V8_NOINLINE V8_PRESERVE_MOST void UnlikelyMethod();
#if V8_OS_WIN
# define V8_PRESERVE_MOST
#else
#if V8_HAS_ATTRIBUTE_PRESERVE_MOST
# define V8_PRESERVE_MOST __attribute__((preserve_most))
#else
# define V8_PRESERVE_MOST /* NOT SUPPORTED */
#endif
#endif
// A macro (V8_DEPRECATED) to mark classes or functions as deprecated.
@ -681,7 +688,7 @@ path. Add it with -I<path> to the command line
// V8_NODISCARD Foo() { ... };
// [[nodiscard]] comes in C++17 but supported in clang with -std >= c++11.
#if V8_HAS_CPP_ATTRIBUTE_NODISCARD
#define V8_NODISCARD
#define V8_NODISCARD [[nodiscard]]
#else
#define V8_NODISCARD /* NOT SUPPORTED */
#endif
@ -787,15 +794,11 @@ V8 shared library set USING_V8_SHARED.
#else // V8_OS_WIN
// Setup for Linux shared library export.
#if V8_HAS_ATTRIBUTE_VISIBILITY
# ifdef BUILDING_V8_SHARED
# define V8_EXPORT __attribute__ ((visibility("default")))
# else
# define V8_EXPORT
# endif
#if V8_HAS_ATTRIBUTE_VISIBILITY && (defined(BUILDING_V8_SHARED) || USING_V8_SHARED)
# define V8_EXPORT __attribute__((visibility("default")))
#else
# define V8_EXPORT
#endif
# endif // V8_HAS_ATTRIBUTE_VISIBILITY && ...
#endif // V8_OS_WIN
@ -833,13 +836,9 @@ V8 shared library set USING_V8_SHARED.
#elif defined(__PPC64__) || defined(_ARCH_PPC64)
#define V8_HOST_ARCH_PPC64 1
#define V8_HOST_ARCH_64_BIT 1
#elif defined(__s390__) || defined(__s390x__)
#define V8_HOST_ARCH_S390 1
#if defined(__s390x__)
#elif defined(__s390x__)
#define V8_HOST_ARCH_S390X 1
#define V8_HOST_ARCH_64_BIT 1
#else
#define V8_HOST_ARCH_32_BIT 1
#endif
#elif defined(__riscv) || defined(__riscv__)
#if __riscv_xlen == 64
#define V8_HOST_ARCH_RISCV64 1
@ -861,7 +860,7 @@ V8 shared library set USING_V8_SHARED.
// compiler.
#if !V8_TARGET_ARCH_X64 && !V8_TARGET_ARCH_IA32 && !V8_TARGET_ARCH_ARM && \
!V8_TARGET_ARCH_ARM64 && !V8_TARGET_ARCH_MIPS64 && \
!V8_TARGET_ARCH_PPC64 && !V8_TARGET_ARCH_S390 && \
!V8_TARGET_ARCH_PPC64 && !V8_TARGET_ARCH_S390X && \
!V8_TARGET_ARCH_RISCV64 && !V8_TARGET_ARCH_LOONG64 && \
!V8_TARGET_ARCH_RISCV32
#if defined(_M_X64) || defined(__x86_64__)
@ -878,11 +877,8 @@ V8 shared library set USING_V8_SHARED.
#define V8_TARGET_ARCH_LOONG64 1
#elif defined(_ARCH_PPC64)
#define V8_TARGET_ARCH_PPC64 1
#elif defined(__s390__)
#define V8_TARGET_ARCH_S390 1
#if defined(__s390x__)
#elif defined(__s390x__)
#define V8_TARGET_ARCH_S390X 1
#endif
#elif defined(__riscv) || defined(__riscv__)
#if __riscv_xlen == 64
#define V8_TARGET_ARCH_RISCV64 1
@ -917,12 +913,8 @@ V8 shared library set USING_V8_SHARED.
#define V8_TARGET_ARCH_64_BIT 1
#elif V8_TARGET_ARCH_PPC64
#define V8_TARGET_ARCH_64_BIT 1
#elif V8_TARGET_ARCH_S390
#if V8_TARGET_ARCH_S390X
#elif V8_TARGET_ARCH_S390X
#define V8_TARGET_ARCH_64_BIT 1
#else
#define V8_TARGET_ARCH_32_BIT 1
#endif
#elif V8_TARGET_ARCH_RISCV64
#define V8_TARGET_ARCH_64_BIT 1
#elif V8_TARGET_ARCH_RISCV32
@ -985,8 +977,8 @@ V8 shared library set USING_V8_SHARED.
#else
#define V8_TARGET_LITTLE_ENDIAN 1
#endif
#elif V8_TARGET_ARCH_S390
#if V8_TARGET_ARCH_S390_LE_SIM
#elif V8_TARGET_ARCH_S390X
#if V8_TARGET_ARCH_S390X_LE_SIM
#define V8_TARGET_LITTLE_ENDIAN 1
#else
#define V8_TARGET_BIG_ENDIAN 1

View file

@ -72,7 +72,6 @@
'V8 Linux64 - debug builder': 'debug_x64',
'V8 Linux64 - no leaptiering - debug builder': 'debug_x64_no_leaptiering',
'V8 Linux64 - no shared cage - debug builder': 'debug_x64_no_shared_cage',
'V8 Linux64 - external code space - debug - builder': 'debug_x64_external_code_space',
'V8 Linux64 - custom snapshot - debug builder': 'debug_x64_custom',
'V8 Linux64 - internal snapshot - builder': 'release_x64_internal',
'V8 Linux64 - debug - header includes - builder': 'debug_x64_header_includes',
@ -87,16 +86,15 @@
'V8 Win32 - builder (reclient)': 'release_x86_minimal_symbols_reclient',
'V8 Win32 - builder (reclient compare)': 'release_x86_minimal_symbols_reclient',
'V8 Win32 - debug builder': 'debug_x86_minimal_symbols',
'V8 Win32 - msvc - debug builder': 'debug_x86_msvc',
# TODO(machenbach): Remove after switching to x64 on infra side.
'V8 Win64 ASAN - builder': 'release_x64_asan_no_lsan',
'V8 Win64 - CET shadow stack - builder': 'release_x64_cet_shadow_stack',
'V8 Win64 - builder': 'release_x64_minimal_symbols',
'V8 Win64 - builder (reclient)': 'release_x64_minimal_symbols_reclient',
'V8 Win64 - builder (reclient compare)': 'release_x64_minimal_symbols_reclient',
'V8 Win64 - dev image': 'release_x64_minimal_symbols',
'V8 Win64 - debug builder': 'debug_x64_minimal_symbols',
'V8 Win64 - drumbrake - debug builder': 'debug_x64_drumbrake',
'V8 Win64 - msvc - builder': 'release_x64_msvc',
# Mac.
'V8 Mac64 - builder': 'release_x64',
'V8 Mac64 - debug builder': 'debug_x64',
@ -118,7 +116,6 @@
# FYI.
'V8 iOS - sim - builder': 'release_x64_ios_simulator',
'V8 Linux64 - arm64 - builder': 'release_arm64',
'V8 Linux64 - arm64 - no pointer compression - builder': 'release_arm64_no_pointer_compression',
'V8 Linux64 - arm64 - debug builder': 'debug_arm64',
'V8 Linux64 - arm64 - sim - no pointer compression - builder':
'release_simulate_arm64_no_pointer_compression',
@ -142,6 +139,7 @@
'V8 Linux64 UBSan - builder': 'release_x64_ubsan',
'V8 Linux - vtunejit': 'debug_x86_vtunejit',
'V8 Linux64 - Fuzzilli - builder': 'release_x64_fuzzilli',
'V8 Linux64 - lower limits - builder': 'release_x64_lower_limits',
'V8 Linux64 - predictable - builder': 'release_x64_predictable',
'V8 Linux64 - verify builtins': 'release_x64_verify_builtins',
'V8 Linux64 - verify deterministic': 'release_x64_verify_deterministic',
@ -233,6 +231,7 @@
'V8 Linux - arm64 - sim - builder': 'release_simulate_arm64_gcmole',
'V8 Linux - arm64 - sim - debug builder': 'debug_simulate_arm64',
'V8 Linux - arm64 - sim - gc stress - builder': 'debug_simulate_arm64',
'V8 Linux64 - arm64 - no pointer compression - builder': 'release_arm64_no_pointer_compression',
'V8 Linux64 - arm64 - no wasm - debug builder': 'debug_arm64_webassembly_disabled',
# Mips.
'V8 Linux - mips64el - sim - builder': 'release_simulate_mips64el',
@ -250,8 +249,9 @@
'v8_android_arm_compile_rel': 'release_android_arm',
'v8_android_arm_verify_deterministic_dbg': 'debug_android_arm_verify_deterministic',
'v8_android_arm64_compile_dbg': 'debug_android_arm64',
'v8_android_arm64_n5x_compile_rel': 'release_android_arm64',
'v8_android_arm64_d8_compile_rel': 'release_android_arm64',
'v8_android_arm64_n5x_compile_rel': 'release_android_arm64',
'v8_android_arm64_p7_compile_rel': 'release_android_arm64',
'v8_fuchsia_compile_rel': 'release_x64_fuchsia_trybot',
'v8_ios_simulator': 'release_x64_ios_simulator',
'v8_linux_compile_rel': 'release_x86_gcmole_trybot',
@ -281,7 +281,6 @@
'v8_linux64_no_sandbox_compile_dbg': 'debug_x64_no_sandbox',
'v8_linux64_dict_tracking_compile_dbg': 'debug_x64_dict_tracking_trybot',
'v8_linux64_disable_runtime_call_stats_compile_rel': 'release_x64_disable_runtime_call_stats',
'v8_linux64_external_code_space_compile_dbg': 'debug_x64_external_code_space',
'v8_linux64_css_compile_dbg': 'debug_x64_conservative_stack_scanning',
'v8_linux64_gc_stress_custom_snapshot_compile_dbg': 'debug_x64_trybot_custom',
'v8_linux64_gc_stress_compile_dbg': 'debug_x64_trybot',
@ -300,6 +299,7 @@
'v8_linux64_no_sandbox_compile_rel': 'release_x64_no_sandbox',
'v8_linux64_official_compile_rel': 'official_x64_on_release_branch',
'v8_linux64_predictable_compile_rel': 'release_x64_predictable',
'v8_linux64_pku_compile_dbg': 'debug_x64',
'v8_linux64_pku_compile_rel': 'release_x64',
'v8_linux64_shared_compile_rel': 'release_x64_shared_verify_heap',
'v8_linux64_single_generation_compile_dbg': 'debug_x64_single_generation',
@ -312,6 +312,7 @@
'v8_linux64_cfi_compile_rel': 'release_x64_cfi',
'v8_linux64_fuzzilli_compile_rel': 'release_x64_fuzzilli',
'v8_linux64_loong64_compile_rel': 'release_simulate_loong64',
'v8_linux64_lower_limits_compile_rel': 'release_x64_lower_limits',
'v8_linux64_msan_compile_rel': 'release_simulate_arm64_msan_minimal_symbols',
'v8_linux_riscv32_compile_rel': 'release_simulate_riscv32',
'v8_linux64_riscv64_compile_rel': 'release_simulate_riscv64',
@ -330,13 +331,12 @@
# TODO(machenbach): Remove after switching to x64 on infra side.
'v8_win_compile_dbg': 'debug_x86_trybot',
'v8_win_compile_rel': 'release_x86_trybot',
'v8_win_msvc_light_compile_dbg': 'debug_x86_msvc',
'v8_win64_asan_compile_rel': 'release_x64_asan_no_lsan',
'v8_win64_msvc_light_compile_rel': 'release_x64_msvc',
'v8_win64_cet_shadow_stack_compile_rel': 'release_x64_cet_shadow_stack',
'v8_win64_compile_dbg': 'debug_x64_minimal_symbols',
'v8_win64_drumbrake_compile_dbg': 'debug_x64_drumbrake',
'v8_win64_msvc_compile_rel': 'release_x64_msvc',
'v8_win64_compile_rel': 'release_x64_trybot',
'v8_win64_nodcheck_compile_rel': 'release_x64_minimal_symbols',
'v8_mac_arm64_compile_rel': 'release_arm64',
'v8_mac_arm64_compile_dbg': 'debug_arm64',
'v8_mac_arm64_full_compile_dbg': 'full_debug_arm64',
@ -358,6 +358,7 @@
'v8_linux_arm64_compile_dbg': 'debug_simulate_arm64',
'v8_linux_arm64_gc_stress_compile_dbg': 'debug_simulate_arm64',
'v8_linux_mips64el_compile_rel': 'release_simulate_mips64el',
'v8_numfuzz_asan_compile_rel': 'release_x64_asan_symbolized_verify_heap_turboshaft_csa',
'v8_numfuzz_compile_rel': 'release_x64',
'v8_numfuzz_compile_dbg': 'debug_x64',
'v8_numfuzz_tsan_compile_rel': 'release_x64_tsan',
@ -593,7 +594,7 @@
'release_bot', 'x64', 'asan', 'dcheck_always_on',
'v8_enable_slow_dchecks', 'v8_verify_heap', 'v8_enable_turboshaft_csa'],
'release_x64_sandbox_testing_turboshaft_csa': [
'release_bot', 'x64', 'symbolized', 'v8_enable_memory_corruption_api',
'release_bot', 'x64', 'symbolized', 'backtrace', 'v8_enable_memory_corruption_api',
'v8_enable_turboshaft_csa'],
'release_x64_asan_sandbox_testing_turboshaft_csa': [
'release_bot', 'x64', 'asan', 'symbolized',
@ -601,6 +602,8 @@
'release_x64_asan_symbolized_verify_heap_turboshaft_csa': [
'release_bot', 'x64', 'asan', 'lsan', 'symbolized',
'v8_verify_heap', 'v8_enable_turboshaft_csa'],
'release_x64_cet_shadow_stack': [
'release_bot', 'x64', 'cet_shadow_stack'],
'release_x64_cfi': [
'release_bot', 'x64', 'cfi'],
'release_x64_cfi_clusterfuzz_turboshaft_csa': [
@ -612,8 +615,6 @@
'v8_verify_heap', 'v8_verify_csa', 'fuzzilli'],
'release_x64_gcmole': [
'release_bot', 'x64', 'gcmole'],
'release_x64_msvc': [
'release_bot_no_reclient', 'x64', 'minimal_symbols', 'msvc'],
'release_x64_correctness_fuzzer_turboshaft_csa' : [
'release_bot', 'x64', 'v8_correctness_fuzzer',
'v8_enable_turboshaft_csa'],
@ -629,6 +630,8 @@
'release_bot', 'x64', 'ios_simulator'],
'release_x64_internal': [
'release_bot', 'x64', 'v8_snapshot_internal'],
'release_x64_lower_limits': [
'release_bot', 'x64', 'v8_lower_limits_mode'],
'release_x64_minimal_symbols': [
'release_bot', 'x64', 'minimal_symbols'],
'release_x64_minimal_symbols_reclient': [
@ -700,8 +703,6 @@
'debug_bot', 'x64', 'v8_snapshot_custom'],
'debug_x64_drumbrake': [
'debug_bot', 'x64', 'v8_enable_drumbrake'],
'debug_x64_external_code_space': [
'debug_bot', 'x64', 'external_code_space'],
'debug_x64_fuchsia': [
'debug_bot', 'x64', 'fuchsia'],
'debug_x64_gcc': [
@ -745,8 +746,6 @@
'debug_bot', 'x86', 'asan', 'lsan', 'v8_enable_turboshaft_csa'],
'debug_x86_minimal_symbols': [
'debug_bot', 'x86', 'minimal_symbols'],
'debug_x86_msvc': [
'debug_bot_no_reclient', 'x86', 'minimal_symbols', 'msvc'],
'debug_x86_no_i18n': [
'debug_bot', 'x86', 'v8_no_i18n'],
'debug_x86_trybot': [
@ -829,6 +828,10 @@
'gn_args': 'v8_enable_builtins_optimization=true',
},
'cet_shadow_stack': {
'gn_args': 'v8_enable_cet_shadow_stack=true',
},
'cfi': {
'mixins': ['v8_enable_test_features'],
'gn_args': ('is_cfi=true use_cfi_cast=true use_cfi_icall=true '
@ -896,10 +899,6 @@
'gn_args': 'chrome_pgo_phase=0',
},
'external_code_space': {
'gn_args': 'v8_enable_external_code_space=true',
},
'fuchsia': {
'gn_args': 'target_os="fuchsia"',
},
@ -922,7 +921,8 @@
},
'ios_simulator': {
'gn_args': 'target_cpu="x64" target_os="ios" use_blink=true',
'gn_args': 'target_cpu="x64" target_os="ios" '
'target_environment="simulator" use_blink=true',
},
'lld': {
@ -948,10 +948,6 @@
'gn_args': 'is_msan=true msan_track_origins=0 instrumented_libraries_release="noble"',
},
'msvc': {
'gn_args': 'is_clang=false',
},
'no_custom_libcxx': {
'gn_args': 'use_custom_libcxx=false',
},
@ -980,6 +976,10 @@
'gn_args': 'is_debug=false dcheck_always_on=false',
},
'backtrace': {
'gn_args': 'v8_enable_backtrace=true',
},
'release_bot': {
'mixins': ['release', 'static', 'reclient', 'v8_enable_google_benchmark'],
},
@ -1082,7 +1082,7 @@
},
'v8_disable_leaptiering': {
'gn_args': 'v8_disable_leaptiering=true',
'gn_args': 'v8_enable_leaptiering=false v8_enable_sandbox=false',
},
'v8_disable_runtime_call_stats': {
@ -1136,7 +1136,9 @@
'gn_args': 'v8_enable_pointer_compression=true',
},
'v8_disable_pointer_compression_shared_cage': {
'gn_args': 'v8_enable_pointer_compression_shared_cage=false',
'gn_args': 'v8_enable_pointer_compression_shared_cage=false '
'v8_enable_sandbox = true '
'v8_enable_external_code_space = true',
},
'v8_enable_single_generation': {
'gn_args': 'v8_enable_single_generation=true '
@ -1173,6 +1175,10 @@
'gn_args': 'v8_is_on_release_branch=true',
},
'v8_lower_limits_mode': {
'gn_args': 'v8_lower_limits_mode=true',
},
'v8_optimized_debug': {
# This is the default in gn for debug.
},

View file

@ -44,22 +44,16 @@
{'name': 'v8testing', 'variant': 'default', 'shards': 4},
],
},
##############################################################################
# Fuchsia
'v8_fuchsia_rel': {
'v8_android_arm64_p7_rel': {
'swarming_dimensions' : {
'os': 'Ubuntu-22.04',
'device_os': 'AP2A.240705.004',
'device_type': 'panther',
'os': 'Android',
},
'tests': [
{'name': 'fuchsia-unittests'},
],
},
'V8 Fuchsia': {
'swarming_dimensions' : {
'os': 'Ubuntu-22.04',
},
'tests': [
{'name': 'fuchsia-unittests'},
{'name': 'mozilla', 'variant': 'default'},
{'name': 'test262', 'variant': 'default', 'shards': 12},
{'name': 'v8testing', 'variant': 'default', 'shards': 4},
],
},
##############################################################################
@ -431,20 +425,25 @@
{'name': 'v8testing', 'shards': 5},
{'name': 'v8testing', 'variant': 'extra', 'shards': 5},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
{'name': 'v8testing', 'variant': 'no_lfa'},
{'name': 'v8testing', 'variant': 'stress_instruction_scheduling'},
{'name': 'v8testing', 'variant': 'stress_concurrent_allocation'},
{'name': 'v8testing', 'variant': 'stress_concurrent_inlining'},
{'name': 'mjsunit', 'variant': 'stress_maglev_tests_with_turbofan'},
# Maglev -- move to extra once more architectures are supported.
{'name': 'v8testing', 'variant': 'maglev'},
{'name': 'v8testing', 'variant': 'maglev_future'},
{'name': 'v8testing', 'variant': 'stress_maglev'},
{'name': 'v8testing', 'variant': 'stress_maglev_non_eager_inlining'},
{'name': 'v8testing', 'variant': 'stress_maglev_future'},
{'name': 'v8testing', 'variant': 'turbolev'},
# Code serializer.
{'name': 'benchmarks', 'variant': 'code_serializer', 'shards': 1},
{'name': 'd8testing', 'variant': 'code_serializer', 'shards': 1},
{'name': 'mozilla', 'variant': 'code_serializer', 'shards': 1},
{'name': 'test262', 'variant': 'code_serializer', 'shards': 3},
{'name': 'test262', 'variant': 'code_serializer', 'shards': 5},
# GC stress
{
'name': 'd8testing',
@ -452,6 +451,9 @@
'test_args': ['--gc-stress'],
'shards': 7,
},
# Jit fuzzing.
{'name': 'mjsunit', 'variant': 'jit_fuzzing'},
{'name': 'mjsunit', 'variant': 'jit_fuzzing_maglev'},
],
},
'v8_linux64_dict_tracking_dbg': {
@ -471,15 +473,6 @@
{'name': 'v8testing'},
],
},
'v8_linux64_external_code_space_dbg': {
'swarming_dimensions' : {
'cpu': 'x86-64-avx2',
'os': 'Ubuntu-22.04',
},
'tests': [
{'name': 'v8testing', 'shards': 3},
],
},
'v8_linux64_fuzzilli_rel': {
'swarming_dimensions' : {
'os': 'Ubuntu-22.04',
@ -515,7 +508,7 @@
{
'name': 'mjsunit',
'test_args': ['--gc-stress', '--no-harness'],
'shards': 3,
'shards': 5,
},
],
},
@ -544,6 +537,18 @@
'test_args': ['--gc-stress'],
'shards': 2
},
{
'name': 'mjsunit',
'variant': 'conservative_pinning',
'test_args': ['--gc-stress'],
'shards': 2
},
{
'name': 'mjsunit',
'variant': 'precise_pinning',
'test_args': ['--gc-stress'],
'shards': 2
},
],
},
'v8_linux64_gcc_rel': {
@ -562,6 +567,14 @@
{'name': 'v8testing'},
],
},
'v8_linux64_lower_limits_rel': {
'swarming_dimensions' : {
'os': 'Ubuntu-22.04',
},
'tests': [
{'name': 'v8testing'},
],
},
'v8_linux64_minor_mc_dbg': {
'swarming_dimensions' : {
'cpu': 'x86-64-avx2',
@ -605,12 +618,17 @@
{'name': 'v8testing', 'variant': 'extra', 'shards': 2},
{'name': 'v8testing', 'variant': 'no_lfa'},
{'name': 'v8testing', 'variant': 'stress_instruction_scheduling'},
{'name': 'mjsunit', 'variant': 'stress_maglev_tests_with_turbofan'},
# Maglev -- move to extra once more architectures are supported.
{'name': 'v8testing', 'variant': 'maglev'},
{'name': 'v8testing', 'variant': 'maglev_future'},
{'name': 'v8testing', 'variant': 'stress_maglev'},
{'name': 'v8testing', 'variant': 'stress_maglev_non_eager_inlining'},
{'name': 'v8testing', 'variant': 'stress_maglev_future'},
{'name': 'v8testing', 'variant': 'turbolev'},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
],
},
'v8_linux64_perfetto_dbg': {
@ -626,7 +644,7 @@
'os': 'Ubuntu-22.04',
},
'tests': [
{'name': 'v8testing', 'shards': 3},
{'name': 'v8testing', 'shards': 5},
],
},
'v8_linux64_no_shared_cage_dbg': {
@ -634,7 +652,7 @@
'os': 'Ubuntu-22.04',
},
'tests': [
{'name': 'v8testing', 'shards': 3},
{'name': 'v8testing', 'shards': 5},
],
},
'v8_linux64_no_pointer_compression_rel': {
@ -663,6 +681,16 @@
{'name': 'v8testing', 'shards': 2},
],
},
'v8_linux64_pku_dbg': {
'swarming_dimensions' : {
'os': 'Ubuntu-22.04',
'cpu': 'x86-64-avx2',
'pool': 'v8.tests',
},
'tests': [
{'name': 'v8testing', 'variant': 'default'},
],
},
'v8_linux64_pku_rel': {
'swarming_dimensions' : {
'os': 'Ubuntu-22.04',
@ -708,20 +736,25 @@
{'name': 'benchmarks', 'shards': 1},
{'name': 'mozilla', 'shards': 1},
{'name': 'optimize_for_size'},
{'name': 'test262', 'shards': 4},
{'name': 'test262', 'variant': 'extra', 'shards': 4},
{'name': 'test262', 'shards': 6},
{'name': 'test262', 'variant': 'extra', 'shards': 6},
{'name': 'v8initializers'},
{'name': 'v8testing', 'shards': 2},
{'name': 'v8testing', 'variant': 'extra', 'shards': 2},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
{'name': 'v8testing', 'variant': 'no_lfa'},
{'name': 'v8testing', 'variant': 'slow_path'},
{'name': 'v8testing', 'variant': 'stress_instruction_scheduling'},
{'name': 'mjsunit', 'variant': 'stress_maglev_tests_with_turbofan'},
# Maglev -- move to extra once more architectures are supported.
{'name': 'v8testing', 'variant': 'maglev'},
{'name': 'v8testing', 'variant': 'maglev_future'},
{'name': 'v8testing', 'variant': 'stress_maglev'},
{'name': 'v8testing', 'variant': 'stress_maglev_non_eager_inlining'},
{'name': 'v8testing', 'variant': 'stress_maglev_future'},
{'name': 'v8testing', 'variant': 'turbolev'},
# Code serializer.
{'name': 'benchmarks', 'variant': 'code_serializer', 'shards': 1},
{'name': 'd8testing', 'variant': 'code_serializer', 'shards': 1},
@ -776,6 +809,8 @@
{'name': 'v8testing', 'variant': 'slow_path', 'shards': 2},
{'name': 'v8testing', 'variant': 'stress_concurrent_allocation', 'shards': 2},
{'name': 'v8testing', 'variant': 'minor_ms', 'shards': 2},
{'name': 'v8testing', 'variant': 'conservative_pinning', 'shards': 2},
{'name': 'v8testing', 'variant': 'precise_pinning', 'shards': 2},
],
},
'v8_linux64_tsan_dbg': {
@ -945,6 +980,16 @@
{'name': 'v8testing', 'shards': 5},
],
},
'v8_win64_cet_shadow_stack_rel': {
'swarming_dimensions' : {
'os': 'Windows-11',
'cpu': 'x86-64-Sapphire_Rapids_GCE',
'pool': 'v8.tests',
},
'tests': [
{'name': 'v8testing', 'variant': 'default'},
],
},
'v8_win64_dbg': {
'swarming_dimensions' : {
'cpu': 'x86-64',
@ -956,6 +1001,8 @@
{'name': 'v8testing', 'shards': 4},
{'name': 'v8testing', 'variant': 'extra', 'shards': 3},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
],
},
'v8_win64_drumbrake_dbg': {
@ -968,7 +1015,7 @@
{'name': 'v8testing', 'variant': 'extra', 'shards': 3},
],
},
'v8_win64_msvc_rel': {
'v8_win64_nodcheck_rel': {
'swarming_dimensions' : {
'cpu': 'x86-64',
'os': 'Windows-10-19045',
@ -977,6 +1024,10 @@
{'name': 'mozilla'},
{'name': 'test262', 'variant': 'default', 'shards': 2},
{'name': 'v8testing', 'shards': 2},
{'name': 'v8testing', 'variant': 'extra', 'shards': 2},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
],
},
'v8_win64_rel': {
@ -990,6 +1041,8 @@
{'name': 'v8testing', 'shards': 2},
{'name': 'v8testing', 'variant': 'extra', 'shards': 2},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
],
},
##############################################################################
@ -1014,6 +1067,8 @@
{'name': 'v8testing', 'shards': 6},
{'name': 'v8testing', 'variant': 'extra', 'shards': 6},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
],
},
'v8_mac_arm64_gc_stress_dbg': {
@ -1045,6 +1100,8 @@
{'name': 'v8testing', 'shards': 3},
{'name': 'v8testing', 'variant': 'extra', 'shards': 3},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
],
},
'v8_mac_arm64_rel': {
@ -1055,12 +1112,17 @@
},
'tests': [
{'name': 'v8testing'},
{'name': 'mjsunit', 'variant': 'stress_maglev_tests_with_turbofan'},
# Maglev -- move to extra once more architectures are supported.
{'name': 'v8testing', 'variant': 'maglev'},
{'name': 'v8testing', 'variant': 'maglev_future'},
{'name': 'v8testing', 'variant': 'stress_maglev'},
{'name': 'v8testing', 'variant': 'stress_maglev_non_eager_inlining'},
{'name': 'v8testing', 'variant': 'stress_maglev_future'},
{'name': 'v8testing', 'variant': 'turbolev'},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
],
},
'v8_mac_arm64_dbg': {
@ -1071,12 +1133,17 @@
},
'tests': [
{'name': 'v8testing'},
{'name': 'mjsunit', 'variant': 'stress_maglev_tests_with_turbofan'},
# Maglev -- move to extra once more architectures are supported.
{'name': 'v8testing', 'variant': 'maglev'},
{'name': 'v8testing', 'variant': 'maglev_future'},
{'name': 'v8testing', 'variant': 'stress_maglev'},
{'name': 'v8testing', 'variant': 'stress_maglev_non_eager_inlining'},
{'name': 'v8testing', 'variant': 'stress_maglev_future'},
{'name': 'v8testing', 'variant': 'turbolev'},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
],
},
'v8_mac_arm64_full_dbg': {
@ -1087,12 +1154,17 @@
},
'tests': [
{'name': 'v8testing'},
{'name': 'mjsunit', 'variant': 'stress_maglev_tests_with_turbofan'},
# Maglev -- move to extra once more architectures are supported.
{'name': 'v8testing', 'variant': 'maglev'},
{'name': 'v8testing', 'variant': 'maglev_future'},
{'name': 'v8testing', 'variant': 'stress_maglev'},
{'name': 'v8testing', 'variant': 'stress_maglev_non_eager_inlining'},
{'name': 'v8testing', 'variant': 'stress_maglev_future'},
{'name': 'v8testing', 'variant': 'turbolev'},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
],
},
'v8_mac_arm64_no_pointer_compression_dbg': {
@ -1294,7 +1366,7 @@
'name': 'v8testing',
'suffix': 'noavx',
'test_args': ['--extra-flags', '--noenable-avx'],
'shards': 2
'shards': 3
},
],
},
@ -1303,7 +1375,7 @@
'os': 'Ubuntu-22.04',
},
'tests': [
{'name': 'v8testing', 'variant': 'default', 'shards': 6},
{'name': 'v8testing', 'variant': 'default', 'shards': 7},
],
},
'V8 Linux - gc stress': {
@ -1360,7 +1432,7 @@
'os': 'Ubuntu-22.04',
},
'tests': [
{'name': 'v8testing'},
{'name': 'v8testing', 'shards': 2},
],
},
'V8 Linux PGO instrumentation - builder' : {
@ -1394,13 +1466,18 @@
{'name': 'v8testing', 'variant': 'assert_types'},
{'name': 'v8testing', 'variant': 'extra'},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
{'name': 'v8testing', 'variant': 'no_lfa'},
{'name': 'v8testing', 'variant': 'stress_instruction_scheduling'},
{'name': 'mjsunit', 'variant': 'stress_maglev_tests_with_turbofan'},
# Maglev -- move to extra once more architectures are supported.
{'name': 'v8testing', 'variant': 'maglev'},
{'name': 'v8testing', 'variant': 'maglev_future'},
{'name': 'v8testing', 'variant': 'stress_maglev'},
{'name': 'v8testing', 'variant': 'stress_maglev_non_eager_inlining'},
{'name': 'v8testing', 'variant': 'stress_maglev_future'},
{'name': 'v8testing', 'variant': 'turbolev'},
# Noavx.
{
'name': 'mozilla',
@ -1478,7 +1555,7 @@
'os': 'Ubuntu-22.04',
},
'tests': [
{'name': 'mjsunit', 'test_args': ['--no-harness']},
{'name': 'mjsunit', 'test_args': ['--no-harness'], 'shards': 2},
],
},
'V8 Linux64 - debug': {
@ -1492,21 +1569,26 @@
{'name': 'mozilla'},
{'name': 'mozilla', 'variant': 'extra'},
{'name': 'optimize_for_size'},
{'name': 'test262', 'shards': 7},
{'name': 'test262', 'shards': 9},
{'name': 'test262', 'variant': 'extra', 'shards': 7},
{'name': 'v8testing', 'shards': 2},
{'name': 'v8testing', 'shards': 3},
{'name': 'v8testing', 'variant': 'extra', 'shards': 4},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
{'name': 'v8testing', 'variant': 'no_lfa'},
{'name': 'v8testing', 'variant': 'slow_path'},
{'name': 'v8testing', 'variant': 'stress_instruction_scheduling'},
{'name': 'v8testing', 'variant': 'stress_concurrent_allocation'},
{'name': 'v8testing', 'variant': 'stress_concurrent_inlining', 'shards': 2},
{'name': 'mjsunit', 'variant': 'stress_maglev_tests_with_turbofan'},
# Maglev -- move to extra once more architectures are supported.
{'name': 'v8testing', 'variant': 'maglev'},
{'name': 'v8testing', 'variant': 'maglev_future'},
{'name': 'v8testing', 'variant': 'stress_maglev'},
{'name': 'v8testing', 'variant': 'stress_maglev_non_eager_inlining'},
{'name': 'v8testing', 'variant': 'stress_maglev_future'},
{'name': 'v8testing', 'variant': 'turbolev'},
# Noavx.
{
'name': 'mozilla',
@ -1524,13 +1606,16 @@
'name': 'v8testing',
'suffix': 'noavx',
'test_args': ['--extra-flags', '--noenable-avx'],
'shards': 2
'shards': 3
},
# Code serializer.
{'name': 'benchmarks', 'variant': 'code_serializer', 'shards': 1},
{'name': 'd8testing', 'variant': 'code_serializer', 'shards': 1},
{'name': 'mozilla', 'variant': 'code_serializer', 'shards': 1},
{'name': 'test262', 'variant': 'code_serializer', 'shards': 2},
{'name': 'test262', 'variant': 'code_serializer', 'shards': 3},
# Jit fuzzing.
{'name': 'mjsunit', 'variant': 'jit_fuzzing'},
{'name': 'mjsunit', 'variant': 'jit_fuzzing_maglev'},
],
},
'V8 Linux64 - minor mc - debug': {
@ -1608,15 +1693,6 @@
{'name': 'v8testing', 'shards': 3},
],
},
'V8 Linux64 - external code space - debug': {
'swarming_dimensions' : {
'cpu': 'x86-64-avx2',
'os': 'Ubuntu-22.04',
},
'tests': [
{'name': 'v8testing', 'shards': 3},
],
},
'V8 Linux64 - fyi': {
'swarming_dimensions' : {
'os': 'Ubuntu-22.04',
@ -1647,6 +1723,16 @@
{'name': 'v8testing', 'variant': 'default'},
],
},
'V8 Linux64 - PKU - debug': {
'swarming_dimensions' : {
'os': 'Ubuntu-22.04',
'cpu': 'x86-64-avx2',
'pool': 'v8.tests',
},
'tests': [
{'name': 'v8testing', 'variant': 'default'},
],
},
'V8 Linux64 gcc': {
'swarming_dimensions' : {
'os': 'Ubuntu-20.04',
@ -1692,6 +1778,18 @@
'test_args': ['--gc-stress'],
'shards': 2
},
{
'name': 'mjsunit',
'variant': 'conservative_pinning',
'test_args': ['--gc-stress'],
'shards': 2
},
{
'name': 'mjsunit',
'variant': 'precise_pinning',
'test_args': ['--gc-stress'],
'shards': 2
},
],
},
'V8 Linux64 - internal snapshot': {
@ -1702,12 +1800,20 @@
{'name': 'v8testing'},
],
},
'V8 Linux64 - lower limits': {
'swarming_dimensions' : {
'os': 'Ubuntu-22.04',
},
'tests': [
{'name': 'v8testing'},
],
},
'V8 Linux64 - no leaptiering - debug': {
'swarming_dimensions' : {
'os': 'Ubuntu-22.04',
},
'tests': [
{'name': 'v8testing', 'shards': 2},
{'name': 'v8testing', 'shards': 5},
],
},
'V8 Linux64 - no shared cage - debug': {
@ -1746,7 +1852,7 @@
'os': 'Ubuntu-22.04',
},
'tests': [
{'name': 'v8testing', 'shards': 2},
{'name': 'v8testing', 'shards': 3},
],
},
'V8 Linux64 - sandbox testing': {
@ -1815,7 +1921,7 @@
{
'name': 'mjsunit',
'test_args': ['--gc-stress', '--no-harness'],
'shards': 3,
'shards': 5,
},
],
},
@ -1840,6 +1946,8 @@
{'name': 'v8testing', 'variant': 'slow_path', 'shards': 2},
{'name': 'v8testing', 'variant': 'stress_concurrent_allocation', 'shards': 2},
{'name': 'v8testing', 'variant': 'minor_ms', 'shards': 2},
{'name': 'v8testing', 'variant': 'conservative_pinning', 'shards': 2},
{'name': 'v8testing', 'variant': 'precise_pinning', 'shards': 2},
],
},
'V8 Linux64 TSAN - debug': {
@ -1849,7 +1957,7 @@
'tests': [
{'name': 'benchmarks', 'shards': 2},
{'name': 'mozilla', 'shards': 4},
{'name': 'test262', 'variant': 'default', 'shards': 5},
{'name': 'test262', 'variant': 'default', 'shards': 6},
{'name': 'v8testing', 'shards': 12},
{'name': 'v8testing', 'variant': 'extra', 'shards': 12},
],
@ -1926,6 +2034,8 @@
{'name': 'v8testing', 'shards': 3},
{'name': 'v8testing', 'variant': 'extra', 'shards': 3},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
],
},
'V8 Mac64 - debug': {
@ -1939,6 +2049,8 @@
{'name': 'v8testing', 'shards': 6},
{'name': 'v8testing', 'variant': 'extra', 'shards': 6},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
],
},
'V8 Mac64 ASAN': {
@ -1968,12 +2080,17 @@
'tests': [
{'name': 'v8testing'},
{'name': 'v8testing', 'variant': 'extra'},
{'name': 'mjsunit', 'variant': 'stress_maglev_tests_with_turbofan'},
# Maglev -- move to extra once more architectures are supported.
{'name': 'v8testing', 'variant': 'maglev'},
{'name': 'v8testing', 'variant': 'maglev_future'},
{'name': 'v8testing', 'variant': 'stress_maglev'},
{'name': 'v8testing', 'variant': 'stress_maglev_non_eager_inlining'},
{'name': 'v8testing', 'variant': 'stress_maglev_future'},
{'name': 'v8testing', 'variant': 'turbolev'},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
],
},
'V8 Mac - arm64 - release': {
@ -1985,12 +2102,17 @@
'tests': [
{'name': 'v8testing'},
{'name': 'v8testing', 'variant': 'extra'},
{'name': 'mjsunit', 'variant': 'stress_maglev_tests_with_turbofan'},
# Maglev -- move to extra once more architectures are supported.
{'name': 'v8testing', 'variant': 'maglev'},
{'name': 'v8testing', 'variant': 'maglev_future'},
{'name': 'v8testing', 'variant': 'stress_maglev'},
{'name': 'v8testing', 'variant': 'stress_maglev_non_eager_inlining'},
{'name': 'v8testing', 'variant': 'stress_maglev_future'},
{'name': 'v8testing', 'variant': 'turbolev'},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
],
},
'V8 Mac - arm64 - debug': {
@ -2002,12 +2124,17 @@
'tests': [
{'name': 'v8testing', 'shards': 2},
{'name': 'v8testing', 'variant': 'extra', 'shards': 2},
{'name': 'mjsunit', 'variant': 'stress_maglev_tests_with_turbofan'},
# Maglev -- move to extra once more architectures are supported.
{'name': 'v8testing', 'variant': 'maglev'},
{'name': 'v8testing', 'variant': 'maglev_future'},
{'name': 'v8testing', 'variant': 'stress_maglev'},
{'name': 'v8testing', 'variant': 'stress_maglev_non_eager_inlining'},
{'name': 'v8testing', 'variant': 'stress_maglev_future'},
{'name': 'v8testing', 'variant': 'turbolev'},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
],
},
'V8 Mac - arm64 - no pointer compression debug': {
@ -2061,6 +2188,18 @@
{'name': 'v8testing', 'shards': 2},
{'name': 'v8testing', 'variant': 'extra', 'shards': 2},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
],
},
'V8 Win64 - CET shadow stack': {
'swarming_dimensions' : {
'os': 'Windows-11',
'cpu': 'x86-64-Sapphire_Rapids_GCE',
'pool': 'v8.tests',
},
'tests': [
{'name': 'v8testing', 'variant': 'default'},
],
},
'V8 Win64 - debug': {
@ -2073,6 +2212,8 @@
{'name': 'v8testing', 'shards': 4},
{'name': 'v8testing', 'variant': 'extra', 'shards': 3},
{'name': 'v8testing', 'variant': 'minor_ms'},
{'name': 'v8testing', 'variant': 'conservative_pinning'},
{'name': 'v8testing', 'variant': 'precise_pinning'},
],
},
'V8 Win64 - drumbrake - debug': {
@ -2090,16 +2231,6 @@
{'name': 'v8testing', 'variant': 'extra', 'shards': 3},
],
},
'V8 Win64 - msvc': {
'swarming_dimensions': {
'os': 'Windows-10-19045',
},
'tests': [
{'name': 'mozilla'},
{'name': 'test262', 'variant': 'default', 'shards': 2},
{'name': 'v8testing', 'shards': 2},
],
},
'V8 Win64 ASAN': {
'swarming_dimensions': {
'os': 'Windows-10-19045',
@ -2130,6 +2261,18 @@
{'name': 'v8testing', 'variant': 'default', 'shards': 4},
],
},
'V8 Android Arm64 - P7': {
'swarming_dimensions': {
'device_os': 'AP2A.240705.004',
'device_type': 'panther',
'os': 'Android',
},
'tests': [
{'name': 'mozilla', 'variant': 'default'},
{'name': 'test262', 'variant': 'default', 'shards': 12},
{'name': 'v8testing', 'variant': 'default', 'shards': 4},
],
},
'V8 Linux - arm - sim': {
'swarming_dimensions': {
'os': 'Ubuntu-22.04',
@ -2139,6 +2282,7 @@
{'name': 'test262', 'variant': 'default', 'shards': 2},
{'name': 'v8testing', 'shards': 6},
{'name': 'v8testing', 'variant': 'extra', 'shards': 3},
{'name': 'v8testing', 'variant': 'turbolev'},
# Armv8-a.
{
'name': 'mozilla',
@ -2190,6 +2334,7 @@
{'name': 'test262', 'variant': 'default', 'shards': 2},
{'name': 'v8testing', 'shards': 10},
{'name': 'v8testing', 'variant': 'extra', 'shards': 10},
{'name': 'v8testing', 'variant': 'turbolev'},
# Armv8-a.
{
'name': 'mozilla',
@ -2442,6 +2587,66 @@
},
],
},
'V8 NumFuzz - ASAN': {
'swarming_dimensions': {
'os': 'Ubuntu-22.04',
},
'swarming_task_attrs': {
'expiration': 13800,
'hard_timeout': 4200,
'priority': 35,
},
'tests': [
{'name': 'd8testing_random_gc', 'shards': 2},
{
'name': 'numfuzz',
'suffix': 'marking',
'test_args': ['--total-timeout-sec=2100', '--stress-marking=1']
},
{
'name': 'numfuzz',
'suffix': 'delay',
'test_args': ['--total-timeout-sec=2100', '--stress-delay-tasks=1']
},
{
'name': 'numfuzz',
'suffix': 'interrupt',
'test_args': ['--total-timeout-sec=2100', '--stress-interrupt-budget=1']
},
{
'name': 'numfuzz',
'suffix': 'threads',
'test_args': ['--total-timeout-sec=2100', '--stress-thread-pool-size=1']
},
{
'name': 'numfuzz',
'suffix': 'stack',
'test_args': ['--total-timeout-sec=2100', '--stress-stack-size=1']
},
{
'name': 'numfuzz',
'suffix': 'combined',
'test_args': [
'--total-timeout-sec=2100',
'--stress-delay-tasks=4',
'--stress-deopt=2',
'--stress-compaction=2',
'--stress-gc=4',
'--stress-marking=4',
'--stress-scavenge=4',
'--stress-thread-pool-size=2',
'--stress-stack-size=1',
'--stress-interrupt-budget=1',
],
'shards': 4
},
{
'name': 'numfuzz',
'suffix': 'scavenge',
'test_args': ['--total-timeout-sec=2100', '--stress-scavenge=1']
},
],
},
'V8 NumFuzz - TSAN': {
'swarming_dimensions': {
'os': 'Ubuntu-22.04',
@ -2586,6 +2791,59 @@
},
],
},
'v8_numfuzz_asan_rel': {
'swarming_dimensions': {
'os': 'Ubuntu-22.04',
},
'tests': [
{
'name': 'numfuzz',
'suffix': 'marking',
'test_args': ['--total-timeout-sec=900', '--stress-marking=1']
},
{
'name': 'numfuzz',
'suffix': 'delay',
'test_args': ['--total-timeout-sec=900', '--stress-delay-tasks=1']
},
{
'name': 'numfuzz',
'suffix': 'interrupt',
'test_args': ['--total-timeout-sec=900', '--stress-interrupt-budget=1']
},
{
'name': 'numfuzz',
'suffix': 'threads',
'test_args': ['--total-timeout-sec=900', '--stress-thread-pool-size=1']
},
{
'name': 'numfuzz',
'suffix': 'stack',
'test_args': ['--total-timeout-sec=900', '--stress-stack-size=1']
},
{
'name': 'numfuzz',
'suffix': 'combined',
'test_args': [
'--total-timeout-sec=900',
'--stress-delay-tasks=4',
'--stress-deopt=2',
'--stress-compaction=2',
'--stress-gc=4',
'--stress-marking=4',
'--stress-scavenge=4',
'--stress-thread-pool-size=2',
'--stress-stack-size=1',
'--stress-interrupt-budget=1',
],
},
{
'name': 'numfuzz',
'suffix': 'scavenge',
'test_args': ['--total-timeout-sec=900', '--stress-scavenge=1']
},
],
},
'v8_numfuzz_tsan_rel': {
'swarming_dimensions': {
'os': 'Ubuntu-22.04',

4
deps/v8/pyrightconfig.json vendored Normal file
View file

@ -0,0 +1,4 @@
{
"include": ["tools"],
"exclude": ["tools/gcmole/bootstrap"]
}

18
deps/v8/src/DEPS vendored
View file

@ -16,8 +16,10 @@ include_rules = [
"+src/compiler/wasm-compiler.h",
"-src/flags/flags-impl.h",
"-src/heap",
"+src/heap/memory-chunk-constants.h",
"+src/heap/memory-chunk-metadata.h",
"+src/heap/code-range.h",
"+src/heap/read-only-spaces.h",
"+src/heap/trusted-range.h",
"+src/heap/combined-heap.h",
"+src/heap/factory.h",
@ -29,6 +31,8 @@ include_rules = [
"+src/heap/heap.h",
"+src/heap/heap-verifier.h",
"+src/heap/heap-inl.h",
"+src/heap/heap-layout-inl.h",
"+src/heap/heap-layout.h",
"+src/heap/heap-write-barrier-inl.h",
"+src/heap/heap-write-barrier.h",
"+src/heap/local-factory-inl.h",
@ -41,6 +45,7 @@ include_rules = [
"+src/heap/mutable-page-metadata-inl.h",
"+src/heap/memory-chunk.h",
"+src/heap/page-metadata-inl.h",
"+src/heap/page-pool.h",
"+src/heap/paged-spaces-inl.h",
"+src/heap/parked-scope-inl.h",
"+src/heap/parked-scope.h",
@ -50,6 +55,7 @@ include_rules = [
"+src/heap/safepoint.h",
"+src/heap/base/stack.h",
"+src/heap/conservative-stack-visitor.h",
"+src/heap/visit-object.h",
"-src/inspector",
"-src/interpreter",
"+src/interpreter/bytecode-array-iterator.h",
@ -67,6 +73,7 @@ include_rules = [
"-src/regexp",
"+src/regexp/regexp.h",
"+src/regexp/regexp-flags.h",
"+src/regexp/regexp-result-vector.h",
"+src/regexp/regexp-stack.h",
"+src/regexp/regexp-utils.h",
"+src/tracing/trace-event-no-perfetto.h",
@ -81,11 +88,6 @@ include_rules = [
"+builtins-generated",
"+torque-generated",
"+starboard",
# Using cppgc inside v8 is not (yet) allowed.
"-include/cppgc",
"+include/cppgc/common.h",
"+include/cppgc/platform.h",
"+include/cppgc/source-location.h",
]
specific_include_rules = {
@ -121,7 +123,13 @@ specific_include_rules = {
"+src/heap/factory-base.h",
"+src/heap/local-factory.h",
],
"setup-builtins-internal\.cc": [
"+src/compiler/pipeline.h",
],
"snapshot\.cc": [
"+src/heap/read-only-promotion.h",
],
"string-hasher-inl\.h": [
"+third_party/rapidhash-v8/rapidhash.h",
]
}

View file

@ -9,3 +9,6 @@
monorail {
component: "Blink>JavaScript>API"
}
buganizer_public: {
component_id: 1456124
}

View file

@ -6,6 +6,8 @@
#define V8_API_API_ARGUMENTS_INL_H_
#include "src/api/api-arguments.h"
// Include the non-inl header before the rest of the headers.
#include "src/api/api-inl.h"
#include "src/debug/debug.h"
#include "src/execution/vm-state-inl.h"
@ -42,10 +44,6 @@ inline Tagged<Object> PropertyCallbackArguments::receiver() const {
return *slot_at(T::kThisIndex);
}
inline Tagged<JSReceiver> FunctionCallbackArguments::holder() const {
return Cast<JSReceiver>(*slot_at(T::kHolderIndex));
}
#define DCHECK_NAME_COMPATIBLE(interceptor, name) \
DCHECK(interceptor->is_named()); \
DCHECK(!name->IsPrivate()); \
@ -76,7 +74,7 @@ inline Tagged<JSReceiver> FunctionCallbackArguments::holder() const {
ExternalCallbackScope call_scope(ISOLATE, FUNCTION_ADDR(F), \
EXCEPTION_CONTEXT, &callback_info);
Handle<Object> FunctionCallbackArguments::CallOrConstruct(
DirectHandle<Object> FunctionCallbackArguments::CallOrConstruct(
Tagged<FunctionTemplateInfo> function, bool is_construct) {
Isolate* isolate = this->isolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kFunctionCallback);
@ -149,16 +147,17 @@ Maybe<InterceptorResult> PropertyCallbackArguments::GetBooleanReturnValue(
// -------------------------------------------------------------------------
// Named Interceptor callbacks.
Handle<JSObjectOrUndefined> PropertyCallbackArguments::CallNamedEnumerator(
Handle<InterceptorInfo> interceptor) {
DirectHandle<JSObjectOrUndefined>
PropertyCallbackArguments::CallNamedEnumerator(
DirectHandle<InterceptorInfo> interceptor) {
DCHECK(interceptor->is_named());
RCS_SCOPE(isolate(), RuntimeCallCounterId::kNamedEnumeratorCallback);
return CallPropertyEnumerator(interceptor);
}
// TODO(ishell): return std::optional<PropertyAttributes>.
Handle<Object> PropertyCallbackArguments::CallNamedQuery(
Handle<InterceptorInfo> interceptor, Handle<Name> name) {
DirectHandle<Object> PropertyCallbackArguments::CallNamedQuery(
DirectHandle<InterceptorInfo> interceptor, DirectHandle<Name> name) {
DCHECK_NAME_COMPATIBLE(interceptor, name);
Isolate* isolate = this->isolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedQueryCallback);
@ -174,8 +173,8 @@ Handle<Object> PropertyCallbackArguments::CallNamedQuery(
return GetReturnValue<Object>(isolate);
}
Handle<JSAny> PropertyCallbackArguments::CallNamedGetter(
Handle<InterceptorInfo> interceptor, Handle<Name> name) {
DirectHandle<JSAny> PropertyCallbackArguments::CallNamedGetter(
DirectHandle<InterceptorInfo> interceptor, DirectHandle<Name> name) {
DCHECK_NAME_COMPATIBLE(interceptor, name);
Isolate* isolate = this->isolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedGetterCallback);
@ -192,7 +191,7 @@ Handle<JSAny> PropertyCallbackArguments::CallNamedGetter(
}
Handle<JSAny> PropertyCallbackArguments::CallNamedDescriptor(
Handle<InterceptorInfo> interceptor, Handle<Name> name) {
DirectHandle<InterceptorInfo> interceptor, DirectHandle<Name> name) {
DCHECK_NAME_COMPATIBLE(interceptor, name);
Isolate* isolate = this->isolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedDescriptorCallback);
@ -210,8 +209,8 @@ Handle<JSAny> PropertyCallbackArguments::CallNamedDescriptor(
}
v8::Intercepted PropertyCallbackArguments::CallNamedSetter(
DirectHandle<InterceptorInfo> interceptor, Handle<Name> name,
Handle<Object> value) {
DirectHandle<InterceptorInfo> interceptor, DirectHandle<Name> name,
DirectHandle<Object> value) {
DCHECK_NAME_COMPATIBLE(interceptor, name);
Isolate* isolate = this->isolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedSetterCallback);
@ -220,7 +219,7 @@ v8::Intercepted PropertyCallbackArguments::CallNamedSetter(
NamedPropertySetterCallback f =
ToCData<NamedPropertySetterCallback, kApiNamedPropertySetterCallbackTag>(
isolate, interceptor->setter());
Handle<InterceptorInfo> has_side_effects;
DirectHandle<InterceptorInfo> has_side_effects;
PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, void, has_side_effects,
ExceptionContext::kNamedSetter);
v8::Intercepted intercepted =
@ -229,7 +228,7 @@ v8::Intercepted PropertyCallbackArguments::CallNamedSetter(
}
v8::Intercepted PropertyCallbackArguments::CallNamedDefiner(
DirectHandle<InterceptorInfo> interceptor, Handle<Name> name,
DirectHandle<InterceptorInfo> interceptor, DirectHandle<Name> name,
const v8::PropertyDescriptor& desc) {
DCHECK_NAME_COMPATIBLE(interceptor, name);
Isolate* isolate = this->isolate();
@ -239,7 +238,7 @@ v8::Intercepted PropertyCallbackArguments::CallNamedDefiner(
NamedPropertyDefinerCallback f = ToCData<NamedPropertyDefinerCallback,
kApiNamedPropertyDefinerCallbackTag>(
isolate, interceptor->definer());
Handle<InterceptorInfo> has_side_effects;
DirectHandle<InterceptorInfo> has_side_effects;
PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, void, has_side_effects,
ExceptionContext::kNamedDefiner);
v8::Intercepted intercepted =
@ -248,7 +247,7 @@ v8::Intercepted PropertyCallbackArguments::CallNamedDefiner(
}
v8::Intercepted PropertyCallbackArguments::CallNamedDeleter(
DirectHandle<InterceptorInfo> interceptor, Handle<Name> name) {
DirectHandle<InterceptorInfo> interceptor, DirectHandle<Name> name) {
DCHECK_NAME_COMPATIBLE(interceptor, name);
Isolate* isolate = this->isolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedDeleterCallback);
@ -257,7 +256,7 @@ v8::Intercepted PropertyCallbackArguments::CallNamedDeleter(
NamedPropertyDeleterCallback f = ToCData<NamedPropertyDeleterCallback,
kApiNamedPropertyDeleterCallbackTag>(
isolate, interceptor->deleter());
Handle<InterceptorInfo> has_side_effects;
DirectHandle<InterceptorInfo> has_side_effects;
PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Boolean, has_side_effects,
ExceptionContext::kNamedDeleter);
v8::Intercepted intercepted = f(v8::Utils::ToLocal(name), callback_info);
@ -267,16 +266,17 @@ v8::Intercepted PropertyCallbackArguments::CallNamedDeleter(
// -------------------------------------------------------------------------
// Indexed Interceptor callbacks.
Handle<JSObjectOrUndefined> PropertyCallbackArguments::CallIndexedEnumerator(
Handle<InterceptorInfo> interceptor) {
DirectHandle<JSObjectOrUndefined>
PropertyCallbackArguments::CallIndexedEnumerator(
DirectHandle<InterceptorInfo> interceptor) {
DCHECK(!interceptor->is_named());
RCS_SCOPE(isolate(), RuntimeCallCounterId::kIndexedEnumeratorCallback);
return CallPropertyEnumerator(interceptor);
}
// TODO(ishell): return std::optional<PropertyAttributes>.
Handle<Object> PropertyCallbackArguments::CallIndexedQuery(
Handle<InterceptorInfo> interceptor, uint32_t index) {
DirectHandle<Object> PropertyCallbackArguments::CallIndexedQuery(
DirectHandle<InterceptorInfo> interceptor, uint32_t index) {
DCHECK(!interceptor->is_named());
Isolate* isolate = this->isolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kIndexedQueryCallback);
@ -294,8 +294,8 @@ Handle<Object> PropertyCallbackArguments::CallIndexedQuery(
return GetReturnValue<Object>(isolate);
}
Handle<JSAny> PropertyCallbackArguments::CallIndexedGetter(
Handle<InterceptorInfo> interceptor, uint32_t index) {
DirectHandle<JSAny> PropertyCallbackArguments::CallIndexedGetter(
DirectHandle<InterceptorInfo> interceptor, uint32_t index) {
DCHECK(!interceptor->is_named());
Isolate* isolate = this->isolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kNamedGetterCallback);
@ -314,7 +314,7 @@ Handle<JSAny> PropertyCallbackArguments::CallIndexedGetter(
}
Handle<JSAny> PropertyCallbackArguments::CallIndexedDescriptor(
Handle<InterceptorInfo> interceptor, uint32_t index) {
DirectHandle<InterceptorInfo> interceptor, uint32_t index) {
DCHECK(!interceptor->is_named());
Isolate* isolate = this->isolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kIndexedDescriptorCallback);
@ -334,7 +334,7 @@ Handle<JSAny> PropertyCallbackArguments::CallIndexedDescriptor(
v8::Intercepted PropertyCallbackArguments::CallIndexedSetter(
DirectHandle<InterceptorInfo> interceptor, uint32_t index,
Handle<Object> value) {
DirectHandle<Object> value) {
DCHECK(!interceptor->is_named());
Isolate* isolate = this->isolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kIndexedSetterCallback);
@ -345,7 +345,7 @@ v8::Intercepted PropertyCallbackArguments::CallIndexedSetter(
ToCData<IndexedPropertySetterCallbackV2,
kApiIndexedPropertySetterCallbackTag>(isolate,
interceptor->setter());
Handle<InterceptorInfo> has_side_effects;
DirectHandle<InterceptorInfo> has_side_effects;
PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, void, has_side_effects,
ExceptionContext::kIndexedSetter);
v8::Intercepted intercepted =
@ -366,7 +366,7 @@ v8::Intercepted PropertyCallbackArguments::CallIndexedDefiner(
ToCData<IndexedPropertyDefinerCallbackV2,
kApiIndexedPropertyDefinerCallbackTag>(isolate,
interceptor->definer());
Handle<InterceptorInfo> has_side_effects;
DirectHandle<InterceptorInfo> has_side_effects;
PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, void, has_side_effects,
ExceptionContext::kIndexedDefiner);
v8::Intercepted intercepted = f(index, desc, callback_info);
@ -374,7 +374,7 @@ v8::Intercepted PropertyCallbackArguments::CallIndexedDefiner(
}
v8::Intercepted PropertyCallbackArguments::CallIndexedDeleter(
Handle<InterceptorInfo> interceptor, uint32_t index) {
DirectHandle<InterceptorInfo> interceptor, uint32_t index) {
DCHECK(!interceptor->is_named());
Isolate* isolate = this->isolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kIndexedDeleterCallback);
@ -391,8 +391,9 @@ v8::Intercepted PropertyCallbackArguments::CallIndexedDeleter(
return intercepted;
}
Handle<JSObjectOrUndefined> PropertyCallbackArguments::CallPropertyEnumerator(
Handle<InterceptorInfo> interceptor) {
DirectHandle<JSObjectOrUndefined>
PropertyCallbackArguments::CallPropertyEnumerator(
DirectHandle<InterceptorInfo> interceptor) {
// Named and indexed enumerator callbacks have same signatures.
static_assert(std::is_same<NamedPropertyEnumeratorCallback,
IndexedPropertyEnumeratorCallback>::value);
@ -410,7 +411,7 @@ Handle<JSObjectOrUndefined> PropertyCallbackArguments::CallPropertyEnumerator(
PREPARE_CALLBACK_INFO_INTERCEPTOR(isolate, f, v8::Array, interceptor,
ExceptionContext::kNamedEnumerator);
f(callback_info);
Handle<JSAny> result = GetReturnValue<JSAny>(isolate);
DirectHandle<JSAny> result = GetReturnValue<JSAny>(isolate);
DCHECK(IsUndefined(*result) || IsJSObject(*result));
return Cast<JSObjectOrUndefined>(result);
}
@ -418,8 +419,8 @@ Handle<JSObjectOrUndefined> PropertyCallbackArguments::CallPropertyEnumerator(
// -------------------------------------------------------------------------
// Accessors
Handle<JSAny> PropertyCallbackArguments::CallAccessorGetter(
DirectHandle<AccessorInfo> info, Handle<Name> name) {
DirectHandle<JSAny> PropertyCallbackArguments::CallAccessorGetter(
DirectHandle<AccessorInfo> info, DirectHandle<Name> name) {
Isolate* isolate = this->isolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kAccessorGetterCallback);
// Unlike interceptor callbacks we know that the property exists, so
@ -430,16 +431,16 @@ Handle<JSAny> PropertyCallbackArguments::CallAccessorGetter(
slot_at(kReturnValueIndex).store(ReadOnlyRoots(isolate).undefined_value());
AccessorNameGetterCallback f =
reinterpret_cast<AccessorNameGetterCallback>(info->getter(isolate));
PREPARE_CALLBACK_INFO_ACCESSOR(isolate, f, v8::Value, info,
handle(receiver(), isolate), ACCESSOR_GETTER,
ExceptionContext::kAttributeGet);
PREPARE_CALLBACK_INFO_ACCESSOR(
isolate, f, v8::Value, info, direct_handle(receiver(), isolate),
ACCESSOR_GETTER, ExceptionContext::kAttributeGet);
f(v8::Utils::ToLocal(name), callback_info);
return GetReturnValue<JSAny>(isolate);
}
bool PropertyCallbackArguments::CallAccessorSetter(
DirectHandle<AccessorInfo> accessor_info, Handle<Name> name,
Handle<Object> value) {
DirectHandle<AccessorInfo> accessor_info, DirectHandle<Name> name,
DirectHandle<Object> value) {
Isolate* isolate = this->isolate();
RCS_SCOPE(isolate, RuntimeCallCounterId::kAccessorSetterCallback);
// Unlike interceptor callbacks we know that the property exists, so
@ -450,7 +451,7 @@ bool PropertyCallbackArguments::CallAccessorSetter(
slot_at(kReturnValueIndex).store(ReadOnlyRoots(isolate).true_value());
// The actual type of setter callback is either
// v8::AccessorNameSetterCallback or
// i::Accesors::AccessorNameBooleanSetterCallback, depending on whether the
// i::Accessors::AccessorNameBooleanSetterCallback, depending on whether the
// AccessorInfo was created by the API or internally (see accessors.cc).
// Here we handle both cases using the AccessorNameSetterCallback signature
// and checking whether the returned result is set to default value
@ -459,9 +460,9 @@ bool PropertyCallbackArguments::CallAccessorSetter(
// the result of [[Set]] operation according to JavaScript semantics.
AccessorNameSetterCallback f = reinterpret_cast<AccessorNameSetterCallback>(
accessor_info->setter(isolate));
PREPARE_CALLBACK_INFO_ACCESSOR(isolate, f, void, accessor_info,
handle(receiver(), isolate), ACCESSOR_SETTER,
ExceptionContext::kAttributeSet);
PREPARE_CALLBACK_INFO_ACCESSOR(
isolate, f, void, accessor_info, direct_handle(receiver(), isolate),
ACCESSOR_SETTER, ExceptionContext::kAttributeSet);
f(v8::Utils::ToLocal(name), v8::Utils::ToLocal(value), callback_info);
// Historically, in case of v8::AccessorNameSetterCallback it wasn't allowed
// to set the result and not setting the result was treated as successful

View file

@ -42,17 +42,15 @@ PropertyCallbackArguments::PropertyCallbackArguments(
FunctionCallbackArguments::FunctionCallbackArguments(
Isolate* isolate, Tagged<FunctionTemplateInfo> target,
Tagged<Object> holder, Tagged<HeapObject> new_target, Address* argv,
int argc)
Tagged<HeapObject> new_target, Address* argv, int argc)
: Super(isolate), argv_(argv), argc_(argc) {
slot_at(T::kTargetIndex).store(target);
slot_at(T::kHolderIndex).store(holder);
slot_at(T::kUnusedIndex).store(ReadOnlyRoots(isolate).undefined_value());
slot_at(T::kNewTargetIndex).store(new_target);
slot_at(T::kIsolateIndex)
.store(Tagged<Object>(reinterpret_cast<Address>(isolate)));
slot_at(T::kReturnValueIndex).store(ReadOnlyRoots(isolate).undefined_value());
slot_at(T::kContextIndex).store(isolate->context());
DCHECK(IsHeapObject(*slot_at(T::kHolderIndex)));
DCHECK(IsSmi(*slot_at(T::kIsolateIndex)));
}

View file

@ -104,53 +104,54 @@ class PropertyCallbackArguments final
// Returns the result of [[Get]] operation or throws an exception.
// In case of exception empty handle is returned.
// TODO(ishell, 328490288): stop returning empty handles.
inline Handle<JSAny> CallAccessorGetter(DirectHandle<AccessorInfo> info,
Handle<Name> name);
inline DirectHandle<JSAny> CallAccessorGetter(DirectHandle<AccessorInfo> info,
DirectHandle<Name> name);
// Returns the result of [[Set]] operation or throws an exception.
V8_WARN_UNUSED_RESULT
inline bool CallAccessorSetter(DirectHandle<AccessorInfo> info,
Handle<Name> name, Handle<Object> value);
DirectHandle<Name> name,
DirectHandle<Object> value);
// -------------------------------------------------------------------------
// Named Interceptor Callbacks
// Empty handle means that the request was not intercepted.
// Pending exception handling should be done by the caller.
inline Handle<Object> CallNamedQuery(Handle<InterceptorInfo> interceptor,
Handle<Name> name);
inline Handle<JSAny> CallNamedGetter(Handle<InterceptorInfo> interceptor,
Handle<Name> name);
inline DirectHandle<Object> CallNamedQuery(
DirectHandle<InterceptorInfo> interceptor, DirectHandle<Name> name);
inline DirectHandle<JSAny> CallNamedGetter(
DirectHandle<InterceptorInfo> interceptor, DirectHandle<Name> name);
// Calls Setter/Definer/Deleter callback and returns whether the request
// was intercepted.
// Pending exception handling and interpretation of the result should be
// done by the caller using GetBooleanReturnValue(..).
inline v8::Intercepted CallNamedSetter(
DirectHandle<InterceptorInfo> interceptor, Handle<Name> name,
Handle<Object> value);
DirectHandle<InterceptorInfo> interceptor, DirectHandle<Name> name,
DirectHandle<Object> value);
inline v8::Intercepted CallNamedDefiner(
DirectHandle<InterceptorInfo> interceptor, Handle<Name> name,
DirectHandle<InterceptorInfo> interceptor, DirectHandle<Name> name,
const v8::PropertyDescriptor& desc);
inline v8::Intercepted CallNamedDeleter(
DirectHandle<InterceptorInfo> interceptor, Handle<Name> name);
DirectHandle<InterceptorInfo> interceptor, DirectHandle<Name> name);
// Empty handle means that the request was not intercepted.
// Pending exception handling should be done by the caller.
inline Handle<JSAny> CallNamedDescriptor(Handle<InterceptorInfo> interceptor,
Handle<Name> name);
inline Handle<JSAny> CallNamedDescriptor(
DirectHandle<InterceptorInfo> interceptor, DirectHandle<Name> name);
// Returns JSArray-like object with property names or undefined.
inline Handle<JSObjectOrUndefined> CallNamedEnumerator(
Handle<InterceptorInfo> interceptor);
inline DirectHandle<JSObjectOrUndefined> CallNamedEnumerator(
DirectHandle<InterceptorInfo> interceptor);
// -------------------------------------------------------------------------
// Indexed Interceptor Callbacks
// Empty handle means that the request was not intercepted.
// Pending exception handling should be done by the caller.
inline Handle<Object> CallIndexedQuery(Handle<InterceptorInfo> interceptor,
uint32_t index);
inline Handle<JSAny> CallIndexedGetter(Handle<InterceptorInfo> interceptor,
uint32_t index);
inline DirectHandle<Object> CallIndexedQuery(
DirectHandle<InterceptorInfo> interceptor, uint32_t index);
inline DirectHandle<JSAny> CallIndexedGetter(
DirectHandle<InterceptorInfo> interceptor, uint32_t index);
// Calls Setter/Definer/Deleter callback and returns whether the request
// was intercepted.
@ -158,20 +159,20 @@ class PropertyCallbackArguments final
// done by the caller using GetBooleanReturnValue(..).
inline v8::Intercepted CallIndexedSetter(
DirectHandle<InterceptorInfo> interceptor, uint32_t index,
Handle<Object> value);
DirectHandle<Object> value);
inline v8::Intercepted CallIndexedDefiner(
DirectHandle<InterceptorInfo> interceptor, uint32_t index,
const v8::PropertyDescriptor& desc);
inline v8::Intercepted CallIndexedDeleter(Handle<InterceptorInfo> interceptor,
uint32_t index);
inline v8::Intercepted CallIndexedDeleter(
DirectHandle<InterceptorInfo> interceptor, uint32_t index);
// Empty handle means that the request was not intercepted.
// Pending exception handling should be done by the caller.
inline Handle<JSAny> CallIndexedDescriptor(
Handle<InterceptorInfo> interceptor, uint32_t index);
DirectHandle<InterceptorInfo> interceptor, uint32_t index);
// Returns JSArray-like object with property names or undefined.
inline Handle<JSObjectOrUndefined> CallIndexedEnumerator(
Handle<InterceptorInfo> interceptor);
inline DirectHandle<JSObjectOrUndefined> CallIndexedEnumerator(
DirectHandle<InterceptorInfo> interceptor);
// Accept potential JavaScript side effects that might occur during life
// time of this object.
@ -232,8 +233,8 @@ class PropertyCallbackArguments final
private:
// Returns JSArray-like object with property names or undefined.
inline Handle<JSObjectOrUndefined> CallPropertyEnumerator(
Handle<InterceptorInfo> interceptor);
inline DirectHandle<JSObjectOrUndefined> CallPropertyEnumerator(
DirectHandle<InterceptorInfo> interceptor);
inline Tagged<JSObject> holder() const;
inline Tagged<Object> receiver() const;
@ -258,13 +259,13 @@ class FunctionCallbackArguments
static constexpr int kArgsLength = T::kArgsLength;
static constexpr int kArgsLengthWithReceiver = T::kArgsLengthWithReceiver;
static constexpr int kHolderIndex = T::kHolderIndex;
static constexpr int kUnusedIndex = T::kUnusedIndex;
static constexpr int kIsolateIndex = T::kIsolateIndex;
static constexpr int kContextIndex = T::kContextIndex;
static constexpr int kTargetIndex = T::kTargetIndex;
static constexpr int kNewTargetIndex = T::kNewTargetIndex;
static_assert(T::kThisValuesIndex == BuiltinArguments::kReceiverArgsOffset);
static_assert(T::kThisValuesIndex == BuiltinArguments::kReceiverArgsIndex);
static constexpr int kSize = T::kSize;
static constexpr int kImplicitArgsOffset = T::kImplicitArgsOffset;
@ -279,7 +280,6 @@ class FunctionCallbackArguments
FunctionCallbackArguments(Isolate* isolate,
Tagged<FunctionTemplateInfo> target,
Tagged<Object> holder,
Tagged<HeapObject> new_target, Address* argv,
int argc);
@ -291,8 +291,8 @@ class FunctionCallbackArguments
* and used if it's been set to anything inside the callback.
* New style callbacks always use the return value.
*/
inline Handle<Object> CallOrConstruct(Tagged<FunctionTemplateInfo> function,
bool is_construct);
inline DirectHandle<Object> CallOrConstruct(
Tagged<FunctionTemplateInfo> function, bool is_construct);
// Unofficial way of getting target FunctionTemplateInfo from
// v8::FunctionCallbackInfo<T>.
@ -302,14 +302,12 @@ class FunctionCallbackArguments
}
private:
inline Tagged<JSReceiver> holder() const;
Address* argv_;
int const argc_;
};
static_assert(BuiltinArguments::kNumExtraArgs ==
BuiltinExitFrameConstants::kNumExtraArgsWithoutReceiver);
BuiltinExitFrameConstants::kNumExtraArgs);
static_assert(BuiltinArguments::kNumExtraArgsWithReceiver ==
BuiltinExitFrameConstants::kNumExtraArgsWithReceiver);

View file

@ -5,8 +5,10 @@
#ifndef V8_API_API_INL_H_
#define V8_API_API_INL_H_
#include "include/v8-fast-api-calls.h"
#include "src/api/api.h"
// Include the non-inl header before the rest of the headers.
#include "include/v8-fast-api-calls.h"
#include "src/common/assert-scope.h"
#include "src/execution/microtask-queue.h"
#include "src/flags/flags.h"
@ -36,19 +38,19 @@ inline v8::internal::Address ToCData(
}
template <internal::ExternalPointerTag tag, typename T>
inline v8::internal::Handle<i::UnionOf<i::Smi, i::Foreign>> FromCData(
inline v8::internal::DirectHandle<i::UnionOf<i::Smi, i::Foreign>> FromCData(
v8::internal::Isolate* isolate, T obj) {
static_assert(sizeof(T) == sizeof(v8::internal::Address));
if (obj == nullptr) return handle(v8::internal::Smi::zero(), isolate);
if (obj == nullptr) return direct_handle(v8::internal::Smi::zero(), isolate);
return isolate->factory()->NewForeign<tag>(
reinterpret_cast<v8::internal::Address>(obj));
}
template <internal::ExternalPointerTag tag>
inline v8::internal::Handle<i::UnionOf<i::Smi, i::Foreign>> FromCData(
inline v8::internal::DirectHandle<i::UnionOf<i::Smi, i::Foreign>> FromCData(
v8::internal::Isolate* isolate, v8::internal::Address obj) {
if (obj == v8::internal::kNullAddress) {
return handle(v8::internal::Smi::zero(), isolate);
return direct_handle(v8::internal::Smi::zero(), isolate);
}
return isolate->factory()->NewForeign<tag>(obj);
}
@ -60,14 +62,16 @@ inline Local<To> Utils::Convert(v8::internal::DirectHandle<From> obj) {
if (obj.is_null()) return Local<To>();
return Local<To>::FromAddress(obj.address());
#else
return Local<To>::FromSlot(obj.location());
// This simply uses the location of the indirect handle wrapped inside a
// "fake" direct handle.
return Local<To>::FromSlot(indirect_handle(obj).location());
#endif
}
// Implementations of ToLocal
#define MAKE_TO_LOCAL(Name) \
template <template <typename T> typename HandleType, typename T, typename> \
template <template <typename> typename HandleType, typename T, typename> \
inline auto Utils::Name(HandleType<T> obj) { \
return Utils::Name##_helper(v8::internal::DirectHandle<T>(obj)); \
}
@ -121,7 +125,7 @@ TYPED_ARRAYS(MAKE_TO_LOCAL_TYPED_ARRAY)
DCHECK(v8::internal::ValueHelper::IsEmpty(that) || \
Is##To(v8::internal::Tagged<v8::internal::Object>( \
v8::internal::ValueHelper::ValueAsAddress(that)))); \
return v8::internal::DirectHandle<v8::internal::To>( \
return v8::internal::DirectHandle<v8::internal::To>::FromAddress( \
v8::internal::ValueHelper::ValueAsAddress(that)); \
} \
\
@ -289,6 +293,11 @@ bool CopyAndConvertArrayToCppBuffer(Local<Array> src, T* dst,
"array");
uint32_t length = src->Length();
if (length == 0) {
// Early return here to avoid a cast error below, as the EmptyFixedArray
// cannot be cast to a FixedDoubleArray.
return true;
}
if (length > max_length) {
return false;
}
@ -337,9 +346,9 @@ void HandleScopeImplementer::EnterContext(Tagged<NativeContext> context) {
entered_contexts_.push_back(context);
}
Handle<NativeContext> HandleScopeImplementer::LastEnteredContext() {
DirectHandle<NativeContext> HandleScopeImplementer::LastEnteredContext() {
if (entered_contexts_.empty()) return {};
return handle(entered_contexts_.back(), isolate_);
return direct_handle(entered_contexts_.back(), isolate_);
}
} // namespace internal

View file

@ -39,46 +39,44 @@ class V8_NODISCARD InvokeScope {
};
MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
Handle<ObjectTemplateInfo> data,
Handle<JSReceiver> new_target,
DirectHandle<ObjectTemplateInfo> data,
DirectHandle<JSReceiver> new_target,
bool is_prototype);
MaybeHandle<JSFunction> InstantiateFunction(
Isolate* isolate, Handle<NativeContext> native_context,
Handle<FunctionTemplateInfo> data,
MaybeHandle<Name> maybe_name = MaybeHandle<Name>());
Isolate* isolate, DirectHandle<NativeContext> native_context,
DirectHandle<FunctionTemplateInfo> data,
MaybeDirectHandle<Name> maybe_name = {});
MaybeHandle<JSFunction> InstantiateFunction(
Isolate* isolate, Handle<FunctionTemplateInfo> data,
MaybeHandle<Name> maybe_name = MaybeHandle<Name>()) {
Isolate* isolate, DirectHandle<FunctionTemplateInfo> data,
MaybeDirectHandle<Name> maybe_name = {}) {
return InstantiateFunction(isolate, isolate->native_context(), data,
maybe_name);
}
MaybeHandle<Object> Instantiate(
Isolate* isolate, Handle<Object> data,
MaybeHandle<Name> maybe_name = MaybeHandle<Name>()) {
MaybeDirectHandle<Object> Instantiate(Isolate* isolate,
DirectHandle<Object> data,
MaybeDirectHandle<Name> maybe_name = {}) {
if (IsFunctionTemplateInfo(*data)) {
return InstantiateFunction(isolate, Cast<FunctionTemplateInfo>(data),
maybe_name);
} else if (IsObjectTemplateInfo(*data)) {
return InstantiateObject(isolate, Cast<ObjectTemplateInfo>(data),
Handle<JSReceiver>(), false);
return InstantiateObject(isolate, Cast<ObjectTemplateInfo>(data), {},
false);
} else {
return data;
}
}
MaybeHandle<Object> DefineAccessorProperty(Isolate* isolate,
Handle<JSObject> object,
Handle<Name> name,
Handle<Object> getter,
Handle<Object> setter,
MaybeDirectHandle<Object> DefineAccessorProperty(
Isolate* isolate, DirectHandle<JSObject> object, DirectHandle<Name> name,
DirectHandle<Object> getter, DirectHandle<Object> setter,
PropertyAttributes attributes) {
DCHECK(!IsFunctionTemplateInfo(*getter) ||
Cast<FunctionTemplateInfo>(*getter)->should_cache());
DCHECK(!IsFunctionTemplateInfo(*setter) ||
Cast<FunctionTemplateInfo>(*setter)->should_cache());
DCHECK_IMPLIES(IsFunctionTemplateInfo(*getter),
Cast<FunctionTemplateInfo>(*getter)->is_cacheable());
DCHECK_IMPLIES(IsFunctionTemplateInfo(*setter),
Cast<FunctionTemplateInfo>(*setter)->is_cacheable());
if (IsFunctionTemplateInfo(*getter) &&
Cast<FunctionTemplateInfo>(*getter)->BreakAtEntry(isolate)) {
ASSIGN_RETURN_ON_EXCEPTION(
@ -100,12 +98,12 @@ MaybeHandle<Object> DefineAccessorProperty(Isolate* isolate,
return object;
}
MaybeHandle<Object> DefineDataProperty(Isolate* isolate,
Handle<JSObject> object,
Handle<Name> name,
Handle<Object> prop_data,
MaybeDirectHandle<Object> DefineDataProperty(Isolate* isolate,
DirectHandle<JSObject> object,
DirectHandle<Name> name,
DirectHandle<Object> prop_data,
PropertyAttributes attributes) {
Handle<Object> value;
DirectHandle<Object> value;
ASSIGN_RETURN_ON_EXCEPTION(isolate, value,
Instantiate(isolate, prop_data, name));
@ -129,7 +127,7 @@ MaybeHandle<Object> DefineDataProperty(Isolate* isolate,
}
void DisableAccessChecks(Isolate* isolate, DirectHandle<JSObject> object) {
Handle<Map> old_map(object->map(), isolate);
DirectHandle<Map> old_map(object->map(), isolate);
// Copy map so it won't interfere constructor's initial map.
DirectHandle<Map> new_map =
Map::Copy(isolate, old_map, "DisableAccessChecks");
@ -138,7 +136,7 @@ void DisableAccessChecks(Isolate* isolate, DirectHandle<JSObject> object) {
}
void EnableAccessChecks(Isolate* isolate, DirectHandle<JSObject> object) {
Handle<Map> old_map(object->map(), isolate);
DirectHandle<Map> old_map(object->map(), isolate);
// Copy map so it won't interfere constructor's initial map.
DirectHandle<Map> new_map = Map::Copy(isolate, old_map, "EnableAccessChecks");
new_map->set_is_access_check_needed(true);
@ -148,7 +146,7 @@ void EnableAccessChecks(Isolate* isolate, DirectHandle<JSObject> object) {
class V8_NODISCARD AccessCheckDisableScope {
public:
AccessCheckDisableScope(Isolate* isolate, Handle<JSObject> obj)
AccessCheckDisableScope(Isolate* isolate, DirectHandle<JSObject> obj)
: isolate_(isolate),
disabled_(obj->map()->is_access_check_needed()),
obj_(obj) {
@ -165,11 +163,11 @@ class V8_NODISCARD AccessCheckDisableScope {
private:
Isolate* isolate_;
const bool disabled_;
Handle<JSObject> obj_;
DirectHandle<JSObject> obj_;
};
Tagged<Object> GetIntrinsic(Isolate* isolate, v8::Intrinsic intrinsic) {
Handle<Context> native_context = isolate->native_context();
DirectHandle<Context> native_context = isolate->native_context();
DCHECK(!native_context.is_null());
switch (intrinsic) {
#define GET_INTRINSIC_VALUE(name, iname) \
@ -183,7 +181,7 @@ Tagged<Object> GetIntrinsic(Isolate* isolate, v8::Intrinsic intrinsic) {
template <typename TemplateInfoT>
MaybeHandle<JSObject> ConfigureInstance(Isolate* isolate, Handle<JSObject> obj,
Handle<TemplateInfoT> data) {
DirectHandle<TemplateInfoT> data) {
RCS_SCOPE(isolate, RuntimeCallCounterId::kConfigureInstance);
HandleScope scope(isolate);
// Disable access checks while instantiating the object.
@ -203,26 +201,27 @@ MaybeHandle<JSObject> ConfigureInstance(Isolate* isolate, Handle<JSObject> obj,
if (max_number_of_properties > 0) {
int valid_descriptors = 0;
// Use a temporary FixedArray to accumulate unique accessors.
Handle<FixedArray> array =
DirectHandle<FixedArray> array =
isolate->factory()->NewFixedArray(max_number_of_properties);
// TODO(leszeks): Avoid creating unnecessary handles for cases where we
// don't need to append anything.
for (Handle<TemplateInfoT> temp(*data, isolate); !(*temp).is_null();
temp = handle(temp->GetParent(isolate), isolate)) {
for (DirectHandle<TemplateInfoT> temp(*data, isolate); !(*temp).is_null();
temp = direct_handle(temp->GetParent(isolate), isolate)) {
// Accumulate accessors.
Tagged<Object> maybe_properties = temp->property_accessors();
if (!IsUndefined(maybe_properties, isolate)) {
valid_descriptors = AccessorInfo::AppendUnique(
isolate, handle(maybe_properties, isolate), array,
isolate, direct_handle(maybe_properties, isolate), array,
valid_descriptors);
}
}
// Install accumulated accessors.
for (int i = 0; i < valid_descriptors; i++) {
Handle<AccessorInfo> accessor(Cast<AccessorInfo>(array->get(i)), isolate);
Handle<Name> name(Cast<Name>(accessor->name()), isolate);
DirectHandle<AccessorInfo> accessor(Cast<AccessorInfo>(array->get(i)),
isolate);
DirectHandle<Name> name(Cast<Name>(accessor->name()), isolate);
JSObject::SetAccessor(obj, name, accessor,
accessor->initial_property_attributes())
.Assert();
@ -237,7 +236,7 @@ MaybeHandle<JSObject> ConfigureInstance(Isolate* isolate, Handle<JSObject> obj,
int i = 0;
for (int c = 0; c < data->number_of_properties(); c++) {
auto name = handle(Cast<Name>(properties->get(i++)), isolate);
auto name = direct_handle(Cast<Name>(properties->get(i++)), isolate);
Tagged<Object> bit = properties->get(i++);
if (IsSmi(bit)) {
PropertyDetails details(Cast<Smi>(bit));
@ -249,8 +248,8 @@ MaybeHandle<JSObject> ConfigureInstance(Isolate* isolate, Handle<JSObject> obj,
RETURN_ON_EXCEPTION(isolate, DefineDataProperty(isolate, obj, name,
prop_data, attributes));
} else {
auto getter = handle(properties->get(i++), isolate);
auto setter = handle(properties->get(i++), isolate);
auto getter = direct_handle(properties->get(i++), isolate);
auto setter = direct_handle(properties->get(i++), isolate);
RETURN_ON_EXCEPTION(
isolate, DefineAccessorProperty(isolate, obj, name, getter, setter,
attributes));
@ -286,12 +285,12 @@ bool IsSimpleInstantiation(Isolate* isolate, Tagged<ObjectTemplateInfo> info,
}
MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
Handle<ObjectTemplateInfo> info,
Handle<JSReceiver> new_target,
DirectHandle<ObjectTemplateInfo> info,
DirectHandle<JSReceiver> new_target,
bool is_prototype) {
RCS_SCOPE(isolate, RuntimeCallCounterId::kInstantiateObject);
Handle<JSFunction> constructor;
bool should_cache = info->should_cache();
DirectHandle<JSFunction> constructor;
bool should_cache = info->is_cacheable();
if (!new_target.is_null()) {
if (IsSimpleInstantiation(isolate, *info, *new_target)) {
constructor = Cast<JSFunction>(new_target);
@ -302,9 +301,9 @@ MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
}
// Fast path.
Handle<JSObject> result;
if (should_cache && info->is_cached()) {
if (should_cache) {
if (TemplateInfo::ProbeInstantiationsCache<JSObject>(
isolate, isolate->native_context(), info->serial_number(),
isolate, isolate->native_context(), info,
TemplateInfo::CachingMode::kLimited)
.ToHandle(&result)) {
return isolate->factory()->CopyJSObject(result);
@ -318,9 +317,9 @@ MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
} else {
// Enter a new scope. Recursion could otherwise create a lot of handles.
HandleScope scope(isolate);
Handle<FunctionTemplateInfo> cons_templ(
DirectHandle<FunctionTemplateInfo> cons_templ(
Cast<FunctionTemplateInfo>(maybe_constructor_info), isolate);
Handle<JSFunction> tmp_constructor;
DirectHandle<JSFunction> tmp_constructor;
ASSIGN_RETURN_ON_EXCEPTION(isolate, tmp_constructor,
InstantiateFunction(isolate, cons_templ));
constructor = scope.CloseAndEscape(tmp_constructor);
@ -337,15 +336,14 @@ MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
Handle<JSObject> object;
ASSIGN_RETURN_ON_EXCEPTION(
isolate, object,
JSObject::New(constructor, new_target, Handle<AllocationSite>::null(),
new_js_object_type));
JSObject::New(constructor, new_target, {}, new_js_object_type));
if (is_prototype) JSObject::OptimizeAsPrototype(object);
ASSIGN_RETURN_ON_EXCEPTION(isolate, result,
ConfigureInstance(isolate, object, info));
if (info->immutable_proto()) {
JSObject::SetImmutableProto(object);
JSObject::SetImmutableProto(isolate, object);
}
if (!is_prototype) {
// Keep prototypes in slow-mode. Let them be lazily turned fast later on.
@ -353,7 +351,7 @@ MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
JSObject::MigrateSlowToFast(result, 0, "ApiNatives::InstantiateObject");
// Don't cache prototypes.
if (should_cache) {
TemplateInfo::CacheTemplateInstantiation<JSObject, ObjectTemplateInfo>(
TemplateInfo::CacheTemplateInstantiation(
isolate, isolate->native_context(), info,
TemplateInfo::CachingMode::kLimited, result);
result = isolate->factory()->CopyJSObject(result);
@ -364,11 +362,11 @@ MaybeHandle<JSObject> InstantiateObject(Isolate* isolate,
}
namespace {
MaybeHandle<Object> GetInstancePrototype(Isolate* isolate,
Handle<Object> function_template) {
MaybeDirectHandle<Object> GetInstancePrototype(
Isolate* isolate, DirectHandle<Object> function_template) {
// Enter a new scope. Recursion could otherwise create a lot of handles.
HandleScope scope(isolate);
Handle<JSFunction> parent_instance;
DirectHandle<JSFunction> parent_instance;
ASSIGN_RETURN_ON_EXCEPTION(
isolate, parent_instance,
InstantiateFunction(isolate,
@ -384,28 +382,29 @@ MaybeHandle<Object> GetInstancePrototype(Isolate* isolate,
} // namespace
MaybeHandle<JSFunction> InstantiateFunction(
Isolate* isolate, Handle<NativeContext> native_context,
Handle<FunctionTemplateInfo> data, MaybeHandle<Name> maybe_name) {
Isolate* isolate, DirectHandle<NativeContext> native_context,
DirectHandle<FunctionTemplateInfo> info,
MaybeDirectHandle<Name> maybe_name) {
RCS_SCOPE(isolate, RuntimeCallCounterId::kInstantiateFunction);
bool should_cache = data->should_cache();
if (should_cache && data->is_cached()) {
bool should_cache = info->is_cacheable();
if (should_cache) {
Handle<JSObject> result;
if (TemplateInfo::ProbeInstantiationsCache<JSObject>(
isolate, native_context, data->serial_number(),
isolate, native_context, info,
TemplateInfo::CachingMode::kUnlimited)
.ToHandle(&result)) {
return Cast<JSFunction>(result);
}
}
Handle<Object> prototype;
if (!data->remove_prototype()) {
Handle<Object> prototype_templ(data->GetPrototypeTemplate(), isolate);
DirectHandle<Object> prototype;
if (!info->remove_prototype()) {
DirectHandle<Object> prototype_templ(info->GetPrototypeTemplate(), isolate);
if (IsUndefined(*prototype_templ, isolate)) {
Handle<Object> protoype_provider_templ(
data->GetPrototypeProviderTemplate(), isolate);
DirectHandle<Object> protoype_provider_templ(
info->GetPrototypeProviderTemplate(), isolate);
if (IsUndefined(*protoype_provider_templ, isolate)) {
prototype = isolate->factory()->NewJSObject(
handle(native_context->object_function(), isolate));
direct_handle(native_context->object_function(), isolate));
} else {
ASSIGN_RETURN_ON_EXCEPTION(
isolate, prototype,
@ -415,99 +414,103 @@ MaybeHandle<JSFunction> InstantiateFunction(
ASSIGN_RETURN_ON_EXCEPTION(
isolate, prototype,
InstantiateObject(isolate, Cast<ObjectTemplateInfo>(prototype_templ),
Handle<JSReceiver>(), true));
DirectHandle<JSReceiver>(), true));
}
Handle<Object> parent(data->GetParentTemplate(), isolate);
DirectHandle<Object> parent(info->GetParentTemplate(), isolate);
if (!IsUndefined(*parent, isolate)) {
Handle<Object> parent_prototype;
DirectHandle<Object> parent_prototype;
ASSIGN_RETURN_ON_EXCEPTION(isolate, parent_prototype,
GetInstancePrototype(isolate, parent));
CHECK(IsHeapObject(*parent_prototype));
DirectHandle<JSPrototype> checked_parent_prototype;
CHECK(TryCast(parent_prototype, &checked_parent_prototype));
JSObject::ForceSetPrototype(isolate, Cast<JSObject>(prototype),
Cast<HeapObject>(parent_prototype));
checked_parent_prototype);
}
}
InstanceType function_type = JS_SPECIAL_API_OBJECT_TYPE;
if (!data->needs_access_check() &&
IsUndefined(data->GetNamedPropertyHandler(), isolate) &&
IsUndefined(data->GetIndexedPropertyHandler(), isolate)) {
function_type = v8_flags.embedder_instance_types ? data->GetInstanceType()
if (!info->needs_access_check() &&
IsUndefined(info->GetNamedPropertyHandler(), isolate) &&
IsUndefined(info->GetIndexedPropertyHandler(), isolate)) {
function_type = v8_flags.experimental_embedder_instance_types
? info->GetInstanceType()
: JS_API_OBJECT_TYPE;
DCHECK(InstanceTypeChecker::IsJSApiObject(function_type));
}
Handle<JSFunction> function = ApiNatives::CreateApiFunction(
isolate, native_context, data, prototype, function_type, maybe_name);
isolate, native_context, info, prototype, function_type, maybe_name);
if (should_cache) {
// Cache the function.
TemplateInfo::CacheTemplateInstantiation<JSObject, FunctionTemplateInfo>(
isolate, native_context, data, TemplateInfo::CachingMode::kUnlimited,
TemplateInfo::CacheTemplateInstantiation(
isolate, native_context, info, TemplateInfo::CachingMode::kUnlimited,
function);
}
MaybeHandle<JSObject> result = ConfigureInstance(isolate, function, data);
MaybeDirectHandle<JSObject> result =
ConfigureInstance(isolate, function, info);
if (result.is_null()) {
// Uncache on error.
TemplateInfo::UncacheTemplateInstantiation<FunctionTemplateInfo>(
isolate, native_context, data, TemplateInfo::CachingMode::kUnlimited);
return MaybeHandle<JSFunction>();
TemplateInfo::UncacheTemplateInstantiation(
isolate, native_context, info, TemplateInfo::CachingMode::kUnlimited);
return {};
}
data->set_published(true);
info->set_published(true);
return function;
}
void AddPropertyToPropertyList(Isolate* isolate,
DirectHandle<TemplateInfo> templ, int length,
Handle<Object>* data) {
Tagged<Object> maybe_list = templ->property_list();
Handle<ArrayList> list;
DirectHandle<TemplateInfoWithProperties> info,
base::Vector<DirectHandle<Object>> data) {
Tagged<Object> maybe_list = info->property_list();
DirectHandle<ArrayList> list;
if (IsUndefined(maybe_list, isolate)) {
list = ArrayList::New(isolate, length, AllocationType::kOld);
list = ArrayList::New(isolate, static_cast<int>(data.size()),
AllocationType::kOld);
} else {
list = handle(Cast<ArrayList>(maybe_list), isolate);
list = direct_handle(Cast<ArrayList>(maybe_list), isolate);
}
templ->set_number_of_properties(templ->number_of_properties() + 1);
for (int i = 0; i < length; i++) {
DirectHandle<Object> value =
data[i].is_null() ? Cast<Object>(isolate->factory()->undefined_value())
: data[i];
info->set_number_of_properties(info->number_of_properties() + 1);
for (DirectHandle<Object> value : data) {
if (value.is_null())
value = Cast<Object>(isolate->factory()->undefined_value());
list = ArrayList::Add(isolate, list, value);
}
templ->set_property_list(*list);
info->set_property_list(*list);
}
} // namespace
// static
i::Handle<i::FunctionTemplateInfo>
DirectHandle<FunctionTemplateInfo>
ApiNatives::CreateAccessorFunctionTemplateInfo(
i::Isolate* i_isolate, FunctionCallback callback, int length,
Isolate* i_isolate, FunctionCallback callback, int length,
SideEffectType side_effect_type) {
// TODO(v8:5962): move FunctionTemplateNew() from api.cc here.
auto isolate = reinterpret_cast<v8::Isolate*>(i_isolate);
Local<FunctionTemplate> func_template = FunctionTemplate::New(
isolate, callback, v8::Local<Value>{}, v8::Local<v8::Signature>{}, length,
v8::ConstructorBehavior::kThrow, side_effect_type);
return Utils::OpenHandle(*func_template);
return Utils::OpenDirectHandle(*func_template);
}
MaybeHandle<JSFunction> ApiNatives::InstantiateFunction(
Isolate* isolate, Handle<NativeContext> native_context,
Handle<FunctionTemplateInfo> data, MaybeHandle<Name> maybe_name) {
Isolate* isolate, DirectHandle<NativeContext> native_context,
DirectHandle<FunctionTemplateInfo> data,
MaybeDirectHandle<Name> maybe_name) {
InvokeScope invoke_scope(isolate);
return ::v8::internal::InstantiateFunction(isolate, native_context, data,
maybe_name);
}
MaybeHandle<JSFunction> ApiNatives::InstantiateFunction(
Isolate* isolate, Handle<FunctionTemplateInfo> data,
MaybeHandle<Name> maybe_name) {
Isolate* isolate, DirectHandle<FunctionTemplateInfo> data,
MaybeDirectHandle<Name> maybe_name) {
InvokeScope invoke_scope(isolate);
return ::v8::internal::InstantiateFunction(isolate, data, maybe_name);
}
MaybeHandle<JSObject> ApiNatives::InstantiateObject(
Isolate* isolate, Handle<ObjectTemplateInfo> data,
Handle<JSReceiver> new_target) {
Isolate* isolate, DirectHandle<ObjectTemplateInfo> data,
DirectHandle<JSReceiver> new_target) {
InvokeScope invoke_scope(isolate);
return ::v8::internal::InstantiateObject(isolate, data, new_target, false);
}
@ -538,64 +541,64 @@ MaybeHandle<JSObject> ApiNatives::InstantiateRemoteObject(
}
void ApiNatives::AddDataProperty(Isolate* isolate,
DirectHandle<TemplateInfo> info,
Handle<Name> name, Handle<Object> value,
DirectHandle<TemplateInfoWithProperties> info,
DirectHandle<Name> name,
DirectHandle<Object> value,
PropertyAttributes attributes) {
PropertyDetails details(PropertyKind::kData, attributes,
PropertyConstness::kMutable);
auto details_handle = handle(details.AsSmi(), isolate);
Handle<Object> data[] = {name, details_handle, value};
AddPropertyToPropertyList(isolate, info, arraysize(data), data);
DirectHandle<Object> data[] = {name, direct_handle(details.AsSmi(), isolate),
value};
AddPropertyToPropertyList(isolate, info, base::VectorOf(data));
}
void ApiNatives::AddDataProperty(Isolate* isolate,
DirectHandle<TemplateInfo> info,
Handle<Name> name, v8::Intrinsic intrinsic,
DirectHandle<TemplateInfoWithProperties> info,
DirectHandle<Name> name,
v8::Intrinsic intrinsic,
PropertyAttributes attributes) {
auto value = handle(Smi::FromInt(intrinsic), isolate);
auto value = direct_handle(Smi::FromInt(intrinsic), isolate);
auto intrinsic_marker = isolate->factory()->true_value();
PropertyDetails details(PropertyKind::kData, attributes,
PropertyConstness::kMutable);
auto details_handle = handle(details.AsSmi(), isolate);
Handle<Object> data[] = {name, intrinsic_marker, details_handle, value};
AddPropertyToPropertyList(isolate, info, arraysize(data), data);
DirectHandle<Object> data[] = {
name, intrinsic_marker, direct_handle(details.AsSmi(), isolate), value};
AddPropertyToPropertyList(isolate, info, base::VectorOf(data));
}
void ApiNatives::AddAccessorProperty(Isolate* isolate,
DirectHandle<TemplateInfo> info,
Handle<Name> name,
Handle<FunctionTemplateInfo> getter,
Handle<FunctionTemplateInfo> setter,
PropertyAttributes attributes) {
void ApiNatives::AddAccessorProperty(
Isolate* isolate, DirectHandle<TemplateInfoWithProperties> info,
DirectHandle<Name> name, DirectHandle<FunctionTemplateInfo> getter,
DirectHandle<FunctionTemplateInfo> setter, PropertyAttributes attributes) {
if (!getter.is_null()) getter->set_published(true);
if (!setter.is_null()) setter->set_published(true);
PropertyDetails details(PropertyKind::kAccessor, attributes,
PropertyConstness::kMutable);
auto details_handle = handle(details.AsSmi(), isolate);
Handle<Object> data[] = {name, details_handle, getter, setter};
AddPropertyToPropertyList(isolate, info, arraysize(data), data);
DirectHandle<Object> data[] = {name, direct_handle(details.AsSmi(), isolate),
getter, setter};
AddPropertyToPropertyList(isolate, info, base::VectorOf(data));
}
void ApiNatives::AddNativeDataProperty(Isolate* isolate,
DirectHandle<TemplateInfo> info,
void ApiNatives::AddNativeDataProperty(
Isolate* isolate, DirectHandle<TemplateInfoWithProperties> info,
DirectHandle<AccessorInfo> property) {
Tagged<Object> maybe_list = info->property_accessors();
Handle<ArrayList> list;
DirectHandle<ArrayList> list;
if (IsUndefined(maybe_list, isolate)) {
list = ArrayList::New(isolate, 1, AllocationType::kOld);
} else {
list = handle(Cast<ArrayList>(maybe_list), isolate);
list = direct_handle(Cast<ArrayList>(maybe_list), isolate);
}
list = ArrayList::Add(isolate, list, property);
info->set_property_accessors(*list);
}
Handle<JSFunction> ApiNatives::CreateApiFunction(
Isolate* isolate, Handle<NativeContext> native_context,
DirectHandle<FunctionTemplateInfo> obj, Handle<Object> prototype,
InstanceType type, MaybeHandle<Name> maybe_name) {
Isolate* isolate, DirectHandle<NativeContext> native_context,
DirectHandle<FunctionTemplateInfo> obj, DirectHandle<Object> prototype,
InstanceType type, MaybeDirectHandle<Name> maybe_name) {
RCS_SCOPE(isolate, RuntimeCallCounterId::kCreateApiFunction);
Handle<SharedFunctionInfo> shared =
DirectHandle<SharedFunctionInfo> shared =
FunctionTemplateInfo::GetOrCreateSharedFunctionInfo(isolate, obj,
maybe_name);
// To simplify things, API functions always have shared name.
@ -617,7 +620,8 @@ Handle<JSFunction> ApiNatives::CreateApiFunction(
DCHECK(result->has_prototype_slot());
if (obj->read_only_prototype()) {
result->set_map(*isolate->sloppy_function_with_readonly_prototype_map());
result->set_map(isolate,
*isolate->sloppy_function_with_readonly_prototype_map());
}
if (IsTheHole(*prototype, isolate)) {
@ -642,7 +646,7 @@ Handle<JSFunction> ApiNatives::CreateApiFunction(
int instance_size = JSObject::GetHeaderSize(type) +
kEmbedderDataSlotSize * embedder_field_count;
Handle<Map> map = isolate->factory()->NewContextfulMap(
DirectHandle<Map> map = isolate->factory()->NewContextfulMap(
native_context, type, instance_size, TERMINAL_FAST_ELEMENTS_KIND);
// Mark as undetectable if needed.

View file

@ -25,48 +25,51 @@ class ApiNatives {
// A convenient internal wrapper around FunctionTemplate::New() for creating
// getter/setter callback function templates.
static Handle<FunctionTemplateInfo> CreateAccessorFunctionTemplateInfo(
static DirectHandle<FunctionTemplateInfo> CreateAccessorFunctionTemplateInfo(
Isolate* isolate, FunctionCallback callback, int length,
v8::SideEffectType side_effect_type);
V8_WARN_UNUSED_RESULT static MaybeHandle<JSFunction> InstantiateFunction(
Isolate* isolate, Handle<NativeContext> native_context,
Handle<FunctionTemplateInfo> data,
MaybeHandle<Name> maybe_name = MaybeHandle<Name>());
Isolate* isolate, DirectHandle<NativeContext> native_context,
DirectHandle<FunctionTemplateInfo> data,
MaybeDirectHandle<Name> maybe_name = {});
V8_WARN_UNUSED_RESULT static MaybeHandle<JSFunction> InstantiateFunction(
Isolate* isolate, Handle<FunctionTemplateInfo> data,
MaybeHandle<Name> maybe_name = MaybeHandle<Name>());
Isolate* isolate, DirectHandle<FunctionTemplateInfo> data,
MaybeDirectHandle<Name> maybe_name = {});
V8_WARN_UNUSED_RESULT static MaybeHandle<JSObject> InstantiateObject(
Isolate* isolate, Handle<ObjectTemplateInfo> data,
Handle<JSReceiver> new_target = Handle<JSReceiver>());
Isolate* isolate, DirectHandle<ObjectTemplateInfo> data,
DirectHandle<JSReceiver> new_target = {});
V8_WARN_UNUSED_RESULT static MaybeHandle<JSObject> InstantiateRemoteObject(
DirectHandle<ObjectTemplateInfo> data);
static Handle<JSFunction> CreateApiFunction(
Isolate* isolate, Handle<NativeContext> native_context,
DirectHandle<FunctionTemplateInfo> obj, Handle<Object> prototype,
InstanceType type, MaybeHandle<Name> name = MaybeHandle<Name>());
Isolate* isolate, DirectHandle<NativeContext> native_context,
DirectHandle<FunctionTemplateInfo> obj, DirectHandle<Object> prototype,
InstanceType type, MaybeDirectHandle<Name> name = {});
static void AddDataProperty(Isolate* isolate, DirectHandle<TemplateInfo> info,
Handle<Name> name, Handle<Object> value,
static void AddDataProperty(Isolate* isolate,
DirectHandle<TemplateInfoWithProperties> info,
DirectHandle<Name> name,
DirectHandle<Object> value,
PropertyAttributes attributes);
static void AddDataProperty(Isolate* isolate, DirectHandle<TemplateInfo> info,
Handle<Name> name, v8::Intrinsic intrinsic,
static void AddDataProperty(Isolate* isolate,
DirectHandle<TemplateInfoWithProperties> info,
DirectHandle<Name> name, v8::Intrinsic intrinsic,
PropertyAttributes attributes);
static void AddAccessorProperty(Isolate* isolate,
DirectHandle<TemplateInfo> info,
Handle<Name> name,
Handle<FunctionTemplateInfo> getter,
Handle<FunctionTemplateInfo> setter,
DirectHandle<TemplateInfoWithProperties> info,
DirectHandle<Name> name,
DirectHandle<FunctionTemplateInfo> getter,
DirectHandle<FunctionTemplateInfo> setter,
PropertyAttributes attributes);
static void AddNativeDataProperty(Isolate* isolate,
DirectHandle<TemplateInfo> info,
static void AddNativeDataProperty(
Isolate* isolate, DirectHandle<TemplateInfoWithProperties> info,
DirectHandle<AccessorInfo> property);
};

2329
deps/v8/src/api/api.cc vendored

File diff suppressed because it is too large Load diff

56
deps/v8/src/api/api.h vendored
View file

@ -57,12 +57,12 @@ inline v8::internal::Address ToCData(
v8::internal::Tagged<v8::internal::Object> obj);
template <internal::ExternalPointerTag tag, typename T>
inline v8::internal::Handle<
inline v8::internal::DirectHandle<
v8::internal::UnionOf<v8::internal::Smi, v8::internal::Foreign>>
FromCData(v8::internal::Isolate* isolate, T obj);
template <internal::ExternalPointerTag tag>
inline v8::internal::Handle<
inline v8::internal::DirectHandle<
v8::internal::UnionOf<v8::internal::Smi, v8::internal::Foreign>>
FromCData(v8::internal::Isolate* isolate, v8::internal::Address obj);
@ -119,7 +119,7 @@ class RegisteredExtension {
V(SignatureToLocal, FunctionTemplateInfo, Signature) \
V(MessageToLocal, Object, Message) \
V(PromiseToLocal, JSObject, Promise) \
V(StackTraceToLocal, FixedArray, StackTrace) \
V(StackTraceToLocal, StackTraceInfo, StackTrace) \
V(StackFrameToLocal, StackFrameInfo, StackFrame) \
V(NumberToLocal, Object, Number) \
V(IntegerToLocal, Object, Integer) \
@ -131,6 +131,7 @@ class RegisteredExtension {
V(FixedArrayToLocal, FixedArray, FixedArray) \
V(PrimitiveArrayToLocal, FixedArray, PrimitiveArray) \
V(ToLocal, ScriptOrModule, ScriptOrModule) \
IF_WASM(V, ToLocal, WasmMemoryMapDescriptor, WasmMemoryMapDescriptor) \
IF_WASM(V, ToLocal, WasmModuleObject, WasmModuleObject)
#define TO_LOCAL_NAME_LIST(V) \
@ -151,7 +152,7 @@ class RegisteredExtension {
V(PrimitiveArrayToLocal)
#define OPEN_HANDLE_LIST(V) \
V(Template, TemplateInfo) \
V(Template, TemplateInfoWithProperties) \
V(FunctionTemplate, FunctionTemplateInfo) \
V(ObjectTemplate, ObjectTemplateInfo) \
V(DictionaryTemplate, DictionaryTemplateInfo) \
@ -190,7 +191,7 @@ class RegisteredExtension {
V(Message, JSMessageObject) \
V(Context, NativeContext) \
V(External, Object) \
V(StackTrace, FixedArray) \
V(StackTrace, StackTraceInfo) \
V(StackFrame, StackFrameInfo) \
V(Proxy, JSProxy) \
V(debug::GeneratorObject, JSGeneratorObject) \
@ -205,6 +206,7 @@ class RegisteredExtension {
V(ScriptOrModule, ScriptOrModule) \
V(FixedArray, FixedArray) \
V(ModuleRequest, ModuleRequest) \
IF_WASM(V, WasmMemoryMapDescriptor, WasmMemoryMapDescriptor) \
IF_WASM(V, WasmMemoryObject, WasmMemoryObject)
class Utils {
@ -226,7 +228,7 @@ class Utils {
// TODO(42203211): Use C++20 concepts instead of the enable_if trait, when
// they are fully supported in V8.
#define DECLARE_TO_LOCAL(Name) \
template <template <typename T> typename HandleType, typename T, \
template <template <typename> typename HandleType, typename T, \
typename = std::enable_if_t<std::is_convertible_v< \
HandleType<T>, v8::internal::DirectHandle<T>>>> \
static inline auto Name(HandleType<T> obj);
@ -263,7 +265,7 @@ class Utils {
}
template <class T>
static inline v8::internal::Handle<v8::internal::Object> OpenPersistent(
static inline v8::internal::DirectHandle<v8::internal::Object> OpenPersistent(
v8::Persistent<T>* persistent) {
return OpenPersistent(*persistent);
}
@ -291,11 +293,6 @@ class Utils {
#undef DECLARE_TO_LOCAL_PRIVATE
};
template <class T>
inline T* ToApi(v8::internal::Handle<v8::internal::Object> obj) {
return reinterpret_cast<T*>(obj.location());
}
template <class T>
inline v8::Local<T> ToApiHandle(
v8::internal::DirectHandle<v8::internal::Object> obj) {
@ -303,9 +300,9 @@ inline v8::Local<T> ToApiHandle(
}
template <class T>
inline bool ToLocal(v8::internal::MaybeHandle<v8::internal::Object> maybe,
inline bool ToLocal(v8::internal::MaybeDirectHandle<v8::internal::Object> maybe,
Local<T>* local) {
v8::internal::Handle<v8::internal::Object> handle;
v8::internal::DirectHandle<v8::internal::Object> handle;
if (maybe.ToHandle(&handle)) {
*local = Utils::Convert<v8::internal::Object, T>(handle);
return true;
@ -345,9 +342,7 @@ class HandleScopeImplementer {
};
explicit HandleScopeImplementer(Isolate* isolate)
: isolate_(isolate),
spare_(nullptr),
last_handle_before_deferred_block_(nullptr) {}
: isolate_(isolate), spare_(nullptr) {}
~HandleScopeImplementer() { DeleteArray(spare_); }
@ -375,7 +370,7 @@ class HandleScopeImplementer {
// Returns the last entered context or an empty handle if no
// contexts have been entered.
inline Handle<NativeContext> LastEnteredContext();
inline DirectHandle<NativeContext> LastEnteredContext();
inline void SaveContext(Tagged<Context> context);
inline Tagged<Context> RestoreContext();
@ -398,7 +393,7 @@ class HandleScopeImplementer {
entered_contexts_.detach();
saved_contexts_.detach();
spare_ = nullptr;
last_handle_before_deferred_block_ = nullptr;
last_handle_before_persistent_block_.reset();
}
void Free() {
@ -416,7 +411,13 @@ class HandleScopeImplementer {
DCHECK(isolate_->thread_local_top()->CallDepthIsZero());
}
void BeginDeferredScope();
void BeginPersistentScope() {
DCHECK(!last_handle_before_persistent_block_.has_value());
last_handle_before_persistent_block_ = isolate()->handle_scope_data()->next;
}
bool HasPersistentScope() const {
return last_handle_before_persistent_block_.has_value();
}
std::unique_ptr<PersistentHandles> DetachPersistent(Address* first_block);
Isolate* isolate_;
@ -428,7 +429,7 @@ class HandleScopeImplementer {
// Used as a stack to keep track of saved contexts.
DetachableVector<Tagged<Context>> saved_contexts_;
Address* spare_;
Address* last_handle_before_deferred_block_;
std::optional<Address*> last_handle_before_persistent_block_;
// This is only used for threading support.
HandleScopeData handle_scope_data_;
@ -481,20 +482,20 @@ void HandleScopeImplementer::DeleteExtensions(internal::Address* prev_limit) {
internal::Address* block_limit = block_start + kHandleBlockSize;
// SealHandleScope may make the prev_limit to point inside the block.
// Cast possibly-unrelated pointers to plain Addres before comparing them
// Cast possibly-unrelated pointers to plain Address before comparing them
// to avoid undefined behavior.
if (reinterpret_cast<Address>(block_start) <
reinterpret_cast<Address>(prev_limit) &&
reinterpret_cast<Address>(prev_limit) <=
reinterpret_cast<Address>(block_limit)) {
#ifdef ENABLE_HANDLE_ZAPPING
#ifdef ENABLE_LOCAL_HANDLE_ZAPPING
internal::HandleScope::ZapRange(prev_limit, block_limit);
#endif
break;
}
blocks_.pop_back();
#ifdef ENABLE_HANDLE_ZAPPING
#ifdef ENABLE_LOCAL_HANDLE_ZAPPING
internal::HandleScope::ZapRange(block_start, block_limit);
#endif
if (spare_ != nullptr) {
@ -528,9 +529,8 @@ void InvokeFunctionCallbackOptimized(
const v8::FunctionCallbackInfo<v8::Value>& info);
void InvokeFinalizationRegistryCleanupFromTask(
Handle<NativeContext> native_context,
Handle<JSFinalizationRegistry> finalization_registry,
Handle<Object> callback);
DirectHandle<NativeContext> native_context,
DirectHandle<JSFinalizationRegistry> finalization_registry);
template <typename T>
EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
@ -544,7 +544,9 @@ template <typename T>
EXPORT_TEMPLATE_DECLARE(V8_EXPORT_PRIVATE)
bool ValidateCallbackInfo(const PropertyCallbackInfo<T>& info);
#ifdef ENABLE_SLOW_DCHECKS
DECLARE_CONTEXTUAL_VARIABLE_WITH_DEFAULT(StackAllocatedCheck, const bool, true);
#endif
} // namespace internal
} // namespace v8

Some files were not shown because too many files have changed in this diff Show more