mirror of
https://github.com/nodejs/node.git
synced 2025-08-15 13:48:44 +02:00
src,test: add V8 API to test the hash seed
PR-URL: https://github.com/nodejs/node/pull/58070 Reviewed-By: Antoine du Hamel <duhamelantoine1995@gmail.com> Reviewed-By: Darshan Sen <raisinten@gmail.com> Reviewed-By: Joyee Cheung <joyeec9h3@gmail.com> Reviewed-By: Rafael Gonzaga <rafael.nunu@hotmail.com>
This commit is contained in:
parent
742c0eb126
commit
20feebb452
5 changed files with 38 additions and 210 deletions
8
Makefile
8
Makefile
|
@ -654,10 +654,6 @@ test-internet: all ## Run internet tests.
|
|||
test-tick-processor: all ## Run tick processor tests.
|
||||
$(PYTHON) tools/test.py $(PARALLEL_ARGS) tick-processor
|
||||
|
||||
.PHONY: test-hash-seed
|
||||
test-hash-seed: all ## Verifu that the hash seed used by V8 for hashing is random.
|
||||
$(NODE) test/pummel/test-hash-seed.js
|
||||
|
||||
.PHONY: test-doc
|
||||
test-doc: doc-only lint-md ## Build, lint, and verify the docs.
|
||||
@if [ "$(shell $(node_use_openssl_and_icu))" != "true" ]; then \
|
||||
|
@ -751,8 +747,6 @@ test-v8: v8 ## Run the V8 test suite on deps/v8.
|
|||
mjsunit cctest debugger inspector message preparser \
|
||||
$(TAP_V8)
|
||||
$(call convert_to_junit,$(TAP_V8_JSON))
|
||||
$(info Testing hash seed)
|
||||
$(MAKE) test-hash-seed
|
||||
|
||||
test-v8-intl: v8 ## Run the v8 test suite, intl tests.
|
||||
export PATH="$(NO_BIN_OVERRIDE_PATH)" && \
|
||||
|
@ -768,7 +762,7 @@ test-v8-benchmarks: v8 ## Run the v8 test suite, benchmarks.
|
|||
$(TAP_V8_BENCHMARKS)
|
||||
$(call convert_to_junit,$(TAP_V8_BENCHMARKS_JSON))
|
||||
|
||||
test-v8-updates: ## Run the v8 test suite, updates.
|
||||
test-v8-updates: all ## Run the v8 test suite, updates.
|
||||
$(PYTHON) tools/test.py $(PARALLEL_ARGS) --mode=$(BUILDTYPE_LOWER) v8-updates
|
||||
|
||||
test-v8-all: test-v8 test-v8-intl test-v8-benchmarks test-v8-updates ## Run the entire V8 test suite, including intl, benchmarks, and updates.
|
||||
|
|
|
@ -32,6 +32,7 @@
|
|||
namespace node {
|
||||
namespace v8_utils {
|
||||
using v8::Array;
|
||||
using v8::BigInt;
|
||||
using v8::CFunction;
|
||||
using v8::Context;
|
||||
using v8::FunctionCallbackInfo;
|
||||
|
@ -260,6 +261,12 @@ static bool FastIsStringOneByteRepresentation(Local<Value> receiver,
|
|||
CFunction fast_is_string_one_byte_representation_(
|
||||
CFunction::Make(FastIsStringOneByteRepresentation));
|
||||
|
||||
void GetHashSeed(const FunctionCallbackInfo<Value>& args) {
|
||||
Isolate* isolate = args.GetIsolate();
|
||||
uint64_t hash_seed = isolate->GetHashSeed();
|
||||
args.GetReturnValue().Set(BigInt::NewFromUnsigned(isolate, hash_seed));
|
||||
}
|
||||
|
||||
static const char* GetGCTypeName(v8::GCType gc_type) {
|
||||
switch (gc_type) {
|
||||
case v8::GCType::kGCTypeScavenge:
|
||||
|
@ -694,6 +701,8 @@ void Initialize(Local<Object> target,
|
|||
IsStringOneByteRepresentation,
|
||||
&fast_is_string_one_byte_representation_);
|
||||
|
||||
SetMethodNoSideEffect(context, target, "getHashSeed", GetHashSeed);
|
||||
|
||||
// GCProfiler
|
||||
Local<FunctionTemplate> t =
|
||||
NewFunctionTemplate(env->isolate(), GCProfiler::New);
|
||||
|
@ -721,6 +730,7 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) {
|
|||
registry->Register(UpdateHeapCodeStatisticsBuffer);
|
||||
registry->Register(UpdateHeapSpaceStatisticsBuffer);
|
||||
registry->Register(SetFlagsFromString);
|
||||
registry->Register(GetHashSeed);
|
||||
registry->Register(SetHeapSnapshotNearHeapLimit);
|
||||
registry->Register(GCProfiler::New);
|
||||
registry->Register(GCProfiler::Start);
|
||||
|
|
165
test/fixtures/guess-hash-seed.js
vendored
165
test/fixtures/guess-hash-seed.js
vendored
|
@ -1,165 +0,0 @@
|
|||
'use strict';
|
||||
function run_repeated(n, fn) {
|
||||
const res = [];
|
||||
for (let i = 0; i < n; i++) res.push(fn());
|
||||
return res;
|
||||
}
|
||||
|
||||
const INT_MAX = 0x7fffffff;
|
||||
|
||||
// from src/js/collection.js
|
||||
// key must be a signed 32-bit number!
|
||||
function ComputeIntegerHash(key/*, seed*/) {
|
||||
let hash = key;
|
||||
hash = hash ^ 0/*seed*/;
|
||||
hash = ~hash + (hash << 15); // hash = (hash << 15) - hash - 1;
|
||||
hash = hash ^ (hash >>> 12);
|
||||
hash = hash + (hash << 2);
|
||||
hash = hash ^ (hash >>> 4);
|
||||
hash = (hash * 2057) | 0; // hash = (hash + (hash << 3)) + (hash << 11);
|
||||
hash = hash ^ (hash >>> 16);
|
||||
return hash & 0x3fffffff;
|
||||
}
|
||||
|
||||
const kNofHashBitFields = 2;
|
||||
const kHashShift = kNofHashBitFields;
|
||||
const kHashBitMask = 0xffffffff >>> kHashShift;
|
||||
const kZeroHash = 27;
|
||||
|
||||
function string_to_array(str) {
|
||||
const res = new Array(str.length);
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
res[i] = str.charCodeAt(i);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
function gen_specialized_hasher(str) {
|
||||
const str_arr = string_to_array(str);
|
||||
return Function('seed', `
|
||||
var running_hash = seed;
|
||||
${str_arr.map((c) => `
|
||||
running_hash += ${c};
|
||||
running_hash &= 0xffffffff;
|
||||
running_hash += (running_hash << 10);
|
||||
running_hash &= 0xffffffff;
|
||||
running_hash ^= (running_hash >>> 6);
|
||||
running_hash &= 0xffffffff;
|
||||
`).join('')}
|
||||
running_hash += (running_hash << 3);
|
||||
running_hash &= 0xffffffff;
|
||||
running_hash ^= (running_hash >>> 11);
|
||||
running_hash &= 0xffffffff;
|
||||
running_hash += (running_hash << 15);
|
||||
running_hash &= 0xffffffff;
|
||||
if ((running_hash & ${kHashBitMask}) == 0) {
|
||||
return ${kZeroHash};
|
||||
}
|
||||
return running_hash;
|
||||
`);
|
||||
}
|
||||
|
||||
// adapted from HashToEntry
|
||||
function hash_to_bucket(hash, numBuckets) {
|
||||
return (hash & ((numBuckets) - 1));
|
||||
}
|
||||
|
||||
function time_set_lookup(set, value) {
|
||||
const t1 = process.hrtime();
|
||||
for (let i = 0; i < 100; i++) {
|
||||
set.has(value);
|
||||
}
|
||||
const t = process.hrtime(t1);
|
||||
const secs = t[0];
|
||||
const nanos = t[1];
|
||||
return secs * 1e9 + nanos;
|
||||
}
|
||||
|
||||
// Prevent optimization of SetHas().
|
||||
%NeverOptimizeFunction(time_set_lookup);
|
||||
|
||||
// Set with 256 buckets; bucket 0 full, others empty
|
||||
const tester_set_buckets = 256;
|
||||
const tester_set = new Set();
|
||||
let tester_set_treshold;
|
||||
(function() {
|
||||
// fill bucket 0 and find extra numbers mapping to bucket 0 and a different
|
||||
// bucket `capacity == numBuckets * 2`
|
||||
let needed = Math.floor(tester_set_buckets * 1.5) + 1;
|
||||
let positive_test_value;
|
||||
let negative_test_value;
|
||||
for (let i = 0; true; i++) {
|
||||
if (i > INT_MAX) throw new Error('i too high');
|
||||
if (hash_to_bucket(ComputeIntegerHash(i), tester_set_buckets) !== 0) {
|
||||
negative_test_value = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
for (let i = 0; needed > 0; i++) {
|
||||
if (i > INT_MAX) throw new Error('i too high');
|
||||
if (hash_to_bucket(ComputeIntegerHash(i), tester_set_buckets) === 0) {
|
||||
needed--;
|
||||
if (needed == 0) {
|
||||
positive_test_value = i;
|
||||
} else {
|
||||
tester_set.add(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// calibrate Set access times for accessing the full bucket / an empty bucket
|
||||
const pos_time =
|
||||
Math.min(...run_repeated(10000, time_set_lookup.bind(null, tester_set,
|
||||
positive_test_value)));
|
||||
const neg_time =
|
||||
Math.min(...run_repeated(10000, time_set_lookup.bind(null, tester_set,
|
||||
negative_test_value)));
|
||||
tester_set_treshold = (pos_time + neg_time) / 2;
|
||||
// console.log(`pos_time: ${pos_time}, neg_time: ${neg_time},`,
|
||||
// `threshold: ${tester_set_treshold}`);
|
||||
})();
|
||||
|
||||
// determine hash seed
|
||||
const slow_str_gen = (function*() {
|
||||
let strgen_i = 0;
|
||||
outer:
|
||||
while (1) {
|
||||
const str = `#${strgen_i++}`;
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
if (time_set_lookup(tester_set, str) < tester_set_treshold)
|
||||
continue outer;
|
||||
}
|
||||
yield str;
|
||||
}
|
||||
})();
|
||||
|
||||
const first_slow_str = slow_str_gen.next().value;
|
||||
// console.log('first slow string:', first_slow_str);
|
||||
const first_slow_str_special_hasher = gen_specialized_hasher(first_slow_str);
|
||||
let seed_candidates = [];
|
||||
//var t_before_first_seed_brute = performance.now();
|
||||
for (let seed_candidate = 0; seed_candidate < 0x100000000; seed_candidate++) {
|
||||
if (hash_to_bucket(first_slow_str_special_hasher(seed_candidate),
|
||||
tester_set_buckets) == 0) {
|
||||
seed_candidates.push(seed_candidate);
|
||||
}
|
||||
}
|
||||
// console.log(`got ${seed_candidates.length} candidates`);
|
||||
// after ${performance.now()-t_before_first_seed_brute}
|
||||
while (seed_candidates.length > 1) {
|
||||
const slow_str = slow_str_gen.next().value;
|
||||
const special_hasher = gen_specialized_hasher(slow_str);
|
||||
const new_seed_candidates = [];
|
||||
for (const seed_candidate of seed_candidates) {
|
||||
if (hash_to_bucket(special_hasher(seed_candidate), tester_set_buckets) ==
|
||||
0) {
|
||||
new_seed_candidates.push(seed_candidate);
|
||||
}
|
||||
}
|
||||
seed_candidates = new_seed_candidates;
|
||||
// console.log(`reduced to ${seed_candidates.length} candidates`);
|
||||
}
|
||||
if (seed_candidates.length != 1)
|
||||
throw new Error('no candidates remaining');
|
||||
const seed = seed_candidates[0];
|
||||
console.log(seed);
|
27
test/parallel/test-hash-seed.mjs
Normal file
27
test/parallel/test-hash-seed.mjs
Normal file
|
@ -0,0 +1,27 @@
|
|||
import '../common/index.mjs';
|
||||
|
||||
import assert from 'node:assert';
|
||||
import { execFile } from 'node:child_process';
|
||||
import { promisify, debuglog } from 'node:util';
|
||||
|
||||
// This test verifies that the V8 hash seed is random
|
||||
// and unique between child processes.
|
||||
|
||||
const execFilePromise = promisify(execFile);
|
||||
const debug = debuglog('test');
|
||||
|
||||
const kRepetitions = 3;
|
||||
|
||||
const seeds = await Promise.all(Array.from({ length: kRepetitions }, generateSeed));
|
||||
debug(`Seeds: ${seeds}`);
|
||||
assert.strictEqual(new Set(seeds).size, seeds.length);
|
||||
assert.strictEqual(seeds.length, kRepetitions);
|
||||
|
||||
async function generateSeed() {
|
||||
const output = await execFilePromise(process.execPath, [
|
||||
'--expose-internals',
|
||||
'--print',
|
||||
'require("internal/test/binding").internalBinding("v8").getHashSeed()',
|
||||
]);
|
||||
return output.stdout.trim();
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
'use strict';
|
||||
|
||||
// Check that spawn child doesn't create duplicated entries
|
||||
const common = require('../common');
|
||||
|
||||
if (common.isPi()) {
|
||||
common.skip('Too slow for Raspberry Pi devices');
|
||||
}
|
||||
|
||||
const kRepetitions = 2;
|
||||
const assert = require('assert');
|
||||
const fixtures = require('../common/fixtures');
|
||||
const { promisify, debuglog } = require('util');
|
||||
const debug = debuglog('test');
|
||||
|
||||
const { execFile } = require('child_process');
|
||||
const execFilePromise = promisify(execFile);
|
||||
const targetScript = fixtures.path('guess-hash-seed.js');
|
||||
|
||||
const requiredCallback = common.mustCall((results) => {
|
||||
const seeds = results.map((val) => val.stdout.trim());
|
||||
debug(`Seeds: ${seeds}`);
|
||||
assert.strictEqual(new Set(seeds).size, seeds.length);
|
||||
assert.strictEqual(seeds.length, kRepetitions);
|
||||
});
|
||||
|
||||
function generateSeed() {
|
||||
return execFilePromise(process.execPath, [
|
||||
// Needed for %NeverOptimizeFunction.
|
||||
'--allow-natives-syntax',
|
||||
targetScript,
|
||||
]);
|
||||
}
|
||||
|
||||
const subprocesses = [...new Array(kRepetitions)].map(generateSeed);
|
||||
|
||||
Promise.all(subprocesses)
|
||||
.then(requiredCallback);
|
Loading…
Add table
Add a link
Reference in a new issue