Skip to content

Commit

Permalink
sea: add support for V8 bytecode-only caching
Browse files Browse the repository at this point in the history
Refs: nodejs/single-executable#73
Signed-off-by: Darshan Sen <[email protected]>
PR-URL: #48191
Fixes: nodejs/single-executable#73
Reviewed-By: Yagiz Nizipli <[email protected]>
Reviewed-By: Stephen Belanger <[email protected]>
Reviewed-By: Joyee Cheung <[email protected]>
  • Loading branch information
RaisinTen authored Jul 26, 2023
1 parent d246536 commit 6cd6789
Show file tree
Hide file tree
Showing 15 changed files with 347 additions and 41 deletions.
18 changes: 17 additions & 1 deletion doc/api/single-executable-applications.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@ changes:
- version: REPLACEME
pr-url: https://github.com/nodejs/node/pull/46824
description: Added support for "useSnapshot".
- version: REPLACEME
pr-url: https://github.com/nodejs/node/pull/48191
description: Added support for "useCodeCache".
-->

> Stability: 1 - Experimental: This feature is being designed and will change.
Expand Down Expand Up @@ -174,7 +177,8 @@ The configuration currently reads the following top-level fields:
"main": "/path/to/bundled/script.js",
"output": "/path/to/write/the/generated/blob.blob",
"disableExperimentalSEAWarning": true, // Default: false
"useSnapshot": false // Default: false
"useSnapshot": false, // Default: false
"useCodeCache": true // Default: false
}
```
Expand Down Expand Up @@ -213,6 +217,18 @@ and the main script can use the [`v8.startupSnapshot` API][] to adapt to
these constraints. See
[documentation about startup snapshot support in Node.js][].
### V8 code cache support
When `useCodeCache` is set to `true` in the configuration, during the generation
of the single executable preparation blob, Node.js will compile the `main`
script to generate the V8 code cache. The generated code cache would be part of
the preparation blob and get injected into the final executable. When the single
executable application is launched, instead of compiling the `main` script from
scratch, Node.js would use the code cache to speed up the compilation, then
execute the script, which would improve the startup performance.
**Note:** `import()` does not work when `useCodeCache` is `true`.
## Notes
### `require(id)` in the injected module is not file based
Expand Down
10 changes: 9 additions & 1 deletion lib/internal/modules/cjs/loader.js
Original file line number Diff line number Diff line change
Expand Up @@ -1123,7 +1123,7 @@ Module.prototype.require = function(id) {
let resolvedArgv;
let hasPausedEntry = false;
let Script;
function wrapSafe(filename, content, cjsModuleInstance) {
function wrapSafe(filename, content, cjsModuleInstance, codeCache) {
if (patched) {
const wrapper = Module.wrap(content);
if (Script === undefined) {
Expand Down Expand Up @@ -1158,13 +1158,21 @@ function wrapSafe(filename, content, cjsModuleInstance) {
'__dirname',
], {
filename,
cachedData: codeCache,
importModuleDynamically(specifier, _, importAssertions) {
const cascadedLoader = getCascadedLoader();
return cascadedLoader.import(specifier, normalizeReferrerURL(filename),
importAssertions);
},
});

// The code cache is used for SEAs only.
if (codeCache &&
result.cachedDataRejected !== false &&
internalBinding('sea').isSea()) {
process.emitWarning('Code cache data rejected.');
}

// Cache the source map for the module if present.
if (result.sourceMapURL) {
maybeCacheSourceMap(filename, content, this, false, undefined, result.sourceMapURL);
Expand Down
9 changes: 7 additions & 2 deletions lib/internal/util/embedding.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
'use strict';
const { codes: { ERR_UNKNOWN_BUILTIN_MODULE } } = require('internal/errors');
const { BuiltinModule: { normalizeRequirableId } } = require('internal/bootstrap/realm');
const { Module, wrapSafe } = require('internal/modules/cjs/loader');
const { codes: { ERR_UNKNOWN_BUILTIN_MODULE } } = require('internal/errors');
const { getCodeCache, getCodePath, isSea } = internalBinding('sea');

// This is roughly the same as:
//
Expand All @@ -15,7 +16,11 @@ const { Module, wrapSafe } = require('internal/modules/cjs/loader');

function embedderRunCjs(contents) {
const filename = process.execPath;
const compiledWrapper = wrapSafe(filename, contents);
const compiledWrapper = wrapSafe(
isSea() ? getCodePath() : filename,
contents,
undefined,
getCodeCache());

const customModule = new Module(filename, null);
customModule.filename = filename;
Expand Down
21 changes: 1 addition & 20 deletions src/json_parser.cc
Original file line number Diff line number Diff line change
Expand Up @@ -4,34 +4,15 @@
#include "util-inl.h"

namespace node {
using v8::ArrayBuffer;
using v8::Context;
using v8::Isolate;
using v8::Local;
using v8::Object;
using v8::String;
using v8::Value;

static Isolate* NewIsolate(v8::ArrayBuffer::Allocator* allocator) {
Isolate* isolate = Isolate::Allocate();
CHECK_NOT_NULL(isolate);
per_process::v8_platform.Platform()->RegisterIsolate(isolate,
uv_default_loop());
Isolate::CreateParams params;
params.array_buffer_allocator = allocator;
Isolate::Initialize(isolate, params);
return isolate;
}

void JSONParser::FreeIsolate(Isolate* isolate) {
per_process::v8_platform.Platform()->UnregisterIsolate(isolate);
isolate->Dispose();
}

JSONParser::JSONParser()
: allocator_(ArrayBuffer::Allocator::NewDefaultAllocator()),
isolate_(NewIsolate(allocator_.get())),
handle_scope_(isolate_.get()),
: handle_scope_(isolate_.get()),
context_(isolate_.get(), Context::New(isolate_.get())),
context_scope_(context_.Get(isolate_.get())) {}

Expand Down
4 changes: 1 addition & 3 deletions src/json_parser.h
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,7 @@ class JSONParser {
private:
// We might want a lighter-weight JSON parser for this use case. But for now
// using V8 is good enough.
static void FreeIsolate(v8::Isolate* isolate);
std::unique_ptr<v8::ArrayBuffer::Allocator> allocator_;
DeleteFnPtr<v8::Isolate, FreeIsolate> isolate_;
RAIIIsolate isolate_;
v8::HandleScope handle_scope_;
v8::Global<v8::Context> context_;
v8::Context::Scope context_scope_;
Expand Down
16 changes: 16 additions & 0 deletions src/node_contextify.cc
Original file line number Diff line number Diff line change
Expand Up @@ -935,6 +935,22 @@ Maybe<bool> StoreCodeCacheResult(
return Just(true);
}

// TODO(RaisinTen): Reuse in ContextifyContext::CompileFunction().
MaybeLocal<Function> CompileFunction(Local<Context> context,
Local<String> filename,
Local<String> content,
std::vector<Local<String>>* parameters) {
ScriptOrigin script_origin(context->GetIsolate(), filename, 0, 0, true);
ScriptCompiler::Source script_source(content, script_origin);

return ScriptCompiler::CompileFunction(context,
&script_source,
parameters->size(),
parameters->data(),
0,
nullptr);
}

bool ContextifyScript::InstanceOf(Environment* env,
const Local<Value>& value) {
return !value.IsEmpty() &&
Expand Down
6 changes: 6 additions & 0 deletions src/node_contextify.h
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,12 @@ v8::Maybe<bool> StoreCodeCacheResult(
bool produce_cached_data,
std::unique_ptr<v8::ScriptCompiler::CachedData> new_cached_data);

v8::MaybeLocal<v8::Function> CompileFunction(
v8::Local<v8::Context> context,
v8::Local<v8::String> filename,
v8::Local<v8::String> content,
std::vector<v8::Local<v8::String>>* parameters);

} // namespace contextify
} // namespace node

Expand Down
Loading

0 comments on commit 6cd6789

Please sign in to comment.