-
Notifications
You must be signed in to change notification settings - Fork 0
/
worker.js
160 lines (131 loc) · 4.86 KB
/
worker.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
const DRYRUN_NUM_FRAMES = 100;
const MAX_ENCODE_QUEUE_SIZE = 3;
function send(topic, payload) {
self.postMessage({
topic,
direction: 'response',
payload
});
}
function log(line) {
send('log', {severity: 'info', line});
}
function fatal(message) {
send('error', {message});
}
function optionallyCreateCanvas(userConfig, useWebGl) {
if (useWebGl) {
const canvas = new OffscreenCanvas(userConfig.width, userConfig.height);
const context = canvas.getContext('webgl2');
return context;
}
}
async function waitForIdle(encoder) {
if (encoder.encodeQueueSize > MAX_ENCODE_QUEUE_SIZE) {
log(`Encoder queue clogged up with ${encoder.encodeQueueSize} frames, waiting to reduce to ${MAX_ENCODE_QUEUE_SIZE} or less.`);
return new Promise(resolve => {
encoder.ondequeue = () => {
log(`Dequeue event received, encoder queue size is now: ${encoder.encodeQueueSize}`);
if (encoder.encodeQueueSize <= MAX_ENCODE_QUEUE_SIZE) {
log(`Encoder queue size reduced to ${encoder.encodeQueueSize}, resuming.`);
resolve();
}
}
})
}
}
function randomFrameData({width, height}) {
return new Uint8Array(width * height * 12 / 8).map(() => Math.round(Math.random() * 255));
}
async function getNextFrame(frameNo, userConfig, webGlContext) {
const duration = 1000000/userConfig.framerate;
const timestamp = Math.ceil(frameNo * duration);
if (webGlContext) {
// Fill with solid, random color.
webGlContext.viewport(0, 0, webGlContext.canvas.width, webGlContext.canvas.height);
webGlContext.clearColor(Math.random(), Math.random(), Math.random(), 1);
webGlContext.clear(webGlContext.COLOR_BUFFER_BIT);
const bitmap = await createImageBitmap(webGlContext.canvas);
return new VideoFrame(bitmap, {
timestamp,
duration,
alpha: userConfig.alpha,
});
} else {
// We assume a tightly packed layout, fill with white noise.
const planes = randomFrameData(userConfig);
return new VideoFrame(planes, {
format: 'I420',
codedWidth: userConfig.width,
codedHeight: userConfig.height,
timestamp,
duration
});
}
}
async function exerciseEncoder(userConfig, userWebGl) {
const webGlContext = optionallyCreateCanvas(userConfig, userWebGl);
log(`Now performing dry-run of VideoEncoder API, using ${webGlContext ? 'GPU-backed' : 'in-memory'} input frames.`);
let chunkNo = 0;
const encoder = new VideoEncoder({
output: chunk => {
log(`<-- Chunk ${chunkNo++} (type ${chunk.type}) of ${chunk.byteLength} bytes at timestamp ${chunk.timestamp} micros produced.`);
},
error: event => {
fatal('Error during encoder dryrun: ' + event);
}
});
log(`VideoEncoder instance created, encoder status is: ${encoder.state}`);
encoder.configure(userConfig);
log(`VideoEncoder configured, encoder status is: ${encoder.state}`);
for (let frameNo = 0; frameNo < DRYRUN_NUM_FRAMES; ++frameNo) {
await waitForIdle(encoder);
log(`Constructing ${frameNo}-th frame.`);
const frame = await getNextFrame(frameNo, userConfig, webGlContext);
log(`--> Passing ${frameNo}-th frame to encoder.`);
encoder.encode(frame);
log(`Closing ${frameNo}-th frame.`);
frame.close();
}
log('Flushing encoder.')
await encoder.flush();
log(`Closing encoder, encoder state is: ${encoder.state}`);
encoder.close();
if (chunkNo === DRYRUN_NUM_FRAMES) {
log(`All ${DRYRUN_NUM_FRAMES} frames were encoded into H.264 bitstream packages.`);
} else {
fatal(`Only ${chunkNo} H.264 bitstream packages were generated out of ${DRYRUN_NUM_FRAMES} frames.`);
}
log('Done.');
send('done', {
chunks: chunkNo
})
}
async function isConfigSupported(userConfig) {
try {
const { supported, config } = await VideoEncoder.isConfigSupported(userConfig);
if (supported) {
send('done', {
config
});
} else {
fatal('Configuration not supported');
}
} catch(error) {
fatal('An error occurred when calling VideoEncoder.isConfigSupported:\n' + error);
}
}
self.onmessage = event => {
if (event.data.direction === 'request') {
switch(event.data.topic) {
case 'start':
exerciseEncoder(event.data.payload.userConfig, event.data.payload.useWebGl);
break;
case 'support':
isConfigSupported(event.data.payload.userConfig);
break;
default:
console.error('Do not understand command', event.data);
}
}
};