NorskTransform.videoCompose() method
Compose multiple video streams together into a single output
Signature:
videoCompose<Pins extends string>(settings: VideoComposeSettings<Pins>): Promise<VideoComposeNode<Pins>>;
Parameters
Parameter | Type | Description |
---|---|---|
settings |
VideoComposeSettings<Pins> |
Composition settings |
Returns:
Promise<VideoComposeNode<Pins>>
Example [14_browser_compose.ts]
Add various browser overlays to an srt source and cycle between them
export async function main() {
const srtSettings: SrtInputSettings = {
id: "srtInput",
ip: "127.0.0.1",
port: 5001,
mode: "listener",
sourceName: "srtInput1",
};
// Set up some configurations to cycle through
// Working configuration
const chopped = { x: 0, y: 0, width: 674, height: 55 };
const topLeft = { x: 5, y: 0, width: 674, height: 55 };
const full = { x: 0, y: 0, width: 100, height: 100 };
const configs = [
{
browser: {
url: "https://app.singular.live/output/6CTPhPRe7yc5lkxgUixA5q/Default?aspect=16:9",
resolution: { width: 1280, height: 720 },
},
sourceRect: full,
destRect: full,
referenceResolution: { width: 100, height: 100 },
},
{
browser: {
// Updates ~1fps
url: "https://observablehq.com/embed/@mbostock/pixel-clock?cells=display",
resolution: { width: 720, height: 400 },
},
sourceRect: chopped,
destRect: topLeft,
referenceResolution: undefined,
},
];
let currentConfig = 0;
let config = configs[currentConfig];
const browserSettings: BrowserInputSettings = {
url: config.browser.url,
resolution: config.browser.resolution,
sourceName: "browserOverlay",
frameRate: { frames: 25, seconds: 1 },
onBrowserEvent: (event) => {
console.log(event);
},
};
const background: ComposePart<"background"> = {
pin: "background",
opacity: 1.0,
zIndex: 0,
sourceRect: { x: 0, y: 0, width: 100, height: 100 },
destRect: { x: 0, y: 0, width: 100, height: 100 },
referenceResolution: { width: 100, height: 100 },
};
const overlay: ComposePart<"overlay"> = {
pin: "overlay",
opacity: 1.0,
zIndex: 1,
sourceRect: config.sourceRect,
destRect: config.destRect,
referenceResolution: config.referenceResolution,
};
const parts = [background, overlay];
const composeSettings: VideoComposeSettings<"background" | "overlay"> = {
id: "compose",
referenceStream: background.pin,
outputResolution: { width: 1280, height: 720 },
outputPixelFormat: "bgra",
parts,
};
const norsk = await Norsk.connect({
onShutdown: () => {
console.log("Norsk has shutdown");
process.exit(1);
},
});
let input1 = await norsk.input.srt(srtSettings);
let input2 = await norsk.input.browser(browserSettings);
let compose = await norsk.processor.transform.videoCompose(composeSettings);
compose.subscribeToPins([
{ source: input1, sourceSelector: videoToPin(background.pin) },
{ source: input2, sourceSelector: videoToPin(overlay.pin) },
]);
setInterval(() => {
let config = configs[currentConfig++ % configs.length];
input2.updateConfig(config.browser);
overlay.sourceRect = config.sourceRect;
overlay.destRect = config.destRect;
compose.updateConfig({ parts });
}, 22000);
let videoStreamKeyConfig = {
id: "video_stream_key",
streamKey: {
programNumber: 1,
renditionName: "video",
streamId: 256,
sourceName: "input",
},
};
let audioStreamKeyConfig = {
id: "audio_stream_key",
streamKey: {
programNumber: 1,
renditionName: "audio",
streamId: 257,
sourceName: "input",
},
};
let videoInput = await norsk.processor.transform.streamKeyOverride(videoStreamKeyConfig);
let audioInputKeyed = await norsk.processor.transform.streamKeyOverride(audioStreamKeyConfig);
let audioInput = await norsk.processor.transform.streamMetadataOverride({ audio: { bitrate: 20_000 } });
videoInput.subscribe([{ source: compose, sourceSelector: selectVideo }]);
audioInputKeyed.subscribe([{ source: input1, sourceSelector: selectAudio }]);
audioInput.subscribe([
{ source: audioInputKeyed, sourceSelector: selectAudio },
]);
function mkRung(name: string, width: number, height: number, bitrate: number, rungSpecificX264Settings?: Partial<X264Codec>): VideoEncodeRung {
return {
name,
width,
height,
frameRate: { frames: 25, seconds: 1 },
codec: {
type: "x264",
bitrateMode: { value: bitrate, mode: "abr" },
keyFrameIntervalMax: 50,
keyFrameIntervalMin: 50,
sceneCut: 0,
bframes: 0,
tune: "zerolatency",
...rungSpecificX264Settings,
},
};
}
let ladderRungs: VideoEncodeRung[] = [
mkRung("high", 1280, 720, 8000000,
{ // Override some of the default x264 settings for the high rung
bframes: 3,
profile: "high",
level: 4.1,
preset: "veryfast",
}),
mkRung("medium", 640, 360, 250000), // default x264 settings
mkRung("low", 320, 180, 150000), // default x264 settings
];
let abrLadder = await norsk.processor.transform.videoEncode({ id: "ladder", rungs: ladderRungs });
abrLadder.subscribe([{ source: videoInput, sourceSelector: selectVideo }]);
let segmentSettings = {
partDurationSeconds: 1.0,
segmentDurationSeconds: 4.0,
};
let destinations: CMAFDestinationSettings[] = [
{ type: "local", retentionPeriodSeconds: 10 },
];
let masterOutput = await norsk.output.cmafMaster({
id: "master",
playlistName: "master",
destinations,
});
let audioOutput = await norsk.output.cmafAudio({
id: "audio",
destinations,
...segmentSettings,
});
let highOutput = await norsk.output.cmafVideo({
id: "high",
destinations,
...segmentSettings,
});
let mediumOutput = await norsk.output.cmafVideo({
id: "medium",
destinations,
...segmentSettings,
});
let lowOutput = await norsk.output.cmafVideo({
id: "low",
destinations,
...segmentSettings,
});
// Wire up the ladder
let ladderItem =
(desiredRendition: string) => (streams: StreamMetadata[]) => {
const video = videoStreamKeys(streams);
// Don't subscribe at all till there is media from every rung in the ladder
if (video.length == ladderRungs.length) {
// Just select the one with the desired rendition name
return video.filter((k) => k.renditionName == desiredRendition);
}
return [];
};
highOutput.subscribe([
{ source: abrLadder, sourceSelector: ladderItem("high") },
]);
mediumOutput.subscribe([
{ source: abrLadder, sourceSelector: ladderItem("medium") },
]);
lowOutput.subscribe([
{ source: abrLadder, sourceSelector: ladderItem("low") },
]);
audioOutput.subscribe([{ source: audioInput, sourceSelector: selectAudio }]);
masterOutput.subscribe([
{ source: abrLadder, sourceSelector: selectAllVideos(ladderRungs.length) },
{ source: audioInput, sourceSelector: selectAudio },
]);
console.log(`Local player: ${masterOutput.playlistUrl}`);
}
Run the following command to generate example input at url srt://127.0.0.1:5001
:
ffmpeg -v error -re -f lavfi -i "sine=frequency=220:sample_rate=48000" -loop 1 -i data/test-src-still.png -vf drawtext=fontfile=Arial.ttf:text="%{frame_num}":start_number=1:x=980:y=330:fontcolor=black:fontsize=40:box=1:boxcolor=white:boxborderw=5,scale=1280:720 -vcodec h264 -b:v 150000 -b:a 20000 -aspect 1280:720 -x264opts "keyint=25:min-keyint=25:no-scenecut:bframes=0" -bluray-compat true -tune stillimage -preset fast -pix_fmt yuv420p -acodec aac -metadata language=en -f mpegts -flush_packets 0 'srt://127.0.0.1:5001'
Example [15_compose.ts]
Compose multiple sources into a single output and vary how they are arranged
export async function main() {
const srtSettings: SrtInputSettings = {
id: "srtInput",
ip: "127.0.0.1",
port: 5001,
mode: "listener",
sourceName: "srtInput1",
};
const rtmpSettings = { id: "rtmpInput", port: 5002 };
const topRight = { x: 50, y: 5, width: 45, height: 45 };
const bottomRight = { x: 50, y: 50, width: 45, height: 45 };
const bottomLeft = { x: 5, y: 50, width: 45, height: 45 };
const background: ComposePart<"background"> = {
pin: "background",
opacity: 1.0,
zIndex: 0,
sourceRect: { x: 0, y: 0, width: 100, height: 100 },
destRect: { x: 0, y: 0, width: 100, height: 100 },
};
const embedded: ComposePart<"embedded"> = {
pin: "embedded",
opacity: 1.0,
zIndex: 1,
sourceRect: { x: 0, y: 0, width: 100, height: 100 },
destRect: topRight,
};
const logo: ComposePart<"logo"> = {
pin: "logo",
opacity: 1.0,
zIndex: 2,
sourceRect: { x: 0, y: 0, width: 100, height: 100 },
destRect: { x: 5, y: 5, width: 10, height: 8 },
};
const parts = [background, embedded, logo];
const composeSettings: VideoComposeSettings<
"background" | "embedded" | "logo"
> = {
id: "compose",
referenceStream: background.pin,
referenceResolution: { width: 100, height: 100 }, // make it % based
outputResolution: { width: 1280, height: 720 },
parts,
outputPixelFormat: "rgba",
onError: () => process.exit(), // interval keeps this script alive after nodes close
};
const fileName = await fs.realpath("./data/Norsk.png");
const fileSettings: LocalFileInputSettings = {
fileName,
sourceName: "logoInput",
id: "logoInput"
};
const norsk = await Norsk.connect();
let input1 = await norsk.input.srt(srtSettings);
let input2 = await norsk.input.rtmpServer(rtmpSettings);
let input3 = await norsk.input.fileImage(fileSettings);
let compose = await norsk.processor.transform.videoCompose(composeSettings);
let output = await norsk.duplex.webRtcBrowser({ id: "webrtc" });
compose.subscribeToPins([
{ source: input1, sourceSelector: videoToPin(background.pin) },
{ source: input2, sourceSelector: videoToPin(embedded.pin) },
{ source: input3, sourceSelector: videoToPin(logo.pin) },
]);
let mixerSettings: AudioMixSettings<"input1" | "input2"> = {
id: "mixer",
onError: (err) => console.log("MIXER ERR", err),
sampleRate: 48000,
sources: [
{ pin: "input1" },
{ pin: "input2" }
],
outputSource: "output",
};
let mixer = await norsk.processor.transform.audioMix(mixerSettings);
mixer.subscribeToPins([
{ source: input1, sourceSelector: audioToPin('input1') },
{ source: input2, sourceSelector: audioToPin('input2') }
]);
output.subscribe([
{ source: compose, sourceSelector: selectVideo },
{ source: mixer, sourceSelector: selectAudio },
]);
console.log(`Local player: ${output.playerUrl}`);
let newParts = [background, { ...embedded, destRect: topRight }, logo];
let changeCount = 0;
setInterval(() => {
switch (changeCount % 4) {
case 0:
newParts = [background, { ...embedded, destRect: topRight }, logo];
break;
case 1:
newParts = [background, { ...embedded, destRect: bottomRight }, logo];
break;
case 2:
newParts = [background, { ...embedded, destRect: bottomLeft }, logo];
break;
case 3:
newParts = [background, logo];
break;
}
compose.updateConfig({ parts: newParts });
changeCount += 1;
}, 2000);
}
Run the following commands together to generate example inputs at urls srt://127.0.0.1:5001
, rtmp://127.0.0.1:5002/acme/high
:
ffmpeg -v error -re -f lavfi -i "sine=frequency=220:sample_rate=48000" -loop 1 -i data/test-src-still.png -vf drawtext=fontfile=Arial.ttf:text="%{frame_num}":start_number=1:x=980:y=330:fontcolor=black:fontsize=40:box=1:boxcolor=white:boxborderw=5,scale=1280:720 -vcodec h264 -b:v 150000 -b:a 20000 -aspect 1280:720 -x264opts "keyint=25:min-keyint=25:no-scenecut:bframes=0" -bluray-compat true -tune stillimage -preset fast -pix_fmt yuv420p -acodec aac -metadata language=en -f mpegts -flush_packets 0 'srt://127.0.0.1:5001'
ffmpeg -v error -re -f lavfi -i "sine=frequency=220:sample_rate=48000" -loop 1 -i data/test-src-still.png -vf drawtext=fontfile=Arial.ttf:text="%{frame_num}":start_number=1:x=980:y=330:fontcolor=black:fontsize=40:box=1:boxcolor=white:boxborderw=5,scale=1280:720 -vcodec h264 -b:v 150000 -b:a 20000 -aspect 1280:720 -x264opts "keyint=25:min-keyint=25:no-scenecut:bframes=0" -bluray-compat true -tune stillimage -preset fast -pix_fmt yuv420p -acodec aac -metadata language=en -f flv 'rtmp://127.0.0.1:5002/acme/high'
Example [16_compose_smooth.ts]
Another picture in picture example, with smooth transitions between compositions
export async function main() {
const srtSettings: SrtInputSettings = {
id: "srtInput",
ip: "127.0.0.1",
port: 5001,
mode: "listener",
sourceName: "srtInput1",
};
const rtmpSettings = { id: "rtmpInput", port: 5002 };
const topRight = { x: 50, y: 5, width: 45, height: 45 };
const bottomRight = { x: 50, y: 50, width: 45, height: 45 };
const full = { x: 0, y: 0, width: 100, height: 100 };
const bottomLeft = { x: 5, y: 50, width: 45, height: 45 };
const background: ComposePart<"background"> = {
pin: "background",
opacity: 1.0,
zIndex: 0,
sourceRect: { x: 0, y: 0, width: 100, height: 100 },
destRect: { x: 0, y: 0, width: 100, height: 100 },
id: "background",
transition: { durationMs: 1000.0 },
};
const embedded: ComposePart<"embedded"> = {
pin: "embedded",
opacity: 1.0,
zIndex: 1,
sourceRect: { x: 0, y: 0, width: 100, height: 100 },
destRect: topRight,
id: "embed",
transition: { durationMs: 1000.0, easing: "ease_in" },
};
const parts = [background, embedded];
const composeSettings: VideoComposeSettings<"background" | "embedded"> = {
id: "compose",
referenceStream: background.pin,
referenceResolution: { width: 100, height: 100 }, // make it % based
outputResolution: { width: 1280, height: 720 },
parts,
onError: (_err) => process.exit(), // interval keeps this script alive after nodes close
};
const norsk = await Norsk.connect({
onShutdown: () => {
console.log("Norsk has shutdown");
process.exit(1)
}
});
let input1 = await norsk.input.srt(srtSettings);
let input2 = await norsk.input.rtmpServer(rtmpSettings);
let compose = await norsk.processor.transform.videoCompose(composeSettings);
let output = await norsk.duplex.webRtcBrowser({ id: "webrtc" });
compose.subscribeToPins([
{ source: input1, sourceSelector: videoToPin(background.pin) },
{ source: input2, sourceSelector: videoToPin(embedded.pin) },
]);
output.subscribe([
{ source: compose, sourceSelector: selectVideo },
{ source: input1, sourceSelector: selectAudio },
]);
console.log(`Local player: ${output.playerUrl}`);
let newParts = [background, { ...embedded, destRect: topRight }];
let changeCount = 0;
setInterval(() => {
switch (changeCount % 5) {
case 0:
newParts = [background, { ...embedded, destRect: bottomRight }];
break;
case 1:
newParts = [background, { ...embedded, destRect: full }];
break;
case 2:
newParts = [background, { ...embedded, destRect: full, opacity: 0.0 }];
break;
case 3:
newParts = [
{ ...embedded, destRect: full, opacity: 1.0, transition: undefined },
{ ...background, zIndex: 2 },
];
break;
case 4:
newParts = [
{ ...embedded, destRect: full, opacity: 1.0 },
{ ...background, zIndex: 2, destRect: bottomLeft },
];
break;
}
compose.updateConfig({ parts: newParts });
changeCount += 1;
}, 2000);
}
Run the following commands together to generate example inputs at urls srt://127.0.0.1:5001
, rtmp://127.0.0.1:5002/acme/high
:
ffmpeg -v error -re -f lavfi -i "sine=frequency=220:sample_rate=48000" -loop 1 -i data/test-src-still.png -vf drawtext=fontfile=Arial.ttf:text="%{frame_num}":start_number=1:x=980:y=330:fontcolor=black:fontsize=40:box=1:boxcolor=white:boxborderw=5,scale=1280:720 -vcodec h264 -b:v 150000 -b:a 20000 -aspect 1280:720 -x264opts "keyint=25:min-keyint=25:no-scenecut:bframes=0" -bluray-compat true -tune stillimage -preset fast -pix_fmt yuv420p -acodec aac -metadata language=en -f mpegts -flush_packets 0 'srt://127.0.0.1:5001'
ffmpeg -v error -re -f lavfi -i "rgbtestsrc[out0];sine=frequency=220:sample_rate=48000[out1]" -vcodec h264 -b:v 150000 -b:a 20000 -aspect 1280:720 -x264opts "keyint=25:min-keyint=25:no-scenecut:bframes=0" -bluray-compat true -tune stillimage -preset fast -pix_fmt yuv420p -acodec aac -metadata language=en -f flv 'rtmp://127.0.0.1:5002/acme/high'
Find Examples
Search for examples using videoCompose in our examples repo.