NorskTransform.videoEncode() method
Encode a video stream to one or more renditions using either software or appropriate hardware if available
Signature:
videoEncode(settings: VideoEncodeSettings): Promise<VideoEncodeNode>;
Parameters
Example [16_rtmp_to_ladder.ts]
Build an ABR ladder from an RTMP source and publish as HLS and WebRTC
export async function main(): Promise<void> {
const cmafRenditions: AppRenditions = ["low", "medium"];
const whepRenditions: AppRenditions = ["LL-preview"];
// Use set to dedupe any common renditions
const allRenditions: AppRenditions = [
...new Set([...cmafRenditions, ...whepRenditions]),
];
const norsk = await Norsk.connect();
const input = await norsk.input.rtmpServer({ id: "rtmp" });
const abrLadder = await norsk.processor.transform.videoEncode({
id: "ladder",
rungs: mkRungs(allRenditions),
});
const sources = { videoLadder: abrLadder, audio: input };
const cmafOutputs = await mkCmafOutputs(
norsk,
cmafSettings,
sources,
cmafRenditions
);
const whepOutputs = await mkWhepOutputs(
norsk,
whepSettings,
sources,
whepRenditions
);
abrLadder.subscribe([{ source: input, sourceSelector: selectVideo }]);
console.log(`Multi variant playlist: ${cmafOutputs.multivariant.url}`);
cmafRenditions.forEach((k) => {
void cmafOutputs.videos[k]
.url()
.then((url) => console.log(`HLS ${k} Playlist: ${url}`));
});
void cmafOutputs.audio
.url()
.then((url) => console.log(`HLS Audio Playlist: ${url}`));
whepRenditions.forEach((k) => {
console.log(`WebRTC PlayerUrl ${k}: ${whepOutputs[k].playerUrl}`);
});
}
Run the following command to generate example input at url rtmp://127.0.0.1:1935/norsk/source
:
ffmpeg -v error -re -stream_loop -1 -i data/InkDrop.ts -vcodec copy -codec copy -f flv 'rtmp://127.0.0.1:1935/norsk/source'
Example [19_remote_commentary.ts]
Provide a low latency view of a stream to a commentator over WebRTC and mix their commentary into that from the source. Publish in an HLS ladder
export async function main() {
const norsk = await Norsk.connect();
let connected = false;
const srtInputSettings: SrtInputSettings = {
id: "srtInput",
host: "0.0.0.0",
port: 5001,
mode: "listener",
sourceName: "camera1",
onConnection: (_) => {
// Accept only 1 srt connection
if (connected) {
return { accept: false };
} else {
connected = true;
return { accept: true, sourceName: "source" };
}
}
};
const input = await norsk.input.srt(srtInputSettings);
const previewLadder: VideoEncodeRung[] = [
{
name: "low",
width: 854,
height: 480,
frameRate: { frames: 25, seconds: 1 },
codec: {
type: "x264",
keyFrameIntervalMax: 50,
keyFrameIntervalMin: 50,
sceneCut: 0,
tune: "zerolatency",
bitrateMode: { value: 800, mode: "abr" }
},
},
];
const previewEncode = await norsk.processor.transform.videoEncode({
id: "preview_ladder",
rungs: previewLadder,
});
previewEncode.subscribe([
{ source: input, sourceSelector: selectVideo }
]);
// Preview WebRTC node, subscribed to the preview-quality video encode and input audio
// And outputting media from the connected browser
const previewRtc = await norsk.duplex.webRtcBrowser({
id: "previewRtc",
...webRtcServerConfig()
});
previewRtc.subscribe([
{ source: previewEncode, sourceSelector: selectVideo },
{ source: input, sourceSelector: selectAudio }
]);
console.log(`Commentary WebRTC client: ${previewRtc.playerUrl}`);
const mixerSettings: AudioMixSettings<"source" | "comms"> = {
id: "mixer",
onError: (err) => console.log("MIXER ERR", err),
sampleRate: 48000,
sources: [
{ pin: "source" },
{ pin: "comms" }
],
outputSource: "source",
channelLayout: "stereo"
};
const mixer = await norsk.processor.transform.audioMix(mixerSettings);
mixer.subscribeToPins([
{ source: input, sourceSelector: audioToPin('source') },
{ source: previewRtc, sourceSelector: audioToPin('comms') }
]);
const whep2 = await norsk.output.whep({ id: "duplexOut", ...webRtcServerConfig });
whep2.subscribe([{ source: previewRtc, sourceSelector: selectVideo }]);
console.log(`Commentary Whep client: ${whep2.playerUrl}`);
const finalLadder: VideoEncodeRung[] = [
{
name: "high",
width: 1280,
height: 720,
frameRate: { frames: 25, seconds: 1 },
codec: {
type: "x264",
bitrateMode: { value: 2000, mode: "abr" },
keyFrameIntervalMax: 50,
keyFrameIntervalMin: 50,
bframes: 3,
sceneCut: 0,
profile: "high",
level: 4.1,
preset: "veryfast",
tune: "zerolatency",
},
},
{
name: "medium",
width: 640,
height: 360,
frameRate: { frames: 25, seconds: 1 },
codec: {
type: "x264",
bitrateMode: { value: 750, mode: "abr" },
keyFrameIntervalMax: 50,
keyFrameIntervalMin: 50,
bframes: 0,
sceneCut: 0,
tune: "zerolatency",
},
},
{
name: "low",
width: 320,
height: 180,
frameRate: { frames: 25, seconds: 1 },
codec: {
type: "x264",
bitrateMode: { value: 500, mode: "abr" },
keyFrameIntervalMax: 50,
keyFrameIntervalMin: 50,
bframes: 0,
sceneCut: 0,
tune: "zerolatency",
},
}
];
const finalEncode = await norsk.processor.transform.videoEncode({
id: "final_ladder",
rungs: finalLadder,
});
finalEncode.subscribe([{ source: input, sourceSelector: selectVideo }]);
const destinations: CmafDestinationSettings[] = [{ type: "local", retentionPeriodSeconds: 10, id: "local" }]
const multiVariantPlaylistSettings = { id: "multi-variant", playlistName: "multi-variant", destinations };
const mediaSettings = {
partDurationSeconds: 1.0,
segmentDurationSeconds: 4.0,
destinations,
};
const multiVariantOutput = await norsk.output.cmafMultiVariant(multiVariantPlaylistSettings);
const audioOutput = await norsk.output.cmafAudio({ id: "audio", ...mediaSettings });
const highOutput = await norsk.output.cmafVideo({ id: "high", ...mediaSettings });
const mediumOutput = await norsk.output.cmafVideo({ id: "medium", ...mediaSettings });
const lowOutput = await norsk.output.cmafVideo({ id: "low", ...mediaSettings });
highOutput.subscribe([
{ source: finalEncode, sourceSelector: selectVideoRendition("high") },
]);
mediumOutput.subscribe([
{ source: finalEncode, sourceSelector: selectVideoRendition("medium") },
]);
lowOutput.subscribe([
{ source: finalEncode, sourceSelector: selectVideoRendition("low") },
]);
audioOutput.subscribe([
{ source: mixer, sourceSelector: selectAudio },
]);
multiVariantOutput.subscribe([
{ source: highOutput, sourceSelector: selectPlaylist },
{ source: mediumOutput, sourceSelector: selectPlaylist },
{ source: lowOutput, sourceSelector: selectPlaylist },
{ source: audioOutput, sourceSelector: selectPlaylist },
]);
console.log(`HLS Multi Variant Playlist: ${multiVariantOutput.url}`);
}
Run the following command to generate example input at url srt://127.0.0.1:5001?pkt_size=1316
:
ffmpeg -v error -re -stream_loop -1 -i data/Weaving.ts -vcodec copy -codec copy -f mpegts -flush_packets 0 'srt://127.0.0.1:5001?pkt_size=1316'
Example [21_ezdrm_ladder.ts]
DASH and HLS ladders, protected with EZDRM
export async function main(): Promise<void> {
if (
!process.env["EZDRM_TOKEN"] &&
(!process.env["EZDRM_USERNAME"] || !process.env["EZDRM_PASSWORD"])
) {
const envvar = (k: string) =>
"$" + k + " " + (process.env[k] ? "\u2713" : "\u2717");
console.error(
"Error: This example integration requires these environment variables to be set:\n ",
`${envvar("EZDRM_TOKEN")}, or ${envvar("EZDRM_USERNAME")} and ${envvar(
"EZDRM_PASSWORD"
)}\n `,
" From your EZDRM account (see EZDRM's documentation on methods for authentication)\n ",
`${envvar("EZDRM_WV_PX")} (optional, for playback)\n `,
" The last six digits of your Widevine Profile ID\n ",
`${envvar("EZDRM_PR_PX")} (optional, for playback)\n `,
" The last six digits of your PlayReady Profile ID"
);
return process.exit(1);
}
console.log("For testing use the following configuration:");
console.log(
" DRM > Custom License Server URL:\n ",
"https://widevine-dash.ezdrm.com/widevine-php/widevine-foreignkey.php?pX=" +
(process.env["EZDRM_WV_PX"] || "$EZDRM_WV_PX")
);
console.log(
" OR\n ",
"https://playready.ezdrm.com/cency/preauth.aspx?pX=" +
(process.env["EZDRM_PR_PX"] || "$EZDRM_PR_PX")
);
console.log();
const drmResponse = await ezdrm.ezdrmCpix();
const cryptoDetails = cpix.parseCpix(drmResponse);
const cmafSettings = { id: "multi-variant", segmentSettings, cryptoDetails };
const cmafRenditions: AppRenditions = ["low", "medium"];
const norsk = await Norsk.connect();
const input = await norsk.input.rtmpServer({ id: "rtmp" });
const abrLadder = await norsk.processor.transform.videoEncode({
id: "ladder",
rungs: mkRungs(cmafRenditions),
});
const sources = { videoLadder: abrLadder, audio: input };
const cmafOutputs = await mkCmafOutputs(
norsk,
cmafSettings,
sources,
cmafRenditions
);
abrLadder.subscribe([{ source: input, sourceSelector: selectVideo }]);
console.log(`Multi variant playlist: ${cmafOutputs.multivariant.url}`);
cmafRenditions.forEach((k) => {
void cmafOutputs.videos[k]
.url()
.then((url) => console.log(`HLS ${k} Playlist: ${url}`));
});
void cmafOutputs.audio
.url()
.then((url) => console.log(`HLS Audio Playlist: ${url}`));
}
Run the following command to generate example input at url rtmp://127.0.0.1:1935/norsk/source
:
ffmpeg -v error -re -stream_loop -1 -i data/InkDrop.ts -vcodec copy -codec copy -f flv 'rtmp://127.0.0.1:1935/norsk/source'
Example [22_axinom_ladder.ts]
DASH and HLS ladders, protected with Axinom
export async function main(): Promise<void> {
const seed: cpix.AVKeyIds = {
audio: "216a1281-c95a-488f-9b5c-0d4f6066e04d",
video: "957f2917-e9ad-41fd-b6d6-0e304170342b",
};
axinom.checkEnv(seed);
const drmResponse = await axinom.axinomCpix(seed);
const cryptoDetails = cpix.parseCpix(drmResponse);
const cmafSettings = { id: "multi-variant", segmentSettings, cryptoDetails };
const cmafRenditions: AppRenditions = ["low", "medium"]; // ["high", "medium", "low"] ;
const norsk = await Norsk.connect();
const input = await norsk.input.rtmpServer({ id: "rtmp" });
const abrLadder = await norsk.processor.transform.videoEncode({
id: "ladder",
rungs: mkRungs(cmafRenditions),
});
const sources = { videoLadder: abrLadder, audio: input };
const cmafOutputs = await mkCmafOutputs(
norsk,
cmafSettings,
sources,
cmafRenditions
);
abrLadder.subscribe([{ source: input, sourceSelector: selectVideo }]);
console.log(`Multi variant playlist: ${cmafOutputs.multivariant.url}`);
cmafRenditions.forEach((k) => {
void cmafOutputs.videos[k]
.url()
.then((url) => console.log(`HLS ${k} Playlist: ${url}`));
});
void cmafOutputs.audio
.url()
.then((url) => console.log(`HLS Audio Playlist: ${url}`));
}
Run the following command to generate example input at url rtmp://127.0.0.1:1935/norsk/source
:
ffmpeg -v error -re -stream_loop -1 -i data/InkDrop.ts -vcodec copy -codec copy -f flv 'rtmp://127.0.0.1:1935/norsk/source'
Example [23_ma35d_ladder.ts]
Build an AMD MA35D-Powered ABR ladder from an RTMP source and publish as HLS and WebRTC
export async function main(): Promise<void> {
const cmafRenditions: AppRenditions = ["low", "medium", "high", "hevc"];
const whepRenditions: AppRenditions = ["LL-preview"];
// Use set to dedupe any common renditions
const allRenditions: AppRenditions = [
...new Set([...cmafRenditions, ...whepRenditions]),
];
const norsk = await Norsk.connect({
onAmdMA35DLoad: (_load) => {
// console.log("HERE", load)
}
});
const input = await norsk.input.rtmpServer({ id: "rtmp" });
const abrLadder = await norsk.processor.transform.videoEncode({
id: "ladder",
rungs: mkRungs(allRenditions, "ma35d"),
});
const sources = { videoLadder: abrLadder, audio: input };
const cmafOutputs = await mkCmafOutputs(
norsk,
cmafSettings,
sources,
cmafRenditions
);
const whepOutputs = await mkWhepOutputs(
norsk,
whepSettings,
sources,
whepRenditions
);
abrLadder.subscribe([{ source: input, sourceSelector: selectVideo }]);
console.log(`Multi variant playlist: ${cmafOutputs.multivariant.url}`);
cmafRenditions.forEach((k) => {
void cmafOutputs.videos[k]
.url()
.then((url) => console.log(`HLS ${k} Playlist: ${url}`));
});
void cmafOutputs.audio
.url()
.then((url) => console.log(`HLS Audio Playlist: ${url}`));
whepRenditions.forEach((k) => {
console.log(`WebRTC PlayerUrl ${k}: ${whepOutputs[k].playerUrl}`);
});
}
Run the following command to generate example input at url rtmp://127.0.0.1:1935/norsk/source
:
ffmpeg -v error -re -stream_loop -1 -i data/InkDrop.ts -vcodec copy -codec copy -f flv 'rtmp://127.0.0.1:1935/norsk/source'
Find Examples
Search for examples using videoEncode in our examples repo.