NorskInput.rtmpServer() method

Create an RTMP Server to receive RTMP streams into your application

Signature:

rtmpServer(settings: RtmpServerInputSettings): Promise<RtmpServerInputNode>;

Parameters

Parameter Type Description

settings

RtmpServerInputSettings

Configuration for the RTMP server

Returns:

Example [01_rtmp_to_webrtc.ts]

Subscribe to an RTMP source and generate local WebRTC output from it

let input = await norsk.input.rtmpServer({ id: "rtmpInput", port: 5001 }); 
let output = await norsk.duplex.webRtcBrowser({ id: "webrtc" }); 

output.subscribe([{ source: input, sourceSelector: selectAV }]); 
console.log(`Local player: ${output.playerUrl}`);

Run the following command to generate example input at url rtmp://127.0.0.1:5001/norsk/high:

ffmpeg -v error -re -f lavfi -i "sine=frequency=220:sample_rate=48000" -loop 1 -i data/test-src-still.png -vf drawtext=fontfile=Arial.ttf:text="%{frame_num}":start_number=1:x=980:y=330:fontcolor=black:fontsize=40:box=1:boxcolor=white:boxborderw=5,scale=1280:720 -vcodec h264 -b:v 150000 -b:a 20000 -aspect 1280:720 -x264opts "keyint=25:min-keyint=25:no-scenecut:bframes=0" -bluray-compat true -tune stillimage -preset fast -pix_fmt yuv420p -acodec aac -metadata language=en -f flv 'rtmp://127.0.0.1:5001/norsk/high'

Example [03_rtmp_to_hls.ts]

Package an inbound RTMP stream into LL-HLS

export async function main() {
  const norsk = await Norsk.connect();

  let input = await norsk.input.rtmpServer({ id: "rtmpInput", port: 5001 });
  let destinations: CMAFDestinationSettings[] = [{ type: "local", retentionPeriodSeconds: 10 }]

  let audioOutput = await norsk.output.cmafAudio({ id: "audio", destinations, ...segmentSettings });
  let videoOutput = await norsk.output.cmafVideo({ id: "video", destinations, ...segmentSettings });
  let masterOutput = await norsk.output.cmafMaster({ id: "master", playlistName: "master", destinations });

  audioOutput.subscribe([{ source: input, sourceSelector: selectAudio }]);
  videoOutput.subscribe([{ source: input, sourceSelector: selectVideo }]);
  masterOutput.subscribe([{ source: input, sourceSelector: selectAV }]);

  console.log(`Master playlist: ${masterOutput.playlistUrl}`);
  audioOutput.url().then(logMediaPlaylist("audio"));
  videoOutput.url().then(logMediaPlaylist("video"));
}

const segmentSettings = {
  partDurationSeconds: 1.0,
  segmentDurationSeconds: 4.0,
};

function logMediaPlaylist(name: string): (url: string) => void {
  return (
    url => { console.log(`${name} playlistUrl: ${url}`); }
  );
}

Run the following command to generate example input at url rtmp://127.0.0.1:5001/norsk/default:

ffmpeg -v error -re -f lavfi -i "sine=frequency=220:sample_rate=48000" -loop 1 -i data/test-src-still.png -vf drawtext=fontfile=Arial.ttf:text="%{frame_num}":start_number=1:x=980:y=330:fontcolor=black:fontsize=40:box=1:boxcolor=white:boxborderw=5,scale=1280:720 -vcodec h264 -b:v 150000 -b:a 20000 -aspect 1280:720 -x264opts "keyint=25:min-keyint=25:no-scenecut:bframes=0" -bluray-compat true -tune stillimage -preset fast -pix_fmt yuv420p -acodec aac -metadata language=en -f flv 'rtmp://127.0.0.1:5001/norsk/default'

Example [04_srt_to_hls.ts]

Package an SRT stream into LL-HLS

export async function main() {
  const norsk = await Norsk.connect();

  let input = await norsk.input.srt(srtInputSettings);
  let destinations: CMAFDestinationSettings[] = [{ type: "local", retentionPeriodSeconds: 10 }]

  let audioOutput = await norsk.output.cmafAudio({ id: "audio", destinations, ...segmentSettings });
  let videoOutput = await norsk.output.cmafVideo({ id: "video", destinations, ...segmentSettings });
  let masterOutput = await norsk.output.cmafMaster({ id: "master", playlistName: "master", destinations });

  let streamMetadataOverride = await norsk.processor.transform.streamMetadataOverride({
    id: "setBitrate",
    video: { bitrate: 150_000 },
    audio: { bitrate: 20_000 },
  });
  streamMetadataOverride.subscribe([
    { source: input, sourceSelector: selectAV },
  ]);

  audioOutput.subscribe([{ source: streamMetadataOverride, sourceSelector: selectAudio }]);
  videoOutput.subscribe([{ source: streamMetadataOverride, sourceSelector: selectVideo }]);
  masterOutput.subscribe([{ source: streamMetadataOverride, sourceSelector: selectAV }]);

  console.log(`Master playlist: ${masterOutput.playlistUrl}`);
  audioOutput.url().then(logMediaPlaylist("audio"));
  videoOutput.url().then(logMediaPlaylist("video"));
}

const segmentSettings = {
  partDurationSeconds: 1.0,
  segmentDurationSeconds: 4.0,
};

const srtInputSettings: SrtInputSettings = {
  id: "srtInput",
  ip: "127.0.0.1",
  port: 5001,
  mode: "listener",
  sourceName: "camera1",
};

function logMediaPlaylist(name: string): (url: string) => void {
  return (
    url => { console.log(`${name} playlistUrl: ${url}`); }
  );
}

Run the following command to generate example input at url srt://127.0.0.1:5001:

ffmpeg -v error -re -f lavfi -i "sine=frequency=220:sample_rate=48000" -loop 1 -i data/test-src-still.png -vf drawtext=fontfile=Arial.ttf:text="%{frame_num}":start_number=1:x=980:y=330:fontcolor=black:fontsize=40:box=1:boxcolor=white:boxborderw=5,scale=1280:720 -vcodec h264 -b:v 150000 -b:a 20000 -aspect 1280:720 -x264opts "keyint=25:min-keyint=25:no-scenecut:bframes=0" -bluray-compat true -tune stillimage -preset fast -pix_fmt yuv420p -acodec aac -metadata language=en -f mpegts -flush_packets 0 'srt://127.0.0.1:5001'

Example [08_pid_normalization.ts]

Receive an RTMP stream and package it in a transport stream with explicit PID mappings

let input = await norsk.input.rtmpServer({ id: "rtmpInput", port: 5001 });
let videoPidNormalizer = await norsk.processor.transform.streamKeyOverride(videoStreamKeyConfig);
let audioPidNormalizer = await norsk.processor.transform.streamKeyOverride(audioStreamKeyConfig);
let output1 = await norsk.duplex.webRtcBrowser({ id: "webrtc" });
let output2 = await norsk.output.fileTs(tsFileOutputSettings);

videoPidNormalizer.subscribe([{ source: input, sourceSelector: selectVideo }]);
audioPidNormalizer.subscribe([{ source: input, sourceSelector: selectAudio }]);

let normalizedSources = [{ source: videoPidNormalizer, sourceSelector: selectVideo }, { source: audioPidNormalizer, sourceSelector: selectAudio }];
output1.subscribe(normalizedSources);
output2.subscribe(normalizedSources);

console.log(`Local player: ${output1.playerUrl}`);

Run the following command to generate example input at url rtmp://127.0.0.1:5001/norsk/high:

ffmpeg -v error -re -f lavfi -i "sine=frequency=220:sample_rate=48000" -loop 1 -i data/test-src-still.png -vf drawtext=fontfile=Arial.ttf:text="%{frame_num}":start_number=1:x=980:y=330:fontcolor=black:fontsize=40:box=1:boxcolor=white:boxborderw=5,scale=1280:720 -vcodec h264 -b:v 150000 -b:a 20000 -aspect 1280:720 -x264opts "keyint=25:min-keyint=25:no-scenecut:bframes=0" -bluray-compat true -tune stillimage -preset fast -pix_fmt yuv420p -acodec aac -metadata language=en -f flv 'rtmp://127.0.0.1:5001/norsk/high'

Example [18_rtmp_server.ts]

A more fully featured RTMP server showing use of callbacks for security etc

export async function main() {
  const norsk = await Norsk.connect();

  let input = await norsk.input.rtmpServer({
    id: "rtmp",
    port: 5001,

    onConnection: (app: string, url: string) => {
      console.log("Got RTMP connection", app, url);
      return { accept: true }; // accept all!!!
    },

    onStream: (
      app: string,
      url: string,
      streamId: number,
      publishingName: string
    ) => {
      if (!(publishingName in allowedRenditions)) {
        return {
          accept: false,
          reason: "only known rendition names are accepted around here",
        };
      }

      console.log("Got RTMP stream", app, url, streamId, publishingName);
      let onStream = async () => {
        const destinations: CMAFDestinationSettings[] = [{ type: "local", retentionPeriodSeconds: 10 }]
        // Register this app if we've not seen it before, and start up a master playlist for it
        if (!knownApps[app]) {
          let settings: CmafMasterOutputSettings = {
            id: "hls-master-" + app,
            playlistName: app,
            destinations,
          };
          let masterPlaylist = await norsk.output.cmafMaster(settings);
          knownApps[app] = { master: masterPlaylist, sources: [], webrtc: [] };
          console.log(`Local player: ${masterPlaylist.playlistUrl}`);
        }
        // Create a single WebRTC output for this new stream
        let webRtcOutput = await norsk.duplex.webRtcBrowser({
          id: "webrtc-" + app + "-" + publishingName,
        });
        webRtcOutput.subscribe([subscribeAV(input, app, publishingName)]);
        knownApps[app].webrtc.push(webRtcOutput);
        console.log(`Local player: ${webRtcOutput.playerUrl}`);

        // Create a single audio HLS output for this new stream
        let audioOutput = await norsk.output.cmafAudio({
          id: "hls-" + app + "-" + publishingName + "-audio",
          partDurationSeconds,
          segmentDurationSeconds,
          destinations
        });
        audioOutput.subscribe([subscribeAudio(input, app, publishingName)]);

        // Create a single video HLS output for this new stream
        let videoOutput = await norsk.output.cmafVideo({
          id: "hls-" + app + "-" + publishingName + "-video",
          partDurationSeconds,
          segmentDurationSeconds,
          destinations,
        });
        videoOutput.subscribe([subscribeVideo(input, app, publishingName)]);

        // Add this to the list of renditions we know about
        knownApps[app].sources.push(publishingName);

        // And re-subscribe the master playlist to all of the known about renditions
        knownApps[app].master.subscribe(
          knownApps[app].sources.map((r) => subscribeAV(input, app, r))
        );
      };
      onStream();

      return {
        accept: true,
        // These are in fact the defaults
        audioStreamKey: {
          programNumber: 1,
          streamId: 1,
          sourceName: app,
          renditionName: publishingName,
        },
        videoStreamKey: {
          programNumber: 1,
          streamId: 2,
          sourceName: app,
          renditionName: publishingName,
        },
      };
    },
  });
}

Run the following command to generate example input at url rtmp://127.0.0.1:5001/norsk/high:

ffmpeg -v error -re -f lavfi -i "sine=frequency=220:sample_rate=48000" -loop 1 -i data/test-src-still.png -vf drawtext=fontfile=Arial.ttf:text="%{frame_num}":start_number=1:x=980:y=330:fontcolor=black:fontsize=40:box=1:boxcolor=white:boxborderw=5,scale=1280:720 -vcodec h264 -b:v 150000 -b:a 20000 -aspect 1280:720 -x264opts "keyint=25:min-keyint=25:no-scenecut:bframes=0" -bluray-compat true -tune stillimage -preset fast -pix_fmt yuv420p -acodec aac -metadata language=en -f flv 'rtmp://127.0.0.1:5001/norsk/high'

Example [19_rtmp_mosaic.ts]

Create an RTMP server and dynamically mosaic any inbound streams into a tiled mosaic, publishing the result as Transport Stream-based HLS

export async function main() {
  const norsk = await Norsk.connect();

  let audioSignalInput = await norsk.input.audioSignal(audioInputSettings());

  let mosaic = new Mosaic(norsk, audioSignalInput);

  await mosaic.run();
}

class Mosaic {
  norsk: Norsk;
  audioSignalInput: AudioSignalGeneratorNode;
  rtmpInput: RtmpServerInputNode | undefined;
  compose: VideoComposeNode<string> | undefined = undefined;
  streams: string[] = [];

  constructor(norsk: Norsk, audioSignalInput: AudioSignalGeneratorNode) {
    this.norsk = norsk;
    this.audioSignalInput = audioSignalInput;
  }

  async run() {
    this.rtmpInput = await this.norsk.input.rtmpServer({
      id: "rtmp",
      port: 1935,
      onConnection: this.onConnection.bind(this),
      onStream: this.onStream.bind(this),
      onConnectionStatusChange: this.onConnectionStatusChange.bind(this)
    });
  }

  onConnection(app: string, _url: string) {
    if (app === "mosaic") {
      return { accept: true };
    } else {
      return { accept: false, reason: "App name must be mosaic" };
    }
  }

  onStream(_app: string, _url: string, _streamId: number, publishingName: string): OnStreamResult {
    this.streams.push(publishingName);
    this.handleNewStream();

    return {
      accept: true,
      videoStreamKey: {
        renditionName: "default",
        sourceName: publishingName,
      },
      audioStreamKey: {
        renditionName: "default",
        sourceName: publishingName,
      },
    };
  }

  onConnectionStatusChange(status: string, streamKeys: RtmpServerStreamKeys) {
    if (status !== "disconnected") {
      // "I only know about one state";
      return;
    }
    for (let key of streamKeys) {
      let stream = key.videoStreamKey?.sourceName?.sourceName;
      this.streams = this.streams.filter((x) => x !== stream);
      console.log(`Stream disconnected: ${stream}`);
      this.handleNewStream();
    }
  }

  handleNewStream() {
    if (this.compose === undefined && this.streams.length > 0) {
      this.norsk.processor.transform
        .videoCompose({
          id: "compose",
          referenceStream: this.streams[0],
          referenceResolution: { width: 100, height: 100 }, // make it % based
          outputResolution: { width: 1280, height: 720 },
          parts: createParts(this.streams),
        })
        .then(async (x) => {
          this.compose = x;
          this.compose?.subscribeToPins([
            {
              source: this.rtmpInput!,
              sourceSelector: (streamMetadata: StreamMetadata[]) => {
                let pins: PinToKey<string> = {};
                for (let stream of this.streams) {
                  pins[stream] = videoStreamKeys(streamMetadata).filter(
                    (x) => x?.sourceName == stream
                  );
                }
                return pins;
              },
            },
          ]);

          let encode = await this.norsk.processor.transform.videoEncode({
            id: "ladder1",
            rungs: [ mkRung("high", 854, 480, 800000) ]
          });
          encode.subscribe([
            { source: this.compose, sourceSelector: videoStreamKeys },
          ]);

          let output = await this.norsk.output.hlsTsVideo({
            id: "video",
            segmentDurationSeconds: 4.0,
          });
          output.subscribe([
            { source: encode, sourceSelector: videoStreamKeys },
          ]);
          console.log(
            "Media playlist",
            "http://localhost:8080/localHls/file/stream/2-high/playlist.m3u8"
          );

          let rtcOutput = await this.norsk.duplex.webRtcBrowser({ id: "webrtc" });
          rtcOutput.subscribe([
            { source: encode, sourceSelector: videoStreamKeys },
            { source: this.audioSignalInput, sourceSelector: audioStreamKeys },
          ]);
          console.log("Local player: " + rtcOutput.playerUrl);
        });
    } else if (this.streams.length > 0) {
      this.compose?.updateConfig({ parts: createParts(this.streams) });
    }
  };
}

function createParts(streams: string[]) {
  let division = Math.ceil(Math.sqrt(streams.length));
  return streams.map((stream, i) => ({
    destRect: {
      width: 100 / division,
      height: 100 / division,
      x: (100 / division) * (i % division),
      y: (100 / division) * Math.floor(i / division),
    },
    opacity: 1.0,
    pin: stream,
    sourceRect: { x: 0, y: 0, width: 100, height: 100 },
    zIndex: 1,
  }));
}

function audioInputSettings(): AudioSignalGeneratorSettings {
  return {
    sourceName: "wave1",
    channelLayout: "stereo",
    sampleRate: 48000,
    sampleFormat: "s16p",
    wave: mkSine(220),
  };
}

function mkRung(name: string, width: number, height: number, bitrate: number): VideoEncodeRung {
  return {
    name,
    width,
    height,
    frameRate: { frames: 25, seconds: 1 },
    codec: {
      type: "x264",
      bitrateMode: { value: bitrate, mode: "abr" },
      keyFrameIntervalMax: 50,
      keyFrameIntervalMin: 50,
      sceneCut: 0,
      bframes: 0,
      tune: "zerolatency",
    },
  };
}

Run the following command to generate example input at url rtmp://127.0.0.1:1935/mosaic/high:

ffmpeg -v error -re -f lavfi -i "sine=frequency=220:sample_rate=48000" -loop 1 -i data/test-src-still.png -vf drawtext=fontfile=Arial.ttf:text="%{frame_num}":start_number=1:x=980:y=330:fontcolor=black:fontsize=40:box=1:boxcolor=white:boxborderw=5,scale=1280:720 -vcodec h264 -b:v 150000 -b:a 20000 -aspect 1280:720 -x264opts "keyint=25:min-keyint=25:no-scenecut:bframes=0" -bluray-compat true -tune stillimage -preset fast -pix_fmt yuv420p -acodec aac -metadata language=en -f flv 'rtmp://127.0.0.1:1935/mosaic/high'

Find Examples