Skip to content

Commit

Permalink
Modernize RTSP server
Browse files Browse the repository at this point in the history
  • Loading branch information
ngraziano committed Jan 18, 2024
1 parent 56ef07a commit 3174378
Show file tree
Hide file tree
Showing 8 changed files with 183 additions and 181 deletions.
8 changes: 4 additions & 4 deletions RTSP/UdpSocket.cs
Original file line number Diff line number Diff line change
Expand Up @@ -144,17 +144,17 @@ private static async Task DoWorkerJobAsync(UdpClient socket, Action<RtspDataEven
/// <summary>
/// Write to the RTP Data Port
/// </summary>
public void WriteToDataPort(byte[] data, string hostname, int port)
public void WriteToDataPort(ReadOnlySpan<byte> data, string hostname, int port)
{
dataSocket.Send(data, data.Length, hostname, port);
dataSocket.Send(data.ToArray(), data.Length, hostname, port);
}

/// <summary>
/// Write to the RTP Control Port
/// </summary>
public void WriteToControlPort(byte[] data, string hostname, int port)
public void WriteToControlPort(ReadOnlySpan<byte> data, string hostname, int port)
{
dataSocket.Send(data, data.Length, hostname, port);
dataSocket.Send(data.ToArray(), data.Length, hostname, port);
}
}
}
27 changes: 14 additions & 13 deletions RtspCameraExample/CJOCh264bitstream.cs
Original file line number Diff line number Diff line change
Expand Up @@ -94,10 +94,6 @@ private void AddBitToStream(int nVal)
nLastBitInBuffer++;
}

//!
/*!
\param
*/
/// <summary>
/// Adds 8 bit to the end of h264 bitstream (it is optimized for byte aligned situations)
/// </summary>
Expand Down Expand Up @@ -160,7 +156,8 @@ rbsp_data shall be in the following way
*/

//Check if emulation prevention is needed (emulation prevention is byte align defined)
if ((BufferAt(0) == 0x00)
if (nLastBitInBuffer / 8 >= 3
&& (BufferAt(0) == 0x00)
&& (BufferAt(1) == 0x00)
&& ((BufferAt(1) == 0x00) || (BufferAt(2) == 0x01) || (BufferAt(2) == 0x02) || (BufferAt(2) == 0x03)))
{
Expand Down Expand Up @@ -191,7 +188,7 @@ rbsp_data shall be in the following way
/// <exception cref="Exception"></exception>
public void Add4BytesNoEmulationPrevention(uint value, bool doAlign = false)
{
ObjectDisposedException.ThrowIf(disposedValue, GetType());
ObjectDisposedException.ThrowIf(disposedValue,typeof(CJOCh264bitstream));

//Used to add NAL header stream
//Remember: NAL header is byte oriented
Expand Down Expand Up @@ -233,7 +230,7 @@ public void Add4BytesNoEmulationPrevention(uint value, bool doAlign = false)
/// <exception cref="ArgumentOutOfRangeException">if nNumbits is too large</exception>
public void AddBits(uint lval, int nNumbits)
{
ObjectDisposedException.ThrowIf(disposedValue, GetType());
ObjectDisposedException.ThrowIf(disposedValue,typeof(CJOCh264bitstream));

if ((nNumbits <= 0) || (nNumbits > 64))
{
Expand All @@ -256,7 +253,7 @@ public void AddBits(uint lval, int nNumbits)
/// <param name="lval">value to add at the end of the h264 stream</param>
public void AddExpGolombUnsigned(uint lval)
{
ObjectDisposedException.ThrowIf(disposedValue, GetType());
ObjectDisposedException.ThrowIf(disposedValue,typeof(CJOCh264bitstream));

//it implements unsigned exp golomb coding
uint lvalint = lval + 1;
Expand All @@ -275,7 +272,7 @@ public void AddExpGolombUnsigned(uint lval)
/// <param name="lval">value to add at the end of the h264 stream</param>
public void AddExpGolombSigned(int lval)
{
ObjectDisposedException.ThrowIf(disposedValue, GetType());
ObjectDisposedException.ThrowIf(disposedValue,typeof(CJOCh264bitstream));

//it implements a signed exp golomb coding

Expand All @@ -286,10 +283,12 @@ public void AddExpGolombSigned(int lval)
AddExpGolombUnsigned(lvalint);
}

//! Adds 0 to the end of h264 bistream in order to leave a byte aligned stream (It will insert seven 0 maximum)
/// <summary>
/// Adds 0 to the end of h264 bistream in order to leave a byte aligned stream (It will insert seven 0 maximum)
/// </summary>
public void DoByteAlign()
{
ObjectDisposedException.ThrowIf(disposedValue, GetType());
ObjectDisposedException.ThrowIf(disposedValue,typeof(CJOCh264bitstream));

//Check if the last bit in buffer is multiple of 8
int nr = nLastBitInBuffer % 8;
Expand All @@ -305,7 +304,7 @@ public void DoByteAlign()
/// <param name="cByte">value to add at the end of the h264 stream (from 0 to 255)</param>
public void AddByte(byte cByte)
{
ObjectDisposedException.ThrowIf(disposedValue, GetType());
ObjectDisposedException.ThrowIf(disposedValue,typeof(CJOCh264bitstream));

//Byte alignment optimization
if ((nLastBitInBuffer % 8) == 0)
Expand All @@ -318,7 +317,9 @@ public void AddByte(byte cByte)
}
}

//! Close the h264 stream saving to disk the last remaing bits in buffer
/// <summary>
/// Close the h264 stream saving to disk the last remaing bits in buffer
/// </summary>
public void Flush()
{
//Flush the data in stream buffer
Expand Down
103 changes: 44 additions & 59 deletions RtspCameraExample/CJOCh264encoder.cs
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,8 @@ public enum SampleFormat
SAMPLE_FORMAT_YUV420p //!< SAMPLE_FORMAT_YUV420p
}

public byte[]? sps;
public byte[]? pps;
public byte[]? nal;
public readonly byte[] sps;
public readonly byte[] pps;

/*!Set the used Y macroblock size for I PCM in YUV420p */
private const int MACROBLOCK_Y_WIDTH = 16;
Expand All @@ -51,7 +50,7 @@ private class Frame
}

/*! The frame var*/
private readonly Frame frame = new();
private readonly Frame frame;

/*! The frames per second var*/
private uint m_nFps;
Expand Down Expand Up @@ -230,11 +229,8 @@ private void CreateSliceFooter()
*/

//Creates & saves a macroblock (coded INTRA 16x16)
private void CreateMacroblock(int nYpos, int nXpos, Span<byte> frameBuffer)
private void CreateMacroblock(int nYpos, int nXpos, ReadOnlySpan<byte> frameBuffer)
{



CreateMacroblockHeader();

stream.DoByteAlign();
Expand All @@ -245,98 +241,87 @@ private void CreateMacroblock(int nYpos, int nXpos, Span<byte> frameBuffer)
{
for (int x = nXpos * frame.nYmbwidth; x < (nXpos + 1) * frame.nYmbwidth; x++)
{
stream.AddByte(frameBuffer[y * frame.nYwidth + x]);
stream.AddByte(frameBuffer[(y * frame.nYwidth) + x]);
}
}

//Cb
int nCsize = frame.nCwidth * frame.nCheight;
var bufferCb = frameBuffer[nYsize..];
for (int y = nYpos * frame.nCmbheight; y < (nYpos + 1) * frame.nCmbheight; y++)
{
for (int x = nXpos * frame.nCmbwidth; x < (nXpos + 1) * frame.nCmbwidth; x++)
{
stream.AddByte(frameBuffer[nYsize + (y * frame.nCwidth + x)]);
stream.AddByte(bufferCb[(y * frame.nCwidth) + x]);
}
}

//Cr
var bufferCr = frameBuffer[(nYsize + nCsize)..];
for (int y = nYpos * frame.nCmbheight; y < (nYpos + 1) * frame.nCmbheight; y++)
{
for (int x = nXpos * frame.nCmbwidth; x < (nXpos + 1) * frame.nCmbwidth; x++)
{
stream.AddByte(frameBuffer[nYsize + nCsize + (y * frame.nCwidth + x)]);
stream.AddByte(bufferCr[(y * frame.nCwidth) + x]);
}
}
}


public CJOCh264encoder()


/// <summary>
/// Initilizes the h264 coder (mini-coder)
/// </summary>
/// <param name="nImW">Frame width in pixels</param>
/// <param name="nImH">Frame height in pixels</param>
/// <param name="nImFps">Desired frames per second of the output file (typical values are: 25, 30, 50, etc)</param>
/// <param name="sampleFormat">Sample format if the input file. In this implementation only SAMPLE_FORMAT_YUV420p is allowed</param>
/// <param name="nSARw">Indicates the horizontal size of the sample aspect ratio (typical values are:1, 4, 16, etc)</param>
/// <param name="nSARh">Indicates the vertical size of the sample aspect ratio (typical values are:1, 3, 9, etc)</param>
/// <exception cref="ArgumentException"></exception>
/// <exception cref="Exception"></exception>
public CJOCh264encoder(int nImW, int nImH, uint nImFps, SampleFormat sampleFormat, uint nSARw = 1, uint nSARh = 1)
{
m_lNumFramesAdded = 0;
stream = new CJOCh264bitstream(baseStream);
m_nFps = 25;
}

//! Initializes the coder
/*!
\param nImW Frame width in pixels
\param nImH Frame height in pixels
\param nFps Desired frames per second of the output file (typical values are: 25, 30, 50, etc)
\param SampleFormat Sample format if the input file. In this implementation only SAMPLE_FORMAT_YUV420p is allowed
\param nSARw Indicates the horizontal size of the sample aspect ratio (typical values are:1, 4, 16, etc)
\param nSARh Indicates the vertical size of the sample aspect ratio (typical values are:1, 3, 9, etc)
*/

//public functions

//Initilizes the h264 coder (mini-coder)
public void IniCoder(int nImW, int nImH, uint nImFps, SampleFormat sampleFormat, uint nSARw = 1, uint nSARh = 1)
{


m_lNumFramesAdded = 0;

if (sampleFormat != SampleFormat.SAMPLE_FORMAT_YUV420p)
{
throw new ArgumentException("Error: SAMPLE FORMAT not allowed. Only yuv420p is allowed in this version", nameof(sampleFormat));
}
//In this implementation only picture sizes multiples of macroblock size (16x16) are allowed
if (((nImW % MACROBLOCK_Y_WIDTH) != 0) || ((nImH % MACROBLOCK_Y_HEIGHT) != 0))
{
throw new Exception("Error: size not allowed. Only multiples of macroblock are allowed (macroblock size is: 16x16)");
}

//Ini vars
frame.sampleformat = sampleFormat;
frame.nYwidth = nImW;
frame.nYheight = nImH;
if (sampleFormat == SampleFormat.SAMPLE_FORMAT_YUV420p)
frame = new Frame
{
//Ini vars
sampleformat = sampleFormat,
nYwidth = nImW,
nYheight = nImH,
//Set macroblock Y size
frame.nYmbwidth = MACROBLOCK_Y_WIDTH;
frame.nYmbheight = MACROBLOCK_Y_HEIGHT;
nYmbwidth = MACROBLOCK_Y_WIDTH,
nYmbheight = MACROBLOCK_Y_HEIGHT,

//Set macroblock C size (in YUV420 is 1/2 of Y)
frame.nCmbwidth = MACROBLOCK_Y_WIDTH / 2;
frame.nCmbheight = MACROBLOCK_Y_HEIGHT / 2;

nCmbwidth = MACROBLOCK_Y_WIDTH / 2,
nCmbheight = MACROBLOCK_Y_HEIGHT / 2,
//Set C size
frame.nCwidth = frame.nYwidth / 2;
frame.nCheight = frame.nYheight / 2;

//In this implementation only picture sizes multiples of macroblock size (16x16) are allowed
if (((nImW % MACROBLOCK_Y_WIDTH) != 0) || ((nImH % MACROBLOCK_Y_HEIGHT) != 0))
{
throw new Exception("Error: size not allowed. Only multiples of macroblock are allowed (macroblock size is: 16x16)");
}
}
nCwidth = nImW / 2,
nCheight = nImW / 2
};
m_nFps = nImFps;



//Create h264 SPS & PPS
CreateSps(frame.nYwidth, frame.nYheight, frame.nYmbwidth, frame.nYmbheight, nImFps, nSARw, nSARh);
stream.Flush(); // Flush data to the List<byte>
stream.Flush();
sps = baseStream.ToArray();
baseStream.SetLength(0);

CreatePPS();
stream.Flush(); // Flush data to the List<byte>
stream.Flush();
pps = baseStream.ToArray();
baseStream.SetLength(0);
}
Expand All @@ -346,7 +331,7 @@ public void IniCoder(int nImW, int nImH, uint nImFps, SampleFormat sampleFormat,
//! It codes the frame that is in frame memory a it saves the coded data to disc

//Codifies & save the video frame (it only uses 16x16 intra PCM -> NO COMPRESSION!)
public void CodeAndSaveFrame(Span<byte> frameBuffer)
public byte[] CodeAndSaveFrame(ReadOnlySpan<byte> frameBuffer)
{
baseStream.SetLength(0);

Expand All @@ -369,7 +354,7 @@ public void CodeAndSaveFrame(Span<byte> frameBuffer)

// flush
stream.Flush();
nal = baseStream.ToArray();
return baseStream.ToArray();
}

//! Returns number of coded frames
Expand Down
8 changes: 4 additions & 4 deletions RtspCameraExample/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -122,16 +122,16 @@ public Demo(ILoggerFactory loggerFactory)
}


private void Video_source_ReceivedYUVFrame(uint timestamp_ms, int width, int height, Span<byte> yuv_data)
private void Video_source_ReceivedYUVFrame(uint timestamp_ms, int width, int height, ReadOnlySpan<byte> yuv_data)
{

// Compress the YUV and feed into the RTSP Server
byte[] raw_video_nal = h264Encoder.CompressFrame(yuv_data);
var raw_video_nal = h264Encoder.CompressFrame(yuv_data);
bool isKeyframe = true; // the Simple/Tiny H264 Encoders only return I-Frames for every video frame.


// Put the NALs into a List
List<byte[]> nal_array = [];
List<ReadOnlyMemory<byte>> nal_array = [];

// We may want to add the SPS and PPS to the H264 stream as in-band data.
// This may be of use if the client did not parse the SPS/PPS in the SDP or if the H264 encoder
Expand All @@ -153,7 +153,7 @@ private void Video_source_ReceivedYUVFrame(uint timestamp_ms, int width, int hei
rtspServer.FeedInRawNAL(timestamp_ms, nal_array);
}

private void Audio_source_ReceivedAudioFrame(uint timestamp_ms, short[] audio_frame)
private void Audio_source_ReceivedAudioFrame(uint timestamp_ms, ReadOnlySpan<short> audio_frame)
{
// Compress the audio into G711 and feed into the RTSP Server
byte[] g711_data = ulaw_encoder.EncodeULaw(audio_frame);
Expand Down
Loading

0 comments on commit 3174378

Please sign in to comment.