Anonymous
Кодирование живого видео в качестве H265 и потоковое его по RTP
Сообщение
Anonymous » 13 мар 2025, 23:40
Я пытаюсь кодировать живое видео как H265 и отправить кодированный поток через RTP. Изображения вытянуты из камеры Basler Gige с использованием Pylon API. Они кодируются как H265 с использованием библиотеки X265, а затем отправляются через RTP с помощью библиотеки CCRTP. Я использую клиент GSTREAMER для тестирования его получения. Приложение/X-RTP, кодирование-name = h265! Fakesink Dimp = true в качестве клиента и получение этого после начала сервера:
Код: Выделить всё
gst-launch-1.0 -v udpsrc address=239.0.0.1 port=5004 ! application/x-rtp,encoding-name=H265 ! fakesink dump=true
Setting pipeline to PAUSED ...
Pipeline is live and does not need PREROLL ...
Pipeline is PREROLLED ...
Setting pipeline to PLAYING ...
New clock: GstSystemClock
/GstPipeline:pipeline0/GstCapsFilter:capsfilter0.GstPad:src: caps = application/x-rtp, encoding-name=(string)H265
/GstPipeline:pipeline0/GstFakeSink:fakesink0.GstPad:sink: caps = application/x-rtp, encoding-name=(string)H265
Redistribute latency...
00000000 (0x7fa31c0069b0): 80 60 04 c1 87 73 a4 fb a1 36 c7 9e 00 00 00 01 .`...s...6......
00000010 (0x7fa31c0069c0): 42 01 01 01 60 00 00 03 00 90 00 00 03 00 00 03 B...`...........
00000020 (0x7fa31c0069d0): 00 5a a0 05 a2 00 88 7d e5 ba 4a 4c 2e 01 00 00 .Z.....}..JL....
00000030 (0x7fa31c0069e0): 03 00 01 00 00 03 00 1e 08 .........
00000000 (0x7fa31c019b00): 80 60 04 c2 87 75 04 8b a1 36 c7 9e 00 00 00 01 .`...u...6......
00000010 (0x7fa31c019b10): 44 01 c0 71 83 12 D..q..
00000000 (0x7fa31c01a180): 80 60 04 c3 87 76 64 1b a1 36 c7 9e 00 00 01 4e .`...vd..6.....N
00000010 (0x7fa31c01a190): 01 05 ff ff ff ff ff ff ff ff f8 2c a2 de 09 b5 ...........,....
00000020 (0x7fa31c01a1a0): 17 47 db bb 55 a4 fe 7f c2 fc 4e 78 32 36 35 20 .G..U.....Nx265
00000030 (0x7fa31c01a1b0): 28 62 75 69 6c 64 20 32 31 35 29 20 2d 20 34 2e (build 215) - 4.
00000040 (0x7fa31c01a1c0): 31 2b 31 2d 31 64 31 31 37 62 65 3a 5b 4c 69 6e 1+1-1d117be:[Lin
00000050 (0x7fa31c01a1d0): 75 78 5d 5b 47 43 43 20 31 32 2e 32 2e 30 5d 5b ux][GCC 12.2.0][
00000060 (0x7fa31c01a1e0): 36 34 20 62 69 74 5d 5b 6e 6f 61 73 6d 5d 20 38 64 bit][noasm] 8
00000070 (0x7fa31c01a1f0): 62 69 74 20 2d 20 48 2e 32 36 35 2f 48 45 56 43 bit - H.265/HEVC
00000080 (0x7fa31c01a200): 20 63 6f 64 65 63 20 2d 20 43 6f 70 79 72 69 67 codec - Copyrig
00000090 (0x7fa31c01a210): 68 74 20 32 30 31 33 2d 32 30 31 38 20 28 63 29 ht 2013-2018 (c)
000000a0 (0x7fa31c01a220): 20 4d 75 6c 74 69 63 6f 72 65 77 61 72 65 2c 20 Multicoreware,
000000b0 (0x7fa31c01a230): 49 6e 63 20 2d 20 68 74 74 70 3a 2f 2f 78 32 36 Inc - http://x26
000000c0 (0x7fa31c01a240): 35 2e 6f 72 67 20 2d 20 6f 70 74 69 6f 6e 73 3a 5.org - options:
etc.
Проблема заключается в том, что если я добавлю элемент rtph265depay к клиенту как GST-launch-1.0 -v UdpsRC Addrc = 239.0.0.1 Port = 5004! Приложение/X-RTP, кодирование-name = h265! rtph265depay! Fakesink Dimp = true Тогда это все, что я получаю после запуска сервера:
Код: Выделить всё
gst-launch-1.0 -v udpsrc address=239.0.0.1 port=5004 ! application/x-rtp,encoding-name=H265 ! rtph265depay ! fakesink dump=true
Setting pipeline to PAUSED ...
Pipeline is live and does not need PREROLL ...
Pipeline is PREROLLED ...
Setting pipeline to PLAYING ...
New clock: GstSystemClock
/GstPipeline:pipeline0/GstCapsFilter:capsfilter0.GstPad:src: caps = application/x-rtp, encoding-name=(string)H265, media=(string)video, clock-rate=(int)90000
/GstPipeline:pipeline0/GstRtpH265Depay:rtph265depay0.GstPad:sink: caps = application/x-rtp, encoding-name=(string)H265, media=(string)video, clock-rate=(int)90000
< /code>
Если я запускаю тестовый видео сервер, используя gst-launch-1.0 videoTestSrc! Видео/рентг! x265enc! H265Parse! rtph265pay config-interval = 1! udpsink host = 239.0.0.1 Port = 5004
и используйте GST-Launch-1.0 -V адрес UDPSRC = 239.0.0.1 Port = 5004! Приложение/X-RTP, кодирование-name = h265! rtph265depay! Fakesink Dimp = true в качестве клиента, тогда это печатается:
Код: Выделить всё
gst-launch-1.0 -v udpsrc address=239.0.0.1 port=5004 ! application/x-rtp,encoding-name=H265 ! rtph265depay ! fakesink dump=true
Setting pipeline to PAUSED ...
Pipeline is live and does not need PREROLL ...
Pipeline is PREROLLED ...
Setting pipeline to PLAYING ...
New clock: GstSystemClock
/GstPipeline:pipeline0/GstCapsFilter:capsfilter0.GstPad:src: caps = application/x-rtp, encoding-name=(string)H265, media=(string)video, clock-rate=(int)90000
/GstPipeline:pipeline0/GstRtpH265Depay:rtph265depay0.GstPad:sink: caps = application/x-rtp, encoding-name=(string)H265, media=(string)video, clock-rate=(int)90000
/GstPipeline:pipeline0/GstRtpH265Depay:rtph265depay0.GstPad:src: caps = video/x-h265, stream-format=(string)hvc1, alignment=(string)au, codec_data=(buffer)01420101040800000300980800f000fcfff9f800000f03200001001740010c01ffff040800000300980800000300003f9598092100010030420101040800000300980800000300003f90014101e2294b2b3492657ff80008000b506060604000000300400000078222000100074401c172b46240
/GstPipeline:pipeline0/GstFakeSink:fakesink0.GstPad:sink: caps = video/x-h265, stream-format=(string)hvc1, alignment=(string)au, codec_data=(buffer)01420101040800000300980800f000fcfff9f800000f03200001001740010c01ffff040800000300980800000300003f9598092100010030420101040800000300980800000300003f90014101e2294b2b3492657ff80008000b506060604000000300400000078222000100074401c172b46240
Redistribute latency...
00000000 (0x7f7680023330): 00 00 09 0f 4e 01 05 ff ff ff ff ff ff ff ff ff ....N...........
00000010 (0x7f7680023340): 0a 2c a2 de 09 b5 17 47 db bb 55 a4 fe 7f c2 fc .,.....G..U.....
00000020 (0x7f7680023350): 4e 78 32 36 35 20 28 62 75 69 6c 64 20 31 39 39 Nx265 (build 199
00000030 (0x7f7680023360): 29 20 2d 20 33 2e 35 2b 31 2d 66 30 63 31 30 32 ) - 3.5+1-f0c102
00000040 (0x7f7680023370): 32 62 36 3a 5b 4c 69 6e 75 78 5d 5b 47 43 43 20 2b6:[Linux][GCC
00000050 (0x7f7680023380): 31 32 2e 32 2e 30 5d 5b 36 34 20 62 69 74 5d 20 12.2.0][64 bit]
00000060 (0x7f7680023390): 31 32 62 69 74 20 2d 20 48 2e 32 36 35 2f 48 45 12bit - H.265/HE
00000070 (0x7f76800233a0): 56 43 20 63 6f 64 65 63 20 2d 20 43 6f 70 79 72 VC codec - Copyr
00000080 (0x7f76800233b0): 69 67 68 74 20 32 30 31 33 2d 32 30 31 38 20 28 ight 2013-2018 (
00000090 (0x7f76800233c0): 63 29 20 4d 75 6c 74 69 63 6f 72 65 77 61 72 65 c) Multicoreware
000000a0 (0x7f76800233d0): 2c 20 49 6e 63 20 2d 20 68 74 74 70 3a 2f 2f 78 , Inc - http://x
000000b0 (0x7f76800233e0): 32 36 35 2e 6f 72 67 20 2d 20 6f 70 74 69 6f 6e 265.org - option
000000c0 (0x7f76800233f0): 73 3a 20 63 70 75 69 64 3d 31 31 31 31 30 33 39 s: cpuid=1111039
etc.
< /code>
Это код, который я пытаюсь отладить: < /p>
#include
#include
#include
#include
#include
int main(int argc, char *argv[]) {
// Pylon initialization
Pylon::PylonAutoInitTerm autoInitTerm;
Pylon::CTlFactory& tlFactory = Pylon::CTlFactory::GetInstance();
Pylon::CDeviceInfo info;
//info.SetSerialNumber();
Pylon::DeviceInfoList_t filter;
filter.push_back(info);
Pylon::CBaslerUniversalInstantCamera camera;
int64_t gev_scps_packet_size = 1500;
int64_t gev_scpd = 50000;
// X265 initialization
printf("x265_max_bit_depth: %i\n", x265_max_bit_depth);
printf("x265_version_str: %s\n", x265_version_str);
printf("x265_build_info_str: %s\n", x265_build_info_str);
// RTP initialization
COMMONCPP_NAMESPACE::RTPSession* session = new COMMONCPP_NAMESPACE::RTPSession(COMMONCPP_NAMESPACE::InetHostAddress("0.0.0.0"));
session->setSchedulingTimeout(10000);
session->setExpireTimeout(1000000);
session->setPayloadFormat(COMMONCPP_NAMESPACE::DynamicPayloadFormat(96, 90000));
if (!session->addDestination(COMMONCPP_NAMESPACE::InetMcastAddress("239.0.0.1"), 5004)) {
printf("Could not connect to port.\n");
return -1;
}
session->startRunning();
if (session->isActive()) {
printf("The queue is active.\n");
}
else {
printf("The queue is not active.\n");
}
uint32 time_stamp = 0;
// Opens a file for testing the X265 encoding.
FILE *fp_dst = NULL;
fp_dst = fopen("tmp.h265", "wb");
if (fp_dst == NULL) {
return -1;
}
while (1) {
try {
Pylon::DeviceInfoList_t devices;
while (!(tlFactory.EnumerateDevices(devices, filter) > 0)) {
printf("The camera is not connected.\n");
Pylon::WaitObject::Sleep(5000);
}
printf("The camera is connected.\n");
printf("Creating device.\n");
Pylon::IPylonDevice* device_ptr = tlFactory.CreateFirstDevice(info);
if (device_ptr == nullptr) {
printf("tlFactory.CreateFirstDevice returned a null pointer.\n");
continue;
}
printf("Attaching device.\n");
camera.Attach(device_ptr);
printf("Opening camera.\n");
camera.Open();
int64_t width;
if (camera.Width.IsReadable()) {
width = camera.Width.GetValue();
printf("Width: %i\n", width);
}
else {
printf("Width is not readable.\n");
}
int64_t height;
if (camera.Height.IsReadable()) {
height = camera.Height.GetValue();
printf("Height: %i\n", height);
}
else {
printf("Height is not readable.\n");
}
Basler_UniversalCameraParams::PixelFormatEnums pixel_format;
if (camera.PixelFormat.IsReadable()) {
pixel_format = camera.PixelFormat.GetValue();
printf("Pixel Format: %s\n", camera.PixelFormat.ToString().c_str());
}
else {
printf("Pixel Format is not readable.\n");
}
Basler_UniversalCameraParams::PixelSizeEnums pixel_size;
if (camera.PixelSize.IsReadable()) {
pixel_size = camera.PixelSize.GetValue();
printf("Pixel Size: %s\n", camera.PixelSize.ToString().c_str());
}
else {
printf("Pixel Size is not readable.\n");
}
// Writes properties of the camera using the generic interface for cameras standard (GenICam).
if (camera.GevSCPSPacketSize.TrySetValue(gev_scps_packet_size)) {
printf("Set Gev SCPS Packet Size to %i.\n", gev_scps_packet_size);
}
else {
printf("Could not set Gev SCPS Packet Size.\n");
}
if (camera.GevSCPD.TrySetValue(gev_scpd)) {
printf("Set Gev SCPD (Inter-Packet Delay) to %i.\n", gev_scpd);
}
else {
printf("Could not set Gev SCPD (Inter-Packet Delay).\n");
}
// Configures the X265 encoder.
printf("Configuring the encoder.\n");
x265_param* param_ptr = x265_param_alloc();
if (param_ptr == nullptr) {
printf("x265_param_alloc returned a null pointer.\n");
continue;
}
x265_param_default(param_ptr);
if (x265_param_default_preset(param_ptr, "ultrafast", "zerolatency") < 0) {
printf("Error calling x265_param_default_preset.\n");
return -1;
}
/*
if (x265_param_apply_profile(param_ptr, "main") < 0) {
printf("Error calling x265_param_apply_profile.\n");
return -1;
}
*/
switch (pixel_size) {
case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp1 :
param_ptr->sourceBitDepth = 1;
case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp2 :
param_ptr->sourceBitDepth = 2;
case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp4 :
param_ptr->sourceBitDepth = 4;
case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp8 :
param_ptr->sourceBitDepth = 8;
case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp10 :
param_ptr->sourceBitDepth = 10;
case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp12 :
param_ptr->sourceBitDepth = 12;
case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp14 :
param_ptr->sourceBitDepth = 14;
case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp16 :
param_ptr->sourceBitDepth = 16;
case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp24 :
param_ptr->sourceBitDepth = 24;
case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp32 :
param_ptr->sourceBitDepth = 32;
case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp36 :
param_ptr->sourceBitDepth = 36;
case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp48 :
param_ptr->sourceBitDepth = 48;
case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp64 :
param_ptr->sourceBitDepth = 64;
}
param_ptr->sourceWidth = width;
param_ptr->sourceHeight = height;
param_ptr->fpsNum = 30;
param_ptr->fpsDenom = 1;
param_ptr->internalCsp = X265_CSP_I420;
param_ptr->bRepeatHeaders = true;
// param_ptr->keyframeMin = 25;
// param_ptr->keyframeMax = 250;
// param_ptr->scenecutThreshold = 40;
// param_ptr->bHistBasedSceneCut = 0;
// param_ptr->bEnableEndOfBitstream = false;
// param_ptr->bEnableEndOfSequence = false;
x265_picture* pic_in_ptr = x265_picture_alloc();
if (pic_in_ptr == nullptr) {
printf("x265_picture_alloc returned a null pointer.\n");
}
x265_picture_init(param_ptr, pic_in_ptr);
x265_picture* pic_out_ptr = x265_picture_alloc();
if (pic_out_ptr == nullptr) {
printf("x265_picture_alloc returned a null pointer.\n");
}
x265_encoder* encoder_ptr = x265_encoder_open(param_ptr);
if (encoder_ptr == nullptr) {
printf("x265_encoder_open returned a null pointer.\n");
}
x265_param_free(param_ptr);
x265_nal* pp_nal_ptr = nullptr;
uint32_t pi_nal = 0;
printf("Finished configuring the encoder.\n");
// Starts the loop for grabbing, encoding and sending images from the camera.
printf("Starting grabbing.\n");
camera.StartGrabbing(Pylon::GrabStrategy_LatestImageOnly);
Pylon::CBaslerUniversalGrabResultPtr ptrGrabResult;
while (1) {
try {
printf("Retrieving result.\n");
camera.RetrieveResult((unsigned int) 5000, ptrGrabResult, Pylon::TimeoutHandling_ThrowException);
}
catch (const Pylon::TimeoutException e) {
printf("Timeout exception calling RetrieveResult.\n");
printf("%s\n", e.GetDescription());
Pylon::WaitObject::Sleep(1000);
if (camera.IsCameraDeviceRemoved()) {
printf("The connection to the camera has been lost.\n");
camera.DestroyDevice();
while (x265_encoder_encode(encoder_ptr, &pp_nal_ptr, &pi_nal, 0, nullptr) > 0) {
printf("Flushing.\n");
}
x265_picture_free(pic_in_ptr);
x265_picture_free(pic_out_ptr);
x265_encoder_close(encoder_ptr);
x265_cleanup();
break;
}
}
if (ptrGrabResult->GrabSucceeded()) {
printf("Grab succeeded.\n");
Pylon::CImageFormatConverter image_format_converter;
image_format_converter.OutputPixelFormat = Pylon::PixelType_YUV420planar;
image_format_converter.OutputBitAlignment = Pylon::OutputBitAlignment_MsbAligned;
Pylon::CPylonImage image;
printf("Converting image.\n");
image_format_converter.Convert(
image,
ptrGrabResult->GetBuffer(),
ptrGrabResult->GetBufferSize(),
ptrGrabResult->GetPixelType(),
ptrGrabResult->GetWidth(),
ptrGrabResult->GetHeight(),
ptrGrabResult->GetPaddingX(),
Pylon::EImageOrientation::ImageOrientation_TopDown
);
size_t plane_0_stride;
Pylon::CPylonImage plane_0;
plane_0.CopyImage(image.GetPlane(0));
plane_0.GetStride(plane_0_stride);
size_t plane_1_stride;
Pylon::CPylonImage plane_1;
plane_1.CopyImage(image.GetPlane(1));
plane_1.GetStride(plane_1_stride);
size_t plane_2_stride;
Pylon::CPylonImage plane_2;
plane_2.CopyImage(image.GetPlane(2));
plane_2.GetStride(plane_2_stride);
pic_in_ptr->stride[0] = plane_0_stride;
pic_in_ptr->planes[0] = (void*) plane_0.GetBuffer();
pic_in_ptr->stride[1] = plane_1_stride;
pic_in_ptr->planes[1] = (void*) plane_1.GetBuffer();
pic_in_ptr->stride[2] = plane_2_stride;
pic_in_ptr->planes[2] = (void*) plane_2.GetBuffer();
printf("Encoding.\n");
int code = x265_encoder_encode(encoder_ptr, &pp_nal_ptr, &pi_nal, pic_in_ptr, pic_out_ptr);
if (code < 0) {
printf("Error encoding.\n");
return -1;
}
else if (code == 0) {
printf("The encoder pipeline is still filling or is empty after flushing.\n");
}
else {
for(int j = 0; j < pi_nal; j++) {
fwrite(pp_nal_ptr[j].payload, pp_nal_ptr[j].sizeBytes, 1, fp_dst);
printf("%u %u %u\n", j, pp_nal_ptr[j].type, pp_nal_ptr[j].sizeBytes);
session->putData(time_stamp, pp_nal_ptr[j].payload, pp_nal_ptr[j].sizeBytes);
// const char* buffer = "test";
// session->putData(time_stamp, (const unsigned char *) buffer, strlen((char*) buffer) + 1);
time_stamp += session->getCurrentRTPClockRate();
}
}
printf("Finished encoding.\n");
}
else {
printf("The grab failed.\n");
printf("%s\n", ptrGrabResult->GetErrorDescription().c_str());
}
}
}
catch (const Pylon::GenericException& e) {
printf("Generic exception.\n");
printf("%s\n", e.GetDescription());
}
}
return -1;
}
Я проверил, что сгенерированный тестовый файл может быть воспроизведен с помощью медиаплеера VLC и выглядит нормально.
Подробнее здесь:
https://stackoverflow.com/questions/795 ... t-over-rtp
1741898429
Anonymous
Я пытаюсь кодировать живое видео как H265 и отправить кодированный поток через RTP. Изображения вытянуты из камеры Basler Gige с использованием Pylon API. Они кодируются как H265 с использованием библиотеки X265, а затем отправляются через RTP с помощью библиотеки CCRTP. Я использую клиент GSTREAMER для тестирования его получения. Приложение/X-RTP, кодирование-name = h265! Fakesink Dimp = true в качестве клиента и получение этого после начала сервера: [code]gst-launch-1.0 -v udpsrc address=239.0.0.1 port=5004 ! application/x-rtp,encoding-name=H265 ! fakesink dump=true Setting pipeline to PAUSED ... Pipeline is live and does not need PREROLL ... Pipeline is PREROLLED ... Setting pipeline to PLAYING ... New clock: GstSystemClock /GstPipeline:pipeline0/GstCapsFilter:capsfilter0.GstPad:src: caps = application/x-rtp, encoding-name=(string)H265 /GstPipeline:pipeline0/GstFakeSink:fakesink0.GstPad:sink: caps = application/x-rtp, encoding-name=(string)H265 Redistribute latency... 00000000 (0x7fa31c0069b0): 80 60 04 c1 87 73 a4 fb a1 36 c7 9e 00 00 00 01 .`...s...6...... 00000010 (0x7fa31c0069c0): 42 01 01 01 60 00 00 03 00 90 00 00 03 00 00 03 B...`........... 00000020 (0x7fa31c0069d0): 00 5a a0 05 a2 00 88 7d e5 ba 4a 4c 2e 01 00 00 .Z.....}..JL.... 00000030 (0x7fa31c0069e0): 03 00 01 00 00 03 00 1e 08 ......... 00000000 (0x7fa31c019b00): 80 60 04 c2 87 75 04 8b a1 36 c7 9e 00 00 00 01 .`...u...6...... 00000010 (0x7fa31c019b10): 44 01 c0 71 83 12 D..q.. 00000000 (0x7fa31c01a180): 80 60 04 c3 87 76 64 1b a1 36 c7 9e 00 00 01 4e .`...vd..6.....N 00000010 (0x7fa31c01a190): 01 05 ff ff ff ff ff ff ff ff f8 2c a2 de 09 b5 ...........,.... 00000020 (0x7fa31c01a1a0): 17 47 db bb 55 a4 fe 7f c2 fc 4e 78 32 36 35 20 .G..U.....Nx265 00000030 (0x7fa31c01a1b0): 28 62 75 69 6c 64 20 32 31 35 29 20 2d 20 34 2e (build 215) - 4. 00000040 (0x7fa31c01a1c0): 31 2b 31 2d 31 64 31 31 37 62 65 3a 5b 4c 69 6e 1+1-1d117be:[Lin 00000050 (0x7fa31c01a1d0): 75 78 5d 5b 47 43 43 20 31 32 2e 32 2e 30 5d 5b ux][GCC 12.2.0][ 00000060 (0x7fa31c01a1e0): 36 34 20 62 69 74 5d 5b 6e 6f 61 73 6d 5d 20 38 64 bit][noasm] 8 00000070 (0x7fa31c01a1f0): 62 69 74 20 2d 20 48 2e 32 36 35 2f 48 45 56 43 bit - H.265/HEVC 00000080 (0x7fa31c01a200): 20 63 6f 64 65 63 20 2d 20 43 6f 70 79 72 69 67 codec - Copyrig 00000090 (0x7fa31c01a210): 68 74 20 32 30 31 33 2d 32 30 31 38 20 28 63 29 ht 2013-2018 (c) 000000a0 (0x7fa31c01a220): 20 4d 75 6c 74 69 63 6f 72 65 77 61 72 65 2c 20 Multicoreware, 000000b0 (0x7fa31c01a230): 49 6e 63 20 2d 20 68 74 74 70 3a 2f 2f 78 32 36 Inc - http://x26 000000c0 (0x7fa31c01a240): 35 2e 6f 72 67 20 2d 20 6f 70 74 69 6f 6e 73 3a 5.org - options: etc. [/code] Проблема заключается в том, что если я добавлю элемент rtph265depay к клиенту как GST-launch-1.0 -v UdpsRC Addrc = 239.0.0.1 Port = 5004! Приложение/X-RTP, кодирование-name = h265! rtph265depay! Fakesink Dimp = true Тогда это все, что я получаю после запуска сервера: [code]gst-launch-1.0 -v udpsrc address=239.0.0.1 port=5004 ! application/x-rtp,encoding-name=H265 ! rtph265depay ! fakesink dump=true Setting pipeline to PAUSED ... Pipeline is live and does not need PREROLL ... Pipeline is PREROLLED ... Setting pipeline to PLAYING ... New clock: GstSystemClock /GstPipeline:pipeline0/GstCapsFilter:capsfilter0.GstPad:src: caps = application/x-rtp, encoding-name=(string)H265, media=(string)video, clock-rate=(int)90000 /GstPipeline:pipeline0/GstRtpH265Depay:rtph265depay0.GstPad:sink: caps = application/x-rtp, encoding-name=(string)H265, media=(string)video, clock-rate=(int)90000 < /code> Если я запускаю тестовый видео сервер, используя gst-launch-1.0 videoTestSrc! Видео/рентг! x265enc! H265Parse! rtph265pay config-interval = 1! udpsink host = 239.0.0.1 Port = 5004 [/code] и используйте GST-Launch-1.0 -V адрес UDPSRC = 239.0.0.1 Port = 5004! Приложение/X-RTP, кодирование-name = h265! rtph265depay! Fakesink Dimp = true в качестве клиента, тогда это печатается: [code]gst-launch-1.0 -v udpsrc address=239.0.0.1 port=5004 ! application/x-rtp,encoding-name=H265 ! rtph265depay ! fakesink dump=true Setting pipeline to PAUSED ... Pipeline is live and does not need PREROLL ... Pipeline is PREROLLED ... Setting pipeline to PLAYING ... New clock: GstSystemClock /GstPipeline:pipeline0/GstCapsFilter:capsfilter0.GstPad:src: caps = application/x-rtp, encoding-name=(string)H265, media=(string)video, clock-rate=(int)90000 /GstPipeline:pipeline0/GstRtpH265Depay:rtph265depay0.GstPad:sink: caps = application/x-rtp, encoding-name=(string)H265, media=(string)video, clock-rate=(int)90000 /GstPipeline:pipeline0/GstRtpH265Depay:rtph265depay0.GstPad:src: caps = video/x-h265, stream-format=(string)hvc1, alignment=(string)au, codec_data=(buffer)01420101040800000300980800f000fcfff9f800000f03200001001740010c01ffff040800000300980800000300003f9598092100010030420101040800000300980800000300003f90014101e2294b2b3492657ff80008000b506060604000000300400000078222000100074401c172b46240 /GstPipeline:pipeline0/GstFakeSink:fakesink0.GstPad:sink: caps = video/x-h265, stream-format=(string)hvc1, alignment=(string)au, codec_data=(buffer)01420101040800000300980800f000fcfff9f800000f03200001001740010c01ffff040800000300980800000300003f9598092100010030420101040800000300980800000300003f90014101e2294b2b3492657ff80008000b506060604000000300400000078222000100074401c172b46240 Redistribute latency... 00000000 (0x7f7680023330): 00 00 09 0f 4e 01 05 ff ff ff ff ff ff ff ff ff ....N........... 00000010 (0x7f7680023340): 0a 2c a2 de 09 b5 17 47 db bb 55 a4 fe 7f c2 fc .,.....G..U..... 00000020 (0x7f7680023350): 4e 78 32 36 35 20 28 62 75 69 6c 64 20 31 39 39 Nx265 (build 199 00000030 (0x7f7680023360): 29 20 2d 20 33 2e 35 2b 31 2d 66 30 63 31 30 32 ) - 3.5+1-f0c102 00000040 (0x7f7680023370): 32 62 36 3a 5b 4c 69 6e 75 78 5d 5b 47 43 43 20 2b6:[Linux][GCC 00000050 (0x7f7680023380): 31 32 2e 32 2e 30 5d 5b 36 34 20 62 69 74 5d 20 12.2.0][64 bit] 00000060 (0x7f7680023390): 31 32 62 69 74 20 2d 20 48 2e 32 36 35 2f 48 45 12bit - H.265/HE 00000070 (0x7f76800233a0): 56 43 20 63 6f 64 65 63 20 2d 20 43 6f 70 79 72 VC codec - Copyr 00000080 (0x7f76800233b0): 69 67 68 74 20 32 30 31 33 2d 32 30 31 38 20 28 ight 2013-2018 ( 00000090 (0x7f76800233c0): 63 29 20 4d 75 6c 74 69 63 6f 72 65 77 61 72 65 c) Multicoreware 000000a0 (0x7f76800233d0): 2c 20 49 6e 63 20 2d 20 68 74 74 70 3a 2f 2f 78 , Inc - http://x 000000b0 (0x7f76800233e0): 32 36 35 2e 6f 72 67 20 2d 20 6f 70 74 69 6f 6e 265.org - option 000000c0 (0x7f76800233f0): 73 3a 20 63 70 75 69 64 3d 31 31 31 31 30 33 39 s: cpuid=1111039 etc. < /code> Это код, который я пытаюсь отладить: < /p> #include #include #include #include #include int main(int argc, char *argv[]) { // Pylon initialization Pylon::PylonAutoInitTerm autoInitTerm; Pylon::CTlFactory& tlFactory = Pylon::CTlFactory::GetInstance(); Pylon::CDeviceInfo info; //info.SetSerialNumber(); Pylon::DeviceInfoList_t filter; filter.push_back(info); Pylon::CBaslerUniversalInstantCamera camera; int64_t gev_scps_packet_size = 1500; int64_t gev_scpd = 50000; // X265 initialization printf("x265_max_bit_depth: %i\n", x265_max_bit_depth); printf("x265_version_str: %s\n", x265_version_str); printf("x265_build_info_str: %s\n", x265_build_info_str); // RTP initialization COMMONCPP_NAMESPACE::RTPSession* session = new COMMONCPP_NAMESPACE::RTPSession(COMMONCPP_NAMESPACE::InetHostAddress("0.0.0.0")); session->setSchedulingTimeout(10000); session->setExpireTimeout(1000000); session->setPayloadFormat(COMMONCPP_NAMESPACE::DynamicPayloadFormat(96, 90000)); if (!session->addDestination(COMMONCPP_NAMESPACE::InetMcastAddress("239.0.0.1"), 5004)) { printf("Could not connect to port.\n"); return -1; } session->startRunning(); if (session->isActive()) { printf("The queue is active.\n"); } else { printf("The queue is not active.\n"); } uint32 time_stamp = 0; // Opens a file for testing the X265 encoding. FILE *fp_dst = NULL; fp_dst = fopen("tmp.h265", "wb"); if (fp_dst == NULL) { return -1; } while (1) { try { Pylon::DeviceInfoList_t devices; while (!(tlFactory.EnumerateDevices(devices, filter) > 0)) { printf("The camera is not connected.\n"); Pylon::WaitObject::Sleep(5000); } printf("The camera is connected.\n"); printf("Creating device.\n"); Pylon::IPylonDevice* device_ptr = tlFactory.CreateFirstDevice(info); if (device_ptr == nullptr) { printf("tlFactory.CreateFirstDevice returned a null pointer.\n"); continue; } printf("Attaching device.\n"); camera.Attach(device_ptr); printf("Opening camera.\n"); camera.Open(); int64_t width; if (camera.Width.IsReadable()) { width = camera.Width.GetValue(); printf("Width: %i\n", width); } else { printf("Width is not readable.\n"); } int64_t height; if (camera.Height.IsReadable()) { height = camera.Height.GetValue(); printf("Height: %i\n", height); } else { printf("Height is not readable.\n"); } Basler_UniversalCameraParams::PixelFormatEnums pixel_format; if (camera.PixelFormat.IsReadable()) { pixel_format = camera.PixelFormat.GetValue(); printf("Pixel Format: %s\n", camera.PixelFormat.ToString().c_str()); } else { printf("Pixel Format is not readable.\n"); } Basler_UniversalCameraParams::PixelSizeEnums pixel_size; if (camera.PixelSize.IsReadable()) { pixel_size = camera.PixelSize.GetValue(); printf("Pixel Size: %s\n", camera.PixelSize.ToString().c_str()); } else { printf("Pixel Size is not readable.\n"); } // Writes properties of the camera using the generic interface for cameras standard (GenICam). if (camera.GevSCPSPacketSize.TrySetValue(gev_scps_packet_size)) { printf("Set Gev SCPS Packet Size to %i.\n", gev_scps_packet_size); } else { printf("Could not set Gev SCPS Packet Size.\n"); } if (camera.GevSCPD.TrySetValue(gev_scpd)) { printf("Set Gev SCPD (Inter-Packet Delay) to %i.\n", gev_scpd); } else { printf("Could not set Gev SCPD (Inter-Packet Delay).\n"); } // Configures the X265 encoder. printf("Configuring the encoder.\n"); x265_param* param_ptr = x265_param_alloc(); if (param_ptr == nullptr) { printf("x265_param_alloc returned a null pointer.\n"); continue; } x265_param_default(param_ptr); if (x265_param_default_preset(param_ptr, "ultrafast", "zerolatency") < 0) { printf("Error calling x265_param_default_preset.\n"); return -1; } /* if (x265_param_apply_profile(param_ptr, "main") < 0) { printf("Error calling x265_param_apply_profile.\n"); return -1; } */ switch (pixel_size) { case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp1 : param_ptr->sourceBitDepth = 1; case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp2 : param_ptr->sourceBitDepth = 2; case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp4 : param_ptr->sourceBitDepth = 4; case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp8 : param_ptr->sourceBitDepth = 8; case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp10 : param_ptr->sourceBitDepth = 10; case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp12 : param_ptr->sourceBitDepth = 12; case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp14 : param_ptr->sourceBitDepth = 14; case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp16 : param_ptr->sourceBitDepth = 16; case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp24 : param_ptr->sourceBitDepth = 24; case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp32 : param_ptr->sourceBitDepth = 32; case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp36 : param_ptr->sourceBitDepth = 36; case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp48 : param_ptr->sourceBitDepth = 48; case Basler_UniversalCameraParams::PixelSizeEnums::PixelSize_Bpp64 : param_ptr->sourceBitDepth = 64; } param_ptr->sourceWidth = width; param_ptr->sourceHeight = height; param_ptr->fpsNum = 30; param_ptr->fpsDenom = 1; param_ptr->internalCsp = X265_CSP_I420; param_ptr->bRepeatHeaders = true; // param_ptr->keyframeMin = 25; // param_ptr->keyframeMax = 250; // param_ptr->scenecutThreshold = 40; // param_ptr->bHistBasedSceneCut = 0; // param_ptr->bEnableEndOfBitstream = false; // param_ptr->bEnableEndOfSequence = false; x265_picture* pic_in_ptr = x265_picture_alloc(); if (pic_in_ptr == nullptr) { printf("x265_picture_alloc returned a null pointer.\n"); } x265_picture_init(param_ptr, pic_in_ptr); x265_picture* pic_out_ptr = x265_picture_alloc(); if (pic_out_ptr == nullptr) { printf("x265_picture_alloc returned a null pointer.\n"); } x265_encoder* encoder_ptr = x265_encoder_open(param_ptr); if (encoder_ptr == nullptr) { printf("x265_encoder_open returned a null pointer.\n"); } x265_param_free(param_ptr); x265_nal* pp_nal_ptr = nullptr; uint32_t pi_nal = 0; printf("Finished configuring the encoder.\n"); // Starts the loop for grabbing, encoding and sending images from the camera. printf("Starting grabbing.\n"); camera.StartGrabbing(Pylon::GrabStrategy_LatestImageOnly); Pylon::CBaslerUniversalGrabResultPtr ptrGrabResult; while (1) { try { printf("Retrieving result.\n"); camera.RetrieveResult((unsigned int) 5000, ptrGrabResult, Pylon::TimeoutHandling_ThrowException); } catch (const Pylon::TimeoutException e) { printf("Timeout exception calling RetrieveResult.\n"); printf("%s\n", e.GetDescription()); Pylon::WaitObject::Sleep(1000); if (camera.IsCameraDeviceRemoved()) { printf("The connection to the camera has been lost.\n"); camera.DestroyDevice(); while (x265_encoder_encode(encoder_ptr, &pp_nal_ptr, &pi_nal, 0, nullptr) > 0) { printf("Flushing.\n"); } x265_picture_free(pic_in_ptr); x265_picture_free(pic_out_ptr); x265_encoder_close(encoder_ptr); x265_cleanup(); break; } } if (ptrGrabResult->GrabSucceeded()) { printf("Grab succeeded.\n"); Pylon::CImageFormatConverter image_format_converter; image_format_converter.OutputPixelFormat = Pylon::PixelType_YUV420planar; image_format_converter.OutputBitAlignment = Pylon::OutputBitAlignment_MsbAligned; Pylon::CPylonImage image; printf("Converting image.\n"); image_format_converter.Convert( image, ptrGrabResult->GetBuffer(), ptrGrabResult->GetBufferSize(), ptrGrabResult->GetPixelType(), ptrGrabResult->GetWidth(), ptrGrabResult->GetHeight(), ptrGrabResult->GetPaddingX(), Pylon::EImageOrientation::ImageOrientation_TopDown ); size_t plane_0_stride; Pylon::CPylonImage plane_0; plane_0.CopyImage(image.GetPlane(0)); plane_0.GetStride(plane_0_stride); size_t plane_1_stride; Pylon::CPylonImage plane_1; plane_1.CopyImage(image.GetPlane(1)); plane_1.GetStride(plane_1_stride); size_t plane_2_stride; Pylon::CPylonImage plane_2; plane_2.CopyImage(image.GetPlane(2)); plane_2.GetStride(plane_2_stride); pic_in_ptr->stride[0] = plane_0_stride; pic_in_ptr->planes[0] = (void*) plane_0.GetBuffer(); pic_in_ptr->stride[1] = plane_1_stride; pic_in_ptr->planes[1] = (void*) plane_1.GetBuffer(); pic_in_ptr->stride[2] = plane_2_stride; pic_in_ptr->planes[2] = (void*) plane_2.GetBuffer(); printf("Encoding.\n"); int code = x265_encoder_encode(encoder_ptr, &pp_nal_ptr, &pi_nal, pic_in_ptr, pic_out_ptr); if (code < 0) { printf("Error encoding.\n"); return -1; } else if (code == 0) { printf("The encoder pipeline is still filling or is empty after flushing.\n"); } else { for(int j = 0; j < pi_nal; j++) { fwrite(pp_nal_ptr[j].payload, pp_nal_ptr[j].sizeBytes, 1, fp_dst); printf("%u %u %u\n", j, pp_nal_ptr[j].type, pp_nal_ptr[j].sizeBytes); session->putData(time_stamp, pp_nal_ptr[j].payload, pp_nal_ptr[j].sizeBytes); // const char* buffer = "test"; // session->putData(time_stamp, (const unsigned char *) buffer, strlen((char*) buffer) + 1); time_stamp += session->getCurrentRTPClockRate(); } } printf("Finished encoding.\n"); } else { printf("The grab failed.\n"); printf("%s\n", ptrGrabResult->GetErrorDescription().c_str()); } } } catch (const Pylon::GenericException& e) { printf("Generic exception.\n"); printf("%s\n", e.GetDescription()); } } return -1; } [/code] Я проверил, что сгенерированный тестовый файл может быть воспроизведен с помощью медиаплеера VLC и выглядит нормально. Подробнее здесь: [url]https://stackoverflow.com/questions/79507568/encoding-live-video-as-h265-and-streaming-it-over-rtp[/url]