我使用的是基於ARM9的SoC和DSP處理器的定製德州儀器基於OMAP-L138的電路板。它連接到相機鏡頭。我想要做的是捕獲發送到dsp處理器的實時視頻流,以便通過8192字節的數據包通過uPP發送H264編碼。我想使用Live555提供的testH264VideoStreamer通過RTSP實時傳輸H264編碼的視頻。我已經修改了代碼如下所示:使用Live555從連接到H264編碼器的IP攝像頭流式傳輸實時視頻
#include <liveMedia.hh>
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#include <errno.h>
#include <string.h>
#include <unistd.h> //to allow read() function
UsageEnvironment* env;
H264VideoStreamFramer* videoSource;
RTPSink* videoSink;
//-------------------------------------------------------------------------------
/* Open File Descriptor*/
int stream = open("/dev/upp", O_RDONLY);
/* Declaring a static 8 bit unsigned integer of size 8192 bytes that keeps its value between invocations */
static uint8_t buf[8192];
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
// Execute play function as a forwarding mechanism
//------------------------------------------------------------------------------
void play(); // forward
//------------------------------------------------------------------------------
// MAIN FUNCTION/ENTRY POINT
//------------------------------------------------------------------------------
int main(int argc, char** argv)
{
// Begin by setting up our live555 usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
env = BasicUsageEnvironment::createNew(*scheduler);
// Create 'groupsocks' for RTP and RTCP:
struct in_addr destinationAddress;
destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
// Note: This is a multicast address. If you wish instead to stream
// using unicast, then you should use the "testOnDemandRTSPServer"
// test program - not this test program - as a model.
const unsigned short rtpPortNum = 18888;
const unsigned short rtcpPortNum = rtpPortNum+1;
const unsigned char ttl = 255;
const Port rtpPort(rtpPortNum);
const Port rtcpPort(rtcpPortNum);
Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
rtpGroupsock.multicastSendOnly(); // we're a SSM source
Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
rtcpGroupsock.multicastSendOnly(); // we're a SSM source
// Create a 'H264 Video RTP' sink from the RTP 'groupsock':
OutPacketBuffer::maxSize = 1000000;
videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96);
// Create (and start) a 'RTCP instance' for this RTP sink:
const unsigned estimatedSessionBandwidth = 500; // in kbps; for RTCP b/w share
const unsigned maxCNAMElen = 100;
unsigned char CNAME[maxCNAMElen+1];
gethostname((char*)CNAME, maxCNAMElen);
CNAME[maxCNAMElen] = '\0'; // just in case
RTCPInstance* rtcp
= RTCPInstance::createNew(*env, &rtcpGroupsock,
estimatedSessionBandwidth, CNAME,
videoSink, NULL /* we're a server */,
True /* we're a SSM source */);
// Note: This starts RTCP running automatically
/*Create RTSP SERVER*/
RTSPServer* rtspServer = RTSPServer::createNew(*env, 8554);
if (rtspServer == NULL)
{
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
}
ServerMediaSession* sms
= ServerMediaSession::createNew(*env, "IPCAM @ TeReSol","UPP Buffer" ,
"Session streamed by \"testH264VideoStreamer\"",
True /*SSM*/);
sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
rtspServer->addServerMediaSession(sms);
char* url = rtspServer->rtspURL(sms);
*env << "Play this stream using the URL \"" << url << "\"\n";
delete[] url;
// Start the streaming:
*env << "Beginning streaming...\n";
play();
env->taskScheduler().doEventLoop(); // does not return
return 0; // only to prevent compiler warning
}
//----------------------------------------------------------------------------------
// afterPlaying() -> Defines what to do once a buffer is streamed
//----------------------------------------------------------------------------------
void afterPlaying(void* /*clientData*/)
{
*env << "...done reading from upp buffer\n";
//videoSink->stopPlaying();
//Medium::close(videoSource);
// Note that this also closes the input file that this source read from.
// Start playing once again to get the next stream
play();
/* We don't need to close the dev as long as we're reading from it. But if we do, use: close("/dev/upp", O_RDWR);*/
}
//----------------------------------------------------------------------------------------------
// play() Method -> Defines how to read and what to make of the input stream
//----------------------------------------------------------------------------------------------
void play()
{
/* Read nbytes of buffer (sizeof buf) from the filedescriptor stream and assign them to address where buf is located */
read(stream, &buf, sizeof buf);
printf("Reading from UPP in to Buffer");
/*Open the input file as a 'byte-stream file source': */
ByteStreamMemoryBufferSource* buffSource
= ByteStreamMemoryBufferSource::createNew(*env, buf, sizeof buf,False/*Empty Buffer After Reading*/);
/*By passing False in the above creatNew() method means that the buffer would be read at once */
if (buffSource == NULL)
{
*env << "Unable to read from\"" << "Buffer"
<< "\" as a byte-stream source\n";
exit(1);
}
FramedSource* videoES = buffSource;
// Create a framer for the Video Elementary Stream:
videoSource = H264VideoStreamFramer::createNew(*env, videoES,False);
// Finally, start playing:
*env << "Beginning to read from UPP...\n";
videoSink->startPlaying(*videoSource, afterPlaying, videoSink);
}
的問題是,雖然代碼編譯成功,但我無法得到所需的輸出。 VLC播放器上的RTSP流處於播放模式,但我看不到任何視頻。我很感激在這件事上的任何幫助。我的描述可能會含糊不清,但我很樂意進一步解釋所需的任何部分。