forked from SonsOfTone/RaspberrIP-Camera
-
Notifications
You must be signed in to change notification settings - Fork 0
/
H264Streamer.cpp
212 lines (178 loc) · 6.44 KB
/
H264Streamer.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
//Written By Antoine DUBOST-BULARD 25/10/2014
//Based on testH264VideoStreamer.cpp from Live55 RTSP library
//Get H264 Buffers from Raspivid and send them to H264 RTSP Streamer
#include <liveMedia.hh>
#include <BasicUsageEnvironment.hh>
#include <GroupsockHelper.hh>
#include <FramedSource.hh>
#include <pthread.h>
#include "MyDeviceSource.hh"
void play(); // forward
void ThreadRTSP(); // forward
UsageEnvironment* env;
H264VideoStreamFramer* videoSource;
RTPSink* videoSink;
H264VideoStreamDiscreteFramer* videoDeviceSource;
MyDeviceSource* LiveVideoSource;
pthread_t pThreadRTSP;
int iretThreadRTSP;
unsigned int ImageBuffer_Lenght = 0;
char ImageBuffer[1*1024*1024];
static struct timeval tm1;
typedef struct
{
unsigned int Port;
char * URL;
}RTSP_Parameters;
RTSP_Parameters RTSP_Params;
static inline void start()
{
gettimeofday(&tm1, NULL);
}
static inline void stop()
{
struct timeval tm2;
gettimeofday(&tm2, NULL);
unsigned long long t = 1000 * (tm2.tv_sec - tm1.tv_sec) + (tm2.tv_usec - tm1.tv_usec) / 1000;
printf("%llu ms\n", t);
}
void WaitStreamingDone()
{
LiveVideoSource->WaitStreamingDone();
}
void CaptureDone()
{
LiveVideoSource->CaptureDone();
}
char GetH264ImageType(char * Buffer)
{
if((Buffer[0] == 0x00) && (Buffer[1] == 0x00) && (Buffer[2] == 0x00) && (Buffer[3] == 0x01) && (Buffer[4] == 0x27)) return 's';//SPS Image Start Code
else if((Buffer[0] == 0x00) && (Buffer[1] == 0x00) && (Buffer[2] == 0x00) && (Buffer[3] == 0x01) && (Buffer[4] == 0x28)) return 'p';//PPS Image Start Code
else if((Buffer[0] == 0x00) && (Buffer[1] == 0x00) && (Buffer[2] == 0x00) && (Buffer[3] == 0x01) && (Buffer[4] == 0x25)) return 'I';//I Image Start Code
else if((Buffer[0] == 0x00) && (Buffer[1] == 0x00) && (Buffer[2] == 0x00) && (Buffer[3] == 0x01) && (Buffer[4] == 0x21)) return 'P';//P Image Start Code
else return 0;
}
extern "C" void FeedStreamerAndRecordingManager(char * Buffer, unsigned int BufferSize)
{
if((GetH264ImageType(Buffer) == 'I') || (GetH264ImageType(Buffer) == 'P') || (GetH264ImageType(Buffer) == 's') || (GetH264ImageType(Buffer) == 'p')) //Start Code
{
//Image Start
if(ImageBuffer_Lenght) //New image comes, Send last image
{
//Framerate measure - Uncomment above two lines
//stop();
//start();
LiveVideoSource->FeedStreamer(ImageBuffer, ImageBuffer_Lenght);
ImageBuffer_Lenght = 0;
CaptureDone();
WaitStreamingDone();
}
if(sizeof(ImageBuffer) >= ImageBuffer_Lenght + BufferSize)
{
memcpy(ImageBuffer + ImageBuffer_Lenght, Buffer, BufferSize);
ImageBuffer_Lenght += BufferSize;
}
else
{
fprintf(stderr, "Buffer Memory Overflow\n");
exit(0);
}
}
else
{
//Image fragment
if(sizeof(ImageBuffer) >= ImageBuffer_Lenght + BufferSize)
{
memcpy(ImageBuffer + ImageBuffer_Lenght, Buffer, BufferSize);
ImageBuffer_Lenght += BufferSize;
}
else
{
fprintf(stderr, "Buffer Memory Overflow\n");
exit(0);
}
}
}
void * ThreadRTSP(void * Parameters) {
// Begin by setting up our usage environment:
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
env = BasicUsageEnvironment::createNew(*scheduler);
// Create 'groupsocks' for RTP and RTCP:
struct in_addr destinationAddress;
destinationAddress.s_addr = chooseRandomIPv4SSMAddress(*env);
// Note: This is a multicast address. If you wish instead to stream
// using unicast, then you should use the "testOnDemandRTSPServer"
// test program - not this test program - as a model.
const unsigned short rtpPortNum = 18888;
const unsigned short rtcpPortNum = rtpPortNum+1;
const unsigned char ttl = 255;
const Port rtpPort(rtpPortNum);
const Port rtcpPort(rtcpPortNum);
RTSP_Parameters * RTSP_Params = (RTSP_Parameters *)Parameters;
Groupsock rtpGroupsock(*env, destinationAddress, rtpPort, ttl);
rtpGroupsock.multicastSendOnly(); // we're a SSM source
Groupsock rtcpGroupsock(*env, destinationAddress, rtcpPort, ttl);
rtcpGroupsock.multicastSendOnly(); // we're a SSM source
// Create a 'H264 Video RTP' sink from the RTP 'groupsock':
OutPacketBuffer::maxSize = 500000;
videoSink = H264VideoRTPSink::createNew(*env, &rtpGroupsock, 96);
// Create (and start) a 'RTCP instance' for this RTP sink:
const unsigned estimatedSessionBandwidth = 15000; // in kbps; for RTCP b/w share
const unsigned maxCNAMElen = 100;
unsigned char CNAME[maxCNAMElen+1];
gethostname((char*)CNAME, maxCNAMElen);
CNAME[maxCNAMElen] = '\0'; // just in case
RTCPInstance* rtcp
= RTCPInstance::createNew(*env, &rtcpGroupsock,
estimatedSessionBandwidth, CNAME,
videoSink, NULL ,
True );
// Note: This starts RTCP running automatically
/*UserAuthenticationDatabase* authDB = NULL;
authDB = new UserAuthenticationDatabase;
char * Login = "ADMIN";
char * Password = "ADMIN";
authDB->addUserRecord(Login, Password); */
RTSPServer* rtspServer = RTSPServer::createNew(*env, RTSP_Params->Port/*, authDB*/);
if (rtspServer == NULL) {
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
}
ServerMediaSession* sms
= ServerMediaSession::createNew(*env, RTSP_Params->URL, NULL,
"Session streamed by \"testH264VideoStreamer\"",
True );
sms->addSubsession(PassiveServerMediaSubsession::createNew(*videoSink, rtcp));
rtspServer->addServerMediaSession(sms);
char* url = rtspServer->rtspURL(sms);
*env << "Play this stream using the URL \"" << url << "\"\n";
delete[] url;
// Start the streaming:
*env << "Beginning streaming...\n";
play();
env->taskScheduler().doEventLoop(); // does not return
}
extern "C" void Start_Streaming(char * URL, unsigned int Port)
{
pthread_t ThreadHndl;
RTSP_Params.Port = Port;
RTSP_Params.URL = URL;
//Subscribe to Multicast Group via eth0
system("sudo ip route add 224.0.0.0/4 dev eth0");
iretThreadRTSP = pthread_create(&ThreadHndl, NULL, (void *(*)(void *))&ThreadRTSP, (void *)&RTSP_Params);
}
void afterPlaying(void* /*clientData*/) {
*env << "...done reading from file\n";
videoSink->stopPlaying();
Medium::close(videoSource);
// Note that this also closes the input file that this source read from.
// Start playing once again:
play();
}
void play() {
MyDeviceParameters params;
LiveVideoSource = MyDeviceSource::createNew(*env,params);
videoDeviceSource = H264VideoStreamDiscreteFramer::createNew(*env,LiveVideoSource);
*env << "Start Streaming from RaspberrIP Camera...\n";
videoSink->startPlaying(*videoDeviceSource, afterPlaying, videoSink);
}