allow to not start v4l2 capture

pull/296/head
mpromonet 3 years ago
parent 13c61161fd
commit 90dfcb1ec0

@ -18,26 +18,26 @@
class DeviceSourceFactory {
public:
static FramedSource* createFramedSource(UsageEnvironment* env, int format, DeviceInterface* devCapture, int queueSize = 5, bool useThread = true, int outfd = -1, bool repeatConfig = true) {
static FramedSource* createFramedSource(UsageEnvironment* env, int format, DeviceInterface* devCapture, int queueSize = 5, V4L2DeviceSource::CaptureMode captureMode = V4L2DeviceSource::CAPTURE_INTERNAL_THREAD, int outfd = -1, bool repeatConfig = true) {
FramedSource* source = NULL;
if (format == V4L2_PIX_FMT_H264)
{
source = H264_V4L2DeviceSource::createNew(*env, devCapture, outfd, queueSize, useThread, repeatConfig, false);
source = H264_V4L2DeviceSource::createNew(*env, devCapture, outfd, queueSize, captureMode, repeatConfig, false);
}
else if (format == V4L2_PIX_FMT_HEVC)
{
source = H265_V4L2DeviceSource::createNew(*env, devCapture, outfd, queueSize, useThread, repeatConfig, false);
source = H265_V4L2DeviceSource::createNew(*env, devCapture, outfd, queueSize, captureMode, repeatConfig, false);
}
else
{
source = V4L2DeviceSource::createNew(*env, devCapture, outfd, queueSize, useThread);
source = V4L2DeviceSource::createNew(*env, devCapture, outfd, queueSize, captureMode);
}
return source;
}
static StreamReplicator* createStreamReplicator(UsageEnvironment* env, int format, DeviceInterface* devCapture, int queueSize = 5, bool useThread = true, int outfd = -1, bool repeatConfig = true) {
static StreamReplicator* createStreamReplicator(UsageEnvironment* env, int format, DeviceInterface* devCapture, int queueSize = 5, V4L2DeviceSource::CaptureMode captureMode = V4L2DeviceSource::CAPTURE_INTERNAL_THREAD, int outfd = -1, bool repeatConfig = true) {
StreamReplicator* replicator = NULL;
FramedSource* framedSource = DeviceSourceFactory::createFramedSource(env, format, devCapture, queueSize, useThread, outfd, repeatConfig);
FramedSource* framedSource = DeviceSourceFactory::createFramedSource(env, format, devCapture, queueSize, captureMode, outfd, repeatConfig);
if (framedSource != NULL)
{
// extend buffer size if needed

@ -24,8 +24,8 @@ const char H264shortmarker[] = {0,0,1};
class H26X_V4L2DeviceSource : public V4L2DeviceSource
{
protected:
H26X_V4L2DeviceSource(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig, bool keepMarker)
: V4L2DeviceSource(env, device, outputFd, queueSize, useThread), m_repeatConfig(repeatConfig), m_keepMarker(keepMarker) {}
H26X_V4L2DeviceSource(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, CaptureMode captureMode, bool repeatConfig, bool keepMarker)
: V4L2DeviceSource(env, device, outputFd, queueSize, captureMode), m_repeatConfig(repeatConfig), m_keepMarker(keepMarker) {}
virtual ~H26X_V4L2DeviceSource() {}
@ -41,13 +41,13 @@ class H26X_V4L2DeviceSource : public V4L2DeviceSource
class H264_V4L2DeviceSource : public H26X_V4L2DeviceSource
{
public:
static H264_V4L2DeviceSource* createNew(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig, bool keepMarker) {
return new H264_V4L2DeviceSource(env, device, outputFd, queueSize, useThread, repeatConfig, keepMarker);
static H264_V4L2DeviceSource* createNew(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, CaptureMode captureMode, bool repeatConfig, bool keepMarker) {
return new H264_V4L2DeviceSource(env, device, outputFd, queueSize, captureMode, repeatConfig, keepMarker);
}
protected:
H264_V4L2DeviceSource(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig, bool keepMarker)
: H26X_V4L2DeviceSource(env, device, outputFd, queueSize, useThread, repeatConfig, keepMarker) {}
H264_V4L2DeviceSource(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, CaptureMode captureMode, bool repeatConfig, bool keepMarker)
: H26X_V4L2DeviceSource(env, device, outputFd, queueSize, captureMode, repeatConfig, keepMarker) {}
// overide V4L2DeviceSource
virtual std::list< std::pair<unsigned char*,size_t> > splitFrames(unsigned char* frame, unsigned frameSize);
@ -56,13 +56,13 @@ class H264_V4L2DeviceSource : public H26X_V4L2DeviceSource
class H265_V4L2DeviceSource : public H26X_V4L2DeviceSource
{
public:
static H265_V4L2DeviceSource* createNew(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig, bool keepMarker) {
return new H265_V4L2DeviceSource(env, device, outputFd, queueSize, useThread, repeatConfig, keepMarker);
static H265_V4L2DeviceSource* createNew(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, CaptureMode captureMode, bool repeatConfig, bool keepMarker) {
return new H265_V4L2DeviceSource(env, device, outputFd, queueSize, captureMode, repeatConfig, keepMarker);
}
protected:
H265_V4L2DeviceSource(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, bool useThread, bool repeatConfig, bool keepMarker)
: H26X_V4L2DeviceSource(env, device, outputFd, queueSize, useThread, repeatConfig, keepMarker) {}
H265_V4L2DeviceSource(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, CaptureMode captureMode, bool repeatConfig, bool keepMarker)
: H26X_V4L2DeviceSource(env, device, outputFd, queueSize, captureMode, repeatConfig, keepMarker) {}
// overide V4L2DeviceSource
virtual std::list< std::pair<unsigned char*,size_t> > splitFrames(unsigned char* frame, unsigned frameSize);

@ -64,14 +64,26 @@ class V4L2DeviceSource: public FramedSource
const std::string m_msg;
};
// ---------------------------------
// Capture Mode
// ---------------------------------
enum CaptureMode
{
CAPTURE_LIVE555_THREAD = 0,
CAPTURE_INTERNAL_THREAD,
NOCAPTURE
};
public:
static V4L2DeviceSource* createNew(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, bool useThread) ;
static V4L2DeviceSource* createNew(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, CaptureMode captureMode) ;
std::string getAuxLine() { return m_auxLine; }
void setAuxLine(const std::string auxLine) { m_auxLine = auxLine; }
DeviceInterface* getDevice() { return m_device; }
DeviceInterface* getDevice() { return m_device; }
void postFrame(char * frame, int frameSize, const timeval &ref);
protected:
V4L2DeviceSource(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, bool useThread);
V4L2DeviceSource(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, CaptureMode captureMode);
virtual ~V4L2DeviceSource();
protected:

@ -117,13 +117,13 @@ class V4l2RTSPServer {
// -----------------------------------------
StreamReplicator* CreateVideoReplicator(
const V4L2DeviceParameters& inParam,
int queueSize, int useThread, int repeatConfig,
int queueSize, V4L2DeviceSource::CaptureMode captureMode, int repeatConfig,
const std::string& outputFile, V4l2IoType ioTypeOut, V4l2Output*& out);
#ifdef HAVE_ALSA
StreamReplicator* CreateAudioReplicator(
const std::string& audioDev, const std::list<snd_pcm_format_t>& audioFmtList, int audioFreq, int audioNbChannels, int verbose,
int queueSize, int useThread);
int queueSize, V4L2DeviceSource::CaptureMode captureMode);
#endif
// -----------------------------------------

@ -142,7 +142,7 @@ int main(int argc, char** argv)
std::string url = "unicast";
std::string murl = "multicast";
std::string tsurl = "ts";
bool useThread = true;
V4L2DeviceSource::CaptureMode captureMode = V4L2DeviceSource::CAPTURE_INTERNAL_THREAD;
std::string maddr;
bool repeatConfig = true;
int timeout = 65;
@ -192,7 +192,7 @@ int main(int argc, char** argv)
case 'r': ioTypeIn = IOTYPE_READWRITE; break;
case 'w': ioTypeOut = IOTYPE_READWRITE; break;
case 'B': openflags = O_RDWR; break;
case 's': useThread = false; break;
case 's': captureMode = V4L2DeviceSource::CAPTURE_LIVE555_THREAD; break;
case 'f': format = V4l2Device::fourcc(optarg); if (format) {videoformatList.push_back(format);}; break;
case 'F': fps = atoi(optarg); break;
case 'W': width = atoi(optarg); break;
@ -333,7 +333,7 @@ int main(int argc, char** argv)
V4L2DeviceParameters inParam(videoDev.c_str(), videoformatList, width, height, fps, ioTypeIn, verbose, openflags);
StreamReplicator* videoReplicator = rtspServer.CreateVideoReplicator(
inParam,
queueSize, useThread, repeatConfig,
queueSize, captureMode, repeatConfig,
output, ioTypeOut, out);
if (out != NULL) {
outList.push_back(out);
@ -344,7 +344,7 @@ int main(int argc, char** argv)
#ifdef HAVE_ALSA
audioReplicator = rtspServer.CreateAudioReplicator(
audioDev, audioFmtList, audioFreq, audioNbChannels, verbose,
queueSize, useThread);
queueSize, captureMode);
#endif

@ -37,18 +37,18 @@ int V4L2DeviceSource::Stats::notify(int tv_sec, int framesize)
// ---------------------------------
// V4L2 FramedSource
// ---------------------------------
V4L2DeviceSource* V4L2DeviceSource::createNew(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, bool useThread)
V4L2DeviceSource* V4L2DeviceSource::createNew(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, CaptureMode captureMode)
{
V4L2DeviceSource* source = NULL;
if (device)
{
source = new V4L2DeviceSource(env, device, outputFd, queueSize, useThread);
source = new V4L2DeviceSource(env, device, outputFd, queueSize, captureMode);
}
return source;
}
// Constructor
V4L2DeviceSource::V4L2DeviceSource(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, bool useThread)
V4L2DeviceSource::V4L2DeviceSource(UsageEnvironment& env, DeviceInterface * device, int outputFd, unsigned int queueSize, CaptureMode captureMode)
: FramedSource(env),
m_in("in"),
m_out("out") ,
@ -59,16 +59,19 @@ V4L2DeviceSource::V4L2DeviceSource(UsageEnvironment& env, DeviceInterface * devi
m_eventTriggerId = envir().taskScheduler().createEventTrigger(V4L2DeviceSource::deliverFrameStub);
memset(&m_thid, 0, sizeof(m_thid));
memset(&m_mutex, 0, sizeof(m_mutex));
pthread_mutex_init(&m_mutex, NULL);
if (m_device)
{
if (useThread)
{
pthread_mutex_init(&m_mutex, NULL);
pthread_create(&m_thid, NULL, threadStub, this);
}
else
{
envir().taskScheduler().turnOnBackgroundReadHandling( m_device->getFd(), V4L2DeviceSource::incomingPacketHandlerStub, this);
switch (captureMode) {
case CAPTURE_INTERNAL_THREAD:
pthread_create(&m_thid, NULL, threadStub, this);
break;
case CAPTURE_LIVE555_THREAD:
envir().taskScheduler().turnOnBackgroundReadHandling( m_device->getFd(), V4L2DeviceSource::incomingPacketHandlerStub, this);
break;
case NOCAPTURE:
default:
break;
}
}
}
@ -205,22 +208,28 @@ int V4L2DeviceSource::getNextFrame()
}
else
{
timeval tv;
gettimeofday(&tv, NULL);
timeval diff;
timersub(&tv,&ref,&diff);
m_in.notify(tv.tv_sec, frameSize);
LOG(DEBUG) << "getNextFrame\ttimestamp:" << ref.tv_sec << "." << ref.tv_usec << "\tsize:" << frameSize <<"\tdiff:" << (diff.tv_sec*1000+diff.tv_usec/1000) << "ms";
processFrame(buffer,frameSize,ref);
if (m_outfd != -1)
{
write(m_outfd, buffer, frameSize);
}
this->postFrame(buffer,frameSize,ref);
}
return frameSize;
}
// post frame to queue
void V4L2DeviceSource::postFrame(char * frame, int frameSize, const timeval &ref)
{
timeval tv;
gettimeofday(&tv, NULL);
timeval diff;
timersub(&tv,&ref,&diff);
m_in.notify(tv.tv_sec, frameSize);
LOG(DEBUG) << "getNextFrame\ttimestamp:" << ref.tv_sec << "." << ref.tv_usec << "\tsize:" << frameSize <<"\tdiff:" << (diff.tv_sec*1000+diff.tv_usec/1000) << "ms";
processFrame(frame,frameSize,ref);
if (m_outfd != -1)
{
write(m_outfd, frame, frameSize);
}
}
void V4L2DeviceSource::processFrame(char * frame, int frameSize, const timeval &ref)
{

@ -26,7 +26,7 @@
StreamReplicator* V4l2RTSPServer::CreateVideoReplicator(
const V4L2DeviceParameters& inParam,
int queueSize, int useThread, int repeatConfig,
int queueSize, V4L2DeviceSource::CaptureMode captureMode, int repeatConfig,
const std::string& outputFile, V4l2IoType ioTypeOut, V4l2Output*& out) {
StreamReplicator* videoReplicator = NULL;
@ -60,7 +60,7 @@ StreamReplicator* V4l2RTSPServer::CreateVideoReplicator(
delete videoCapture;
} else {
LOG(NOTICE) << "Create Source ..." << videoDev;
videoReplicator = DeviceSourceFactory::createStreamReplicator(this->env(), videoCapture->getFormat(), new VideoCaptureAccess(videoCapture), queueSize, useThread, outfd, repeatConfig);
videoReplicator = DeviceSourceFactory::createStreamReplicator(this->env(), videoCapture->getFormat(), new VideoCaptureAccess(videoCapture), queueSize, captureMode, outfd, repeatConfig);
if (videoReplicator == NULL)
{
LOG(FATAL) << "Unable to create source for device " << videoDev;
@ -173,7 +173,7 @@ std::string getV4l2Alsa(const std::string& v4l2device) {
StreamReplicator* V4l2RTSPServer::CreateAudioReplicator(
const std::string& audioDev, const std::list<snd_pcm_format_t>& audioFmtList, int audioFreq, int audioNbChannels, int verbose,
int queueSize, int useThread) {
int queueSize, V4L2DeviceSource::CaptureMode captureMode) {
StreamReplicator* audioReplicator = NULL;
if (!audioDev.empty())
{
@ -187,7 +187,7 @@ StreamReplicator* V4l2RTSPServer::CreateAudioReplicator(
ALSACapture* audioCapture = ALSACapture::createNew(param);
if (audioCapture)
{
audioReplicator = DeviceSourceFactory::createStreamReplicator(this->env(), 0, audioCapture, queueSize, useThread);
audioReplicator = DeviceSourceFactory::createStreamReplicator(this->env(), 0, audioCapture, queueSize, captureMode);
if (audioReplicator == NULL)
{
LOG(FATAL) << "Unable to create source for device " << audioDevice;

Loading…
Cancel
Save