2014-03-24 09:55:35 +00:00
/ *
* QTKit input device
* Copyright ( c ) 2013 Vadim Kalinsky < vadim @ kalinsky . ru >
*
* This file is part of FFmpeg .
*
* FFmpeg is free software ; you can redistribute it and / or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version .
*
* FFmpeg is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg ; if not , write to the Free Software
* Foundation , Inc . , 51 Franklin Street , Fifth Floor , Boston , MA 02110 -1301 USA
* /
/ * *
* @ file
* QTKit input device
* @ author Vadim Kalinsky < vadim @ kalinsky . ru >
* /
2015-03-10 11:04:35 +00:00
# if defined ( __clang __ )
# pragma clang diagnostic ignored "-Wdeprecated-declarations"
# endif
2014-04-24 03:12:07 +00:00
# import < QTKit / QTKit . h >
2014-03-24 09:55:35 +00:00
# include < pthread . h >
# include "libavutil/pixdesc.h"
# include "libavutil/opt.h"
# include "libavformat/internal.h"
# include "libavutil/internal.h"
# include "libavutil/time.h"
# include "avdevice.h"
2014-03-31 12:16:26 +00:00
# define QTKIT_TIMEBASE 100
2014-03-24 09:55:35 +00:00
static const AVRational kQTKitTimeBase_q = {
. num = 1 ,
2014-03-31 12:16:26 +00:00
. den = QTKIT_TIMEBASE
2014-03-24 09:55:35 +00:00
} ;
typedef struct
{
AVClass * class ;
float frame_rate ;
int frames_captured ;
int64_t first_pts ;
pthread_mutex _t frame_lock ;
pthread_cond _t frame_wait _cond ;
id qt_delegate ;
2014-04-02 20:24:36 +00:00
int list_devices ;
int video_device _index ;
2014-03-24 09:55:35 +00:00
QTCaptureSession * capture_session ;
QTCaptureDecompressedVideoOutput * video_output ;
CVImageBufferRef current_frame ;
} CaptureContext ;
static void lock_frames ( CaptureContext * ctx )
{
pthread_mutex _lock ( & ctx -> frame_lock ) ;
}
static void unlock_frames ( CaptureContext * ctx )
{
pthread_mutex _unlock ( & ctx -> frame_lock ) ;
}
/ * * FrameReciever class - delegate for QTCaptureSession
* /
@ interface FFMPEG_FrameReceiver : NSObject
{
CaptureContext * _context ;
}
- ( id ) initWithContext : ( CaptureContext * ) context ;
- ( void ) captureOutput : ( QTCaptureOutput * ) captureOutput
didOutputVideoFrame : ( CVImageBufferRef ) videoFrame
withSampleBuffer : ( QTSampleBuffer * ) sampleBuffer
fromConnection : ( QTCaptureConnection * ) connection ;
@ end
@ implementation FFMPEG_FrameReceiver
- ( id ) initWithContext : ( CaptureContext * ) context
{
if ( self = [ super init ] ) {
_context = context ;
}
return self ;
}
- ( void ) captureOutput : ( QTCaptureOutput * ) captureOutput
didOutputVideoFrame : ( CVImageBufferRef ) videoFrame
withSampleBuffer : ( QTSampleBuffer * ) sampleBuffer
fromConnection : ( QTCaptureConnection * ) connection
{
lock_frames ( _context ) ;
if ( _context -> current_frame ! = nil ) {
CVBufferRelease ( _context -> current_frame ) ;
}
_context -> current_frame = CVBufferRetain ( videoFrame ) ;
pthread_cond _signal ( & _context -> frame_wait _cond ) ;
unlock_frames ( _context ) ;
+ + _context -> frames_captured ;
}
@ end
static void destroy_context ( CaptureContext * ctx )
{
[ ctx -> capture_session stopRunning ] ;
[ ctx -> capture_session release ] ;
[ ctx -> video_output release ] ;
[ ctx -> qt_delegate release ] ;
ctx -> capture_session = NULL ;
ctx -> video_output = NULL ;
ctx -> qt_delegate = NULL ;
pthread_mutex _destroy ( & ctx -> frame_lock ) ;
pthread_cond _destroy ( & ctx -> frame_wait _cond ) ;
if ( ctx -> current_frame )
CVBufferRelease ( ctx -> current_frame ) ;
}
static int qtkit_read _header ( AVFormatContext * s )
{
NSAutoreleasePool * pool = [ [ NSAutoreleasePool alloc ] init ] ;
CaptureContext * ctx = ( CaptureContext * ) s -> priv_data ;
ctx -> first_pts = av_gettime ( ) ;
pthread_mutex _init ( & ctx -> frame_lock , NULL ) ;
pthread_cond _init ( & ctx -> frame_wait _cond , NULL ) ;
2014-04-02 20:24:36 +00:00
// List devices if requested
if ( ctx -> list_devices ) {
av_log ( ctx , AV_LOG _INFO , "QTKit video devices:\n" ) ;
NSArray * devices = [ QTCaptureDevice inputDevicesWithMediaType : QTMediaTypeVideo ] ;
for ( QTCaptureDevice * device in devices ) {
const char * name = [ [ device localizedDisplayName ] UTF8String ] ;
int index = [ devices indexOfObject : device ] ;
av_log ( ctx , AV_LOG _INFO , "[%d] %s\n" , index , name ) ;
}
goto fail ;
}
// Find capture device
QTCaptureDevice * video_device = nil ;
// check for device index given in filename
if ( ctx -> video_device _index = = -1 ) {
sscanf ( s -> filename , "%d" , & ctx -> video_device _index ) ;
}
if ( ctx -> video_device _index >= 0 ) {
NSArray * devices = [ QTCaptureDevice inputDevicesWithMediaType : QTMediaTypeVideo ] ;
if ( ctx -> video_device _index >= [ devices count ] ) {
av_log ( ctx , AV_LOG _ERROR , "Invalid device index\n" ) ;
goto fail ;
}
video_device = [ devices objectAtIndex : ctx -> video_device _index ] ;
} else if ( strncmp ( s -> filename , "" , 1 ) &&
strncmp ( s -> filename , "default" , 7 ) ) {
NSArray * devices = [ QTCaptureDevice inputDevicesWithMediaType : QTMediaTypeVideo ] ;
for ( QTCaptureDevice * device in devices ) {
if ( ! strncmp ( s -> filename , [ [ device localizedDisplayName ] UTF8String ] , strlen ( s -> filename ) ) ) {
video_device = device ;
break ;
}
}
if ( ! video_device ) {
av_log ( ctx , AV_LOG _ERROR , "Video device not found\n" ) ;
goto fail ;
}
} else {
video_device = [ QTCaptureDevice defaultInputDeviceWithMediaType : QTMediaTypeMuxed ] ;
}
2014-03-24 09:55:35 +00:00
BOOL success = [ video_device open : nil ] ;
// Video capture device not found , looking for QTMediaTypeVideo
if ( ! success ) {
video_device = [ QTCaptureDevice defaultInputDeviceWithMediaType : QTMediaTypeVideo ] ;
success = [ video_device open : nil ] ;
if ( ! success ) {
av_log ( s , AV_LOG _ERROR , "No QT capture device found\n" ) ;
goto fail ;
}
}
NSString * dev_display _name = [ video_device localizedDisplayName ] ;
av_log ( s , AV_LOG _DEBUG , "'%s' opened\n" , [ dev_display _name UTF8String ] ) ;
// Initialize capture session
ctx -> capture_session = [ [ QTCaptureSession alloc ] init ] ;
QTCaptureDeviceInput * capture_dev _input = [ [ [ QTCaptureDeviceInput alloc ] initWithDevice : video_device ] autorelease ] ;
success = [ ctx -> capture_session addInput : capture_dev _input error : nil ] ;
if ( ! success ) {
av_log ( s , AV_LOG _ERROR , "Failed to add QT capture device to session\n" ) ;
goto fail ;
}
// Attaching output
// FIXME : Allow for a user defined pixel format
ctx -> video_output = [ [ QTCaptureDecompressedVideoOutput alloc ] init ] ;
NSDictionary * captureDictionary = [ NSDictionary dictionaryWithObject :
[ NSNumber numberWithUnsignedInt : kCVPixelFormatType_24RGB ]
forKey : ( id ) kCVPixelBufferPixelFormatTypeKey ] ;
[ ctx -> video_output setPixelBufferAttributes : captureDictionary ] ;
ctx -> qt_delegate = [ [ FFMPEG_FrameReceiver alloc ] initWithContext : ctx ] ;
[ ctx -> video_output setDelegate : ctx -> qt_delegate ] ;
[ ctx -> video_output setAutomaticallyDropsLateVideoFrames : YES ] ;
[ ctx -> video_output setMinimumVideoFrameInterval : 1.0 / ctx -> frame_rate ] ;
success = [ ctx -> capture_session addOutput : ctx -> video_output error : nil ] ;
if ( ! success ) {
av_log ( s , AV_LOG _ERROR , "can't add video output to capture session\n" ) ;
goto fail ;
}
[ ctx -> capture_session startRunning ] ;
// Take stream info from the first frame .
while ( ctx -> frames_captured < 1 ) {
CFRunLoopRunInMode ( kCFRunLoopDefaultMode , 0.1 , YES ) ;
}
lock_frames ( ctx ) ;
AVStream * stream = avformat_new _stream ( s , NULL ) ;
if ( ! stream ) {
goto fail ;
}
2014-03-31 12:16:26 +00:00
avpriv_set _pts _info ( stream , 64 , 1 , QTKIT_TIMEBASE ) ;
2014-03-24 09:55:35 +00:00
stream -> codec -> codec_id = AV_CODEC _ID _RAWVIDEO ;
stream -> codec -> codec_type = AVMEDIA_TYPE _VIDEO ;
stream -> codec -> width = ( int ) CVPixelBufferGetWidth ( ctx -> current_frame ) ;
stream -> codec -> height = ( int ) CVPixelBufferGetHeight ( ctx -> current_frame ) ;
stream -> codec -> pix_fmt = AV_PIX _FMT _RGB24 ;
CVBufferRelease ( ctx -> current_frame ) ;
ctx -> current_frame = nil ;
unlock_frames ( ctx ) ;
[ pool release ] ;
return 0 ;
fail :
[ pool release ] ;
destroy_context ( ctx ) ;
return AVERROR ( EIO ) ;
}
static int qtkit_read _packet ( AVFormatContext * s , AVPacket * pkt )
{
CaptureContext * ctx = ( CaptureContext * ) s -> priv_data ;
do {
lock_frames ( ctx ) ;
if ( ctx -> current_frame ! = nil ) {
if ( av_new _packet ( pkt , ( int ) CVPixelBufferGetDataSize ( ctx -> current_frame ) ) < 0 ) {
return AVERROR ( EIO ) ;
}
pkt -> pts = pkt -> dts = av_rescale _q ( av_gettime ( ) - ctx -> first_pts , AV_TIME _BASE _Q , kQTKitTimeBase_q ) ;
pkt -> stream_index = 0 ;
pkt -> flags | = AV_PKT _FLAG _KEY ;
CVPixelBufferLockBaseAddress ( ctx -> current_frame , 0 ) ;
void * data = CVPixelBufferGetBaseAddress ( ctx -> current_frame ) ;
memcpy ( pkt -> data , data , pkt -> size ) ;
CVPixelBufferUnlockBaseAddress ( ctx -> current_frame , 0 ) ;
CVBufferRelease ( ctx -> current_frame ) ;
ctx -> current_frame = nil ;
} else {
pkt -> data = NULL ;
pthread_cond _wait ( & ctx -> frame_wait _cond , & ctx -> frame_lock ) ;
}
unlock_frames ( ctx ) ;
} while ( ! pkt -> data ) ;
return 0 ;
}
static int qtkit_close ( AVFormatContext * s )
{
CaptureContext * ctx = ( CaptureContext * ) s -> priv_data ;
destroy_context ( ctx ) ;
return 0 ;
}
static const AVOption options [ ] = {
{ "frame_rate" , "set frame rate" , offsetof ( CaptureContext , frame_rate ) , AV_OPT _TYPE _FLOAT , { . dbl = 30.0 } , 0.1 , 30.0 , AV_OPT _TYPE _VIDEO _RATE , NULL } ,
2014-04-02 20:24:36 +00:00
{ "list_devices" , "list available devices" , offsetof ( CaptureContext , list_devices ) , AV_OPT _TYPE _INT , { . i64 = 0 } , 0 , 1 , AV_OPT _FLAG _DECODING _PARAM , "list_devices" } ,
{ "true" , "" , 0 , AV_OPT _TYPE _CONST , { . i64 = 1 } , 0 , 0 , AV_OPT _FLAG _DECODING _PARAM , "list_devices" } ,
{ "false" , "" , 0 , AV_OPT _TYPE _CONST , { . i64 = 0 } , 0 , 0 , AV_OPT _FLAG _DECODING _PARAM , "list_devices" } ,
{ "video_device_index" , "select video device by index for devices with same name (starts at 0)" , offsetof ( CaptureContext , video_device _index ) , AV_OPT _TYPE _INT , { . i64 = -1 } , -1 , INT_MAX , AV_OPT _FLAG _DECODING _PARAM } ,
2014-03-24 09:55:35 +00:00
{ NULL } ,
} ;
static const AVClass qtkit_class = {
. class_name = "QTKit input device" ,
. item_name = av_default _item _name ,
. option = options ,
. version = LIBAVUTIL_VERSION _INT ,
2014-04-11 14:29:37 +00:00
. category = AV_CLASS _CATEGORY _DEVICE _VIDEO _INPUT ,
2014-03-24 09:55:35 +00:00
} ;
AVInputFormat ff_qtkit _demuxer = {
. name = "qtkit" ,
. long_name = NULL_IF _CONFIG _SMALL ( "QTKit input device" ) ,
. priv_data _size = sizeof ( CaptureContext ) ,
. read_header = qtkit_read _header ,
. read_packet = qtkit_read _packet ,
. read_close = qtkit_close ,
. flags = AVFMT_NOFILE ,
. priv_class = & qtkit_class ,
} ;