Commit 8519db8e authored by Alexander Alekhin's avatar Alexander Alekhin

Merge pull request #15464 from tallestorange:3.4

parents f05d5888 17402254
...@@ -35,6 +35,10 @@ ...@@ -35,6 +35,10 @@
#import <AVFoundation/AVFoundation.h> #import <AVFoundation/AVFoundation.h>
#import <Foundation/NSException.h> #import <Foundation/NSException.h>
#define CV_CAP_MODE_BGR CV_FOURCC_MACRO('B','G','R','3')
#define CV_CAP_MODE_RGB CV_FOURCC_MACRO('R','G','B','3')
#define CV_CAP_MODE_GRAY CV_FOURCC_MACRO('G','R','E','Y')
#define CV_CAP_MODE_YUYV CV_FOURCC_MACRO('Y', 'U', 'Y', 'V')
/********************** Declaration of class headers ************************/ /********************** Declaration of class headers ************************/
...@@ -125,37 +129,36 @@ class CvCaptureCAM : public CvCapture { ...@@ -125,37 +129,36 @@ class CvCaptureCAM : public CvCapture {
*****************************************************************************/ *****************************************************************************/
class CvCaptureFile : public CvCapture { class CvCaptureFile : public CvCapture {
public: public:
CvCaptureFile(const char* filename) ;
CvCaptureFile(const char* filename) ; ~CvCaptureFile();
~CvCaptureFile(); virtual bool grabFrame();
virtual bool grabFrame(); virtual IplImage* retrieveFrame(int);
virtual IplImage* retrieveFrame(int); virtual double getProperty(int property_id) const;
virtual IplImage* queryFrame(); virtual bool setProperty(int property_id, double value);
virtual double getProperty(int property_id) const; virtual int didStart();
virtual bool setProperty(int property_id, double value); private:
virtual int didStart(); AVAsset *mAsset;
AVAssetTrack *mAssetTrack;
private: AVAssetReader *mAssetReader;
AVAssetReaderTrackOutput *mTrackOutput;
AVAssetReader *mMovieReader;
char* imagedata; CMSampleBufferRef mCurrentSampleBuffer;
IplImage* image; CVImageBufferRef mGrabbedPixels;
char* bgr_imagedata; IplImage *mDeviceImage;
IplImage* bgr_image; uint8_t *mOutImagedata;
size_t currSize; IplImage *mOutImage;
size_t currSize;
IplImage* retrieveFramePixelBuffer(); uint32_t mMode;
double getFPS(); int mFormat;
int movieWidth; bool setupReadingAt(CMTime position);
int movieHeight; IplImage* retrieveFramePixelBuffer();
double movieFPS;
double currentFPS; CMTime mFrameTimestamp;
double movieDuration; size_t mFrameNum;
int changedPos;
int started;
int started;
}; };
...@@ -766,114 +769,128 @@ fromConnection:(AVCaptureConnection *)connection{ ...@@ -766,114 +769,128 @@ fromConnection:(AVCaptureConnection *)connection{
*****************************************************************************/ *****************************************************************************/
CvCaptureFile::CvCaptureFile(const char* filename) { CvCaptureFile::CvCaptureFile(const char* filename) {
NSAutoreleasePool *localpool = [[NSAutoreleasePool alloc] init];
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
mAsset = nil;
mMovieReader = nil; mAssetTrack = nil;
image = NULL; mAssetReader = nil;
bgr_image = NULL; mTrackOutput = nil;
imagedata = NULL; mDeviceImage = NULL;
bgr_imagedata = NULL; mOutImage = NULL;
mOutImagedata = NULL;
currSize = 0; currSize = 0;
mMode = CV_CAP_MODE_BGR;
movieWidth = 0; mFormat = CV_8UC3;
movieHeight = 0; mCurrentSampleBuffer = NULL;
movieFPS = 0; mGrabbedPixels = NULL;
currentFPS = 0; mFrameTimestamp = kCMTimeZero;
movieDuration = 0; mFrameNum = 0;
changedPos = 0;
started = 0; started = 0;
AVURLAsset *asset = [AVURLAsset URLAssetWithURL: mAsset = [[AVAsset assetWithURL:[NSURL fileURLWithPath: @(filename)]] retain];
[NSURL fileURLWithPath: [NSString stringWithUTF8String:filename]]
options:nil];
AVAssetTrack* videoTrack = nil; if ( mAsset == nil ) {
NSArray* tracks = [asset tracksWithMediaType:AVMediaTypeVideo]; fprintf(stderr, "OpenCV: Couldn't read movie file \"%s\"\n", filename);
if ([tracks count] == 1) [localpool drain];
{ started = 0;
videoTrack = [tracks objectAtIndex:0]; return;
}
movieWidth = videoTrack.naturalSize.width;
movieHeight = videoTrack.naturalSize.height;
movieFPS = videoTrack.nominalFrameRate;
currentFPS = movieFPS; //Debugging !! should be getFPS(); NSArray *tracks = [mAsset tracksWithMediaType:AVMediaTypeVideo];
//Debugging. need to be checked if ([tracks count] == 0) {
fprintf(stderr, "OpenCV: Couldn't read video stream from file \"%s\"\n", filename);
[localpool drain];
started = 0;
return;
}
// In ms mAssetTrack = [tracks[0] retain];
movieDuration = videoTrack.timeRange.duration.value/videoTrack.timeRange.duration.timescale * 1000;
started = 1; if ( ! setupReadingAt(kCMTimeZero) ) {
NSError* error = nil; fprintf(stderr, "OpenCV: Couldn't read movie file \"%s\"\n", filename);
mMovieReader = [[AVAssetReader alloc] initWithAsset:asset error:&error]; [localpool drain];
if (error) started = 0;
NSLog(@"%@", [error localizedDescription]); return;
NSDictionary* videoSettings =
[NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]
forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[mMovieReader addOutput:[AVAssetReaderTrackOutput
assetReaderTrackOutputWithTrack:videoTrack
outputSettings:videoSettings]];
[mMovieReader startReading];
} }
/*
// Asynchronously open the video in another thread. Always fail.
[asset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler:
^{
// The completion block goes here.
dispatch_async(dispatch_get_main_queue(),
^{
AVAssetTrack* ::videoTrack = nil;
NSArray* ::tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if ([tracks count] == 1)
{
videoTrack = [tracks objectAtIndex:0];
movieWidth = videoTrack.naturalSize.width;
movieHeight = videoTrack.naturalSize.height;
movieFPS = videoTrack.nominalFrameRate;
currentFPS = movieFPS; //Debugging !! should be getFPS();
//Debugging. need to be checked
movieDuration = videoTrack.timeRange.duration.value/videoTrack.timeRange.duration.timescale * 1000;
started = 1; started = 1;
[localpool drain];
}
NSError* ::error = nil; CvCaptureFile::~CvCaptureFile() {
// mMovieReader is a member variable NSAutoreleasePool *localpool = [[NSAutoreleasePool alloc] init];
mMovieReader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
if (error) free(mOutImagedata);
NSLog(@"%@", [error localizedDescription]); cvReleaseImage(&mOutImage);
cvReleaseImage(&mDeviceImage);
NSDictionary* ::videoSettings = [mAssetReader release];
[NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] [mTrackOutput release];
forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey]; [mAssetTrack release];
[mAsset release];
CVBufferRelease(mGrabbedPixels);
if ( mCurrentSampleBuffer ) {
CFRelease(mCurrentSampleBuffer);
}
[mMovieReader addOutput:[AVAssetReaderTrackOutput [localpool drain];
assetReaderTrackOutputWithTrack:videoTrack
outputSettings:videoSettings]];
[mMovieReader startReading];
} }
});
}]; bool CvCaptureFile::setupReadingAt(CMTime position) {
*/ if (mAssetReader) {
if (mAssetReader.status == AVAssetReaderStatusReading) {
[mAssetReader cancelReading];
}
[mAssetReader release];
mAssetReader = nil;
}
if (mTrackOutput) {
[mTrackOutput release];
mTrackOutput = nil;
}
[localpool drain]; // Capture in a pixel format that can be converted efficiently to the output mode.
} OSType pixelFormat;
if (mMode == CV_CAP_MODE_BGR || mMode == CV_CAP_MODE_RGB) {
pixelFormat = kCVPixelFormatType_32BGRA;
mFormat = CV_8UC3;
} else if (mMode == CV_CAP_MODE_GRAY) {
pixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
mFormat = CV_8UC1;
} else if (mMode == CV_CAP_MODE_YUYV) {
pixelFormat = kCVPixelFormatType_422YpCbCr8;
mFormat = CV_8UC2;
} else {
fprintf(stderr, "VIDEOIO ERROR: AVF Mac: Unsupported mode: %d\n", mMode);
return false;
}
CvCaptureFile::~CvCaptureFile() { NSDictionary *settings =
@{
(id)kCVPixelBufferPixelFormatTypeKey: @(pixelFormat)
};
mTrackOutput = [[AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack: mAssetTrack
outputSettings: settings] retain];
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init]; if ( !mTrackOutput ) {
if (imagedata != NULL) free(imagedata); fprintf(stderr, "OpenCV: error in [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:outputSettings:]\n");
if (bgr_imagedata != NULL) free(bgr_imagedata); return false;
cvReleaseImage(&image); }
cvReleaseImage(&bgr_image);
[mMovieReader release]; NSError *error = nil;
[localpool drain]; mAssetReader = [[AVAssetReader assetReaderWithAsset: mAsset
error: &error] retain];
if ( error ) {
fprintf(stderr, "OpenCV: error in [AVAssetReader assetReaderWithAsset:error:]\n");
NSLog(@"OpenCV: %@", error.localizedDescription);
return false;
}
mAssetReader.timeRange = CMTimeRangeMake(position, kCMTimePositiveInfinity);
mFrameTimestamp = position;
mFrameNum = round((mFrameTimestamp.value * mAssetTrack.nominalFrameRate) / double(mFrameTimestamp.timescale));
[mAssetReader addOutput: mTrackOutput];
return [mAssetReader startReading];
} }
int CvCaptureFile::didStart() { int CvCaptureFile::didStart() {
...@@ -881,101 +898,191 @@ int CvCaptureFile::didStart() { ...@@ -881,101 +898,191 @@ int CvCaptureFile::didStart() {
} }
bool CvCaptureFile::grabFrame() { bool CvCaptureFile::grabFrame() {
NSAutoreleasePool *localpool = [[NSAutoreleasePool alloc] init];
//everything is done in queryFrame; CVBufferRelease(mGrabbedPixels);
currentFPS = movieFPS; if ( mCurrentSampleBuffer ) {
return 1; CFRelease(mCurrentSampleBuffer);
}
mCurrentSampleBuffer = [mTrackOutput copyNextSampleBuffer];
/* mGrabbedPixels = CMSampleBufferGetImageBuffer(mCurrentSampleBuffer);
double t1 = getProperty(CV_CAP_PROP_POS_MSEC); CVBufferRetain(mGrabbedPixels);
[mCaptureSession stepForward]; mFrameTimestamp = CMSampleBufferGetOutputPresentationTimeStamp(mCurrentSampleBuffer);
double t2 = getProperty(CV_CAP_PROP_POS_MSEC); mFrameNum++;
if (t2>t1 && !changedPos) {
currentFPS = 1000.0/(t2-t1);
} else {
currentFPS = movieFPS;
}
changedPos = 0;
*/
bool isReading = (mAssetReader.status == AVAssetReaderStatusReading);
[localpool drain];
return isReading;
} }
IplImage* CvCaptureFile::retrieveFramePixelBuffer() { IplImage* CvCaptureFile::retrieveFramePixelBuffer() {
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init]; if ( ! mGrabbedPixels ) {
return 0;
}
if (mMovieReader.status != AVAssetReaderStatusReading){ NSAutoreleasePool *localpool = [[NSAutoreleasePool alloc] init];
return NULL; CVPixelBufferLockBaseAddress(mGrabbedPixels, 0);
} void *baseaddress;
size_t width, height, rowBytes;
OSType pixelFormat = CVPixelBufferGetPixelFormatType(mGrabbedPixels);
AVAssetReaderOutput * output = [mMovieReader.outputs objectAtIndex:0]; if (CVPixelBufferIsPlanar(mGrabbedPixels)) {
CMSampleBufferRef sampleBuffer = [output copyNextSampleBuffer]; baseaddress = CVPixelBufferGetBaseAddressOfPlane(mGrabbedPixels, 0);
if (!sampleBuffer) { width = CVPixelBufferGetWidthOfPlane(mGrabbedPixels, 0);
[localpool drain]; height = CVPixelBufferGetHeightOfPlane(mGrabbedPixels, 0);
return NULL; rowBytes = CVPixelBufferGetBytesPerRowOfPlane(mGrabbedPixels, 0);
} else {
baseaddress = CVPixelBufferGetBaseAddress(mGrabbedPixels);
width = CVPixelBufferGetWidth(mGrabbedPixels);
height = CVPixelBufferGetHeight(mGrabbedPixels);
rowBytes = CVPixelBufferGetBytesPerRow(mGrabbedPixels);
} }
CVPixelBufferRef frame = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferRef pixels = CVBufferRetain(frame);
CVPixelBufferLockBaseAddress(pixels, 0); if ( rowBytes == 0 ) {
fprintf(stderr, "OpenCV: error: rowBytes == 0\n");
CVPixelBufferUnlockBaseAddress(mGrabbedPixels, 0);
CVBufferRelease(mGrabbedPixels);
mGrabbedPixels = NULL;
return 0;
}
uint32_t* baseaddress = (uint32_t*)CVPixelBufferGetBaseAddress(pixels); int outChannels;
size_t width = CVPixelBufferGetWidth(pixels); if (mMode == CV_CAP_MODE_BGR || mMode == CV_CAP_MODE_RGB) {
size_t height = CVPixelBufferGetHeight(pixels); outChannels = 3;
size_t rowBytes = CVPixelBufferGetBytesPerRow(pixels); } else if (mMode == CV_CAP_MODE_GRAY) {
outChannels = 1;
} else if (mMode == CV_CAP_MODE_YUYV) {
outChannels = 2;
} else {
fprintf(stderr, "VIDEOIO ERROR: AVF Mac: Unsupported mode: %d\n", mMode);
CVPixelBufferUnlockBaseAddress(mGrabbedPixels, 0);
CVBufferRelease(mGrabbedPixels);
mGrabbedPixels = NULL;
return 0;
}
if (rowBytes != 0) { if ( currSize != width*outChannels*height ) {
currSize = width*outChannels*height;
free(mOutImagedata);
mOutImagedata = reinterpret_cast<uint8_t*>(malloc(currSize));
}
if (currSize != rowBytes*height*sizeof(char)) { if (mOutImage == NULL) {
currSize = rowBytes*height*sizeof(char); mOutImage = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, outChannels);
if (imagedata != NULL) free(imagedata); }
if (bgr_imagedata != NULL) free(bgr_imagedata); mOutImage->width = int(width);
imagedata = (char*)malloc(currSize); mOutImage->height = int(height);
bgr_imagedata = (char*)malloc(currSize); mOutImage->nChannels = outChannels;
mOutImage->depth = IPL_DEPTH_8U;
mOutImage->widthStep = int(width*outChannels);
mOutImage->imageData = reinterpret_cast<char *>(mOutImagedata);
mOutImage->imageSize = int(currSize);
int deviceChannels;
int cvtCode;
if ( pixelFormat == kCVPixelFormatType_32BGRA ) {
deviceChannels = 4;
if (mMode == CV_CAP_MODE_BGR) {
cvtCode = CV_BGRA2BGR;
} else if (mMode == CV_CAP_MODE_RGB) {
cvtCode = CV_BGRA2RGB;
} else if (mMode == CV_CAP_MODE_GRAY) {
cvtCode = CV_BGRA2GRAY;
} else {
CVPixelBufferUnlockBaseAddress(mGrabbedPixels, 0);
CVBufferRelease(mGrabbedPixels);
mGrabbedPixels = NULL;
fprintf(stderr, "OpenCV: unsupported pixel conversion mode\n");
return 0;
} }
} else if ( pixelFormat == kCVPixelFormatType_24RGB ) {
memcpy(imagedata, baseaddress, currSize); deviceChannels = 3;
if (image == NULL) { if (mMode == CV_CAP_MODE_BGR) {
image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 4); cvtCode = CV_RGB2BGR;
} else if (mMode == CV_CAP_MODE_RGB) {
cvtCode = 0;
} else if (mMode == CV_CAP_MODE_GRAY) {
cvtCode = CV_RGB2GRAY;
} else {
CVPixelBufferUnlockBaseAddress(mGrabbedPixels, 0);
CVBufferRelease(mGrabbedPixels);
mGrabbedPixels = NULL;
fprintf(stderr, "OpenCV: unsupported pixel conversion mode\n");
return 0;
} }
} else if ( pixelFormat == kCVPixelFormatType_422YpCbCr8 ) { // 422 (2vuy, UYVY)
image->width = (int)width; deviceChannels = 2;
image->height = (int)height;
image->nChannels = 4; if (mMode == CV_CAP_MODE_BGR) {
image->depth = IPL_DEPTH_8U; cvtCode = CV_YUV2BGR_UYVY;
image->widthStep = (int)rowBytes; } else if (mMode == CV_CAP_MODE_RGB) {
image->imageData = imagedata; cvtCode = CV_YUV2RGB_UYVY;
image->imageSize = (int)currSize; } else if (mMode == CV_CAP_MODE_GRAY) {
cvtCode = CV_YUV2GRAY_UYVY;
} else if (mMode == CV_CAP_MODE_YUYV) {
if (bgr_image == NULL) { cvtCode = -1; // Copy
bgr_image = cvCreateImageHeader(cvSize((int)width,(int)height), IPL_DEPTH_8U, 3); } else {
CVPixelBufferUnlockBaseAddress(mGrabbedPixels, 0);
CVBufferRelease(mGrabbedPixels);
mGrabbedPixels = NULL;
fprintf(stderr, "OpenCV: unsupported pixel conversion mode\n");
return 0;
} }
} else if ( pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || // 420v
pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ) { // 420f
height = height * 3 / 2;
deviceChannels = 1;
if (mMode == CV_CAP_MODE_BGR) {
cvtCode = CV_YUV2BGR_YV12;
} else if (mMode == CV_CAP_MODE_RGB) {
cvtCode = CV_YUV2RGB_YV12;
} else if (mMode == CV_CAP_MODE_GRAY) {
cvtCode = CV_YUV2GRAY_420;
} else {
CVPixelBufferUnlockBaseAddress(mGrabbedPixels, 0);
CVBufferRelease(mGrabbedPixels);
mGrabbedPixels = NULL;
fprintf(stderr, "OpenCV: unsupported pixel conversion mode\n");
return 0;
}
} else {
char pfBuf[] = { (char)pixelFormat, (char)(pixelFormat >> 8),
(char)(pixelFormat >> 16), (char)(pixelFormat >> 24), '\0' };
fprintf(stderr, "OpenCV: unsupported pixel format '%s'\n", pfBuf);
CVPixelBufferUnlockBaseAddress(mGrabbedPixels, 0);
CVBufferRelease(mGrabbedPixels);
mGrabbedPixels = NULL;
return 0;
}
bgr_image->width = (int)width; if (mDeviceImage == NULL) {
bgr_image->height = (int)height; mDeviceImage = cvCreateImageHeader(cvSize(int(width),int(height)), IPL_DEPTH_8U, deviceChannels);
bgr_image->nChannels = 3; }
bgr_image->depth = IPL_DEPTH_8U; mDeviceImage->width = int(width);
bgr_image->widthStep = (int)rowBytes; mDeviceImage->height = int(height);
bgr_image->imageData = bgr_imagedata; mDeviceImage->nChannels = deviceChannels;
bgr_image->imageSize = (int)currSize; mDeviceImage->depth = IPL_DEPTH_8U;
mDeviceImage->widthStep = int(rowBytes);
cvCvtColor(image, bgr_image,CV_BGRA2BGR); mDeviceImage->imageData = reinterpret_cast<char *>(baseaddress);
mDeviceImage->imageSize = int(rowBytes*height);
if (cvtCode == -1) {
cv::cvarrToMat(mDeviceImage).copyTo(cv::cvarrToMat(mOutImage));
} else {
cvCvtColor(mDeviceImage, mOutImage, cvtCode);
} }
CVPixelBufferUnlockBaseAddress(pixels, 0); CVPixelBufferUnlockBaseAddress(mGrabbedPixels, 0);
CVBufferRelease(pixels);
CMSampleBufferInvalidate(sampleBuffer);
CFRelease(sampleBuffer);
[localpool drain]; [localpool drain];
return bgr_image;
return mOutImage;
} }
...@@ -983,123 +1090,88 @@ IplImage* CvCaptureFile::retrieveFrame(int) { ...@@ -983,123 +1090,88 @@ IplImage* CvCaptureFile::retrieveFrame(int) {
return retrieveFramePixelBuffer(); return retrieveFramePixelBuffer();
} }
IplImage* CvCaptureFile::queryFrame() { double CvCaptureFile::getProperty(int property_id) const{
grabFrame(); if (mAsset == nil) return 0;
return retrieveFrame(0);
}
double CvCaptureFile::getFPS() { CMTime t;
/* switch (property_id) {
if (mCaptureSession == nil) return 0; case CV_CAP_PROP_POS_MSEC:
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init]; return mFrameTimestamp.value * 1000.0 / mFrameTimestamp.timescale;
double now = getProperty(CV_CAP_PROP_POS_MSEC); case CV_CAP_PROP_POS_FRAMES:
double retval = 0; return mAssetTrack.nominalFrameRate > 0 ? mFrameNum : 0;
if (now == 0) { case CV_CAP_PROP_POS_AVI_RATIO:
[mCaptureSession stepForward]; t = [mAsset duration];
double t2 = getProperty(CV_CAP_PROP_POS_MSEC); return (mFrameTimestamp.value * t.timescale) / double(mFrameTimestamp.timescale * t.value);
[mCaptureSession stepBackward]; case CV_CAP_PROP_FRAME_WIDTH:
retval = 1000.0 / (t2-now); return mAssetTrack.naturalSize.width;
} else { case CV_CAP_PROP_FRAME_HEIGHT:
[mCaptureSession stepBackward]; return mAssetTrack.naturalSize.height;
double t2 = getProperty(CV_CAP_PROP_POS_MSEC); case CV_CAP_PROP_FPS:
[mCaptureSession stepForward]; return mAssetTrack.nominalFrameRate;
retval = 1000.0 / (now-t2); case CV_CAP_PROP_FRAME_COUNT:
} t = [mAsset duration];
[localpool drain]; return round((t.value * mAssetTrack.nominalFrameRate) / double(t.timescale));
return retval; case CV_CAP_PROP_FORMAT:
*/ return mFormat;
return 30.0; //TODO: Debugging case CV_CAP_PROP_FOURCC:
return mMode;
default:
break;
}
return 0;
} }
double CvCaptureFile::getProperty(int /*property_id*/) const{ bool CvCaptureFile::setProperty(int property_id, double value) {
if (mAsset == nil) return false;
/* NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
if (mCaptureSession == nil) return 0;
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init];
double retval;
QTTime t;
switch (property_id) {
case CV_CAP_PROP_POS_MSEC:
[[mCaptureSession attributeForKey:QTMovieCurrentTimeAttribute] getValue:&t];
retval = t.timeValue * 1000.0 / t.timeScale;
break;
case CV_CAP_PROP_POS_FRAMES:
retval = movieFPS * getProperty(CV_CAP_PROP_POS_MSEC) / 1000;
break;
case CV_CAP_PROP_POS_AVI_RATIO:
retval = (getProperty(CV_CAP_PROP_POS_MSEC)) / (movieDuration );
break;
case CV_CAP_PROP_FRAME_WIDTH:
retval = movieWidth;
break;
case CV_CAP_PROP_FRAME_HEIGHT:
retval = movieHeight;
break;
case CV_CAP_PROP_FPS:
retval = currentFPS;
break;
case CV_CAP_PROP_FOURCC:
default:
retval = 0;
}
[localpool drain];
return retval;
*/
return 1.0; //Debugging
}
bool CvCaptureFile::setProperty(int /*property_id*/, double /*value*/) { bool retval = false;
CMTime t;
/* switch (property_id) {
if (mCaptureSession == nil) return false; case CV_CAP_PROP_POS_MSEC:
t = mAsset.duration;
NSAutoreleasePool* localpool = [[NSAutoreleasePool alloc] init]; t.value = value * t.timescale / 1000;
retval = setupReadingAt(t);
bool retval = false; break;
QTTime t; case CV_CAP_PROP_POS_FRAMES:
retval = mAssetTrack.nominalFrameRate > 0 ? setupReadingAt(CMTimeMake(value, mAssetTrack.nominalFrameRate)) : false;
double ms; break;
case CV_CAP_PROP_POS_AVI_RATIO:
switch (property_id) { t = mAsset.duration;
case CV_CAP_PROP_POS_MSEC: t.value = round(t.value * value);
[[mCaptureSession attributeForKey:QTMovieCurrentTimeAttribute] getValue:&t]; retval = setupReadingAt(t);
t.timeValue = value * t.timeScale / 1000; break;
[mCaptureSession setCurrentTime:t]; case CV_CAP_PROP_FOURCC:
changedPos = 1; uint32_t mode;
retval = true; mode = cvRound(value);
break; if (mMode == mode) {
case CV_CAP_PROP_POS_FRAMES: retval = true;
ms = (value*1000.0 -5)/ currentFPS; } else {
retval = setProperty(CV_CAP_PROP_POS_MSEC, ms); switch (mode) {
break; case CV_CAP_MODE_BGR:
case CV_CAP_PROP_POS_AVI_RATIO: case CV_CAP_MODE_RGB:
ms = value * movieDuration; case CV_CAP_MODE_GRAY:
retval = setProperty(CV_CAP_PROP_POS_MSEC, ms); case CV_CAP_MODE_YUYV:
break; mMode = mode;
case CV_CAP_PROP_FRAME_WIDTH: retval = setupReadingAt(mFrameTimestamp);
//retval = movieWidth; break;
break; default:
case CV_CAP_PROP_FRAME_HEIGHT: fprintf(stderr, "VIDEOIO ERROR: AVF iOS: Unsupported mode: %d\n", mode);
//retval = movieHeight; retval=false;
break; break;
case CV_CAP_PROP_FPS: }
//etval = currentFPS; }
break; break;
case CV_CAP_PROP_FOURCC: default:
default: break;
retval = false;
} }
[localpool drain]; [localpool drain];
return retval; return retval;
*/
return true;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment