0
是否可以使用ASIFormDataRequest上傳拍攝視頻的幀?使用ASIFormDataRequest上傳視頻幀?
代碼我有:
- (void)viewDidLoad
{
[super viewDidLoad];
frameCount = 0;
[myQueue cancelAllOperations];
[myQueue setDelegate:self];
NSString *urlString = @"url";
NSURL *url = [NSURL URLWithString:urlString];
request = [ASIFormDataRequest requestWithURL:url];
objReturner = [[ReturnerClass alloc] init];
NSError *deviceError;
AVCaptureDevice *cameraDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *inputDevice = [AVCaptureDeviceInput deviceInputWithDevice:cameraDevice error:&deviceError];
// make output device
AVCaptureVideoDataOutput *outputDevice = [[AVCaptureVideoDataOutput alloc] init];
outputDevice.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey: (id)kCVPixelBufferPixelFormatTypeKey];
[outputDevice setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
// initialize capture session
self.captureSession = [[AVCaptureSession alloc] init];
[self.captureSession addInput:inputDevice];
[self.captureSession addOutput:outputDevice];
// make preview layer and add so that camera's view is displayed on screen
AVCaptureVideoPreviewLayer *previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
previewLayer.frame = self.previewLayer.bounds;
[self.previewLayer.layer addSublayer:previewLayer];
[self.captureSession startRunning];
}
-(void) captureOutput:(AVCaptureOutput*)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection*)connection
{
//[connection setVideoMinFrameDuration:CMTimeMake(1, 30)];
/*We create an autorelease pool because as we are not in the main_queue our code is
not executed in the main thread. So we have to create an autorelease pool for the thread we are in*/
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
/*Lock the image buffer*/
CVPixelBufferLockBaseAddress(imageBuffer,0);
/*Get information about the image*/
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
/*Create a CGImageRef from the CVImageBufferRef*/
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
/*We release some components*/
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
/*We display the result on the custom layer. All the display stuff must be done in the main thread because
UIKit is no thread safe, and as we are not in the main thread (remember we didn't use the main_queue)
we use performSelectorOnMainThread to call our CALayer and tell it to display the CGImage.*/
//[self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage waitUntilDone:YES];
/*We display the result on the image view (We need to change the orientation of the image so that the video is displayed correctly).
Same thing as for the CALayer we are not in the main thread so ...*/
UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
/*We relase the CGImageRef*/
CGImageRelease(newImage);
//[self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];
NSData *imageData = UIImageJPEGRepresentation(image,0.1); //change Image to NSData
[self sendFrameToServer:imageData];
/*We unlock the image buffer*/
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
}
-(void)sendFrameToServer:(NSData *)imageData{
[request setData:imageData withFileName:[NSString stringWithFormat:@"%i_%@.png",frameCount,[objReturner returnTimeStamp]] andContentType:@"image/png" forKey:@"imagetoupload"];
[request startSynchronous];
[myQueue addOperation:request];
NSLog(@"Value: %@",[request responseString]);
frameCount++;
}
我聲明myQueue中,並請求對象在.H
我這裏的問題是,NSLog的響應總是顯示被髮送到第一圖像服務器,它永遠不會改變例如「價值:文件0_2013-03-14 19:27:34.png已上傳」保持這樣的整個時間,而frameCount正在增加,日期秒甚至沒有上升。
有沒有解決方案呢?
僅使用[標籤:Xcode]標籤直接關於工具本身的問題。謝謝! – Undo 2013-03-14 18:56:36