2016-02-24 17 views
4

我正在努力學習適用於iOS的3D開發金屬。我在網上查看了幾篇短篇文章,僅介紹了Metal的基礎知識。 Theres也有一本書「Metal by Example」,但是這本書沒有詳細介紹Metal。我也檢查了一個類似的問題,這個問題在堆棧溢出時被問到,但答案只是讓我看到了相同的文章。有沒有人知道一個很好的資源(書籍,視頻或網絡教程),可以教給我一些基本的專家級金屬知識?找不到Metal的資源iOS

+1

我開始了一系列關於[使用MetalKit(HTTP:/ /mhorga.org)上個月,它基於偉大的「Metal by Example」書。我的系列計劃儘可能詳細地介紹,但您可能需要耐心等待未來幾周內出版的下一部分內容。那裏還沒有很多'金屬'資源! – Marius

+0

謝謝!一定會看你的教程。也就像你在Swift中做過的那樣。如果你以答覆的形式發帖,我一定會給你正確的答案。 –

回答

0

對於Metal而言,很難獲得全面的資源指南,因爲它仍處於起步階段。有一些東西,但它不足以讓你真正擅長這一點。

下面是我的建議:咬緊牙關,先學習一些OpenGL ES。網上有大量的教程,還有書籍。只要做一點吧;足以繪製紋理對象。但要確保你能理解這些基礎知識的基本概念。之後,拿起一本關於一般計算機圖形學概念的好書。一個很好的實時渲染(Möller,Haines和Hoffman)。這將建立你作爲一門科學的圖形知識,而金屬只是一個實現這些概念的工具。爲了實現它們,Apple官方文檔和WWDC視頻就足夠了。

Metal和OpenGL ES之間有很多通用的術語,這就是爲什麼我建議先學習它。無論如何他們都是計算機圖形學。所以你的目標應該是學習計算機圖形學。金屬將成爲你工作的工具。

0

開始MetalKit,並通過建立的後果,就像一個相機應用的東西,使用任何金屬表現着色器處理視頻輸出:

@import UIKit; 
@import AVFoundation; 
@import CoreMedia; 
#import <MetalKit/MetalKit.h> 
#import <Metal/Metal.h> 
#import <MetalPerformanceShaders/MetalPerformanceShaders.h> 

@interface ViewController : UIViewController <MTKViewDelegate, AVCaptureVideoDataOutputSampleBufferDelegate> { 
    NSString *_displayName; 
    NSString *serviceType; 
} 

@property (retain, nonatomic) SessionContainer *session; 
@property (retain, nonatomic) AVCaptureSession *avSession; 

@end; 

#import "ViewController.h" 

@interface ViewController() { 
    MTKView *_metalView; 

    id<MTLDevice> _device; 
    id<MTLCommandQueue> _commandQueue; 
    id<MTLTexture> _texture; 

    CVMetalTextureCacheRef _textureCache; 
} 

@property (strong, nonatomic) AVCaptureDevice *videoDevice; 
@property (nonatomic) dispatch_queue_t sessionQueue; 

@end 

@implementation ViewController 

- (void)viewDidLoad { 
    NSLog(@"%s", __PRETTY_FUNCTION__); 
    [super viewDidLoad]; 

    _device = MTLCreateSystemDefaultDevice(); 
    _metalView = [[MTKView alloc] initWithFrame:self.view.bounds]; 
    [_metalView setContentMode:UIViewContentModeScaleAspectFit]; 
    _metalView.device = _device; 
    _metalView.delegate = self; 
    _metalView.clearColor = MTLClearColorMake(1, 1, 1, 1); 
    _metalView.colorPixelFormat = MTLPixelFormatBGRA8Unorm; 
    _metalView.framebufferOnly = NO; 
    _metalView.autoResizeDrawable = NO; 

    CVMetalTextureCacheCreate(NULL, NULL, _device, NULL, &_textureCache); 

    [self.view addSubview:_metalView]; 

    self.sessionQueue = dispatch_queue_create("session queue", DISPATCH_QUEUE_SERIAL); 

    if ([self setupCamera]) { 
     [_avSession startRunning]; 
    } 
} 

- (BOOL)setupCamera { 
    NSLog(@"%s", __PRETTY_FUNCTION__); 
    @try { 
     NSError * error; 

      _avSession = [[AVCaptureSession alloc] init]; 
      [_avSession beginConfiguration]; 
      [_avSession setSessionPreset:AVCaptureSessionPreset640x480]; 

      // get list of devices; connect to front-facing camera 
      self.videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 
      if (self.videoDevice == nil) return FALSE; 

      AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:self.videoDevice error:&error]; 
      [_avSession addInput:input]; 

      dispatch_queue_t sampleBufferQueue = dispatch_queue_create("CameraMulticaster", DISPATCH_QUEUE_SERIAL); 

      AVCaptureVideoDataOutput * dataOutput = [[AVCaptureVideoDataOutput alloc] init]; 
      [dataOutput setAlwaysDiscardsLateVideoFrames:YES]; 
      [dataOutput setVideoSettings:@{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)}]; 
      [dataOutput setSampleBufferDelegate:self queue:sampleBufferQueue]; 

      [_avSession addOutput:dataOutput]; 
      [_avSession commitConfiguration]; 
    } @catch (NSException *exception) { 
     NSLog(@"%s - %@", __PRETTY_FUNCTION__, exception.description); 
     return FALSE; 
    } @finally { 
     return TRUE; 
    } 

} 

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 
{ 
    CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
    { 
     size_t width = CVPixelBufferGetWidth(pixelBuffer); 
     size_t height = CVPixelBufferGetHeight(pixelBuffer); 

     CVMetalTextureRef texture = NULL; 
     CVReturn status = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, MTLPixelFormatBGRA8Unorm, width, height, 0, &texture); 
     if(status == kCVReturnSuccess) 
     { 
      _metalView.drawableSize = CGSizeMake(width, height); 
      _texture = CVMetalTextureGetTexture(texture); 
      _commandQueue = [_device newCommandQueue]; 
      CFRelease(texture); 
     } 
    } 
} 

- (void)drawInMTKView:(MTKView *)view { 
    // creating command encoder 
    if (_texture) { 
     id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer]; 
     id<MTLTexture> drawingTexture = view.currentDrawable.texture; 

     // set up and encode the filter 
     MPSImageGaussianBlur *filter = [[MPSImageGaussianBlur alloc] initWithDevice:_device sigma:5]; 

     [filter encodeToCommandBuffer:commandBuffer sourceTexture:_texture destinationTexture:drawingTexture]; 

     // committing the drawing 
     [commandBuffer presentDrawable:view.currentDrawable]; 
     [commandBuffer commit]; 
     _texture = nil; 
    } 
} 

- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size { 

} 

@end