2012-07-23 13 views

回答

0

我不認爲你可以配置什麼深度範圍來記錄,但你可以做的是過程,你從.oni記錄讀取的信息:

import SimpleOpenNI.*; 

SimpleOpenNI context; 
boolean  recordFlag = true; 
boolean  saving = false; 
int frames = 0; 
int savedFrames = 0; 
//change these two values as you wish: 
float minZ = 100; 
float maxZ = 500; 

void setup(){ 
    context = new SimpleOpenNI(this); 

    if(! recordFlag){ 
    if(! context.openFileRecording("test.oni")){ 
     println("can't find recording !!!!"); 
     exit(); 
    } 
    context.enableDepth(); 
    }else{ 
    // recording 
    context.enableDepth(); 
    // setup the recording 
    context.enableRecorder(SimpleOpenNI.RECORD_MEDIUM_FILE,"test.oni"); 
    // select the recording channels 
    context.addNodeToRecording(SimpleOpenNI.NODE_DEPTH,SimpleOpenNI.CODEC_16Z_EMB_TABLES); 
    } 
    // set window size 
    if((context.nodes() & SimpleOpenNI.NODE_DEPTH) != 0) 
    size(context.depthWidth() , context.depthHeight()); 
    else 
    exit(); 
} 
void draw() 
{ 
    background(0); 
    context.update(); 
    if((context.nodes() & SimpleOpenNI.NODE_DEPTH) != 0) image(context.depthImage(),0,0); 
    if(recordFlag) frames++; 
    if(saving && savedFrames < frames){ 
     delay(3000);//hack 
     int i = savedFrames; 
     int w = context.depthWidth(); 
     int h = context.depthHeight(); 
     PrintWriter output = createWriter(dataPath("frame_"+i+".ply")); 
     output.println("ply"); 
     output.println("format ascii 1.0"); 
     output.println("element vertex " + (w*h)); 
     output.println("property float x"); 
     output.println("property float y"); 
     output.println("property float z"); 
     output.println("end_header\n"); 
     rect(random(width),random(height),100,100); 
     int[] depthMap = context.depthMap(); 
     int  index; 
     PVector realWorldPoint; 
     for(int y=0;y < h;y++){ 
     for(int x=0;x < w;x++){ 
      index = x + y * w; 
      realWorldPoint = context.depthMapRealWorld()[index]; 
      if(realWorldPoint.z > minZ && realWorldPoint.z < maxZ) 
      output.println(realWorldPoint.x + " " + realWorldPoint.y + " " + realWorldPoint.z); 
     } 
     } 
     output.flush(); 
     output.close(); 
     println("saved " + (i+1) + " of " + frames); 
     savedFrames++; 
    } 
} 
void keyPressed(){ 
    if(key == ' '){ 
    if(recordFlag){ 
     saveStrings(dataPath("frames.txt"),split(frames+" ",' ')); 
     exit(); 
    }else saveONIToPLY(); 
    } 
} 
void saveONIToPLY(){ 
    frames = int(loadStrings(dataPath("frames.txt"))[0]); 
    saving = true; 
    println("recording " + frames + " frames"); 
} 
+0

我認爲你是對的。我找到了一個openFrameworks代碼庫,它允許我獲取ONI文件並在某個z深度將其導出,但是此代碼非常適合將其可視化。謝謝! – mheavers 2012-07-24 16:18:35

+0

酷,很好,你可以使用openFrameworks,對於kinect的東西速度要好得多。對於更高級的點雲處理,您還可以看一下[Point Clouds Library](http://pointclouds.org/),但要記住它並不是非常直接的設置(因爲它有很多依賴)。你的項目聽起來很有趣,在某些時候看到網上的東西會很酷:) Gooduck! – 2012-07-24 16:24:03

+0

當然,我會盡快給你發送一些我正在處理的內容。 – mheavers 2012-07-25 00:05:40