2012-09-14 27 views
0

我試圖從android device.my java方法中調用一些java方法,我通過axis2 web服務訪問。從android中調用使用外部Jar的web服務方法時出錯

這裏是我完整的Java類中,我寫了兩個方法從Adnroid內設備調用。而不知怎的,它只是調用get_wav_byte()方法成功但另一種方法,它給我的錯誤是在java的如下

。 lang.Thread.run(Thread.java:662) 產生的原因:java.lang.NoClassDefFoundError:EDU/CMU /獅身人面像/ UTIL /道具/ ConfigurationManager中

這是我的外部jar庫中的類,即時通訊使用recognise_wave(String wavePath)方法。 我也檢查過,edu/cmu/sphinx/util/props/ConfigurationManager在jar文件m中可用,包括stilll,它給了我錯誤信息。我向你提供了我的完整Java和Android代碼,如下所示

JAVA METHOD :

package edu.cmu.sphinx.demo.transcriber; 




import edu.cmu.sphinx.frontend.util.AudioFileDataSource; 
import edu.cmu.sphinx.recognizer.Recognizer; 
import edu.cmu.sphinx.result.Result; 
//import edu.cmu.sphinx.util.props.ConfigurationManager; 
import edu.cmu.sphinx.util.props.ConfigurationManager; 


import java.io.File; 
import java.io.FileOutputStream; 
import java.io.IOException; 

import java.net.MalformedURLException; 
import java.net.URL; 

import javax.sound.sampled.UnsupportedAudioFileException; 

/** A simple example that shows how to transcribe a continuous audio file that has multiple utterances in it. */ 
public class Transcriber { 

     // private static final String PATH = "file:///D:\\Sound\\"; 




    @SuppressWarnings("null") 
    public static String recognize_wave(String wavePath) throws MalformedURLException{ 




     String resultText=""; 
       URL audioURL; 


       audioURL = new URL(wavePath); 
       URL configURL = Transcriber.class.getResource("config.xml"); 

       ConfigurationManager cm = new ConfigurationManager(configURL); 
       Recognizer recognizer = (Recognizer) cm.lookup("recognizer"); 

       /* allocate the resource necessary for the recognizer */ 
       recognizer.allocate(); 

       // configure the audio input for the recognizer 
       AudioFileDataSource dataSource = (AudioFileDataSource) cm.lookup("audioFileDataSource"); 
       dataSource.setAudioFile(audioURL, null); 

       // Loop until last utterance in the audio file has been decoded, in which case the recognizer will return null. 
       Result result; 
       while ((result = recognizer.recognize())!= null) { 

         resultText = result.getBestResultNoFiller(); 
         System.out.println(resultText); 
       } 

      return resultText; 
     } 

     public String get_wav_byte(byte[] wavbite,String path) throws IOException 

     { 


      String result1="null"; 
      //return result1; 

      final String PATH = "file:///D:\\Sound\\"; 

      //System.out.println(bhavik1111); 


      try 
     { 
      File dstFile = new File(path); 
      FileOutputStream out = new FileOutputStream(dstFile); 
       out.write(wavbite, 0, wavbite.length); 

      out.close(); 

     } 
     catch (IOException e) 
     { 
      System.out.println("IOException : " + e); 
     } 


     try { 
      result1=recognize_wave(path); 
     } catch (MalformedURLException e) { 
      // TODO Auto-generated catch block 
      e.printStackTrace(); 
     } 


    return result1; 


} 



} 

和我的Android代碼來調用與KSOAP2該方法如下:

package com.varma.samples.audiorecorder; 

import java.io.BufferedInputStream; 
import java.io.ByteArrayOutputStream; 
import java.io.File; 
import java.io.FileInputStream; 
import java.io.FileNotFoundException; 
import java.io.FileOutputStream; 
import java.io.IOException; 
import java.io.InputStream; 

import org.ksoap2.SoapEnvelope; 
import org.ksoap2.SoapFault; 
import org.ksoap2.serialization.MarshalBase64; 
import org.ksoap2.serialization.SoapObject; 
import org.ksoap2.serialization.SoapSerializationEnvelope; 
import org.ksoap2.transport.HttpTransportSE; 

import android.annotation.SuppressLint; 
import android.app.Activity; 
import android.media.AudioFormat; 
import android.media.AudioRecord; 
import android.media.MediaRecorder; 
import android.os.Bundle; 
import android.os.Environment; 
import android.util.Base64; 
import android.util.Log; 
import android.view.View; 
import android.view.ViewDebug.FlagToString; 
import android.widget.Button; 
import android.widget.TextView; 

public class RecorderActivity extends Activity { 
    private static final int RECORDER_BPP =16; 
    private static final String AUDIO_RECORDER_FILE_EXT_WAV = ".wav"; 
    private static final String AUDIO_RECORDER_FOLDER = "AudioRecorder"; 
    private static final String AUDIO_RECORDER_TEMP_FILE = "record_temp.raw"; 
    private static String AUDIO_WAV_FILE = ""; 
    private static final int RECORDER_SAMPLERATE = 16000; 
    private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_CONFIGURATION_MONO; 
    private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT; 

    private AudioRecord recorder = null; 
    private int bufferSize = 0; 
    private Thread recordingThread = null; 
    private boolean isRecording = false; 

    @SuppressLint("NewApi") 
    @Override 
    public void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     setContentView(R.layout.main); 

     setButtonHandlers(); 
     enableButtons(false); 







     bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE,RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING); 
    } 

    private void setButtonHandlers() { 
     ((Button)findViewById(R.id.btnStart)).setOnClickListener(btnClick); 
     ((Button)findViewById(R.id.btnStop)).setOnClickListener(btnClick); 
    } 

    private void enableButton(int id,boolean isEnable){ 
     ((Button)findViewById(id)).setEnabled(isEnable); 
    } 

    private void enableButtons(boolean isRecording) { 
     enableButton(R.id.btnStart,!isRecording); 
     enableButton(R.id.btnStop,isRecording); 
    } 

    private String getFilename(){ 
     String filepath = Environment.getExternalStorageDirectory().getPath(); 
     File file = new File(filepath,AUDIO_RECORDER_FOLDER); 

     if(!file.exists()){ 
      file.mkdirs(); 
     } 

     return (file.getAbsolutePath() + "/" + System.currentTimeMillis() + AUDIO_RECORDER_FILE_EXT_WAV); 
    } 

    private String getTempFilename(){ 
     String filepath = Environment.getExternalStorageDirectory().getPath(); 
     File file = new File(filepath,AUDIO_RECORDER_FOLDER); 

     if(!file.exists()){ 
      file.mkdirs(); 
     } 

     File tempFile = new File(filepath,AUDIO_RECORDER_TEMP_FILE); 

     if(tempFile.exists()) 
      tempFile.delete(); 

     return (file.getAbsolutePath() + "/" + AUDIO_RECORDER_TEMP_FILE); 
    } 

    @SuppressLint({ "NewApi", "NewApi" }) 
    private void startRecording(){ 
     recorder = new AudioRecord(MediaRecorder.AudioSource.MIC, 
         RECORDER_SAMPLERATE, RECORDER_CHANNELS,RECORDER_AUDIO_ENCODING, bufferSize); 

     recorder.startRecording(); 

     isRecording = true; 

     recordingThread = new Thread(new Runnable() { 

      @Override 
      public void run() { 
       writeAudioDataToFile(); 
      } 
     },"AudioRecorder Thread"); 

     recordingThread.start(); 
    } 

    @SuppressLint({ "NewApi", "NewApi", "NewApi" }) 
    private void writeAudioDataToFile(){ 
     byte data[] = new byte[bufferSize]; 
     String filename = getTempFilename(); 
     FileOutputStream os = null; 

     try { 
      os = new FileOutputStream(filename); 
     } catch (FileNotFoundException e) { 
      // TODO Auto-generated catch block 
      e.printStackTrace(); 
     } 

     int read = 0; 

     if(null != os){ 
      while(isRecording){ 
       read = recorder.read(data, 0, bufferSize); 

       if(AudioRecord.ERROR_INVALID_OPERATION != read){ 
        try { 
         os.write(data); 
        } catch (IOException e) { 
         e.printStackTrace(); 
        } 
       } 
      } 

      try { 
       os.close(); 
      } catch (IOException e) { 
       e.printStackTrace(); 
      } 
     } 
    } 

    @SuppressLint({ "NewApi", "NewApi" }) 
    private void stopRecording(){ 
     if(null != recorder){ 
      isRecording = false; 

      recorder.stop(); 
      recorder.release(); 

      recorder = null; 
      recordingThread = null; 
     } 

     copyWaveFile(getTempFilename(),getFilename()); 
     deleteTempFile(); 
    } 

    private void deleteTempFile() { 
     File file = new File(getTempFilename()); 

     file.delete(); 
    } 

    @SuppressLint("NewApi") 
    private void copyWaveFile(String inFilename,String outFilename){ 
     FileInputStream in = null; 
     FileOutputStream out = null; 
     long totalAudioLen = 0; 
     long totalDataLen = totalAudioLen + 36; 
     long longSampleRate = 16000; 
     int channels = 1; 
     long byteRate = RECORDER_BPP * RECORDER_SAMPLERATE * channels/8; 
    /// long byteRate = 256; 

     byte[] data = new byte[bufferSize]; 

     try { 
      in = new FileInputStream(inFilename); 
      out = new FileOutputStream(outFilename); 
      totalAudioLen = in.getChannel().size(); 
      totalDataLen = totalAudioLen + 36; 

      AppLog.logString("File size: " + totalDataLen); 

      WriteWaveFileHeader(out, totalAudioLen, totalDataLen, 
        longSampleRate, channels, byteRate); 

      while(in.read(data) != -1){ 
       out.write(data); 
      } 



      in.close(); 
      out.close(); 
      ////////////////// 


      AUDIO_WAV_FILE=outFilename; 
      ///////////////// 


     } catch (FileNotFoundException e) { 
      e.printStackTrace(); 
     } catch (IOException e) { 
      e.printStackTrace(); 
     } 
    } 
/////////////read wav file and convert to byte//////////////////// 
    public static byte[] getBytesFromFile(File file) throws IOException { 

     /* 
     InputStream is = new FileInputStream(file); 

     // Get the size of the file 
     long length = file.length(); 

     // You cannot create an array using a long type. 
     // It needs to be an int type. 
     // Before converting to an int type, check 
     // to ensure that file is not larger than Integer.MAX_VALUE. 
     if (length > Integer.MAX_VALUE) { 
      // File is too large 
     } 

     // Create the byte array to hold the data 
     byte[] bytes = new byte[(int)length]; 

     // Read in the bytes 
     int offset = 0; 
     int numRead = 0; 
     while (offset < bytes.length 
       && (numRead=is.read(bytes, offset, bytes.length-offset)) >= 0) { 
      offset += numRead; 
     } 

     // Ensure all the bytes have been read in 
     if (offset < bytes.length) { 
      throw new IOException("Could not completely read file "+file.getName()); 
     } 

     // Close the input stream and return bytes 
     is.close(); 
     return bytes; 
     */ 
     ByteArrayOutputStream out = new ByteArrayOutputStream(); 
     BufferedInputStream in = new BufferedInputStream(new FileInputStream(file)); 

     int read; 
     byte[] buff = new byte[1024]; 
     while ((read = in.read(buff)) > 0) 
     { 
      out.write(buff, 0, read); 
     } 
     out.flush(); 
     byte[] audioBytes = out.toByteArray(); 


     return audioBytes; 



    } 

    ////////////////////////////////////// 
    private void WriteWaveFileHeader(
      FileOutputStream out, long totalAudioLen, 
      long totalDataLen, long longSampleRate, int channels, 
      long byteRate) throws IOException { 

     byte[] header = new byte[44]; 

     header[0] = 'R'; // RIFF/WAVE header 
     header[1] = 'I'; 
     header[2] = 'F'; 
     header[3] = 'F'; 
     header[4] = (byte) (totalDataLen & 0xff); 
     header[5] = (byte) ((totalDataLen >> 8) & 0xff); 
     header[6] = (byte) ((totalDataLen >> 16) & 0xff); 
     header[7] = (byte) ((totalDataLen >> 24) & 0xff); 
     header[8] = 'W'; 
     header[9] = 'A'; 
     header[10] = 'V'; 
     header[11] = 'E'; 
     header[12] = 'f'; // 'fmt ' chunk 
     header[13] = 'm'; 
     header[14] = 't'; 
     header[15] = ' '; 
     header[16] = 16; // 4 bytes: size of 'fmt ' chunk 
     header[17] = 0; 
     header[18] = 0; 
     header[19] = 0; 
     header[20] = 1; // format = 1 
     header[21] = 0; 
     header[22] = (byte) channels; 
     header[23] = 0; 
     header[24] = (byte) (longSampleRate & 0xff); 
     header[25] = (byte) ((longSampleRate >> 8) & 0xff); 
     header[26] = (byte) ((longSampleRate >> 16) & 0xff); 
     header[27] = (byte) ((longSampleRate >> 24) & 0xff); 
     header[28] = (byte) (byteRate & 0xff); 
     header[29] = (byte) ((byteRate >> 8) & 0xff); 
     header[30] = (byte) ((byteRate >> 16) & 0xff); 
     header[31] = (byte) ((byteRate >> 24) & 0xff); 
     header[32] = (byte) (2 * 16/8); // block align 
     header[33] = 0; 
     header[34] = RECORDER_BPP; // bits per sample 
     header[35] = 0; 
     header[36] = 'd'; 
     header[37] = 'a'; 
     header[38] = 't'; 
     header[39] = 'a'; 
     header[40] = (byte) (totalAudioLen & 0xff); 
     header[41] = (byte) ((totalAudioLen >> 8) & 0xff); 
     header[42] = (byte) ((totalAudioLen >> 16) & 0xff); 
     header[43] = (byte) ((totalAudioLen >> 24) & 0xff); 

     out.write(header, 0, 44); 


    } 

    private View.OnClickListener btnClick = new View.OnClickListener() { 
     @Override 
     public void onClick(View v) { 
      switch(v.getId()){ 
       case R.id.btnStart:{ 
        AppLog.logString("Start Recording"); 





        enableButtons(true); 





        startRecording(); 

        break; 
       } 
       case R.id.btnStop:{ 
        AppLog.logString("Start Recording"); 

        enableButtons(false); 
        stopRecording(); 

        File source_for_byte=new File(AUDIO_WAV_FILE); 
         byte[] temp = new byte[(int) source_for_byte.length()]; 




         try { 
          temp=getBytesFromFile(source_for_byte); 
         } catch (IOException e) { 
          // TODO Auto-generated catch block 
          e.printStackTrace(); 
         } 



         //byte[] strBase64 = Base64.encode(temp, Base64.NO_WRAP); 
         //Request.addProperty("image", strBase64); 
         //////////////////////WebService Activity /////////////////////// 

         String METHOD_NAME = ""; 
         // our webservice method name 
         String NAMESPACE = "http://test.com"; 
         String SOAP_ACTION = NAMESPACE + METHOD_NAME; 
         // NAMESPACE + method name 


         //final String URL = "http://192.168.3.106:8080/axis2/services/speechmain?wsdl"; 

         final String URL="http://192.168.3.106:8080/axis2/services/VoiceService?wsdl"; 

          METHOD_NAME = "get_wav_byte"; 
          try { 
           SoapObject request = new SoapObject(NAMESPACE, METHOD_NAME); 


          request.addProperty("wavbite", temp); 

          request.addProperty("path", "D:\\sound\\latest_recognizer.wav"); 
          SoapSerializationEnvelope envelope = new SoapSerializationEnvelope(
            SoapEnvelope.VER11); 

          new MarshalBase64().register(envelope); // serialization 

          envelope.encodingStyle = SoapEnvelope.ENC; 
           envelope.dotNet = true; 
           envelope.setOutputSoapObject(request); 
           HttpTransportSE androidHttpTransport = new HttpTransportSE(URL); 
           androidHttpTransport.call(SOAP_ACTION, envelope); 
           Object result = envelope.getResponse(); 

           // Object result = (SoapObject) envelope.bodyIn; 







           ((TextView) findViewById(R.id.gettext1)).setText("NUMBER IS :-> " 
             + result.toString()); 

          } catch (Exception E) { 
           E.printStackTrace(); 
           ((TextView) findViewById(R.id.gettext1)).setText("ERROR:" 
             + E.getClass().getName() + ":" + E.getMessage()); 
          } 




         ///////////////////////// 












        break; 
       } 
      } 
     } 
    }; 
} 

我所理解的是一些如何它不工作,因爲我在公共靜態使用外部jar文件字符串recognition_wave(Stri ng wavePath)方法。

我搜索了很多,但仍然沒有很好的指導又是可利用的。

希望你能幫助我..

在此先感謝..

+0

把jar(庫)文件放到libs文件夾中應用... – Shiva

+0

感謝您的迴應,但我已經完成了這部分,沒有得到任何東西。 –

回答

0

檢查以確保您的外部JAR文件包含在您的構建路徑中以及位於您的libs文件夾中。如果您沒有libs文件夾,您可以在項目的根目錄中創建一個文件夾。然後複製並粘貼你的jar文件到這個位置。然後在Eclipse中右鍵單擊jar文件並轉至Build Path,然後單擊Add To Build Path。如果你在你的libs文件夾中有jar文件,你不會得到任何編譯器錯誤。但那只是編譯時間。如果jar不在構建路徑中,它將在運行時失敗。

相關問題