2013-12-13 55 views
0

編輯: 我已更改我的服務代碼以實現作爲啓動服務,而不是IntentService作爲更新StreamService.java低於 現在,我收到錯誤有關權限拒絕錯誤,如StreamService後logcat消息中所述的.java權限拒絕錯誤 - SpeechRecognizer作爲連續服務? (android.permission.INTERACT_ACROSS_USERS_FULL)

編輯:

As mentioned in Android Developer site that SpeechRecognizer API can only be used as Application Context. Is there any woraround with which I can get it working 

我已經實現了具有所有UI組件MainActivity類別。類是如下

CODE - MainActivity.java

package com.example.speechsensorservice; 

import android.app.Activity; 
import android.content.BroadcastReceiver; 
import android.content.Context; 
import android.content.Intent; 
import android.content.IntentFilter; 
import android.os.Bundle; 
import android.util.Log; 
import android.view.Menu; 
import android.view.View; 
import android.widget.ImageButton; 
import android.widget.TextView; 
import android.widget.Toast; 

public class MainActivity extends Activity { 


    private static final String TAG = "SpeechSensor"; 

    private boolean headsetConnected = false; 

    public TextView txtText; 

    private BroadcastReceiver mReceiver; 
    private ImageButton btnSpeak; 

    @Override 
    public void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     setContentView(R.layout.activity_main); 

     txtText = (TextView) findViewById(R.id.txtText); 
     btnSpeak = (ImageButton) findViewById(R.id.btnSpeak); 

     btnSpeak.setOnClickListener(new View.OnClickListener() { 

      @Override 
      public void onClick(View v) { 
       Intent intent = new Intent(getApplicationContext(),StreamService.class); 
       startService(intent); 
      } 
     }); 
    } 

    protected void onResume() { 
     super.onResume(); 

     IntentFilter sIF = new IntentFilter(); 
     sIF.addAction(Intent.ACTION_HEADSET_PLUG); 
     sIF.addAction("com.example.speechsensorservice.TEXT"); 
     mReceiver = new BroadcastReceiver() { 

       @Override 
      public void onReceive(Context arg0, Intent arg1) { 
       // TODO Auto-generated method stub 
       String act = arg1.getAction(); 
       Log.d(TAG, "Received Action = " + act); 
       if (Intent.ACTION_HEADSET_PLUG.equals(act)) { 
        if (arg1.hasExtra("state")) { 
         if (!headsetConnected && arg1.getIntExtra("state", 0) == 1) { 
          headsetConnected = true; 
          txtText.setText("Headset Plugged in"); 
          startNoiseProcessService(); 
         } 
        } 
       } 
       else if (act.equals("com.example.speechsensorservice.TEXT")){ 
        if (arg1.hasExtra("Identity")) { 
         String s = arg1.getStringExtra("Identity"); 
         if (s.equals("NA")) { 
          Toast t = Toast.makeText(getApplicationContext(), 
            "Your Device doesnot support Speech to Text", 
            Toast.LENGTH_SHORT); 
          t.show(); 
         } 
         else txtText.setText(s); 
        } 
       } 
      } 

     }; 

     this.registerReceiver(mReceiver, sIF);  
    } 

    public void onPause() { 
     super.onPause(); 
     this.unregisterReceiver(this.mReceiver); 
    } 

    @Override 
    public boolean onCreateOptionsMenu(Menu menu) { 
     getMenuInflater().inflate(R.menu.main, menu); 
     return true; 
    } 

    public void startNoiseProcessService() { 
     Intent intent = new Intent(this,StreamService.class); 
     startService(intent); 
    } 


} 

,我已經實現了啓動語音識別服務是通過繼承IntentService類後臺任務另一類。具體的實現是下面

碼 - StreamService.java

package com.example.speechsensorservice; 

import java.util.ArrayList; 

import android.app.Service; 
import android.content.BroadcastReceiver; 
import android.content.Context; 
import android.content.Intent; 
import android.content.IntentFilter; 
import android.os.Bundle; 
import android.os.IBinder; 
import android.speech.RecognitionListener; 
import android.speech.RecognizerIntent; 
import android.speech.SpeechRecognizer; 
import android.util.Log; 

public class StreamService extends Service { 
    private static final String TAG = "SpeechSensor"; 
    private static final String ACTION = "com.example.speechsensorservice.TEXT"; 
    private SpeechRecognizer sr; 

    private BroadcastReceiver sReceiver; 

    private boolean headsetConnected = true; 

    String text; 


    @Override 
    public IBinder onBind(Intent arg0) { 
     // TODO Auto-generated method stub 
     return null; 
    } 

    @Override 
    public void onCreate() { 
     Log.d(TAG, "onCreate() StreamService Method"); 
     super.onCreate(); 
     sReceiver = new BroadcastReceiver() { 
      public void onReceive(Context arg0, Intent arg1) { 
       // TODO Auto-generated method stub 
       if (Intent.ACTION_HEADSET_PLUG.equals(arg1.getAction())) { 
        if (arg1.hasExtra("state")) { 
          if (headsetConnected && arg1.getIntExtra("state", 0) == 0) { 
           headsetConnected = false; 
           stopStreaming(); 
          } 
        } 
       } 
      } 

     }; 
     this.registerReceiver(sReceiver, new IntentFilter(Intent.ACTION_HEADSET_PLUG)); 
    } 

    @Override 
    public int onStartCommand(Intent intent, int flags, int startId) { 
     Log.d(TAG,"Inside onStartCommand()"); 
    // Runnable r = new Runnable() { 
    //  public void run() { 
       startStreaming(); 
    //  } 
    // }; 

    // Thread t = new Thread(r); 
    // t.start(); 

     return Service.START_STICKY; 

    } 

    @Override 
    public void onDestroy() { 
     Log.d(TAG, "onDestroy() StreamService Method"); 
     super.onDestroy(); 
     this.unregisterReceiver(this.sReceiver); 
    } 


    public void startStreaming() { 
     Log.d(TAG, "Inside startStreaming()"); 
      Intent intent; 
      text = ""; 
      if (!SpeechRecognizer.isRecognitionAvailable(this)) { 
       Log.d(TAG, "Not Applicable with your device"); 
       text = "NA"; 
       intent = new Intent(ACTION); 
       intent.putExtra("Identity", text); 
       sendBroadcast(intent); 
      } 
      else { 
       Log.d(TAG, "started taking input"); 
       sr = SpeechRecognizer.createSpeechRecognizer(this.getApplicationContext()); 

       intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); 

       //intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "hi-IN"); 
       intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, "en-US");//RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);//RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH); 
      // intent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 3); 

       sr.setRecognitionListener(new mylistener()); 
       sr.startListening(intent); 
      } 

    } 

    public void stopStreaming() { 
      if (sr == null) return; 
      Log.d(TAG, "stopped taking input"); 
      sr.cancel(); 
      sr.destroy(); 
      sr = null; 
      this.stopSelf(); 
    } 

    public boolean isStreaming() { 
      // TODO Auto-generated method stub 
      Log.d(TAG,"isStreaming : YES"); 
      if (sr != null) return true; 
      return false; 
    } 

    class mylistener implements RecognitionListener { 

      @Override 
      public void onBeginningOfSpeech() { 
       // TODO Auto-generated method stub 
       Log.d(TAG, "onBeginningOfSpeech"); 
      } 

      @Override 
      public void onBufferReceived(byte[] arg0) { 
       // TODO Auto-generated method stub 

      } 

      @Override 
      public void onEndOfSpeech() { 
       // TODO Auto-generated method stub 
       Log.d(TAG, "onEndOfSpeech"); 
      } 

      @Override 
      public void onError(int arg0) { 
       // TODO Auto-generated method stub 

      } 

      @Override 
      public void onEvent(int arg0, Bundle arg1) { 
       // TODO Auto-generated method stub 

      } 

      @Override 
      public void onPartialResults(Bundle arg0) { 
       // TODO Auto-generated method stub 

      } 

      @Override 
      public void onReadyForSpeech(Bundle arg0) { 
       // TODO Auto-generated method stub 
       Log.d(TAG, "onReadyForSpeech"); 
      } 

      @Override 
      public void onResults(Bundle arg0) { 
       // TODO Auto-generated method stub 


       Log.d(TAG, "Got Results"); 
       ArrayList<String> al = arg0.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION); 
       text = al.get(0); 
       for (int i =0 ; i < al.size(); i++) { 
        Log.d(TAG,"result=" + al.get(i)); 
       } 
       Intent intent = new Intent(ACTION); 
       intent.putExtra("Identifier", text); 
       sendBroadcast(intent); 
       // startStreaming(); 

      } 

      @Override 
      public void onRmsChanged(float arg0) { 
       // TODO Auto-generated method stub 

      } 

     } 

} 

在這裏,我得到錯誤java.lang.RuntimeException: SpeechRecognizer should be used only from the application's main thread

碼流是這樣的:

ImageButton->的onClick() - >火StreamService.class-> onCreate() - > onHandleIntent() - >調用startStreaming() - >獲取錯誤的服務Intent

LogCat消息:

12-13 17:03:24.822 794 7381 E DatabaseUtils: Writing exception to parcel 
12-13 17:03:24.822 794 7381 E DatabaseUtils: java.lang.SecurityException: Permission Denial: get/set setting for user asks to run as user -2 but is calling from user 0; this requires android.permission.INTERACT_ACROSS_USERS_FULL 
12-13 17:03:24.822 794 7381 E DatabaseUtils:  at com.android.server.am.ActivityManagerService.handleIncomingUser(ActivityManagerService.java:12754) 
12-13 17:03:24.822 794 7381 E DatabaseUtils:  at android.app.ActivityManager.handleIncomingUser(ActivityManager.java:1998) 
12-13 17:03:24.822 794 7381 E DatabaseUtils:  at com.android.providers.settings.SettingsProvider.call(SettingsProvider.java:574) 
12-13 17:03:24.822 794 7381 E DatabaseUtils:  at android.content.ContentProvider$Transport.call(ContentProvider.java:256) 
12-13 17:03:24.822 794 7381 E DatabaseUtils:  at android.content.ContentProviderNative.onTransact(ContentProviderNative.java:256) 
12-13 17:03:24.822 794 7381 E DatabaseUtils:  at android.os.Binder.execTransact(Binder.java:351) 
12-13 17:03:24.822 794 7381 E DatabaseUtils:  at dalvik.system.NativeStart.run(Native Method) 
+0

[IntentService#onHandleIntent()](http://developer.android.com/reference/android/app/IntentService.html#onHandleIntent%28android.content.Intent%29)文件說, 「工作線程」 。使用常規服務並覆蓋'onStartCommand' – zapl

+0

仍然無法正常工作。有什麼方法可以讓主線程上下文運行speechrecognizer實例。即UI線程 – Aki008

+0

「IntentService」中除「onHandleIntent」之外的所有「Service」方法都在主線程中執行。 – zapl

回答

-2

很好的問題是自我解解釋,在logcat中的第一行是給你的是,當前線程不具有執行用戶任務權限的解決方案,因此只需添加以下權限清單中和看看它是否成功。

<uses-permission android:name="android.permission.INTERACT_ACROSS_USERS_FULL"> 

還是讓我知道,如果我理解正確的問題

+0

不適用這個調整。 – Aki008

+1

什麼錯誤日誌現在可以發佈嗎? – Khay

+4

此權限是一個簽名級別權​​限,我們只能在我的應用程序的簽名與系統相同時使用它。 – Aki008

2

有些時候,這個特殊的錯誤實際上是一種誤導,是由其他運行問題引起的。

我記錄了一個這樣的例子here - 拋出一個NullPointerException異常報告爲相同的錯誤,儘管它與跨用戶權限無關。

在我的特殊情況下,ProGuard正在剝離出我需要的方法,導致引發NullPointerException。堆棧跟蹤是這樣的:

Permission Denial: get/set setting for user asks to run as user -2 but is calling from user 0; this requires android.permission.INTERACT_ACROSS_USERS_FULL 
java.lang.NullPointerException 
at java.lang.Enum$1.create(Enum.java:43) 
at java.lang.Enum$1.create(Enum.java:35) 
at libcore.util.BasicLruCache.get(BasicLruCache.java:54) 
at java.lang.Enum.getSharedConstants(Enum.java:209) 
at java.lang.Enum.valueOf(Enum.java:189) 
at com.my.app.package.b.c.a(Unknown Source) 
at com.my.app.package.b.a.onCreate(Unknown Source) 
at android.support.v4.app.FragmentManagerImpl.moveToState(Unknown Source) 
at android.support.v4.app.FragmentManagerImpl.moveToState(Unknown Source) 
at android.support.v4.app.BackStackRecord.run(Unknown Source) 
at android.support.v4.app.FragmentManagerImpl.execPendingActions(Unknown Source) 
at android.support.v4.app.FragmentManagerImpl$1.run(Unknown Source) 
at android.os.Handler.handleCallback(Handler.java:730) 
at android.os.Handler.dispatchMessage(Handler.java:92) 
at android.os.Looper.loop(Looper.java:137) 
at android.app.ActivityThread.main(ActivityThread.java:5455) 
at java.lang.reflect.Method.invokeNative(Native Method) 
at java.lang.reflect.Method.invoke(Method.java:525) 
at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:1187) 
at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:1003) 
at dalvik.system.NativeStart.main(Native Method) 

我還沒有在世界上的線索,爲什麼Android的翻空指針異常進入android.permission.INTERACT_ACROSS_USERS_FULL錯誤,但顯而易見的解決辦法就是調整ProGuard的配置,以便方法沒有被剝離。

我打電話的方法不是在枚舉上有「valueOf」方法。事實證明,有一些有趣的反思(我在上面的鏈接進入),但我的解決方案是將以下內容添加到我的ProGuard配置。

-keepclassmembers enum * { 
    public static **[] values(); 
    public static ** valueOf(java.lang.String); 
} 
+0

請注意,[只有鏈接的答案](http://meta.stackoverflow.com/tags/link-only-answers/info)不鼓勵,所以答案應該是搜索解決方案的終點(vs.而另一個引用的中途停留時間往往會隨着時間推移而過時)。請考慮在此添加獨立的摘要,並將鏈接保留爲參考。 – kleopatra