(1)从HeadsetClientService的connectAudio开始
HeadsetClientService指的是打电话时蓝牙需要的服务,connectAudio在这里指的是连接设备的语音
path:my22q_main\packages\apps\Bluetooth\src\com\android\bluetooth\hfpclient\HeadsetClientService.java
boolean connectAudio(BluetoothDevice device) {
enforceCallingOrSelfPermission(BLUETOOTH_ADMIN_PERM, "Need BLUETOOTH_ADMIN permission");
HeadsetClientStateMachine sm = getStateMachine(device);
if (sm == null) {
Log.e(TAG, "Cannot allocate SM for device " + device);
return false;
}
if (!sm.isConnected()) {//如果未和蓝牙设备连接的话,就返回false
return false;
}
if (sm.isAudioOn()) {//如果已经和蓝牙设备的语音连接的话,也返回false
return false;
}
sm.sendMessage(HeadsetClientStateMachine.CONNECT_AUDIO);
return true;
}
2,来到HeadsetClientStateMachine里面
它在sendMessage()发送消息,然后在各个状态里的processMessage()处理消息
在StateMachine里面Connected状态里connect_audio才有效
path:my22q_main\packages\apps\Bluetooth\src\com\android\bluetooth\hfpclient\HeadsetClientStateMachine.java
case CONNECT_AUDIO:
if (!NativeInterface.connectAudioNative(getByteAddress(mCurrentDevice))) {
Log.e(TAG, "ERROR: Couldn't connect Audio for device " + mCurrentDevice);
// No state transition is involved, fire broadcast immediately
broadcastAudioState(mCurrentDevice,
BluetoothHeadsetClient.STATE_AUDIO_DISCONNECTED,
BluetoothHeadsetClient.STATE_AUDIO_DISCONNECTED);
} else { // We have successfully sent a connect request!
mAudioState = BluetoothHeadsetClient.STATE_AUDIO_CONNECTING;
}
break;
4,来到NativeInterface接口这里,与jni交流的接口
path:my22q_main\packages\apps\Bluetooth\src\com\android\bluetooth\hfpclient\NativeInterface.java
static native boolean connectAudioNative(byte[] address);
通过!NativeInterface.connectAudioNative(getByteAddress(mCurrentDevice)去连接蓝牙设备的语音
这个时候有了native方法,那么就去找相应的jni包,然后在找到这个方法
ps:
在NativeInterface方法中,还有其他的一些native方法,也是java通向c++层的接口
static native void classInitNative();
native void initializeNative();
native void cleanupNative();
static native boolean connectNative(byte[] address);
static native boolean disconnectNative(byte[] address);
static native boolean connectAudioNative(byte[] address);
static native boolean disconnectAudioNative(byte[] address);
static native boolean startVoiceRecognitionNative(byte[] address);
static native boolean stopVoiceRecognitionNative(byte[] address);
static native boolean setVolumeNative(byte[] address, int volumeType, int volume);
static native boolean dialNative(byte[] address, String number);
static native boolean dialMemoryNative(byte[] address, int location);
static native boolean handleCallActionNative(byte[] address, int action, int index);
static native boolean queryCurrentCallsNative(byte[] address);
static native boolean queryCurrentOperatorNameNative(byte[] address);
static native boolean retrieveSubscriberInfoNative(byte[] address);
static native boolean sendDtmfNative(byte[] address, byte code);
static native boolean requestLastVoiceTagNumberNative(byte[] address);
static native boolean sendATCmdNative(byte[] address, int atCmd, int val1, int val2,
String arg);
还有一些给c++层回调java层的接口,比如当连接蓝牙设备的audio成功后,c++层要通知java层audio状态改变。就用下面这个onAudioStateChanged()方法
private void onAudioStateChanged(int state, byte[] address) {
StackEvent event = new StackEvent(StackEvent.EVENT_TYPE_AUDIO_STATE_CHANGED);
event.valueInt = state;
event.device = getDevice(address);
if (DBG) {
Log.d(TAG, "onAudioStateChanged: address " + address + " event " + event);
}
HeadsetClientService service = HeadsetClientService.getHeadsetClientService();
if (service != null) {
service.messageFromNative(event);
} else {
Log.w(TAG, "onAudioStateChanged: Ignoring message because service not available: "
+ event);
}
}
也还有其他一些c++回调java层的方法,比如
private void onConnectionStateChanged(int state, int peerFeat, int chldFeat, byte[] address) {
StackEvent event = new StackEvent(StackEvent.EVENT_TYPE_CONNECTION_STATE_CHANGED);
event.valueInt = state;
event.valueInt2 = peerFeat;
event.valueInt3 = chldFeat;
event.device = getDevice(address);
// BluetoothAdapter.getDefaultAdapter().getRemoteDevice(Utils.getAddressStringFromByte
// (address));
if (DBG) {
Log.d(TAG, "Device addr " + event.device.getAddress() + " State " + state);
}
HeadsetClientService service = HeadsetClientService.getHeadsetClientService();
if (service != null) {
service.messageFromNative(event);
} else {
Log.w(TAG, "Ignoring message because service not available: " + event);
}
}
private void onAudioStateChanged(int state, byte[] address) {
StackEvent event = new StackEvent(StackEvent.EVENT_TYPE_AUDIO_STATE_CHANGED);
event.valueInt = state;
event.device = getDevice(address);
if (DBG) {
Log.d(TAG, "onAudioStateChanged: address " + address + " event " + event);
}
HeadsetClientService service = HeadsetClientService.getHeadsetClientService();
if (service != null) {
service.messageFromNative(event);
} else {
Log.w(TAG, "onAudioStateChanged: Ignoring message because service not available: "
+ event);
}
}
private void onVrStateChanged(int state, byte[] address) {
StackEvent event = new StackEvent(StackEvent.EVENT_TYPE_VR_STATE_CHANGED);
event.valueInt = state;
event.device = getDevice(address);
if (DBG) {
Log.d(TAG, "onVrStateChanged: address " + address + " event " + event);
}
HeadsetClientService service = HeadsetClientService.getHeadsetClientService();
if (service != null) {
service.messageFromNative(event);
} else {
Log.w(TAG,
"onVrStateChanged: Ignoring message because service not available: " + event);
}
}
private void onNetworkState(int state, byte[] address) {
StackEvent event = new StackEvent(StackEvent.EVENT_TYPE_NETWORK_STATE);
event.valueInt = state;
event.device = getDevice(address);
if (DBG) {
Log.d(TAG, "onNetworkStateChanged: address " + address + " event " + event);
}
HeadsetClientService service = HeadsetClientService.getHeadsetClientService();
if (service != null) {
service.messageFromNative(event);
} else {
Log.w(TAG,
"onNetworkStateChanged: Ignoring message because service not available: "
+ event);
}
}
这些方法中都用了service.messageFromNative(event);,通知HeadsetClientService状态改变,再去通知相应的UI更新
《---------------------------------------------------------------------------------------------------------------------------------------》
好了,继续往下层走
5,来到对应的jni里面
path:my22q_main\packages\apps\Bluetooth\jni\com_android_bluetooth_hfpclient.cpp
java层对应的native方法就在这里
static jboolean connectAudioNative(JNIEnv* env, jobject object,
jbyteArray address) {
if (!sBluetoothHfpClientInterface) return JNI_FALSE;
jbyte* addr = env->GetByteArrayElements(address, NULL);
if (!addr) {
jniThrowIOException(env, EINVAL);
return JNI_FALSE;
}
bt_status_t status =
sBluetoothHfpClientInterface->connect_audio((const RawAddress*)addr);
if (status != BT_STATUS_SUCCESS) {
ALOGE("Failed AG audio connection, status: %d", status);
}
env->ReleaseByteArrayElements(address, addr, 0);
return (status == BT_STATUS_SUCCESS) ? JNI_TRUE : JNI_FALSE;
}
ps:
在这个jni方法里,除了实现了java层定义的Native方法,也会在这里进行对java层的回调,比如当c++里完成了对蓝牙设备audio的连接,底层的消息也会经过这里,然后回调java层的方法,通知java层任务的完成情况,如果语音连接成功了,就通知java层进行audio状态的更新
static void audio_state_cb(const RawAddress* bd_addr,
bthf_client_audio_state_t state) {
if (mCallbacksObj == NULL) return;
CallbackEnv sCallbackEnv(__func__);
if (!sCallbackEnv.valid()) return;
ScopedLocalRef<jbyteArray> addr(sCallbackEnv.get(), marshall_bda(bd_addr));
if (!addr.get()) return;
sCallbackEnv->CallVoidMethod(mCallbacksObj, method_onAudioStateChanged,
(jint)state, addr.get());
}
其他的方法原理类似。
6,继续往c++层下面走
sBluetoothHfpClientInterface->connect_audio((const RawAddress*)addr);
在这里,跳转进去是bt_hf_client.h,它相当于一个接口。
typedef struct {
/** set to sizeof(BtHfClientCallbacks) */
size_t size;
bthf_client_connection_state_callback connection_state_cb;
bthf_client_audio_state_callback audio_state_cb;
bthf_client_vr_cmd_callback vr_cmd_cb;
bthf_client_network_state_callback network_state_cb;
bthf_client_network_roaming_callback network_roaming_cb;
bthf_client_network_signal_callback network_signal_cb;
bthf_client_battery_level_callback battery_level_cb;
bthf_client_current_operator_callback current_operator_cb;
bthf_client_call_callback call_cb;
bthf_client_callsetup_callback callsetup_cb;
bthf_client_callheld_callback callheld_cb;
bthf_client_resp_and_hold_callback resp_and_hold_cb;
bthf_client_clip_callback clip_cb;
bthf_client_call_waiting_callback call_waiting_cb;
bthf_client_current_calls current_calls_cb;
bthf_client_volume_change_callback volume_change_cb;
bthf_client_cmd_complete_callback cmd_complete_cb;
bthf_client_subscriber_info_callback subscriber_info_cb;
bthf_client_in_band_ring_tone_callback in_band_ring_tone_cb;
bthf_client_last_voice_tag_number_callback last_voice_tag_number_callback;
bthf_client_ring_indication_callback ring_indication_cb;
} bthf_client_callbacks_t;
/** Represents the standard BT-HF interface. */
typedef struct {
/** set to sizeof(BtHfClientInterface) */
size_t size;
/**
* Register the BtHf callbacks
*/
bt_status_t (*init)(bthf_client_callbacks_t* callbacks);
/** connect to audio gateway */
bt_status_t (*connect)(RawAddress* bd_addr);
/** disconnect from audio gateway */
bt_status_t (*disconnect)(const RawAddress* bd_addr);
/** create an audio connection */
bt_status_t (*connect_audio)(const RawAddress* bd_addr);
/** close the audio connection */
bt_status_t (*disconnect_audio)(const RawAddress* bd_addr);
/** start voice recognition */
bt_status_t (*start_voice_recognition)(const RawAddress* bd_addr);
/** stop voice recognition */
bt_status_t (*stop_voice_recognition)(const RawAddress* bd_addr);
/** volume control */
bt_status_t (*volume_control)(const RawAddress* bd_addr,
bthf_client_volume_type_t type, int volume);
/** place a call with number a number
* if number is NULL last called number is called (aka re-dial)*/
在这里定义了往底层调用的函数,同样也定义了往上层回调的函数,但是却没有实现这些函数
所以我们要去找那些地方引用bt_hf_client.h并且实现了这些函数
在opengork上查到了btif_hf_client.cc实现了连接语音的方法
path:my22q_main\system\bt\btif\src\btif_hf_client.cc
static bt_status_t connect_audio(const RawAddress* bd_addr) {
btif_hf_client_cb_t* cb = btif_hf_client_get_cb_by_bda(*bd_addr);
if (cb == NULL || !is_connected(cb)) return BT_STATUS_FAIL;
CHECK_BTHF_CLIENT_SLC_CONNECTED(cb);
if ((BTIF_HF_CLIENT_FEATURES & BTA_HF_CLIENT_FEAT_CODEC) &&
(cb->peer_feat & BTA_HF_CLIENT_PEER_CODEC)) {
BTA_HfClientSendAT(cb->handle, BTA_HF_CLIENT_AT_CMD_BCC, 0, 0, NULL);
} else {
BTA_HfClientAudioOpen(cb->handle);
}
/* Inform the application that the audio connection has been initiated
* successfully */
btif_transfer_context(btif_in_hf_client_generic_evt,
BTIF_HF_CLIENT_CB_AUDIO_CONNECTING, (char*)bd_addr,
sizeof(RawAddress), NULL);
return BT_STATUS_SUCCESS;
}
然后在bta_hf_client_api.cc中继续
path:my22q_main\system\bt\bta\hf_client\bta_hf_client_api.cc
void BTA_HfClientAudioOpen(uint16_t handle) {
BT_HDR* p_buf = (BT_HDR*)osi_malloc(sizeof(BT_HDR));
p_buf->event = BTA_HF_CLIENT_API_AUDIO_OPEN_EVT;
p_buf->layer_specific = handle;
bta_sys_sendmsg(p_buf);
}
再往下:
path:my22q_main\system\bt\bta\sys\bta_sys_main.cc
void bta_sys_sendmsg(void* p_msg) {
if (do_in_main_thread(
FROM_HERE, base::Bind(&bta_sys_event, static_cast<BT_HDR*>(p_msg))) !=
BT_STATUS_SUCCESS) {
LOG(ERROR) << __func__ << ": do_in_main_thread failed";
}
}
再往下:让任务执行在main_thread里面
path:my22q_main\system\bt\stack\btu\btu_task.cc
bt_status_t do_in_main_thread(const base::Location& from_here,
base::OnceClosure task) {
if (!main_thread.DoInThread(from_here, std::move(task))) {
LOG(ERROR) << __func__ << ": failed from " << from_here.ToString();
return BT_STATUS_FAIL;
}
return BT_STATUS_SUCCESS;
}
再往下就找不下去了,唉!!!!