1 | // Copyright 2013 The Chromium Authors. All rights reserved. |
2 | // Use of this source code is governed by a BSD-style license that can be |
3 | // found in the LICENSE file. |
4 | |
5 | package org.chromium.media; |
6 | |
7 | import android.content.BroadcastReceiver; |
8 | import android.content.Context; |
9 | import android.content.Intent; |
10 | import android.content.IntentFilter; |
11 | import android.content.pm.PackageManager; |
12 | import android.media.AudioFormat; |
13 | import android.media.AudioManager; |
14 | import android.media.AudioRecord; |
15 | import android.media.AudioTrack; |
16 | import android.os.Build; |
17 | import android.util.Log; |
18 | |
19 | import org.chromium.base.CalledByNative; |
20 | import org.chromium.base.JNINamespace; |
21 | |
22 | @JNINamespace("media") |
23 | class AudioManagerAndroid { |
24 | private static final String TAG = "AudioManagerAndroid"; |
25 | |
26 | // Most of Google lead devices use 44.1K as the default sampling rate, 44.1K |
27 | // is also widely used on other android devices. |
28 | private static final int DEFAULT_SAMPLING_RATE = 44100; |
29 | // Randomly picked up frame size which is close to return value on N4. |
30 | // Return this default value when |
31 | // getProperty(PROPERTY_OUTPUT_FRAMES_PER_BUFFER) fails. |
32 | private static final int DEFAULT_FRAME_PER_BUFFER = 256; |
33 | |
34 | private final AudioManager mAudioManager; |
35 | private final Context mContext; |
36 | |
37 | private BroadcastReceiver mReceiver; |
38 | private boolean mOriginalSpeakerStatus; |
39 | |
40 | @CalledByNative |
41 | public void setMode(int mode) { |
42 | try { |
43 | mAudioManager.setMode(mode); |
44 | } catch (SecurityException e) { |
45 | Log.e(TAG, "setMode exception: " + e.getMessage()); |
46 | logDeviceInfo(); |
47 | } |
48 | } |
49 | |
50 | @CalledByNative |
51 | private static AudioManagerAndroid createAudioManagerAndroid(Context context) { |
52 | return new AudioManagerAndroid(context); |
53 | } |
54 | |
55 | private AudioManagerAndroid(Context context) { |
56 | mContext = context; |
57 | mAudioManager = (AudioManager)mContext.getSystemService(Context.AUDIO_SERVICE); |
58 | } |
59 | |
60 | @CalledByNative |
61 | public void registerHeadsetReceiver() { |
62 | if (mReceiver != null) { |
63 | return; |
64 | } |
65 | |
66 | mOriginalSpeakerStatus = mAudioManager.isSpeakerphoneOn(); |
67 | IntentFilter filter = new IntentFilter(Intent.ACTION_HEADSET_PLUG); |
68 | |
69 | mReceiver = new BroadcastReceiver() { |
70 | @Override |
71 | public void onReceive(Context context, Intent intent) { |
72 | if (Intent.ACTION_HEADSET_PLUG.equals(intent.getAction())) { |
73 | try { |
74 | mAudioManager.setSpeakerphoneOn( |
75 | intent.getIntExtra("state", 0) == 0); |
76 | } catch (SecurityException e) { |
77 | Log.e(TAG, "setMode exception: " + e.getMessage()); |
78 | logDeviceInfo(); |
79 | } |
80 | } |
81 | } |
82 | }; |
83 | mContext.registerReceiver(mReceiver, filter); |
84 | } |
85 | |
86 | @CalledByNative |
87 | public void unregisterHeadsetReceiver() { |
88 | mContext.unregisterReceiver(mReceiver); |
89 | mReceiver = null; |
90 | mAudioManager.setSpeakerphoneOn(mOriginalSpeakerStatus); |
91 | } |
92 | |
93 | private void logDeviceInfo() { |
94 | Log.i(TAG, "Manufacturer:" + Build.MANUFACTURER + |
95 | " Board: " + Build.BOARD + " Device: " + Build.DEVICE + |
96 | " Model: " + Build.MODEL + " PRODUCT: " + Build.PRODUCT); |
97 | } |
98 | |
99 | @CalledByNative |
100 | private int getNativeOutputSampleRate() { |
101 | if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN_MR1) { |
102 | String sampleRateString = mAudioManager.getProperty( |
103 | AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE); |
104 | return (sampleRateString == null ? |
105 | DEFAULT_SAMPLING_RATE : Integer.parseInt(sampleRateString)); |
106 | } else { |
107 | return DEFAULT_SAMPLING_RATE; |
108 | } |
109 | } |
110 | |
111 | /** |
112 | * Returns the minimum frame size required for audio input. |
113 | * |
114 | * @param sampleRate sampling rate |
115 | * @param channels number of channels |
116 | */ |
117 | @CalledByNative |
118 | private static int getMinInputFrameSize(int sampleRate, int channels) { |
119 | int channelConfig; |
120 | if (channels == 1) { |
121 | channelConfig = AudioFormat.CHANNEL_IN_MONO; |
122 | } else if (channels == 2) { |
123 | channelConfig = AudioFormat.CHANNEL_IN_STEREO; |
124 | } else { |
125 | return -1; |
126 | } |
127 | return AudioRecord.getMinBufferSize( |
128 | sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT) / 2 / channels; |
129 | } |
130 | |
131 | /** |
132 | * Returns the minimum frame size required for audio output. |
133 | * |
134 | * @param sampleRate sampling rate |
135 | * @param channels number of channels |
136 | */ |
137 | @CalledByNative |
138 | private static int getMinOutputFrameSize(int sampleRate, int channels) { |
139 | int channelConfig; |
140 | if (channels == 1) { |
141 | channelConfig = AudioFormat.CHANNEL_OUT_MONO; |
142 | } else if (channels == 2) { |
143 | channelConfig = AudioFormat.CHANNEL_OUT_STEREO; |
144 | } else { |
145 | return -1; |
146 | } |
147 | return AudioTrack.getMinBufferSize( |
148 | sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT) / 2 / channels; |
149 | } |
150 | |
151 | @CalledByNative |
152 | private boolean isAudioLowLatencySupported() { |
153 | return mContext.getPackageManager().hasSystemFeature( |
154 | PackageManager.FEATURE_AUDIO_LOW_LATENCY); |
155 | } |
156 | |
157 | @CalledByNative |
158 | private int getAudioLowLatencyOutputFrameSize() { |
159 | String framesPerBuffer = |
160 | mAudioManager.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER); |
161 | return (framesPerBuffer == null ? |
162 | DEFAULT_FRAME_PER_BUFFER : Integer.parseInt(framesPerBuffer)); |
163 | } |
164 | |
165 | } |