upload android base code part7
This commit is contained in:
parent
4e516ec6ed
commit
841ae54672
25229 changed files with 1709508 additions and 0 deletions
|
@ -0,0 +1,293 @@
|
|||
/*
|
||||
* Copyright (C) 2015 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.example.android.wearable.speaker;
|
||||
|
||||
import android.Manifest;
|
||||
import android.content.Context;
|
||||
import android.content.pm.PackageManager;
|
||||
import android.media.AudioDeviceInfo;
|
||||
import android.media.AudioManager;
|
||||
import android.media.MediaPlayer;
|
||||
import android.os.Build;
|
||||
import android.os.Bundle;
|
||||
import android.os.CountDownTimer;
|
||||
import android.support.v4.app.ActivityCompat;
|
||||
import android.support.v4.content.ContextCompat;
|
||||
import android.support.wearable.activity.WearableActivity;
|
||||
import android.util.Log;
|
||||
import android.view.View;
|
||||
import android.widget.ImageView;
|
||||
import android.widget.ProgressBar;
|
||||
import android.widget.Toast;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* We first get the required permission to use the MIC. If it is granted, then we continue with
|
||||
* the application and present the UI with three icons: a MIC icon (if pressed, user can record up
|
||||
* to 10 seconds), a Play icon (if clicked, it wil playback the recorded audio file) and a music
|
||||
* note icon (if clicked, it plays an MP3 file that is included in the app).
|
||||
*/
|
||||
public class MainActivity extends WearableActivity implements UIAnimation.UIStateListener,
|
||||
SoundRecorder.OnVoicePlaybackStateChangedListener {
|
||||
|
||||
private static final String TAG = "MainActivity";
|
||||
private static final int PERMISSIONS_REQUEST_CODE = 100;
|
||||
private static final long COUNT_DOWN_MS = TimeUnit.SECONDS.toMillis(10);
|
||||
private static final long MILLIS_IN_SECOND = TimeUnit.SECONDS.toMillis(1);
|
||||
private static final String VOICE_FILE_NAME = "audiorecord.pcm";
|
||||
private MediaPlayer mMediaPlayer;
|
||||
private AppState mState = AppState.READY;
|
||||
private UIAnimation.UIState mUiState = UIAnimation.UIState.HOME;
|
||||
private SoundRecorder mSoundRecorder;
|
||||
|
||||
private UIAnimation mUIAnimation;
|
||||
private ProgressBar mProgressBar;
|
||||
private CountDownTimer mCountDownTimer;
|
||||
|
||||
enum AppState {
|
||||
READY, PLAYING_VOICE, PLAYING_MUSIC, RECORDING
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onCreate(Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
setContentView(R.layout.main_activity);
|
||||
mProgressBar = (ProgressBar) findViewById(R.id.progress);
|
||||
mProgressBar.setMax((int) (COUNT_DOWN_MS / MILLIS_IN_SECOND));
|
||||
setAmbientEnabled();
|
||||
}
|
||||
|
||||
private void setProgressBar(long progressInMillis) {
|
||||
mProgressBar.setProgress((int) (progressInMillis / MILLIS_IN_SECOND));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onUIStateChanged(UIAnimation.UIState state) {
|
||||
Log.d(TAG, "UI State is: " + state);
|
||||
if (mUiState == state) {
|
||||
return;
|
||||
}
|
||||
switch (state) {
|
||||
case MUSIC_UP:
|
||||
mState = AppState.PLAYING_MUSIC;
|
||||
mUiState = state;
|
||||
playMusic();
|
||||
break;
|
||||
case MIC_UP:
|
||||
mState = AppState.RECORDING;
|
||||
mUiState = state;
|
||||
mSoundRecorder.startRecording();
|
||||
setProgressBar(COUNT_DOWN_MS);
|
||||
mCountDownTimer = new CountDownTimer(COUNT_DOWN_MS, MILLIS_IN_SECOND) {
|
||||
@Override
|
||||
public void onTick(long millisUntilFinished) {
|
||||
mProgressBar.setVisibility(View.VISIBLE);
|
||||
setProgressBar(millisUntilFinished);
|
||||
Log.d(TAG, "Time Left: " + millisUntilFinished / MILLIS_IN_SECOND);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFinish() {
|
||||
mProgressBar.setProgress(0);
|
||||
mProgressBar.setVisibility(View.INVISIBLE);
|
||||
mSoundRecorder.stopRecording();
|
||||
mUIAnimation.transitionToHome();
|
||||
mUiState = UIAnimation.UIState.HOME;
|
||||
mState = AppState.READY;
|
||||
mCountDownTimer = null;
|
||||
}
|
||||
};
|
||||
mCountDownTimer.start();
|
||||
break;
|
||||
case SOUND_UP:
|
||||
mState = AppState.PLAYING_VOICE;
|
||||
mUiState = state;
|
||||
mSoundRecorder.startPlay();
|
||||
break;
|
||||
case HOME:
|
||||
switch (mState) {
|
||||
case PLAYING_MUSIC:
|
||||
mState = AppState.READY;
|
||||
mUiState = state;
|
||||
stopMusic();
|
||||
break;
|
||||
case PLAYING_VOICE:
|
||||
mState = AppState.READY;
|
||||
mUiState = state;
|
||||
mSoundRecorder.stopPlaying();
|
||||
break;
|
||||
case RECORDING:
|
||||
mState = AppState.READY;
|
||||
mUiState = state;
|
||||
mSoundRecorder.stopRecording();
|
||||
if (mCountDownTimer != null) {
|
||||
mCountDownTimer.cancel();
|
||||
mCountDownTimer = null;
|
||||
}
|
||||
mProgressBar.setVisibility(View.INVISIBLE);
|
||||
setProgressBar(COUNT_DOWN_MS);
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Plays back the MP3 file embedded in the application
|
||||
*/
|
||||
private void playMusic() {
|
||||
if (mMediaPlayer == null) {
|
||||
mMediaPlayer = MediaPlayer.create(this, R.raw.sound);
|
||||
mMediaPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
|
||||
@Override
|
||||
public void onCompletion(MediaPlayer mp) {
|
||||
// we need to transition to the READY/Home state
|
||||
Log.d(TAG, "Music Finished");
|
||||
mUIAnimation.transitionToHome();
|
||||
}
|
||||
});
|
||||
}
|
||||
mMediaPlayer.start();
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops the playback of the MP3 file.
|
||||
*/
|
||||
private void stopMusic() {
|
||||
if (mMediaPlayer != null) {
|
||||
mMediaPlayer.stop();
|
||||
mMediaPlayer.release();
|
||||
mMediaPlayer = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the permission that this app needs and if it has not been granted, it will
|
||||
* prompt the user to grant it, otherwise it shuts down the app.
|
||||
*/
|
||||
private void checkPermissions() {
|
||||
boolean recordAudioPermissionGranted =
|
||||
ContextCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO)
|
||||
== PackageManager.PERMISSION_GRANTED;
|
||||
|
||||
if (recordAudioPermissionGranted) {
|
||||
start();
|
||||
} else {
|
||||
ActivityCompat.requestPermissions(this, new String[] {Manifest.permission.RECORD_AUDIO},
|
||||
PERMISSIONS_REQUEST_CODE);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRequestPermissionsResult(int requestCode,
|
||||
String permissions[], int[] grantResults) {
|
||||
if (requestCode == PERMISSIONS_REQUEST_CODE) {
|
||||
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
|
||||
start();
|
||||
} else {
|
||||
// Permission has been denied before. At this point we should show a dialog to
|
||||
// user and explain why this permission is needed and direct him to go to the
|
||||
// Permissions settings for the app in the System settings. For this sample, we
|
||||
// simply exit to get to the important part.
|
||||
Toast.makeText(this, R.string.exiting_for_permissions, Toast.LENGTH_LONG).show();
|
||||
finish();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the main flow of the application.
|
||||
*/
|
||||
private void start() {
|
||||
mSoundRecorder = new SoundRecorder(this, VOICE_FILE_NAME, this);
|
||||
int[] thumbResources = new int[] {R.id.mic, R.id.play, R.id.music};
|
||||
ImageView[] thumbs = new ImageView[3];
|
||||
for(int i=0; i < 3; i++) {
|
||||
thumbs[i] = (ImageView) findViewById(thumbResources[i]);
|
||||
}
|
||||
View containerView = findViewById(R.id.container);
|
||||
ImageView expandedView = (ImageView) findViewById(R.id.expanded);
|
||||
int animationDuration = getResources().getInteger(android.R.integer.config_shortAnimTime);
|
||||
mUIAnimation = new UIAnimation(containerView, thumbs, expandedView, animationDuration,
|
||||
this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onStart() {
|
||||
super.onStart();
|
||||
if (speakerIsSupported()) {
|
||||
checkPermissions();
|
||||
} else {
|
||||
findViewById(R.id.container2).setOnClickListener(new View.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View v) {
|
||||
Toast.makeText(MainActivity.this, R.string.no_speaker_supported,
|
||||
Toast.LENGTH_SHORT).show();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onStop() {
|
||||
if (mSoundRecorder != null) {
|
||||
mSoundRecorder.cleanup();
|
||||
mSoundRecorder = null;
|
||||
}
|
||||
if (mCountDownTimer != null) {
|
||||
mCountDownTimer.cancel();
|
||||
}
|
||||
|
||||
if (mMediaPlayer != null) {
|
||||
mMediaPlayer.release();
|
||||
mMediaPlayer = null;
|
||||
}
|
||||
super.onStop();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPlaybackStopped() {
|
||||
mUIAnimation.transitionToHome();
|
||||
mUiState = UIAnimation.UIState.HOME;
|
||||
mState = AppState.READY;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if the wear device has a built-in speaker and if it is supported. Speaker, even if
|
||||
* physically present, is only supported in Android M+ on a wear device..
|
||||
*/
|
||||
public final boolean speakerIsSupported() {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
|
||||
PackageManager packageManager = getPackageManager();
|
||||
// The results from AudioManager.getDevices can't be trusted unless the device
|
||||
// advertises FEATURE_AUDIO_OUTPUT.
|
||||
if (!packageManager.hasSystemFeature(PackageManager.FEATURE_AUDIO_OUTPUT)) {
|
||||
return false;
|
||||
}
|
||||
AudioManager audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
|
||||
AudioDeviceInfo[] devices = audioManager.getDevices(AudioManager.GET_DEVICES_OUTPUTS);
|
||||
for (AudioDeviceInfo device : devices) {
|
||||
if (device.getType() == AudioDeviceInfo.TYPE_BUILTIN_SPEAKER) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,263 @@
|
|||
/*
|
||||
* Copyright (C) 2015 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.example.android.wearable.speaker;
|
||||
|
||||
import android.content.Context;
|
||||
import android.media.AudioFormat;
|
||||
import android.media.AudioManager;
|
||||
import android.media.AudioRecord;
|
||||
import android.media.AudioTrack;
|
||||
import android.media.MediaRecorder;
|
||||
import android.os.AsyncTask;
|
||||
import android.os.Handler;
|
||||
import android.os.Looper;
|
||||
import android.util.Log;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* A helper class to provide methods to record audio input from the MIC to the internal storage
|
||||
* and to playback the same recorded audio file.
|
||||
*/
|
||||
public class SoundRecorder {
|
||||
|
||||
private static final String TAG = "SoundRecorder";
|
||||
private static final int RECORDING_RATE = 8000; // can go up to 44K, if needed
|
||||
private static final int CHANNEL_IN = AudioFormat.CHANNEL_IN_MONO;
|
||||
private static final int CHANNELS_OUT = AudioFormat.CHANNEL_OUT_MONO;
|
||||
private static final int FORMAT = AudioFormat.ENCODING_PCM_16BIT;
|
||||
private static int BUFFER_SIZE = AudioRecord
|
||||
.getMinBufferSize(RECORDING_RATE, CHANNEL_IN, FORMAT);
|
||||
|
||||
private final String mOutputFileName;
|
||||
private final AudioManager mAudioManager;
|
||||
private final Handler mHandler;
|
||||
private final Context mContext;
|
||||
private State mState = State.IDLE;
|
||||
|
||||
private OnVoicePlaybackStateChangedListener mListener;
|
||||
private AsyncTask<Void, Void, Void> mRecordingAsyncTask;
|
||||
private AsyncTask<Void, Void, Void> mPlayingAsyncTask;
|
||||
|
||||
enum State {
|
||||
IDLE, RECORDING, PLAYING
|
||||
}
|
||||
|
||||
public SoundRecorder(Context context, String outputFileName,
|
||||
OnVoicePlaybackStateChangedListener listener) {
|
||||
mOutputFileName = outputFileName;
|
||||
mListener = listener;
|
||||
mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
|
||||
mHandler = new Handler(Looper.getMainLooper());
|
||||
mContext = context;
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts recording from the MIC.
|
||||
*/
|
||||
public void startRecording() {
|
||||
if (mState != State.IDLE) {
|
||||
Log.w(TAG, "Requesting to start recording while state was not IDLE");
|
||||
return;
|
||||
}
|
||||
|
||||
mRecordingAsyncTask = new AsyncTask<Void, Void, Void>() {
|
||||
|
||||
private AudioRecord mAudioRecord;
|
||||
|
||||
@Override
|
||||
protected void onPreExecute() {
|
||||
mState = State.RECORDING;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Void doInBackground(Void... params) {
|
||||
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
|
||||
RECORDING_RATE, CHANNEL_IN, FORMAT, BUFFER_SIZE * 3);
|
||||
BufferedOutputStream bufferedOutputStream = null;
|
||||
try {
|
||||
bufferedOutputStream = new BufferedOutputStream(
|
||||
mContext.openFileOutput(mOutputFileName, Context.MODE_PRIVATE));
|
||||
byte[] buffer = new byte[BUFFER_SIZE];
|
||||
mAudioRecord.startRecording();
|
||||
while (!isCancelled()) {
|
||||
int read = mAudioRecord.read(buffer, 0, buffer.length);
|
||||
bufferedOutputStream.write(buffer, 0, read);
|
||||
}
|
||||
} catch (IOException | NullPointerException | IndexOutOfBoundsException e) {
|
||||
Log.e(TAG, "Failed to record data: " + e);
|
||||
} finally {
|
||||
if (bufferedOutputStream != null) {
|
||||
try {
|
||||
bufferedOutputStream.close();
|
||||
} catch (IOException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
mAudioRecord.release();
|
||||
mAudioRecord = null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onPostExecute(Void aVoid) {
|
||||
mState = State.IDLE;
|
||||
mRecordingAsyncTask = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onCancelled() {
|
||||
if (mState == State.RECORDING) {
|
||||
Log.d(TAG, "Stopping the recording ...");
|
||||
mState = State.IDLE;
|
||||
} else {
|
||||
Log.w(TAG, "Requesting to stop recording while state was not RECORDING");
|
||||
}
|
||||
mRecordingAsyncTask = null;
|
||||
}
|
||||
};
|
||||
|
||||
mRecordingAsyncTask.execute();
|
||||
}
|
||||
|
||||
public void stopRecording() {
|
||||
if (mRecordingAsyncTask != null) {
|
||||
mRecordingAsyncTask.cancel(true);
|
||||
}
|
||||
}
|
||||
|
||||
public void stopPlaying() {
|
||||
if (mPlayingAsyncTask != null) {
|
||||
mPlayingAsyncTask.cancel(true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts playback of the recorded audio file.
|
||||
*/
|
||||
public void startPlay() {
|
||||
if (mState != State.IDLE) {
|
||||
Log.w(TAG, "Requesting to play while state was not IDLE");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!new File(mContext.getFilesDir(), mOutputFileName).exists()) {
|
||||
// there is no recording to play
|
||||
if (mListener != null) {
|
||||
mHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
mListener.onPlaybackStopped();
|
||||
}
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
final int intSize = AudioTrack.getMinBufferSize(RECORDING_RATE, CHANNELS_OUT, FORMAT);
|
||||
|
||||
mPlayingAsyncTask = new AsyncTask<Void, Void, Void>() {
|
||||
|
||||
private AudioTrack mAudioTrack;
|
||||
|
||||
@Override
|
||||
protected void onPreExecute() {
|
||||
mAudioManager.setStreamVolume(AudioManager.STREAM_MUSIC,
|
||||
mAudioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC), 0 /* flags */);
|
||||
mState = State.PLAYING;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Void doInBackground(Void... params) {
|
||||
try {
|
||||
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, RECORDING_RATE,
|
||||
CHANNELS_OUT, FORMAT, intSize, AudioTrack.MODE_STREAM);
|
||||
byte[] buffer = new byte[intSize * 2];
|
||||
FileInputStream in = null;
|
||||
BufferedInputStream bis = null;
|
||||
mAudioTrack.setVolume(AudioTrack.getMaxVolume());
|
||||
mAudioTrack.play();
|
||||
try {
|
||||
in = mContext.openFileInput(mOutputFileName);
|
||||
bis = new BufferedInputStream(in);
|
||||
int read;
|
||||
while (!isCancelled() && (read = bis.read(buffer, 0, buffer.length)) > 0) {
|
||||
mAudioTrack.write(buffer, 0, read);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
Log.e(TAG, "Failed to read the sound file into a byte array", e);
|
||||
} finally {
|
||||
try {
|
||||
if (in != null) {
|
||||
in.close();
|
||||
}
|
||||
if (bis != null) {
|
||||
bis.close();
|
||||
}
|
||||
} catch (IOException e) { /* ignore */}
|
||||
|
||||
mAudioTrack.release();
|
||||
}
|
||||
} catch (IllegalStateException e) {
|
||||
Log.e(TAG, "Failed to start playback", e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onPostExecute(Void aVoid) {
|
||||
cleanup();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onCancelled() {
|
||||
cleanup();
|
||||
}
|
||||
|
||||
private void cleanup() {
|
||||
if (mListener != null) {
|
||||
mListener.onPlaybackStopped();
|
||||
}
|
||||
mState = State.IDLE;
|
||||
mPlayingAsyncTask = null;
|
||||
}
|
||||
};
|
||||
|
||||
mPlayingAsyncTask.execute();
|
||||
}
|
||||
|
||||
public interface OnVoicePlaybackStateChangedListener {
|
||||
|
||||
/**
|
||||
* Called when the playback of the audio file ends. This should be called on the UI thread.
|
||||
*/
|
||||
void onPlaybackStopped();
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up some resources related to {@link AudioTrack} and {@link AudioRecord}
|
||||
*/
|
||||
public void cleanup() {
|
||||
Log.d(TAG, "cleanup() is called");
|
||||
stopPlaying();
|
||||
stopRecording();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,220 @@
|
|||
/*
|
||||
* Copyright (C) 2015 Google Inc. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package com.example.android.wearable.speaker;
|
||||
|
||||
import android.animation.Animator;
|
||||
import android.animation.AnimatorListenerAdapter;
|
||||
import android.animation.AnimatorSet;
|
||||
import android.animation.ObjectAnimator;
|
||||
import android.graphics.Point;
|
||||
import android.graphics.Rect;
|
||||
import android.view.View;
|
||||
import android.view.animation.DecelerateInterpolator;
|
||||
import android.widget.ImageView;
|
||||
|
||||
/**
|
||||
* A helper class to provide a simple animation when user selects any of the three icons on the
|
||||
* main UI.
|
||||
*/
|
||||
public class UIAnimation {
|
||||
|
||||
private AnimatorSet mCurrentAnimator;
|
||||
private final int[] mLargeDrawables = new int[]{R.drawable.ic_mic_120dp,
|
||||
R.drawable.ic_play_arrow_120dp, R.drawable.ic_audiotrack_120dp};
|
||||
private final ImageView[] mThumbs;
|
||||
private ImageView expandedImageView;
|
||||
private final View mContainerView;
|
||||
private final int mAnimationDurationTime;
|
||||
|
||||
private UIStateListener mListener;
|
||||
private UIState mState = UIState.HOME;
|
||||
|
||||
public UIAnimation(View containerView, ImageView[] thumbs, ImageView expandedView,
|
||||
int animationDuration, UIStateListener listener) {
|
||||
mContainerView = containerView;
|
||||
mThumbs = thumbs;
|
||||
expandedImageView = expandedView;
|
||||
mAnimationDurationTime = animationDuration;
|
||||
mListener = listener;
|
||||
|
||||
mThumbs[0].setOnClickListener(new View.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
zoomImageFromThumb(0);
|
||||
}
|
||||
});
|
||||
|
||||
mThumbs[1].setOnClickListener(new View.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
zoomImageFromThumb(1);
|
||||
}
|
||||
});
|
||||
|
||||
mThumbs[2].setOnClickListener(new View.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
zoomImageFromThumb(2);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void zoomImageFromThumb(final int index) {
|
||||
int imageResId = mLargeDrawables[index];
|
||||
final ImageView thumbView = mThumbs[index];
|
||||
if (mCurrentAnimator != null) {
|
||||
return;
|
||||
}
|
||||
|
||||
expandedImageView.setImageResource(imageResId);
|
||||
|
||||
final Rect startBounds = new Rect();
|
||||
final Rect finalBounds = new Rect();
|
||||
final Point globalOffset = new Point();
|
||||
thumbView.getGlobalVisibleRect(startBounds);
|
||||
mContainerView.getGlobalVisibleRect(finalBounds, globalOffset);
|
||||
startBounds.offset(-globalOffset.x, -globalOffset.y);
|
||||
finalBounds.offset(-globalOffset.x, -globalOffset.y);
|
||||
float startScale;
|
||||
if ((float) finalBounds.width() / finalBounds.height()
|
||||
> (float) startBounds.width() / startBounds.height()) {
|
||||
startScale = (float) startBounds.height() / finalBounds.height();
|
||||
float startWidth = startScale * finalBounds.width();
|
||||
float deltaWidth = (startWidth - startBounds.width()) / 2;
|
||||
startBounds.left -= deltaWidth;
|
||||
startBounds.right += deltaWidth;
|
||||
} else {
|
||||
startScale = (float) startBounds.width() / finalBounds.width();
|
||||
float startHeight = startScale * finalBounds.height();
|
||||
float deltaHeight = (startHeight - startBounds.height()) / 2;
|
||||
startBounds.top -= deltaHeight;
|
||||
startBounds.bottom += deltaHeight;
|
||||
}
|
||||
|
||||
for(int k=0; k < 3; k++) {
|
||||
mThumbs[k].setAlpha(0f);
|
||||
}
|
||||
expandedImageView.setVisibility(View.VISIBLE);
|
||||
|
||||
expandedImageView.setPivotX(0f);
|
||||
expandedImageView.setPivotY(0f);
|
||||
|
||||
AnimatorSet zommInAnimator = new AnimatorSet();
|
||||
zommInAnimator.play(ObjectAnimator
|
||||
.ofFloat(expandedImageView, View.X, startBounds.left, finalBounds.left)).with(
|
||||
ObjectAnimator.ofFloat(expandedImageView, View.Y, startBounds.top, finalBounds
|
||||
.top)).with(
|
||||
ObjectAnimator.ofFloat(expandedImageView, View.SCALE_X, startScale, 1f))
|
||||
.with(ObjectAnimator.ofFloat(expandedImageView, View.SCALE_Y, startScale, 1f));
|
||||
zommInAnimator.setDuration(mAnimationDurationTime);
|
||||
zommInAnimator.setInterpolator(new DecelerateInterpolator());
|
||||
zommInAnimator.addListener(new AnimatorListenerAdapter() {
|
||||
@Override
|
||||
public void onAnimationEnd(Animator animation) {
|
||||
mCurrentAnimator = null;
|
||||
if (mListener != null) {
|
||||
mState = UIState.getUIState(index);
|
||||
mListener.onUIStateChanged(mState);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onAnimationCancel(Animator animation) {
|
||||
mCurrentAnimator = null;
|
||||
}
|
||||
});
|
||||
zommInAnimator.start();
|
||||
mCurrentAnimator = zommInAnimator;
|
||||
|
||||
final float startScaleFinal = startScale;
|
||||
expandedImageView.setOnClickListener(new View.OnClickListener() {
|
||||
@Override
|
||||
public void onClick(View view) {
|
||||
if (mCurrentAnimator != null) {
|
||||
return;
|
||||
}
|
||||
AnimatorSet zoomOutAnimator = new AnimatorSet();
|
||||
zoomOutAnimator.play(ObjectAnimator
|
||||
.ofFloat(expandedImageView, View.X, startBounds.left))
|
||||
.with(ObjectAnimator
|
||||
.ofFloat(expandedImageView,
|
||||
View.Y, startBounds.top))
|
||||
.with(ObjectAnimator
|
||||
.ofFloat(expandedImageView,
|
||||
View.SCALE_X, startScaleFinal))
|
||||
.with(ObjectAnimator
|
||||
.ofFloat(expandedImageView,
|
||||
View.SCALE_Y, startScaleFinal));
|
||||
zoomOutAnimator.setDuration(mAnimationDurationTime);
|
||||
zoomOutAnimator.setInterpolator(new DecelerateInterpolator());
|
||||
zoomOutAnimator.addListener(new AnimatorListenerAdapter() {
|
||||
@Override
|
||||
public void onAnimationEnd(Animator animation) {
|
||||
for (int k = 0; k < 3; k++) {
|
||||
mThumbs[k].setAlpha(1f);
|
||||
}
|
||||
expandedImageView.setVisibility(View.GONE);
|
||||
mCurrentAnimator = null;
|
||||
if (mListener != null) {
|
||||
mState = UIState.HOME;
|
||||
mListener.onUIStateChanged(mState);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onAnimationCancel(Animator animation) {
|
||||
thumbView.setAlpha(1f);
|
||||
expandedImageView.setVisibility(View.GONE);
|
||||
mCurrentAnimator = null;
|
||||
}
|
||||
});
|
||||
zoomOutAnimator.start();
|
||||
mCurrentAnimator = zoomOutAnimator;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public enum UIState {
|
||||
MIC_UP(0), SOUND_UP(1), MUSIC_UP(2), HOME(3);
|
||||
private int mState;
|
||||
|
||||
UIState(int state) {
|
||||
mState = state;
|
||||
}
|
||||
|
||||
static UIState getUIState(int state) {
|
||||
for(UIState uiState : values()) {
|
||||
if (uiState.mState == state) {
|
||||
return uiState;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public interface UIStateListener {
|
||||
void onUIStateChanged(UIState state);
|
||||
}
|
||||
|
||||
public void transitionToHome() {
|
||||
if (mState == UIState.HOME) {
|
||||
return;
|
||||
}
|
||||
expandedImageView.callOnClick();
|
||||
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue