2016-08-16 10 views
2

私は、オーディオファイルを再生中にその色を変更したいWaveformViewを持っています。オーディオが一時停止しているので、それは色づけで前進し続けなければならない。私は私のコードでそれを行う方法を得ていないのです。..描画されたビューキャンバスで色を徐々に塗りつぶす方法

enter image description here

これが私の生成された波形のスクリーンショットです。再生ボタンをクリックすると、波形の色が徐々に赤色(最初から最後までゆっくり)に変わるはずです。

波形ビューを描画するコードを示します。

WaveFormView.class

public class WaveformView extends View { 
public interface WaveformListener { 

    public void waveformFling(float x); 

    public void waveformDraw(); 
} 

; 
// Colors 
private Paint mGridPaint; 
private Paint mSelectedLinePaint; 
private Paint mUnselectedLinePaint; 
private Paint mUnselectedBkgndLinePaint; 
private Paint mBorderLinePaint; 
private Paint mPlaybackLinePaint; 
private Paint mTimecodePaint; 

private SoundFile mSoundFile; 
private int[] mLenByZoomLevel; 
private double[][] mValuesByZoomLevel; 
private double[] mZoomFactorByZoomLevel; 
private int[] mHeightsAtThisZoomLevel; 
private int mZoomLevel; 
private int mNumZoomLevels; 
private int mSampleRate; 
private int mSamplesPerFrame; 
private int mOffset; 
private int mSelectionStart; 
private int mSelectionEnd; 
private int mPlaybackPos; 
private float mDensity; 
private float mInitialScaleSpan; 
private WaveformListener mListener; 
private GestureDetector mGestureDetector; 
private ScaleGestureDetector mScaleGestureDetector; 
private boolean mInitialized; 
Color color; 


public WaveformView(Context context, AttributeSet attrs) { 
    super(context, attrs); 

    // We don't want keys, the markers get these 
    setFocusable(false); 

    mGridPaint = new Paint(); 
    mGridPaint.setAntiAlias(false); 
    mGridPaint.setColor(
      getResources().getColor(R.color.grid_line)); 
    mSelectedLinePaint = new Paint(); 
    mSelectedLinePaint.setAntiAlias(false); 
    mSelectedLinePaint.setColor(
      getResources().getColor(R.color.waveform_selected)); 
    mUnselectedLinePaint = new Paint(); 
    mUnselectedLinePaint.setAntiAlias(false); 
    mUnselectedLinePaint.setColor(
      getResources().getColor(R.color.waveform_unselected)); 
    mUnselectedBkgndLinePaint = new Paint(); 
    mUnselectedBkgndLinePaint.setAntiAlias(false); 
    mUnselectedBkgndLinePaint.setColor(
      getResources().getColor(
        R.color.selection_border)); 
    mBorderLinePaint = new Paint(); 
    mBorderLinePaint.setAntiAlias(true); 
    mBorderLinePaint.setStrokeWidth(1.5f); 
    mBorderLinePaint.setPathEffect(
      new DashPathEffect(new float[]{3.0f, 2.0f}, 0.0f)); 
    mBorderLinePaint.setColor(
      getResources().getColor(R.color.selection_border)); 
    mPlaybackLinePaint = new Paint(); 
    mPlaybackLinePaint.setAntiAlias(false); 
    mPlaybackLinePaint.setColor(
      getResources().getColor(R.color.playback_indicator)); 
    mTimecodePaint = new Paint(); 
    mTimecodePaint.setTextSize(12); 
    mTimecodePaint.setAntiAlias(true); 
    mTimecodePaint.setColor(
      getResources().getColor(R.color.timecode)); 
    mTimecodePaint.setShadowLayer(
      2, 1, 1, 
      getResources().getColor(R.color.timecode_shadow)); 

    mGestureDetector = new GestureDetector(
      context, 
      new GestureDetector.SimpleOnGestureListener() { 
       public boolean onFling(
         MotionEvent e1, MotionEvent e2, float vx, float vy) { 
        mListener.waveformFling(vx); 
        return true; 
       } 
      }); 

    mSoundFile = null; 
    mLenByZoomLevel = null; 
    mValuesByZoomLevel = null; 
    mHeightsAtThisZoomLevel = null; 
    mOffset = 0; 
    mPlaybackPos = -1; 
    mSelectionStart = 0; 
    mSelectionEnd = 0; 
    mDensity = 1.0f; 
    mInitialized = false; 
} 

public boolean hasSoundFile() { 
    return mSoundFile != null; 
} 

public void setSoundFile(SoundFile soundFile) { 
    mSoundFile = soundFile; 
    mSampleRate = mSoundFile.getSampleRate(); 
    mSamplesPerFrame = mSoundFile.getSamplesPerFrame(); 
    computeDoublesForAllZoomLevels(); 
    mHeightsAtThisZoomLevel = null; 
} 


/** 
* Called once when a new sound file is added 
*/ 
private void computeDoublesForAllZoomLevels() { 
    int numFrames = mSoundFile.getNumFrames(); 
    int[] frameGains = mSoundFile.getFrameGains(); 
    double[] smoothedGains = new double[numFrames]; 
    if (numFrames == 1) { 
     smoothedGains[0] = frameGains[0]; 
    } else if (numFrames == 2) { 
     smoothedGains[0] = frameGains[0]; 
     smoothedGains[1] = frameGains[1]; 
    } else if (numFrames > 2) { 
     smoothedGains[0] = (double)(
       (frameGains[0]/2.0) + 
         (frameGains[1]/2.0)); 
     for (int i = 1; i < numFrames - 1; i++) { 
      smoothedGains[i] = (double)(
        (frameGains[i - 1]/3.0) + 
          (frameGains[i ]/3.0) + 
          (frameGains[i + 1]/3.0)); 
     } 
     smoothedGains[numFrames - 1] = (double)(
       (frameGains[numFrames - 2]/2.0) + 
         (frameGains[numFrames - 1]/2.0)); 
    } 

    // Make sure the range is no more than 0 - 255 
    double maxGain = 1.0; 
    for (int i = 0; i < numFrames; i++) { 
     if (smoothedGains[i] > maxGain) { 
      maxGain = smoothedGains[i]; 
     } 
    } 
    double scaleFactor = 1.0; 
    if (maxGain > 255.0) { 
     scaleFactor = 255/maxGain; 
    } 

    // Build histogram of 256 bins and figure out the new scaled max 
    maxGain = 0; 
    int gainHist[] = new int[256]; 
    for (int i = 0; i < numFrames; i++) { 
     int smoothedGain = (int)(smoothedGains[i] * scaleFactor); 
     if (smoothedGain < 0) 
      smoothedGain = 0; 
     if (smoothedGain > 255) 
      smoothedGain = 255; 

     if (smoothedGain > maxGain) 
      maxGain = smoothedGain; 

     gainHist[smoothedGain]++; 
    } 

    // Re-calibrate the min to be 5% 
    double minGain = 0; 
    int sum = 0; 
    while (minGain < 255 && sum < numFrames/20) { 
     sum += gainHist[(int)minGain]; 
     minGain++; 
    } 

    // Re-calibrate the max to be 99% 
    sum = 0; 
    while (maxGain > 2 && sum < numFrames/100) { 
     sum += gainHist[(int)maxGain]; 
     maxGain--; 
    } 

    // Compute the heights 
    double[] heights = new double[numFrames]; 
    double range = maxGain - minGain; 
    for (int i = 0; i < numFrames; i++) { 
     double value = (smoothedGains[i] * scaleFactor - minGain)/range; 
     if (value < 0.0) 
      value = 0.0; 
     if (value > 1.0) 
      value = 1.0; 
     heights[i] = value * value; 
    } 

    mNumZoomLevels = 5; 
    mLenByZoomLevel = new int[5]; 
    mZoomFactorByZoomLevel = new double[5]; 
    mValuesByZoomLevel = new double[5][]; 

    // Level 0 is doubled, with interpolated values 
    mLenByZoomLevel[0] = numFrames * 2; 
    mZoomFactorByZoomLevel[0] = 2.0; 
    mValuesByZoomLevel[0] = new double[mLenByZoomLevel[0]]; 
    if (numFrames > 0) { 
     mValuesByZoomLevel[0][0] = 0.5 * heights[0]; 
     mValuesByZoomLevel[0][1] = heights[0]; 
    } 
    for (int i = 1; i < numFrames; i++) { 
     mValuesByZoomLevel[0][2 * i] = 0.5 * (heights[i - 1] + heights[i]); 
     mValuesByZoomLevel[0][2 * i + 1] = heights[i]; 
    } 

    // Level 1 is normal 
    mLenByZoomLevel[1] = numFrames; 
    mValuesByZoomLevel[1] = new double[mLenByZoomLevel[1]]; 
    mZoomFactorByZoomLevel[1] = 1.0; 
    for (int i = 0; i < mLenByZoomLevel[1]; i++) { 
     mValuesByZoomLevel[1][i] = heights[i]; 
    } 

    // 3 more levels are each halved 
    for (int j = 2; j < 5; j++) { 
     mLenByZoomLevel[j] = mLenByZoomLevel[j - 1]/2; 
     mValuesByZoomLevel[j] = new double[mLenByZoomLevel[j]]; 
     mZoomFactorByZoomLevel[j] = mZoomFactorByZoomLevel[j - 1]/2.0; 
     for (int i = 0; i < mLenByZoomLevel[j]; i++) { 
      mValuesByZoomLevel[j][i] = 
        0.5 * (mValuesByZoomLevel[j - 1][2 * i] + 
          mValuesByZoomLevel[j - 1][2 * i + 1]); 
     } 
    } 

    if (numFrames > 5000) { 
     mZoomLevel = 3; 
    } else if (numFrames > 1000) { 
     mZoomLevel = 2; 
    } else if (numFrames > 300) { 
     mZoomLevel = 1; 
    } else { 
     mZoomLevel = 0; 
    } 

    mInitialized = true; 
} 

public boolean canZoomIn() { 
    return (mZoomLevel > 0); 
} 

public void zoomIn() { 
    if (canZoomIn()) { 
     mZoomLevel--; 
     mSelectionStart *= 2; 
     mSelectionEnd *= 2; 
     mHeightsAtThisZoomLevel = null; 
     int offsetCenter = mOffset + getMeasuredWidth()/2; 
     offsetCenter *= 2; 
     mOffset = offsetCenter - getMeasuredWidth()/2; 
     if (mOffset < 0) 
      mOffset = 0; 
     invalidate(); 
    } 
} 

public boolean canZoomOut() { 
    return (mZoomLevel < mNumZoomLevels - 1); 
} 

public void zoomOut() { 
    if (canZoomOut()) { 
     mZoomLevel++; 
     mSelectionStart /= 2; 
     mSelectionEnd /= 2; 
     int offsetCenter = mOffset + getMeasuredWidth()/2; 
     offsetCenter /= 2; 
     mOffset = offsetCenter - getMeasuredWidth()/2; 
     if (mOffset < 0) 
      mOffset = 0; 
     mHeightsAtThisZoomLevel = null; 
     invalidate(); 
    } 
} 



public double pixelsToSeconds(int pixels) { 
    double z = mZoomFactorByZoomLevel[mZoomLevel]; 
    return (pixels * (double)mSamplesPerFrame/(mSampleRate * z)); 
} 



public void setListener(WaveformListener listener) { 
    mListener = listener; 
} 

public void recomputeHeights(float density) { 
    mHeightsAtThisZoomLevel = null; 
    mDensity = density; 
    mTimecodePaint.setTextSize((int)(12 * density)); 

    invalidate(); 
} 


protected void drawWaveformLine(Canvas canvas, 
           int x, int y0, int y1, 
           Paint paint) { 
    canvas.drawLine(x, y0, x, y1, paint); 
} 


@Override 
protected void onDraw(Canvas canvas) { 
    super.onDraw(canvas); 
    if (mSoundFile == null) 
     return; 

    if (mHeightsAtThisZoomLevel == null) 
     computeIntsForThisZoomLevel(); 
    DisplayMetrics displaymetrics = getContext().getResources().getDisplayMetrics(); 
    int height = displaymetrics.heightPixels; 
    int widths = displaymetrics.widthPixels; 
    // Draw waveform 
    int measuredWidth = getMeasuredWidth(); 
    int measuredHeight = getMeasuredHeight(); 
    int start = mOffset; 
    int width = mHeightsAtThisZoomLevel.length - start; 
    int ctr = measuredHeight/2; 



    Log.e("wid",String.valueOf(width)); 
    Log.e("widCal",String.valueOf(mHeightsAtThisZoomLevel.length)); 
    Log.e("widstart",String.valueOf(start)); 


    if (width > measuredWidth) 
     width = measuredWidth; 


    Log.e("measured",String.valueOf(measuredWidth)); 


    // Draw grid 
    double onePixelInSecs = pixelsToSeconds(1); 
    boolean onlyEveryFiveSecs = (onePixelInSecs > 1.0/50.0); 
    double fractionalSecs = mOffset * onePixelInSecs; 
    int integerSecs = (int) fractionalSecs; 
    int i = 0; 
    while (i < width) { 
     i++; 
     fractionalSecs += onePixelInSecs; 
     int integerSecsNew = (int) fractionalSecs; 
     if (integerSecsNew != integerSecs) { 
      integerSecs = integerSecsNew; 
      if (!onlyEveryFiveSecs || 0 == (integerSecs % 5)) { 
       canvas.drawLine(i, 0, i, measuredHeight, mGridPaint); 
      } 
     } 
    } 

    // Draw waveform 
    for (i = 0; i < width; i++) { 
     Paint paint; 

     if (i + start >= mSelectionStart && 
       i + start < mSelectionEnd) { 
      paint = mSelectedLinePaint; 
      // paint.setColor(color); 
     } else { 
      drawWaveformLine(canvas, ((widths/width)*i), 0, measuredHeight, 
        mUnselectedBkgndLinePaint); 
      paint = mUnselectedLinePaint; 
     } 
     drawWaveformLine(
       canvas, ((widths/width)*i), 
       ctr - mHeightsAtThisZoomLevel[start + i], 
       ctr + 1 + mHeightsAtThisZoomLevel[start + i], 
       paint); 





     if (i + start == mPlaybackPos) { 
      canvas.drawLine(i, 0, i, measuredHeight, mPlaybackLinePaint); 
     } 
     } 


    if (mListener != null) { 
     mListener.waveformDraw(); 
    } 
} 



private void computeIntsForThisZoomLevel() { 
    int halfHeight = (getMeasuredHeight()/2) - 1; 
    mHeightsAtThisZoomLevel = new int[mLenByZoomLevel[mZoomLevel]]; 
    for (int i = 0; i < mLenByZoomLevel[mZoomLevel]; i++) { 
     mHeightsAtThisZoomLevel[i] = 
       (int)(mValuesByZoomLevel[mZoomLevel][i] * halfHeight); 
    } 
} 

    } 

MainActivity.classあなたonDraw(Canvas canvas)追加し、次の行で

public class MainActivity extends AppCompatActivity implements WaveformView.WaveformListener { 


    WaveformView mWaveformView; 
    SoundFile mSoundFile; 
    private float mDensity; 
    private File mFile; 
    private String mFilename; 
    private long mLoadingLastUpdateTime; 
    boolean mLoadingKeepGoing; 
    boolean mFinishActivity; 
    private ProgressDialog mProgressDialog; 
    String mTitle,mArtist; 
    private Thread mLoadSoundFileThread; 
    private Thread mRecordAudioThread; 
    private Thread mSaveSoundFileThread; 

    private boolean mIsPlaying; 
    private SamplePlayer mPlayer; 
    private String mInfoContent; 
    private int mWidth; 
    private int mMaxPos; 
    private int mStartPos; 
    private int mEndPos; 
    private boolean mStartVisible; 
    private boolean mEndVisible; 
    private int mLastDisplayedStartPos; 
    private int mLastDisplayedEndPos; 
    private int mOffset; 
    private int mOffsetGoal; 
    private int mFlingVelocity; 
    private int mPlayStartMsec; 
    private int mPlayEndMsec; 
    private Handler mHandler; 

    Button pla; 
    MediaPlayer mediaPlayer; 
    boolean ismIsPlaying; 


    @Override 
    protected void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     setContentView(R.layout.activity_main); 

     pla = (Button)findViewById(R.id.play); 
     mWaveformView = (WaveformView)findViewById(R.id.waveform); 
     mWaveformView.setListener(this); 
     mHandler = new Handler(); 

     Uri uri = Uri.parse("/sdcard/audio_file.mp3"); 

     mediaPlayer = new MediaPlayer(); 
     mediaPlayer = MediaPlayer.create(getApplicationContext(),uri); 

     loadGui(); 
     loadFromFile(); 

    } 


    /** 
    * Called from both onCreate and onConfigurationChanged 
    * (if the user switched layouts) 
    */ 
    private void loadGui() { 
     // Inflate our UI from its XML layout description. 
     setContentView(R.layout.activity_main); 

     DisplayMetrics metrics = new DisplayMetrics(); 
     getWindowManager().getDefaultDisplay().getMetrics(metrics); 
     mDensity = metrics.density; 



     mWaveformView = (WaveformView)findViewById(R.id.waveform); 
     mWaveformView.setListener(this); 



     if (mSoundFile != null && !mWaveformView.hasSoundFile()) { 
      mWaveformView.setSoundFile(mSoundFile); 
      mWaveformView.recomputeHeights(mDensity); 

     } 

    } 

    private void loadFromFile() { 

     mFilename = "/sdcard/audio_file.mp3"; 

     mFile = new File(mFilename); 

     SongMetadataReader metadataReader = new SongMetadataReader(
       this, mFilename); 
     mTitle = metadataReader.mTitle; 
     mArtist = metadataReader.mArtist; 

     String titleLabel = mTitle; 
     if (mArtist != null && mArtist.length() > 0) { 
      titleLabel += " - " + mArtist; 
     } 
     setTitle(titleLabel); 

     mLoadingLastUpdateTime = getCurrentTime(); 
     mLoadingKeepGoing = true; 
     mFinishActivity = false; 
     mProgressDialog = new ProgressDialog(MainActivity.this); 
     mProgressDialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL); 
     mProgressDialog.setTitle("Loading..."); 
     mProgressDialog.setCancelable(true); 
     mProgressDialog.setOnCancelListener(
       new DialogInterface.OnCancelListener() { 
        public void onCancel(DialogInterface dialog) { 
         mLoadingKeepGoing = false; 
         mFinishActivity = true; 
        } 
       }); 
     mProgressDialog.show(); 

     final SoundFile.ProgressListener listener = 
       new SoundFile.ProgressListener() { 
        public boolean reportProgress(double fractionComplete) { 
         long now = getCurrentTime(); 
         if (now - mLoadingLastUpdateTime > 100) { 
          mProgressDialog.setProgress(
            (int) (mProgressDialog.getMax() * fractionComplete)); 
          mLoadingLastUpdateTime = now; 
         } 
         return mLoadingKeepGoing; 
        } 
       }; 

     // Load the sound file in a background thread 
     mLoadSoundFileThread = new Thread() { 
      public void run() { 
       try { 
        mSoundFile = SoundFile.create(mFile.getAbsolutePath(), listener); 

        if (mSoundFile == null) { 
         mProgressDialog.dismiss(); 
         String name = mFile.getName().toLowerCase(); 
         String[] components = name.split("\\."); 
         String err; 
         if (components.length < 2) { 
          err = getResources().getString(
            R.string.no_extension_error); 
         } else { 
          err = getResources().getString(
            R.string.bad_extension_error) + " " + 
            components[components.length - 1]; 
         } 
         final String finalErr = err; 
         Runnable runnable = new Runnable() { 
          public void run() { 
           showFinalAlert(new Exception(), finalErr); 
          } 
         }; 
         mHandler.post(runnable); 
         return; 
        } 
        mPlayer = new SamplePlayer(mSoundFile); 
       } catch (final Exception e) { 
        mProgressDialog.dismiss(); 
        e.printStackTrace(); 
        mInfoContent = e.toString(); 
        runOnUiThread(new Runnable() { 
         public void run() { 

         } 
        }); 

        Runnable runnable = new Runnable() { 
         public void run() { 
          showFinalAlert(e, getResources().getText(R.string.read_error)); 
         } 
        }; 
        mHandler.post(runnable); 
        return; 
       } 
       mProgressDialog.dismiss(); 
       if (mLoadingKeepGoing) { 
        Runnable runnable = new Runnable() { 
         public void run() { 
          finishOpeningSoundFile(); 

          } 
        }; 
        mHandler.post(runnable); 
       } else if (mFinishActivity){ 
        MainActivity.this.finish(); 
       } 
      } 
     }; 
     mLoadSoundFileThread.start(); 

    } 

    private void finishOpeningSoundFile() { 
     mWaveformView.setSoundFile(mSoundFile); 
     mWaveformView.recomputeHeights(mDensity); 

     Log.e("sound file",mFilename); 
     Log.e("sound", String.valueOf(mSoundFile)); 


    } 



    /** 
    * Show a "final" alert dialog that will exit the activity 
    * after the user clicks on the OK button. If an exception 
    * is passed, it's assumed to be an error condition, and the 
    * dialog is presented as an error, and the stack trace is 
    * logged. If there's no exception, it's a success message. 
    */ 
    private void showFinalAlert(Exception e, CharSequence message) { 
     CharSequence title; 
     if (e != null) { 
      Log.e("Ringdroid", "Error: " + message); 
      Log.e("Ringdroid", getStackTrace(e)); 
      title = getResources().getText(R.string.alert_title_failure); 
      setResult(RESULT_CANCELED, new Intent()); 
     } else { 
      Log.v("Ringdroid", "Success: " + message); 
      title = getResources().getText(R.string.alert_title_success); 
     } 

     new AlertDialog.Builder(MainActivity.this) 
       .setTitle(title) 
       .setMessage(message) 
       .setPositiveButton(
         R.string.alert_ok_button, 
         new DialogInterface.OnClickListener() { 
          public void onClick(DialogInterface dialog, 
               int whichButton) { 
           finish(); 
          } 
         }) 
       .setCancelable(false) 
       .show(); 
    } 

    private void showFinalAlert(Exception e, int messageResourceId) { 
     showFinalAlert(e, getResources().getText(messageResourceId)); 
    } 

    @Override 
    public void waveformTouchStart(float x) { 

    } 

    @Override 
    public void waveformTouchMove(float x) { 

    } 

    @Override 
    public void waveformTouchEnd() { 

    } 

    @Override 
    public void waveformFling(float x) { 

    } 

    @Override 
    public void waveformDraw() { 
     mWidth = mWaveformView.getMeasuredWidth(); 
     if (mOffsetGoal != mOffset) { 
      // updateDisplay(); 
     } 
     else if (mIsPlaying) { 
     // updateDisplay(); 
     } else if (mFlingVelocity != 0) { 
     //  updateDisplay(); 
     } 
    } 

    private long getCurrentTime() { 
     return System.nanoTime()/1000000; 
    } 

    private String getStackTrace(Exception e) { 
     StringWriter writer = new StringWriter(); 
     e.printStackTrace(new PrintWriter(writer)); 
     return writer.toString(); 
    } 


    public void buttonClick(View view) { 

     Toast.makeText(MainActivity.this, "test", Toast.LENGTH_SHORT).show(); 
     mediaPlayer.start(); 
     ismIsPlaying = true; 


    } 
} 
+0

「今見ている方法」、「どのように見えるか」などのスクリーンショットを用意してください。あるいは、あなたがしていない行動や、あなたがその中で変えたいことを説明してください。ありがとうございました。 –

+0

@MichaelSpitsin更新をご確認ください。 –

+0

新しいPaintオブジェクトを赤色で作成します。あなたのonDraw()では、すでに演奏された波形の領域にあるその線に対して、この新しいペイントを使用します。 – Chris623

答えて

0

if (i + start <= mPlaybackPos) { 
    Paint mPaint = new Paint(paint); 
    mPaint.setColor(Color.RED); 
    drawWaveformLine(
      canvas, ((widths/width)*i), 
      ctr - mHeightsAtThisZoomLevel[start + i], 
      ctr + 1 + mHeightsAtThisZoomLevel[start + i], 
      mPaint); 
} 

は、線の上にそれらを追加:

if (i + start == mPlaybackPos) { 

そして、これが動作するかどうか、onDraw()方法の外にペイントオブジェクトを割り当てることを検討してください。

+0

これで何も起こらなかった –

+0

もう一度やり直してみてください – Chris623

+0

私はそれが働いていると思いますが、再生を設定することができません –

関連する問題