1. 程式人生 > >android音訊編輯(裁剪,合成)(1)

android音訊編輯(裁剪,合成)(1)

公司最近在做自己的專案,關於音視訊編輯,還有圖片的編輯方面。上網搜了一下,哇!很煩,大都用的FFmpeg進行編碼、解碼,再進行

相應的操作!國外也有大牛,封裝了jar,大家搜一下就很多了!在這也不多說了,用FFmpeg進行格式轉換,裁剪等等操作的,也可以在

GitHub上搜一下,有安卓版的已經編譯好的開源專案demo(大多用的FFmpeg的命令列進行操作)!好吧!本來還想多向公司爭取點時間

對這方面好好研究一下!既然都有現成的了,就拉過來改吧改吧!(一向討厭伸手主義,但是貌似自己也入坑了!媽蛋,誰讓時間不夠用

呢!)

音訊錄製

效果圖如下:
這裡寫圖片描述

音訊編輯分兩塊:一、錄音;二、音訊編輯

錄音的介面如下(很炫!有沒有!哈哈):
(這是我從GitHub上找的一個demo,改了很長時間才實現的)

主要功能:

(1)、音訊的錄製
(2)、錄製過程中新增標記
(3)、錄音的暫停開始
(4)、錄音完成(pcm格式轉為wav格式)

下面對介面的製作稍加分析:
注:原始碼也不是我寫的,我只是在其上作了修改和新增(勿噴)!

畫布類:

package com.jwzt.jwzt_procaibian.widget;

import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

/**
* 該類只是一個初始化surfaceview的封裝
* @author tcx
*/
public class WaveSurfaceView extends SurfaceView implements SurfaceHolder.Callback{
private SurfaceHolder holder;
private int line_off;//上下邊距距離
public int getLine_off() {
return line_off;
}
public void setLine_off(int line_off) {
this.line_off = line_off;
}
public WaveSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
this.holder = getHolder();
holder.addCallback(this);
}
/**
* @author tcx
* init surfaceview
*/
public void initSurfaceView( final SurfaceView sfv){
new Thread(){
public void run() {
Canvas canvas = sfv.getHolder().lockCanvas(
new Rect(0, 0, sfv.getWidth(), sfv.getHeight()));// 關鍵:獲取畫布
if(canvas==null){
return;
}
//canvas.drawColor(Color.rgb(241, 241, 241));// 清除背景
canvas.drawARGB(255, 42, 53, 82);

            int height = sfv.getHeight()-line_off;
             Paint paintLine =new Paint();
             Paint centerLine =new Paint();
             Paint circlePaint = new Paint();
             circlePaint.setColor(Color.rgb(246, 131, 126));
             paintLine.setColor(Color.rgb(255, 255, 255));
             paintLine.setStrokeWidth(2);
             circlePaint.setAntiAlias(true);
             canvas.drawLine(sfv.getWidth()/2, 0, sfv.getWidth()/2, sfv.getHeight(), circlePaint);//垂直的線
             centerLine.setColor(Color.rgb(39, 199, 175));
             canvas.drawLine(0, line_off/2, sfv.getWidth(), line_off/2, paintLine);//最上面的那根線
             canvas.drawLine(0, sfv.getHeight()-line_off/2-1, sfv.getWidth(), sfv.getHeight()-line_off/2-1, paintLine);//最下面的那根線  
             canvas.drawLine(0, height*0.5f+line_off/2, sfv.getWidth() ,height*0.5f+line_off/2, centerLine);//中心線
             sfv.getHolder().unlockCanvasAndPost(canvas);// 解鎖畫布,提交畫好的影象
        };
    }.start();

}


@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {

}

@Override
public void surfaceCreated(SurfaceHolder holder) {
    initSurfaceView(this);

}

@Override
public void surfaceDestroyed(SurfaceHolder holder) {

}

}

上面的這個類就很簡單了,就是利用surfaceView製作一個畫布

接下來就開始在畫布上搞事情了
先初始化好錄音機:

/**
* 初始化錄音
*/
private void initAudio(){
recBufSize = AudioRecord.getMinBufferSize(FREQUENCY,
CHANNELCONGIFIGURATION, AUDIOENCODING);//設定錄音緩衝區(一般為20ms,1280)
audioRecord = new AudioRecord(AUDIO_SOURCE,// 指定音訊來源,這裡為麥克風
FREQUENCY, // 16000HZ取樣頻率
CHANNELCONGIFIGURATION,// 錄製通道
AUDIO_SOURCE,// 錄製編碼格式
recBufSize);
waveCanvas = new WaveCanvas();//在下面哦
waveCanvas.baseLine = waveSfv.getHeight() / 2;
waveCanvas.Start(audioRecord, recBufSize, waveSfv, mFileName, U.DATA_DIRECTORY, new Handler.Callback() {
@Override
public boolean handleMessage(Message msg) {
return true;
}
},(swidth-DensityUtil.dip2px(10))/2,this);

恩,接著看浪線怎麼畫:
**WaveCanvas類在此**

package com.jwzt.jwzt_procaibian.widget;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Paint.FontMetricsInt;
import android.graphics.Paint.Style;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.Rect;
import android.media.AudioRecord;
import android.os.AsyncTask;
import android.os.Handler.Callback;
import android.os.Message;
import android.util.Log;
import android.view.SurfaceView;

import com.jwzt.jwzt_procaibian.R;
import com.jwzt.jwzt_procaibian.inter.CurrentPosInterface;
import com.jwzt.jwzt_procaibian.utils.Pcm2Wav;

/**
* 錄音和寫入檔案使用了兩個不同的執行緒,以免造成卡機現象
* 錄音波形繪製
* @author tcx
*
*/
public class WaveCanvas {

private ArrayList<Short> inBuf = new ArrayList<Short>();//緩衝區資料
private ArrayList<byte[]> write_data = new ArrayList<byte[]>();//寫入檔案資料
public boolean isRecording = false;// 錄音執行緒控制標記
private boolean isWriting = false;// 錄音執行緒控制標記

private int line_off ;//上下邊距的距離
public int rateX = 30;//控制多少幀取一幀
public int rateY = 1; //  Y軸縮小的比例 預設為1
public int baseLine = 0;// Y軸基線
private AudioRecord audioRecord;
int recBufSize;
private int marginRight=30;//波形圖繪製距離右邊的距離
private int draw_time = 1000 / 200;//兩次繪圖間隔的時間
private float divider = 0.1f;//為了節約繪畫時間,每0.2個畫素畫一個數據
long c_time;
private String savePcmPath ;//儲存pcm檔案路徑
private String saveWavPath;//儲存wav檔案路徑
private Paint circlePaint;
private Paint center;
private Paint paintLine;
private Paint mPaint;
private Context mContext;
private ArrayList<Float> markList=new ArrayList<Float>();
private int readsize;

private Map<Integer,Integer> markMap=new HashMap<Integer,Integer>();
private boolean isPause=false;
private CurrentPosInterface mCurrentPosInterface;
private Paint progressPaint;
private Paint paint;
private Paint bottomHalfPaint;
private Paint darkPaint;
private Paint markTextPaint;
private Bitmap markIcon;
private int bitWidth;
private int bitHeight;
private int start;




/**
 * 開始錄音
 * @param audioRecord
 * @param recBufSize
 * @param sfv
 * @param audioName
 */
public void Start(AudioRecord audioRecord, int recBufSize, SurfaceView sfv
    ,String audioName,String path,Callback callback,int width,Context context) {
    this.audioRecord = audioRecord;
    isRecording = true;
    isWriting = true;
    this.recBufSize = recBufSize;
    savePcmPath = path + audioName +".pcm";
    saveWavPath = path + audioName +".wav";
    this.mContext=context;
    init();
    new Thread(new WriteRunnable()).start();//開執行緒寫檔案
    new RecordTask(audioRecord, recBufSize, sfv, mPaint,callback).execute();
    this.marginRight=width;
}

public  void init(){
    circlePaint = new Paint();//畫圓
    circlePaint.setColor(Color.rgb(246, 131, 126));//設定上圓的顏色
    center = new Paint();
    center.setColor(Color.rgb(39, 199, 175));// 畫筆為color
    center.setStrokeWidth(1);// 設定畫筆粗細
    center.setAntiAlias(true);
    center.setFilterBitmap(true);
    center.setStyle(Style.FILL);
    paintLine =new Paint();
    paintLine.setColor(Color.rgb(255, 255, 255));
    paintLine.setStrokeWidth(2);// 設定畫筆粗細


    mPaint = new Paint();
    mPaint.setColor(Color.rgb(39, 199, 175));// 畫筆為color
    mPaint.setStrokeWidth(1);// 設定畫筆粗細
    mPaint.setAntiAlias(true);
    mPaint.setFilterBitmap(true);
    mPaint.setStyle(Paint.Style.FILL);



    //標記 部分畫筆
    progressPaint=new Paint();
    progressPaint.setColor(mContext.getResources().getColor(R.color.vine_green));
    paint=new Paint();
    bottomHalfPaint=new Paint();
    darkPaint=new Paint();
    darkPaint.setColor(mContext.getResources().getColor(R.color.dark_black));
    bottomHalfPaint.setColor(mContext.getResources().getColor(R.color.hui));
    markTextPaint=new Paint();
    markTextPaint.setColor(mContext.getResources().getColor(R.color.hui));
    markTextPaint.setTextSize(18);
    paint.setAntiAlias(true);
    paint.setDither(true);
    paint.setFilterBitmap(true);
    markIcon=((BitmapDrawable)mContext.getResources().getDrawable(R.drawable.edit_mark)).getBitmap();
    bitWidth = markIcon.getWidth();
    bitHeight = markIcon.getHeight();







}




/** 
 * 停止錄音
 */  
public void Stop() {
    isRecording = false;
    isPause=true;
    audioRecord.stop();

}

/**
 * pause recording audio
 */
public void pause(){
    isPause=true;
}



/**
 * restart recording audio
 */
public void reStart(){
    isPause=false;
}


/**
 * 清楚資料
 */
public void clear(){
    inBuf.clear();// 清除  
}



/**
 * 非同步錄音程式
 * @author cokus
 *
 */
class RecordTask extends AsyncTask<Object, Object, Object> {
    private int recBufSize;  
    private AudioRecord audioRecord;  
    private SurfaceView sfv;// 畫板  
    private Paint mPaint;// 畫筆  
    private Callback callback;
    private boolean isStart =false;
    private Rect srcRect;
    private Rect destRect;
    private Rect bottomHalfBgRect;



    public RecordTask(AudioRecord audioRecord, int recBufSize,
            SurfaceView sfv, Paint mPaint,Callback callback) {
        this.audioRecord = audioRecord;
        this.recBufSize = recBufSize;  
        this.sfv = sfv;
        line_off = ((WaveSurfaceView)sfv).getLine_off();
        this.mPaint = mPaint;
        this.callback = callback;
        inBuf.clear();// 清除  
    }

    @Override
    protected Object doInBackground(Object... params) {
        try {
            short[] buffer = new short[recBufSize];
            audioRecord.startRecording();// 開始錄製
            while (isRecording) {

                while(!isPause){
                    // 從MIC儲存資料到緩衝區  
                    readsize = audioRecord.read(buffer, 0,
                            recBufSize);
                    synchronized (inBuf) {
                        for (int i = 0; i < readsize; i += rateX) {
                            inBuf.add(buffer[i]);
                        }
                    }
                    publishProgress();//更新主執行緒中的UI
                    if (AudioRecord.ERROR_INVALID_OPERATION != readsize) {
                        synchronized (write_data) {
                            byte  bys[] = new byte[readsize*2];
                            //因為arm位元組序問題,所以需要高低位交換
                            for (int i = 0; i < readsize; i++) {
                            byte ss[] = getBytes(buffer[i]);
                                bys[i*2] =ss[0];
                                bys[i*2+1] = ss[1];
                            }
                            write_data.add(bys);
                        }
                    }
                }
            }
            isWriting = false;

        } catch (Throwable t) {
            Message msg = new Message();
            msg.arg1 =-2;
            msg.obj=t.getMessage();
            callback.handleMessage(msg); 
        }
        return null;
    }

    @Override
    protected void onProgressUpdate(Object... values) {
        long time = new Date().getTime();
        if(time - c_time >= draw_time){
            ArrayList<Short> buf = new ArrayList<Short>();
            synchronized (inBuf) {
                if (inBuf.size() == 0)  
                    return;
                while(inBuf.size() > (sfv.getWidth()-marginRight) / divider){
                    inBuf.remove(0);
                }
                buf = (ArrayList<Short>) inBuf.clone();// 儲存  
            }
            SimpleDraw(buf, sfv.getHeight()/2);// 把緩衝區資料畫出來
            c_time = new Date().getTime();
        }
        super.onProgressUpdate(values);
    }


     public byte[] getBytes(short s)
        {
            byte[] buf = new byte[2];
            for (int i = 0; i < buf.length; i++)
            {
                buf[i] = (byte) (s & 0x00ff);
                s >>= 8;
            }
            return buf;
        }







    /** 
     * 繪製指定區域 
     *  
     * @param buf 
     *            緩衝區 
     * @param baseLine 
     *            Y軸基線 
     */  
    void SimpleDraw(ArrayList<Short> buf, int baseLine) {
        if (!isRecording)
            return;
        rateY = (65535 /2/ (sfv.getHeight()-line_off));

        for (int i = 0; i < buf.size(); i++) {
            byte bus[] = getBytes(buf.get(i));
            buf.set(i, (short)((0x0000 | bus[1]) << 8 | bus[0]));//高低位交換
        }
        Canvas canvas = sfv.getHolder().lockCanvas(  
                new Rect(0, 0, sfv.getWidth(), sfv.getHeight()));// 關鍵:獲取畫布  
        if(canvas==null)
            return;
       // canvas.drawColor(Color.rgb(241, 241, 241));// 清除背景  
        canvas.drawARGB(255, 42, 53, 82);



        start = (int) ((buf.size())* divider);
        float py = baseLine;
        float y;

        if(sfv.getWidth() - start <= marginRight){//如果超過預留的右邊距距離
            start = sfv.getWidth() -marginRight;//畫的位置x座標
        }
        //TODO
        canvas.drawLine(marginRight, 0, marginRight, sfv.getHeight(), circlePaint);//垂直的線


        int height = sfv.getHeight()-line_off;
        canvas.drawLine(0, line_off/2, sfv.getWidth(), line_off/2, paintLine);//最上面的那根線

        mCurrentPosInterface.onCurrentPosChanged(start);

        canvas.drawLine(0, height*0.5f+line_off/2, sfv.getWidth() ,height*0.5f+line_off/2, center);//中心線
        canvas.drawLine(0, sfv.getHeight()-line_off/2-1, sfv.getWidth(), sfv.getHeight()-line_off/2-1, paintLine);//最下面的那根線


        Map<Integer,Integer> newMarkMap=new HashMap<Integer,Integer>();
        Iterator<Integer> iterator = markMap.keySet().iterator();
        while(iterator.hasNext()){
            int key=iterator.next();
            int pos = markMap.get(key);

            destRect=new Rect((int)(pos-bitWidth/4), 0, (int)(pos-bitWidth/4)+bitWidth/2, bitHeight/2);
            canvas.drawBitmap(markIcon, null, destRect, null);
            String text=(key+1)+"";
            float textWidth = markTextPaint.measureText(text);
            FontMetricsInt fontMetricsInt = markTextPaint.getFontMetricsInt();
            int fontHeight=fontMetricsInt.bottom-fontMetricsInt.top;
            canvas.drawText(text, (pos-textWidth/2), fontHeight-8, markTextPaint);
            canvas.drawLine(pos-bitWidth/16+2, bitHeight/2-2,pos-bitWidth/16+2, sfv.getWidth()-bitHeight/2,bottomHalfPaint );
            newMarkMap.put(key, pos-3);
        }
            markMap=newMarkMap;



        for (int i = 0; i < buf.size(); i++) {
            y =buf.get(i)/rateY + baseLine;// 調節縮小比例,調節基準線
            float x=(i) * divider;
            if(sfv.getWidth() - (i-1) * divider <=marginRight){
                x = sfv.getWidth()-marginRight;
            }

            canvas.drawLine(x, y,  x,sfv.getHeight()-y, mPaint);//中間出波形

        }  
        sfv.getHolder().unlockCanvasAndPost(canvas);// 解鎖畫布,提交畫好的影象  
    }
}


/**
 * 新增音訊的標記點
 */
public void addCurrentPostion(){
    markMap.put(markMap.size(), start);

}



/**
 * 清除標記位置點
 */
public void clearMarkPosition(){
    markMap.clear();
}


/**
 * 當前位置變化監聽
 */
public void setCurrentPostChangerLisener(CurrentPosInterface currentPosInterface){
    mCurrentPosInterface = currentPosInterface;
}




/**
 * 非同步寫檔案
 * @author cokus
 *
 */
class WriteRunnable implements Runnable {
    @Override
    public void run() {
        try {
            FileOutputStream fos2wav = null;
            File file2wav = null;
            try {                                       
                file2wav = new File(savePcmPath);
                if (file2wav.exists()) {
                    file2wav.delete();
                }
                fos2wav = new FileOutputStream(file2wav);// 建立一個可存取位元組的檔案            
            } catch (Exception e) {
                e.printStackTrace();
            }
            while (isWriting || write_data.size() > 0) {

                    byte[] buffer = null;
                    synchronized (write_data) {
                        if(write_data.size() > 0){
                            buffer = write_data.get(0);
                            write_data.remove(0);
                        }
                    }
                    try {
                        if(buffer != null){
                            fos2wav.write(buffer);
                            fos2wav.flush();
                        }
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
            }
            fos2wav.close();
            Pcm2Wav p2w = new Pcm2Wav();//將pcm格式轉換成wav 其實就尼瑪加了一個44位元組的頭資訊
            p2w.convertAudioFiles(savePcmPath, saveWavPath);
        } catch (Throwable t) {
        }
    }

}
}

“`
ok! 至此音訊的錄製功能到此結束!
額,忘了,還有一個pcm轉wav的類。如下:

public void convertAudioFiles(String src, String target) throws Exception
{
FileInputStream fis = new FileInputStream(src);
FileOutputStream fos = new FileOutputStream(target);

    byte[] buf = new byte[1024 * 1000];
    int size = fis.read(buf);
    int PCMSize = 0;
    while (size != -1)
    {
        PCMSize += size;
        size = fis.read(buf);
    }
    fis.close();


    WaveHeader header = new WaveHeader();
    header.fileLength = PCMSize + (44 - 8);
    header.FmtHdrLeth = 16;
    header.BitsPerSample = 16;
    header.Channels = 1;
    header.FormatTag = 0x0001;
    header.SamplesPerSec = 16000;
    header.BlockAlign = (short) (header.Channels * header.BitsPerSample / 8);
    header.AvgBytesPerSec = header.BlockAlign * header.SamplesPerSec;
    header.DataHdrLeth = PCMSize;

    byte[] h = header.getHeader();

    assert h.length == 44;
    //write header
    fos.write(h, 0, h.length);
    //write data stream
    fis = new FileInputStream(src);
    size = fis.read(buf);
    while (size != -1)
    {
        fos.write(buf, 0, size);
        size = fis.read(buf);
    }
    fis.close();
    fos.close();
}

對應的44位元組頭部資訊:

package com.jwzt.jwzt_procaibian.utils;

import java.io.ByteArrayOutputStream;
import java.io.IOException;

public class WaveHeader
{
public final char fileID[] = { ‘R’, ‘I’, ‘F’, ‘F’ };

public int fileLength;

public char wavTag[] = { 'W', 'A', 'V', 'E' };;

public char FmtHdrID[] = { 'f', 'm', 't', ' ' };

public int FmtHdrLeth;

public short FormatTag;

public short Channels;

public int SamplesPerSec;

public int AvgBytesPerSec;

public short BlockAlign;

public short BitsPerSample;

public char DataHdrID[] = { 'd', 'a', 't', 'a' };

public int DataHdrLeth;

public byte[] getHeader() throws IOException
{
    ByteArrayOutputStream bos = new ByteArrayOutputStream();
    WriteChar(bos, fileID);
    WriteInt(bos, fileLength);
    WriteChar(bos, wavTag);
    WriteChar(bos, FmtHdrID);
    WriteInt(bos, FmtHdrLeth);
    WriteShort(bos, FormatTag);
    WriteShort(bos, Channels);
    WriteInt(bos, SamplesPerSec);
    WriteInt(bos, AvgBytesPerSec);
    WriteShort(bos, BlockAlign);
    WriteShort(bos, BitsPerSample);
    WriteChar(bos, DataHdrID);
    WriteInt(bos, DataHdrLeth);
    bos.flush();
    byte[] r = bos.toByteArray();
    bos.close();
    return r;
}

private void WriteShort(ByteArrayOutputStream bos, int s)
        throws IOException
{
    byte[] mybyte = new byte[2];
    mybyte[1] = (byte) ((s << 16) >> 24);
    mybyte[0] = (byte) ((s << 24) >> 24);
    bos.write(mybyte);
}

private void WriteInt(ByteArrayOutputStream bos, int n) throws IOException
{
    byte[] buf = new byte[4];
    buf[3] = (byte) (n >> 24);
    buf[2] = (byte) ((n << 8) >> 24);
    buf[1] = (byte) ((n << 16) >> 24);
    buf[0] = (byte) ((n << 24) >> 24);
    bos.write(buf);
}

private void WriteChar(ByteArrayOutputStream bos, char[] id)
{
    for (int i = 0; i < id.length; i++)
    {
        char c = id[i];
        bos.write(c);
    }
}

}

好了,錄製方面的東西就差不多了!
pcm轉為wav格式其實就是多了44個頭位元組,合併的時候,我們需要更改頭部標記檔案長度的那個位元組進行修改!然後再利用FFmpeg進行格式轉化!當然,裁剪的話稍微有些麻煩!我們需要進行幀的計算,註釋中也提到了,20ms一幀,採用的單聲道,16000的採集引數,那每幀大概就是640!然後在裁剪的時候計算出音訊總的幀數,進行計算裁剪!誤差應該不超100ms!
這裡就不給原始碼了,這個專案還在開發測試階段,機型,螢幕什麼的還沒做適配,坑定還有很多bug需要修改!也希望,給在這方面開發的同學們,提供一點思路!很多東西也需要大家進行思考和學習!

有不對的地方也希望大家指正,共同進步!

下一篇再介紹裁剪部分的介面製作和功能實現!