第一篇:android 使用MediaCodec 編解碼總結(jié)
android 使用MediaCodec 編解碼總結(jié)
本文將主要介紹在安卓中調(diào)用MediaCodec類實(shí)現(xiàn)視頻文件的硬解碼,以及如何將以byte[]類型存儲(chǔ)的圖像數(shù)據(jù)通過硬編碼合成視頻文件。1.MediaCodec類的編解碼原理 參考鏈接:https://developer.Android.com/reference/android/media/MediaCodec.html 工作流是這樣的: 以編碼為例,首先要初始化硬件編碼器,配置要編碼的格式、視頻文件的長寬、碼率、幀率、關(guān)鍵幀間隔等等。這一步叫configure。之后開啟編碼器,當(dāng)前編碼器便是可用狀態(tài),隨時(shí)準(zhǔn)備接收數(shù)據(jù)。下一個(gè)過程便是編碼的running過程,在此過程中,需要維護(hù)兩個(gè)buffer隊(duì)列,InputBuffer 和OutputBuffer,用戶需要不斷出隊(duì)InputBuffer(即dequeueInputBuffer),往里邊放入需要編碼的圖像數(shù)據(jù)之后再入隊(duì)等待處理,然后硬件編碼器開始異步處理,一旦處理結(jié)束,他會(huì)將數(shù)據(jù)放在OutputBuffer中,并且通知用戶當(dāng)前有輸出數(shù)據(jù)可用了,那么用戶就可以出隊(duì)一個(gè)OutputBuffer,將其中的數(shù)據(jù)拿走,然后釋放掉這個(gè)buffer。結(jié)束條件在于end-of-stream這個(gè)flag標(biāo)志位的設(shè)定。在編碼結(jié)束后,編碼器調(diào)用stop函數(shù)停止編碼,之后調(diào)用release函數(shù)將編碼器完全釋放掉,整體流程結(jié)束。
2.視頻解碼程序示例 代碼來源于
Android: MediaCodec視頻文件硬件解碼以下所有代碼可以在此處下載[java] view plain copy
print?
package com.example.guoheng_iri.helloworld;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.media.Image;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.util.Log;
import java.io.File;import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.LinkedBlockingQueue;
public class VideoDecode {
private static final String TAG = “VideoToFrames”;
private static final boolean VERBOSE = true;
private static final long DEFAULT_TIMEOUT_US = 10000;
private static final int COLOR_FormatI420 = 1;
private static final int COLOR_FormatNV21 = 2;
public static final int FILE_TypeI420 = 1;
public static final int FILE_TypeNV21 = 2;
public static final int FILE_TypeJPEG = 3;
private final int decodeColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
private int outputImageFileType =-1;
private String OUTPUT_DIR;
public int ImageWidth=0;
public int ImageHeight=0;
MediaExtractor extractor = null;
MediaCodec decoder = null;
MediaFormat mediaFormat;
public void setSaveFrames(String dir, int fileType)throws IOException {
if(fileType!= FILE_TypeI420 && fileType!= FILE_TypeNV21 && fileType!= FILE_TypeJPEG){
throw new IllegalArgumentException(“only support FILE_TypeI420 ” + “and FILE_TypeNV21 ” + “and FILE_TypeJPEG”);
}
outputImageFileType = fileType;
File theDir = new File(dir);
if(!theDir.exists()){
theDir.mkdirs();
} else if(!theDir.isDirectory()){
throw new IOException(“Not a directory”);
}
OUTPUT_DIR = theDir.getAbsolutePath()+ “/”;
}
public void VideoDecodePrepare(String videoFilePath){
extractor = null;
decoder = null;
try {
File videoFile = new File(videoFilePath);
extractor = new MediaExtractor();
extractor.setDataSource(videoFile.toString());
int trackIndex = selectTrack(extractor);
if(trackIndex < 0){
throw new RuntimeException(“No video track found in ” + videoFilePath);
}
extractor.selectTrack(trackIndex);
mediaFormat = extractor.getTrackFormat(trackIndex);
String mime = mediaFormat.getString(MediaFormat.KEY_MIME);
decoder = MediaCodec.createDecoderByType(mime);
showSupportedColorFormat(decoder.getCodecInfo().getCapabilitiesForType(mime));
if(isColorFormatSupported(decodeColorFormat, decoder.getCodecInfo().getCapabilitiesForType(mime))){
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, decodeColorFormat);
Log.i(TAG, “set decode color format to type ” + decodeColorFormat);
} else {
Log.i(TAG, “unable to set decode color format, color format type ” + decodeColorFormat + “ not supported”);
}
decoder.configure(mediaFormat, null, null, 0);
decoder.start();
} catch(IOException ioe){
throw new RuntimeException(“failed init encoder”, ioe);
}
}
public void close(){
decoder.stop();
decoder.release();
if(extractor!= null){
extractor.release();
extractor = null;
}
}
public void excuate()
{
try {
decodeFramesToImage(decoder, extractor, mediaFormat);
}finally {
// release encoder, muxer, and input Surface
close();
}
}
private void showSupportedColorFormat(MediaCodecInfo.CodecCapabilities caps){
System.out.print(“supported color format: ”);
for(int c : caps.colorFormats){
System.out.print(c + “t”);
}
System.out.println();
}
private boolean isColorFormatSupported(int colorFormat, MediaCodecInfo.CodecCapabilities caps){
for(int c : caps.colorFormats){
if(c == colorFormat){
return true;
}
}
return false;
}
public void decodeFramesToImage(MediaCodec decoder, MediaExtractor extractor, MediaFormat mediaFormat){
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
boolean sawInputEOS = false;
boolean sawOutputEOS = false;
final int width = mediaFormat.getInteger(MediaFormat.KEY_WIDTH);
final int height = mediaFormat.getInteger(MediaFormat.KEY_HEIGHT);
ImageWidth=width;
ImageHeight=height;
int outputFrameCount = 0;
while(!sawOutputEOS){
if(!sawInputEOS){
int inputBufferId = decoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US);
if(inputBufferId >= 0){
ByteBuffer inputBuffer = decoder.getInputBuffer(inputBufferId);
int sampleSize = extractor.readSampleData(inputBuffer, 0);//將一部分視頻數(shù)據(jù)讀取到inputbuffer中,大小為sampleSize
if(sampleSize < 0){
decoder.queueInputBuffer(inputBufferId, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
sawInputEOS = true;
} else {
long presentationTimeUs = extractor.getSampleTime();
decoder.queueInputBuffer(inputBufferId, 0, sampleSize, presentationTimeUs, 0);
extractor.advance();//移動(dòng)到視頻文件的下一個(gè)地址
}
}
}
int outputBufferId = decoder.dequeueOutputBuffer(info, DEFAULT_TIMEOUT_US);
if(outputBufferId >= 0){
if((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)!= 0){
sawOutputEOS = true;
}
boolean doRender =(info.size!= 0);
if(doRender){
outputFrameCount++;
Image image = decoder.getOutputImage(outputBufferId);
System.out.println(“image format: ” + image.getFormat());
if(outputImageFileType!=-1){
String fileName;
switch(outputImageFileType){
case FILE_TypeI420:
fileName = OUTPUT_DIR + String.format(“frame_%05d_I420_%dx%d.yuv”, outputFrameCount, width, height);
dumpFile(fileName, getDataFromImage(image, COLOR_FormatI420));
break;
case FILE_TypeNV21:
fileName = OUTPUT_DIR + String.format(“frame_%05d_NV21_%dx%d.yuv”, outputFrameCount, width, height);
dumpFile(fileName, getDataFromImage(image, COLOR_FormatNV21));
break;
case FILE_TypeJPEG:
fileName = OUTPUT_DIR + String.format(“frame_%05d.jpg”, outputFrameCount);
compressToJpeg(fileName, image);
break;
}
}
image.close();
decoder.releaseOutputBuffer(outputBufferId, true);
}
}
}
}
private static int selectTrack(MediaExtractor extractor){
int numTracks = extractor.getTrackCount();
for(int i = 0;i < numTracks;i++){
MediaFormat format = extractor.getTrackFormat(i);
String mime = format.getString(MediaFormat.KEY_MIME);
if(mime.startsWith(“video/”)){
if(VERBOSE){
Log.d(TAG, “Extractor selected track ” + i + “(” + mime + “): ” + format);
}
return i;
}
}
return-1;
}
private static boolean isImageFormatSupported(Image image){
int format = image.getFormat();
switch(format){
case ImageFormat.YUV_420_888:
case ImageFormat.NV21:
case ImageFormat.YV12:
return true;
}
return false;
}
public static byte[] getGrayFromData(Image image, int colorFormat){
if(colorFormat!= COLOR_FormatI420 && colorFormat!= COLOR_FormatNV21){
throw new IllegalArgumentException(“only support COLOR_FormatI420 ” + “and COLOR_FormatNV21”);
}
if(!isImageFormatSupported(image)){
throw new RuntimeException(“can't convert Image to byte array, format ” + image.getFormat());
}
Image.Plane[] planes = image.getPlanes();
int i = 0;
ByteBuffer buffer = planes[i].getBuffer();
byte[] data = new byte[buffer.remaining()];
buffer.get(data, 0, data.length);
if(VERBOSE)Log.v(TAG, “Finished reading data from plane ” + i);
return data;
}
public static byte[] getDataFromImage(Image image, int colorFormat){
if(colorFormat!= COLOR_FormatI420 && colorFormat!= COLOR_FormatNV21){
throw new IllegalArgumentException(“only support COLOR_FormatI420 ” + “and COLOR_FormatNV21”);
}
if(!isImageFormatSupported(image)){
throw new RuntimeException(“can't convert Image to byte array, format ” + image.getFormat());
}
Rect crop = image.getCropRect();
int format = image.getFormat();
int width = crop.width();
int height = crop.height();
Image.Plane[] planes = image.getPlanes();
byte[] data = new byte[width * height * ImageFormat.getBitsPerPixel(format)/ 8];
byte[] rowData = new byte[planes[0].getRowStride()];
int channelOffset = 0;
int outputStride = 1;
for(int i = 0;i < planes.length;i++){
switch(i){
case 0:
channelOffset = 0;
outputStride = 1;
break;
case 1:
if(colorFormat == COLOR_FormatI420){
channelOffset = width * height;
outputStride = 1;
} else if(colorFormat == COLOR_FormatNV21){
channelOffset = width * height;
outputStride = 2;
}
break;
case 2:
if(colorFormat == COLOR_FormatI420){
channelOffset =(int)(width * height * 1.25);
outputStride = 1;
} else if(colorFormat =
第二篇:Android 下log的使用總結(jié)
Android 下log的使用總結(jié)
一:在源碼開發(fā)模式下
1:包含頭文件:
1.#include
2:定義宏LOG_TAG
1.#define LOG_TAG “MY LOG TAG”
3:鏈接log對應(yīng)的.so庫
在Android.mk文件中加入如下語句:
1.LOCAL_SHARED_LIBRARIES +=
2.libcutils
接下來就可以直接使用LOGD來打印log信息了.二:在NDK開發(fā)模式下
1:包含頭文件:
1.#include
2:定義宏LOG_TAG
1.#define LOG_TAG “MY LOG TAG”
2.#define LOGD(...)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)3:鏈接log對應(yīng)的.so庫
在Android.mk文件中加入如下語句:
1.LOCAL_LDLIBS :=-llog
接下來就可以直接使用LOGD來打印log信息了.三:在Java代碼中
1:導(dǎo)入包
1.import android.util.Log;
2:使用
1.private static final String TAG = “your_tag”;
2.Log.d(TAG,“show something”);
在程序運(yùn)行過程中可以通過adb shell下的logcat指令看到相應(yīng)的內(nèi)容?;蛟贓clipse下的ADT的LogCat窗口中看到相應(yīng)的內(nèi)容了.
第三篇:Android總結(jié)
Android四大組件:
Activity—表現(xiàn)屏幕界面
Service—后臺(tái)服務(wù)
BroadcastReceiver—實(shí)現(xiàn)廣播機(jī)制
ContentProvider—實(shí)現(xiàn)數(shù)據(jù)存儲(chǔ)
Intent類:用來啟動(dòng)程序并傳遞信息的類
用于Activity、Receiver、Service之間進(jìn)行交互的類,通過無參構(gòu)造方法創(chuàng)建對象,增加其action、category、data、extra等屬性進(jìn)行信息傳遞,并通過Activity中的startActivity(Intent intent)進(jìn)行界面的跳轉(zhuǎn);通過Context中的StartService(Intent intent)進(jìn)行服務(wù)跳轉(zhuǎn);通過Context中的registerReceive(Intent intent)對廣播進(jìn)行注冊,并通過sendBroadcast()進(jìn)行無序消息發(fā)送,或可以通過SendOrderedBroadcast()進(jìn)行有序的消息發(fā)送。Handler類:
用來發(fā)送和處理消息,并配合主線程完成UI的更新;消息Message/Runnable傳遞通過MessageQueue(消息隊(duì)列,先進(jìn)先出)進(jìn)行傳遞,并通過Lopper進(jìn)行接收,傳遞的消息可以為Message對象,也可以是Runnable對象;接收方法通過HandleMessage(Message msg)進(jìn)行獲取。SharedPreferences類:
一般用于第一次登錄時(shí)的設(shè)置,或者是各個(gè)界面的一些小型格式設(shè)置,如字體等。是本地的小型共享數(shù)據(jù)庫,可以通過Context的靜態(tài)方法getSharedPreferences獲得其對象,對象內(nèi)的值均為鍵值對進(jìn)行儲(chǔ)存。通過SharedPreferences對象調(diào)用editor()獲取SharedPreferences.Editor對象,向共享數(shù)據(jù)庫中增加數(shù)據(jù),putString(),并提交數(shù)據(jù),commit();通過SharedPreferences對象獲取共享數(shù)據(jù)庫中的數(shù)據(jù),getString()。
ViewPager:實(shí)現(xiàn)界面滑動(dòng)的類;
通過設(shè)置OnPagerChangedListener設(shè)置ViewPager的監(jiān)聽事件;
實(shí)現(xiàn)流程:
①布局文件中設(shè)置ViewPager控件;
②代碼中進(jìn)行綁定控件;
③通過繼承PagerAdapter抽象類進(jìn)行設(shè)置適配器,并傳遞數(shù)據(jù)源;
④適配器中實(shí)現(xiàn)兩個(gè)抽象方法,兩個(gè)重寫方法:getCount()—獲取滑動(dòng)界面的數(shù)量,isViewFromObject()—判斷視圖是否是來自于Object文件中;重寫兩個(gè)方法,分別為destoryItem—銷毀指定位置的視圖;InstantiateItem(),設(shè)置指定位置的視圖;
Timer與TimerTask類:
Timer為計(jì)時(shí)器的類,通過無參構(gòu)造方法可以獲取對象,通過Timer.schedule(TimerTask task,long time)進(jìn)行設(shè)置多久后執(zhí)行某任務(wù),當(dāng)任務(wù)執(zhí)行完后,取消計(jì)時(shí)的功能,Timer.cancle();TimerTask類為抽象類,實(shí)例化時(shí),必須重寫run方法;執(zhí)行的內(nèi)容,均在run方法中進(jìn)行設(shè)置,并且執(zhí)行時(shí),已在子線程中進(jìn)行執(zhí)行。自定義View:用到的類有Paint、Canvas、Spec、SpecF、Path、View.MeasureSpec、Timer、TimerTask;
抽象類,通過子類繼承,獲取對象;在布局文件中綁定后,通過代碼,設(shè)置自定義View的屬性;自定義View中,通過重寫OnMeasure方法,對布局文件中的尺寸進(jìn)行測量,并由View中的setMeasureDimenson()方法,進(jìn)行數(shù)據(jù)的保存;通過重寫Ondraw方法,進(jìn)行繪圖;當(dāng)需要繪制動(dòng)態(tài)圖形時(shí),使用計(jì)時(shí)器Timer的schedule(TimerTask,long time,delay time2)方法,在time時(shí)間后,每隔time2時(shí)間,重寫執(zhí)行run方法中的內(nèi)容;將耗時(shí)的操作設(shè)置在run方法中,并通過View中的invalidate()方法刷新主線程中的繪的圖形,通過postInvalidate()刷新子線程中的圖形。數(shù)據(jù)庫:
常用的數(shù)據(jù)庫有Oracle,需要安裝和配置的大型收費(fèi)數(shù)據(jù)庫;MySQL是中型數(shù)據(jù)庫,同樣需要安裝配置,但不需要收費(fèi);Sqlite是小型免費(fèi)的嵌入式數(shù)據(jù)庫,占用內(nèi)存低,最新版本為3.0。Sqlite數(shù)據(jù)庫需要通過SqliteDatabaseOpenHelper進(jìn)行創(chuàng)建數(shù)據(jù)庫,并通過SqliteDatabase進(jìn)行數(shù)據(jù)庫的操作。輔助類是抽象類,通過繼承,重寫兩個(gè)方法,并在子類的構(gòu)造方法中通過OpenHelper的構(gòu)造方法(Context context,String SqlName,SqliteDatabase.CursorFactory factory,int version)進(jìn)行數(shù)據(jù)庫的創(chuàng)建,在onCreate方法中,進(jìn)行數(shù)據(jù)庫表的創(chuàng)建,在onUpdate中進(jìn)行數(shù)據(jù)庫的版本更新。在數(shù)據(jù)庫的操作類中,執(zhí)行exect方法,通過sql語句對數(shù)據(jù)庫進(jìn)行操作。Create table student(_id integer primary key auto increament ,name text);insert into student(_id,name)values(1,zx);delete from student where _id=1;update student set _id=2 where name=zx;select *from student;ListView、GridView適配器的優(yōu)化:
將布局文件中的控件進(jìn)行封裝,當(dāng)視圖加載時(shí),判斷可變視圖是否存在,當(dāng)不存在時(shí),通過布局文件獲取視圖,并新建封裝類,將地址通過setTag()進(jìn)行發(fā)送;當(dāng)視圖存在時(shí),重復(fù)利用地址—getTag()。反射:
存儲(chǔ)數(shù)據(jù)的方式:
共享數(shù)據(jù)庫、數(shù)據(jù)庫、文件、網(wǎng)絡(luò)、內(nèi)容提供者
廣播:
廣播傳播時(shí),需要接收者、發(fā)送者、廣播頻道;根據(jù)發(fā)送者的發(fā)送方式不同,分為有序廣播、無序廣播;有序廣播為接收者有接收順序,根據(jù)設(shè)置的優(yōu)先級(jí)不同,確定先后順序,接收者同時(shí)也是發(fā)送者,向后面的廣播發(fā)送消息,發(fā)送過程中,可以添加信息,也可以停止廣播的傳輸;無序廣播,接收者之間無聯(lián)系,均從發(fā)送者處接收信息;廣播在傳輸過程中,不能被添加信息,也不可能被停止。廣播在發(fā)送前,需要對接收者進(jìn)行注冊,注冊方式有兩種,動(dòng)態(tài)注冊、靜態(tài)注冊。動(dòng)態(tài)注冊,是在代碼中進(jìn)行,通過Context對象調(diào)用靜態(tài)方法進(jìn)行注冊,所有的廣播均可以用動(dòng)態(tài)注冊,其生命周期依賴于應(yīng)用,相對于靜態(tài)注冊,比較節(jié)省內(nèi)存;靜態(tài)方法在清單文件中進(jìn)行注冊,部分系統(tǒng)廣播不能通過靜態(tài)注冊進(jìn)行,其生命周期依賴于系統(tǒng),當(dāng)系統(tǒng)啟動(dòng),即運(yùn)行接收廣播,較耗內(nèi)存。廣播接收者需要繼承BroadcastReceiver,并實(shí)現(xiàn)抽象方法onReceive(),通過回調(diào)接口,進(jìn)行數(shù)據(jù)的傳輸。注意:廣播發(fā)送前,必須進(jìn)行接收者的注冊,并且,當(dāng)顯示跳轉(zhuǎn)時(shí),不需要意圖過濾器。安卓布局:九種布局
線性布局,水平或垂直方向兩種格式,主要特點(diǎn)為權(quán)重,即規(guī)定各控件在視圖中的占有的比例;
相對布局,相對于父控件或兄弟控件的布局,各控件需指定相對位置; 絕對布局,指定各控件在視圖中的絕對位置,幾乎不再使用; 表格布局,子布局放在行中,列由控件表示(TableRow); 幀布局:覆蓋前面布局的布局,一般用于暫停按鈕等; 風(fēng)格布局:可以跨行、跨列的布局,占滿換行;
左右側(cè)滑:可以實(shí)現(xiàn)左右側(cè)滑,通過設(shè)置主菜單和二級(jí)菜單設(shè)置左右兩個(gè)菜單; 下拉刷新:設(shè)置下拉刷新、上拉加載的功能; 抽屜布局;
安卓版本及對應(yīng)的API:
1.6—4;2—7;3—11;4—15;4.3—18;5—20;5.1—21;6—23;7—25; 安卓四層架構(gòu):
應(yīng)用層:Java語言開發(fā),主要從事App開發(fā);
運(yùn)行庫層:Java語言與C語言,View視圖、管理類等的開發(fā); 架構(gòu)層:C語言與Linux語言,各種框架、瀏覽器等; 內(nèi)核層:Linux、C語言,開發(fā)各種驅(qū)動(dòng); 安卓四大組件:
Activity:界面,實(shí)現(xiàn)程序與用戶之間的交換,有自己的生命周期,七個(gè)生命周期;4種啟動(dòng)模式 Service:
BroadcastReceive:三要素,發(fā)送者、接收者、發(fā)送頻道(Intent);類型:有序(接收有序,有數(shù)據(jù)傳送,可以攔截?cái)?shù)據(jù))、無序廣播(相對);注冊方式:靜態(tài)注冊,持久監(jiān)聽,占用內(nèi)存比較高生命周期跟隨系統(tǒng),動(dòng)態(tài)注冊(代碼中),所有廣播都可以動(dòng)態(tài)注冊,部分系統(tǒng)廣播不能動(dòng)態(tài)注冊,臨時(shí)監(jiān)聽,占用內(nèi)存較少,生命周期隨應(yīng)用進(jìn)行;
ContentProvide:不能存放數(shù)據(jù),五種存放數(shù)據(jù)方式之一,特點(diǎn)為:①為數(shù)據(jù)的獲取等操作添加一個(gè)統(tǒng)一的接口②可以實(shí)現(xiàn)跨應(yīng)用訪問數(shù)據(jù);③可以實(shí)現(xiàn)Android中通訊錄、消息、音頻、視頻等的訪問或操作;通過ContentReceive進(jìn)行數(shù)據(jù)的訪問,可以對數(shù)據(jù)進(jìn)行增刪改查操作。
動(dòng)畫: IO流: 序列化: AlertDialog:
Set實(shí)現(xiàn)類: 手機(jī)電量檢測:
自定義SurfaceView:
自定義View:三個(gè)構(gòu)造方法的區(qū)別
Message:Handler.obtain/new/Message.obtain
HttpUriConnection訪問網(wǎng)絡(luò)
gride 異步任務(wù) 動(dòng)畫
抽象類和接口 反射 克隆 序列化 側(cè)滑的實(shí)現(xiàn) 數(shù)據(jù)庫 Socket:
Gson解析
異步任務(wù)和子線程區(qū)別 WebView 版本更新 照片的圓角化
Collection與Collections Sql語句
MVP框架與MVC: TCP與UDP的區(qū)別: 一鍵分享的流程: Http協(xié)議的理解: 不使用框架訪問網(wǎng)絡(luò): List集合與set集合: 自定義View的流程: 線性布局的特點(diǎn): ViewPager的原理: 服務(wù)的啟動(dòng)方式:
Activity的啟動(dòng)方式: Xml數(shù)據(jù)解析:
第四篇:Android Multimedia框架總結(jié)(二十二)MediaCodec中C++中創(chuàng)建到start過程及狀態(tài)變換
Android Multimedia框架總結(jié)(二十二)MediaCodec中C++中創(chuàng)建到start過程
及狀態(tài)變換
從今天開始,將深入源碼中看看其c++過程,看下Agenda如下:
mediacodec.h CreateByType initMediaCodec中BufferInfo內(nèi)部類: configure過程 start BufferInfo在MediaCodec.h中對應(yīng)是一個(gè)結(jié)構(gòu)體
//create by 逆流的魚yuiop on 2016/12/11 //blog地址:http://blog.csdn.net/hejjunlin struct BufferInfo {
uint32_t mBufferID;
sp
sp
sp
sp
sp
bool mOwnedByClient;};mediacodec.h的方法的聲明,位于frameworksavincludemediastagefright下
//create by 逆流的魚yuiop on 2016/12/11 //blog地址:http://blog.csdn.net/hejjunlin namespace android { struct ABuffer;struct AMessage;struct AReplyToken;struct AString;struct CodecBase;struct IBatteryStats;struct ICrypto;class IMemory;struct MemoryDealer;class IResourceManagerClient;class IResourceManagerService;struct PersistentSurface;struct SoftwareRenderer;struct Surface;struct MediaCodec : public AHandler {
enum ConfigureFlags {
CONFIGURE_FLAG_ENCODE
= 1,};
enum BufferFlags {
BUFFER_FLAG_SYNCFRAME
= 1,BUFFER_FLAG_CODECCONFIG = 2,BUFFER_FLAG_EOS
= 4,};
enum {
CB_INPUT_AVAILABLE = 1,CB_OUTPUT_AVAILABLE = 2,CB_ERROR = 3,CB_OUTPUT_FORMAT_CHANGED = 4,CB_RESOURCE_RECLAIMED = 5,};
static const pid_t kNoPid =-1;
static sp
static sp
static sp
CreatePersistentInputSurface();
status_t configure(const sp
status_t setCallback(const sp
status_t setOnFrameRenderedNotification(const sp
status_t createInputSurface(sp
status_t setInputSurface(const sp
&surface);
status_t start();
// Returns to a state in which the component remains allocated but
// unconfigured.status_t stop();
// Resets the codec to the INITIALIZED state.Can be called after an error
// has occured to make the codec usable.status_t reset();
// Client MUST call release before releasing final reference to this
// object.status_t release();
status_t flush();
status_t queueInputBuffer(size_t index,size_t offset,size_t size,int64_t presentationTimeUs,uint32_t flags,AString *errorDetailMsg = NULL);
status_t queueSecureInputBuffer(size_t index,size_t offset,const CryptoPlugin::SubSample *subSamples,size_t numSubSamples,const uint8_t key[16],const uint8_t iv[16],CryptoPlugin::Mode mode,int64_t presentationTimeUs,uint32_t flags,AString *errorDetailMsg = NULL);
status_t dequeueInputBuffer(size_t *index, int64_t timeoutUs = 0ll);
status_t dequeueOutputBuffer(size_t *index,size_t *offset,size_t *size,int64_t *presentationTimeUs,uint32_t *flags,int64_t timeoutUs = 0ll);
status_t renderOutputBufferAndRelease(size_t index, int64_t timestampNs);
status_t renderOutputBufferAndRelease(size_t index);
status_t releaseOutputBuffer(size_t index);
status_t signalEndOfInputStream();
status_t getOutputFormat(sp
status_t getInputFormat(sp
status_t getWidevineLegacyBuffers(Vector
status_t getInputBuffers(Vector
status_t getOutputBuffers(Vector
status_t getOutputBuffer(size_t index, sp
status_t getOutputFormat(size_t index, sp
status_t getInputBuffer(size_t index, sp
status_t setSurface(const sp
status_t requestIDRFrame();
// Notification will be posted once there “is something to do”, i.e.// an input/output buffer has become available, a format change is
// pending, an error is pending.void requestActivityNotification(const sp
status_t getName(AString *componentName)const;
status_t setParameters(const sp
// Create a MediaCodec notification message from a list of rendered or dropped render infos
// by adding rendered frame information to a base notification message.Returns the number
// of frames that were rendered.static size_t CreateFramesRenderedMessage(std::list
virtual ~MediaCodec();
virtual void onMessageReceived(const sp
// used by ResourceManagerClient
status_t reclaim(bool force = false);
friend struct ResourceManagerClient;private:
enum State {
UNINITIALIZED,INITIALIZING,INITIALIZED,CONFIGURING,CONFIGURED,STARTING,STARTED,F(xiàn)LUSHING,F(xiàn)LUSHED,STOPPING,RELEASING,};
enum {
kPortIndexInput
= 0,kPortIndexOutput
= 1,};
enum {
kWhatInit
= 'init',kWhatConfigure
= 'conf',kWhatSetSurface
= 'sSur',kWhatCreateInputSurface
= 'cisf',kWhatSetInputSurface
= 'sisf',kWhatStart
= 'strt',kWhatStop
= 'stop',kWhatRelease
= 'rele',kWhatDequeueInputBuffer
= 'deqI',kWhatQueueInputBuffer
= 'queI',kWhatDequeueOutputBuffer
= 'deqO',kWhatReleaseOutputBuffer
= 'relO',kWhatSignalEndOfInputStream
= 'eois',kWhatGetBuffers
= 'getB',kWhatFlush
= 'flus',kWhatGetOutputFormat
= 'getO',kWhatGetInputFormat
= 'getI',kWhatDequeueInputTimedOut
= 'dITO',kWhatDequeueOutputTimedOut
= 'dOTO',kWhatCodecNotify
= 'codc',kWhatRequestIDRFrame
= 'ridr',kWhatRequestActivityNotification
= 'racN',kWhatGetName
= 'getN',kWhatSetParameters
= 'setP',kWhatSetCallback
= 'setC',kWhatSetNotification
= 'setN',};
enum {
kFlagUsesSoftwareRenderer
= 1,kFlagOutputFormatChanged
= 2,kFlagOutputBuffersChanged
= 4,kFlagStickyError
= 8,kFlagDequeueInputPending
= 16,kFlagDequeueOutputPending
= 32,kFlagIsSecure
= 64,kFlagSawMediaServerDie
= 128,kFlagIsEncoder
= 256,kFlagGatherCodecSpecificData
= 512,kFlagIsAsync
= 1024,kFlagIsComponentAllocated
= 2048,kFlagPushBlankBuffersOnShutdown = 4096,};
struct BufferInfo {
uint32_t mBufferID;
sp
sp
sp
sp
sp
bool mOwnedByClient;
};
struct ResourceManagerServiceProxy : public IBinder::DeathRecipient {
ResourceManagerServiceProxy(pid_t pid);
~ResourceManagerServiceProxy();
void init();
// implements DeathRecipient
virtual void binderDied(const wp
void addResource(int64_t clientId,const sp
void removeResource(int64_t clientId);
bool reclaimResource(const Vector
private:
Mutex mLock;
sp
pid_t mPid;
};
State mState;
bool mReleasedByResourceManager;
sp
sp
sp
AString mComponentName;
sp
uint32_t mFlags;
status_t mStickyError;
sp
SoftwareRenderer *mSoftRenderer;
sp
sp
sp
sp
sp
sp
sp
bool mBatteryStatNotified;
bool mIsVideo;
int32_t mVideoWidth;
int32_t mVideoHeight;
int32_t mRotationDegrees;
// initial create parameters
AString mInitName;
bool mInitNameIsType;
bool mInitIsEncoder;
// configure parameter
sp
// Used only to synchronize asynchronous getBufferAndFormat
// across all the other(synchronous)buffer state change
// operations, such as de/queueIn/OutputBuffer, start and
// stop/flush/reset/release.Mutex mBufferLock;
List
Vector
int32_t mDequeueInputTimeoutGeneration;
sp
int32_t mDequeueOutputTimeoutGeneration;
sp
sp
List
sp
bool mHaveInputSurface;
bool mHavePendingInputBuffers;
MediaCodec(const sp
static status_t PostAndAwaitResponse(const sp
void PostReplyWithError(const sp
status_t init(const AString &name, bool nameIsType, bool encoder);
void setState(State newState);
void returnBuffersToCodec();
void returnBuffersToCodecOnPort(int32_t portIndex);
size_t updateBuffers(int32_t portIndex, const sp
status_t onQueueInputBuffer(const sp
status_t onReleaseOutputBuffer(const sp
ssize_t dequeuePortBuffer(int32_t portIndex);
status_t getBufferAndFormat(size_t portIndex, size_t index,sp
bool handleDequeueInputBuffer(const sp
bool handleDequeueOutputBuffer(const sp
void cancelPendingDequeueOperations();
void extractCSD(const sp
status_t queueCSDInputBuffer(size_t bufferIndex);
status_t handleSetSurface(const sp
status_t connectToSurface(const sp
status_t disconnectFromSurface();
void postActivityNotificationIfPossible();
void onInputBufferAvailable();
void onOutputBufferAvailable();
void onError(status_t err, int32_t actionCode, const char *detail = NULL);
void onOutputFormatChanged();
status_t onSetParameters(const sp
status_t amendOutputFormatWithCodecSpecificData(const sp
void updateBatteryStat();
bool isExecuting()const;
uint64_t getGraphicBufferSize();
void addResource(const String8 &type, const String8 &subtype, uint64_t value);
bool hasPendingBuffer(int portIndex);
bool hasPendingBuffer();
/* called to get the last codec error when the sticky flag is set.* if no such codec error is found, returns UNKNOWN_ERROR.*/
inline status_t getStickyError()const {
return mStickyError!= 0 ? mStickyError : UNKNOWN_ERROR;
}
inline void setStickyError(status_t err){
mFlags |= kFlagStickyError;
mStickyError = err;
}
DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);};} // namespace android CreateByType
//create by 逆流的魚yuiop on 2016/12/11 //blog地址:http://blog.csdn.net/hejjunlin // static sp
sp
const status_t ret = codec->init(mime, true /* nameIsType */, encoder);
if(err!= NULL){
*err = ret;
}
return ret == OK ? codec : NULL;// NULL deallocates codec.} 接著到init過程
//create by 逆流的魚yuiop on 2016/12/11 //blog地址:http://blog.csdn.net/hejjunlin status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder){
mResourceManagerService->init();
// 保存 初始參數(shù),到時(shí)用于reset
mInitName = name;
mInitNameIsType = nameIsType;
mInitIsEncoder = encoder;
// 目前視頻解碼器不能馬上從OMX_FillThisBuffer返回,違反OpenMAX規(guī)格,直到提醒我們需要入駐另一個(gè)第三方的looper釋放在事件隊(duì)列中。
if(nameIsType ||!strncasecmp(name.c_str(), “omx.”, 4)){//omx.匹配
mCodec = new ACodec;//實(shí)例化ACodec
} else if(!nameIsType
&&!strncasecmp(name.c_str(), “android.filter.”, 15)){
mCodec = new MediaFilter;// 實(shí)例化MediaFilter
} else {
return NAME_NOT_FOUND;
}
bool secureCodec = false;
if(nameIsType &&!strncasecmp(name.c_str(), “video/”, 6)){
mIsVideo = true;
} else {
AString tmp = name;
if(tmp.endsWith(“.secure”)){
secureCodec = true;
tmp.erase(tmp.size()-7, 7);
}
const sp
if(mcl == NULL){
mCodec = NULL;// remove the codec.return NO_INIT;// if called from Java should raise IOException
}
ssize_t codecIdx = mcl->findCodecByName(tmp.c_str());
if(codecIdx >= 0){
const sp
Vector
info->getSupportedMimes(&mimes);
for(size_t i = 0;i < mimes.size();i++){
if(mimes[i].startsWith(“video/”)){
mIsVideo = true;
break;
}
}
}
}
if(mIsVideo){
// video codec needs dedicated looper
if(mCodecLooper == NULL){
mCodecLooper = new ALooper;
mCodecLooper->setName(“CodecLooper”);//設(shè)置名字為CodecLooper
mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
}
mCodecLooper->registerHandler(mCodec);
} else {
mLooper->registerHandler(mCodec);
}
mLooper->registerHandler(this);
mCodec->setNotificationMessage(new AMessage(kWhatCodecNotify, this));
sp
msg->setString(“name”, name);
msg->setInt32(“nameIsType”, nameIsType);
if(nameIsType){
msg->setInt32(“encoder”, encoder);
}
status_t err;
Vector
const char *type = secureCodec ? kResourceSecureCodec : kResourceNonSecureCodec;
const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec;
resources.push_back(MediaResource(String8(type), String8(subtype), 1));
for(int i = 0;i <= kMaxRetry;++i){
if(i > 0){
// Don't try to reclaim resource for the first time.if(!mResourceManagerService->reclaimResource(resources)){
break;
}
}
sp
err = PostAndAwaitResponse(msg, &response);
if(!isResourceError(err)){
break;
}
}
return err;} configure過程
//create by 逆流的魚yuiop on 2016/12/11 //blog地址:http://blog.csdn.net/hejjunlin status_t MediaCodec::configure(const sp
sp
if(mIsVideo){
format->findInt32(“width”, &mVideoWidth);
format->findInt32(“height”, &mVideoHeight);
if(!format->findInt32(“rotation-degrees”, &mRotationDegrees)){
mRotationDegrees = 0;
}
}
msg->setMessage(“format”, format);
msg->setInt32(“flags”, flags);
msg->setObject(“surface”, surface);
if(crypto!= NULL){
msg->setPointer(“crypto”, crypto.get());
}
// save msg for reset
mConfigureMsg = msg;
status_t err;
Vector
const char *type =(mFlags & kFlagIsSecure)?
kResourceSecureCodec : kResourceNonSecureCodec;
const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec;
resources.push_back(MediaResource(String8(type), String8(subtype), 1));
// Don't know the buffer size at this point, but it's fine to use 1 because
// the reclaimResource call doesn't consider the requester's buffer size for now.resources.push_back(MediaResource(String8(kResourceGraphicMemory), 1));
for(int i = 0;i <= kMaxRetry;++i){
if(i > 0){
// Don't try to reclaim resource for the first time.if(!mResourceManagerService->reclaimResource(resources)){
break;
}
}
sp
err = PostAndAwaitResponse(msg, &response);
if(err!= OK && err!= INVALID_OPERATION){
// MediaCodec now set state to UNINITIALIZED upon any fatal error.// To maintain backward-compatibility, do a reset()to put codec
// back into INITIALIZED state.// But don't reset if the err is INVALID_OPERATION, which means
// the configure failure is due to wrong state.ALOGE(“configure failed with err 0x%08x, resetting...”, err);
reset();
}
if(!isResourceError(err)){
break;
}
}
return err;} start過程
//create by 逆流的魚yuiop on 2016/12/11 //blog地址:http://blog.csdn.net/hejjunlin status_t MediaCodec::start(){
sp
status_t err;
Vector
const char *type =(mFlags & kFlagIsSecure)?
kResourceSecureCodec : kResourceNonSecureCodec;
const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec;
resources.push_back(MediaResource(String8(type), String8(subtype), 1));
// Don't know the bu004km.cnffer size at this point, but it's fine to use 1 because
// the reclaimResource call doesn't consider the requester's buffer size for now.resources.push_back(MediaResource(String8(kResourceGraphicMemory), 1));
for(int i = 0;i <= kMaxRetry;++i){
if(i > 0){
// Don't try to reclaim resource for the first time.if(!mResourceManagerService->reclaimResource(resources)){
break;
}
// Recover codec from previous error before retry start.err = reset();
if(err!= OK){
ALOGE(“retrying start: failed to reset codec”);
break;
}
sp
err = PostAndAwaitResponse(mConfigureMsg, &response);
if(err!= OK){
ALOGE(“retrying start: failed to configure codec”);
break;
}
}
sp
err = PostAndAwaitResponse(msg, &response);
if(!isResourceError(err)){
break;
}
}
return err;} stop過程
//create by 逆流的魚yuiop on 2016/12/11 //blog地址:http://blog.csdn.net/hejjunlin status_t MediaCodec::stop(){
sp
sp
return PostAndAwaitResponse(msg, &response);} 找到對應(yīng)的AMessage.cpp,對應(yīng)同樣有一套AHandler.cpp,及ALooper.cpp,這此組成了在c++中一套機(jī)制,接口 方法的名字和Java層保持一致。
所有message都在onMessageReceived方法中處理,MediaCodec的各個(gè)狀態(tài)的相關(guān)切換。
void MediaCodec::onMessageReceived(const sp
switch(mState){
case INITIALIZING://初始化中
{
setState(UNINITIALIZED);
break;
}
case CONFIGURING://配置中
{
setState(actionCode == ACTION_CODE_FATAL ?
UNINITIALIZED : INITIALIZED);
break;
}
case STARTING://start中
{
setState(actionCode == ACTION_CODE_FATAL ?
UNINITIALIZED : CONFIGURED);
break;
}
case STOPPING://停止中
case RELEASING://釋放中
{
// Ignore the error, assuming we'll still get
// the shnc630.comutdown complete notification.sendErrorResponse = false;
if(mFlags & kFlagSawMediaServerDie){
// MediaServer died, there definitely won't
// be a shutdown complete notification after
// all.// note that we're directly going from
// STOPPING->UNINITIALIZED, instead of the
// usual STOPPING->INITIALIZED state.setState(UNINITIALIZED);
if(mState == RELEASING){
mComponentName.clear();
}
STARTED);
(new AMessage)->postReply(mReplyID);
}
break;}
case FLUSHING://刷新中 {
if(actionCode == ACTION_CODE_FATAL){
setState(UNINITIALIZED);
} else {
setState((mFlags & kFlagIsAsync)? FLUSHED :
}
break;}
case FLUSHED: case STARTED: {
sendErrorResponse = false;
setStickyError(err);
postActivityNotificationIfPossible();
cancelPendingDequeueOperations();
if(mFlags & kFlagIsAsync){
onError(err, actionCode);
}
switch(actionCode){
case ACTION_CODE_TRANSIENT:
break;
case ACTION_CODE_RECOVERABLE:
setState(INITIALIZED);
break;
default:
setState(UNINITIALIZED);
break;
}
break;
}
default:
{
sendErrorResponse = false;
setStickyError(err);
postActivityNotificationIfPossible();
// actionCode in an uninitialized state is always fatal.if(mState == UNINITIALIZED){
}
actionCode = ACTION_CODE_FATAL;
}
if(mFlags & kFlagIsAsync){
onError(err, actionCode);
}
switch(actionCode){
case ACTION_CODE_TRANSIENT:
break;
case ACTION_CODE_RECOVERABLE:
setState(INITIALIZED);
break;
default:
setState(UNINITIALIZED);
break;
}
break;}
第五篇:Android WebView總結(jié)
Android WebView總結(jié)
1、添加權(quán)限:AndroidManifest.xml中必須使用許可“Android.permission.INTERNET”,否則會(huì)出web page not available錯(cuò)誤。
2、在要Activity中生成一個(gè)WebView組件:WebView webView = new WebView(this);
3、設(shè)置WebView基本信息:
如果訪問的頁面中有Javascript,則webview必須設(shè)置支持Javascript。
webview.getSettings().setJavaScriptEnabled(true);
觸摸焦點(diǎn)起作用
requestFocus();
取消滾動(dòng)條
this.setScrollBarStyle(SCROLLBARS_OUTSIDE_OVERLAY);
4、設(shè)置WevView要顯示的網(wǎng)頁:
互聯(lián)網(wǎng)用:webView.loadUrl("");本地文件存放在:assets文件中
5、如果希望點(diǎn)擊鏈接由自己處理,而不是新開Android的系統(tǒng)browser中響應(yīng)該鏈接。給WebView添加一個(gè)事件監(jiān)聽對象(WebViewClient)
并重寫其中的一些方法
shouldOverrideUrlLoading:對網(wǎng)頁中超鏈接按鈕的響應(yīng)。
當(dāng)按下某個(gè)連接時(shí)WebViewClient會(huì)調(diào)用這個(gè)方法,并傳遞參數(shù):按下的url
onLoadResource
onPageStart
onPageFinish
onReceiveError
onReceivedHttpAuthRequest6、如果用webview點(diǎn)鏈接看了很多頁以后,如果不做任何處理,點(diǎn)擊系統(tǒng)“Back”鍵,整個(gè)瀏覽器會(huì)調(diào)用finish()而結(jié)束自身,如果希望瀏覽的網(wǎng)頁回退而不是退出瀏覽器,需要在當(dāng)前Activity中處理并消費(fèi)掉該Back事件。
覆蓋Activity類的onKeyDown(int keyCoder,KeyEvent event)方法。
public boolean onKeyDown(int keyCoder,KeyEvent event){
if(webView.canGoBack()&& keyCoder == KeyEvent.KEYCODE_BACK){
webview.goBack();//goBack()表示返回webView的上一頁面
return true;
}
return false;
}