|
上次靜下心來看opencore大概是半年前,看的云里霧里的,最近為了一個在線的視頻調試再次看看opencore,感覺明朗了很多,下面的東西雖然大部分都是代碼,但是代碼中的注釋和對整個opencore框架的解釋我覺得都是合情合理的,有興趣的人可以參考一下。 Opencore小結 在android框架層,他認識的就是MediaPlayerInterface。對于一個能夠播放音樂的東西,我們看google是怎么抽象的。 首先是音頻設備的抽象: // AudioSink: abstraction layer for audio output class AudioSink : public RefBase { public: typedef void (*AudioCallback)(
AudioSink *audioSink, void *buffer, size_t size, void *cookie); virtual ~AudioSink() {} //析構函數(shù)一般為虛函數(shù) virtual bool ready() const = 0; // audio output is open and ready virtual bool realtime() const = 0; // audio output is real-time output virtual ssize_t bufferSize() const = 0; virtual ssize_t frameCount() const = 0; virtual ssize_t channelCount() const = 0; virtual ssize_t frameSize() const = 0; virtual uint32_t latency() const = 0; virtual float msecsPerFrame() const = 0; // If no callback is specified, use the "write" API below to submit // audio data. Otherwise return a full buffer of audio data on each // callback. 播放音樂的兩種方式,一個是write輸出 //一個是通過callback,當?shù)讓佑X得音頻不夠的時候就會調用這個callback //這個時候,callback返回一個緩存給下面就行了 //打開一個設備 virtual status_t open(
uint32_t sampleRate, int channelCount,
int format=AudioSystem::PCM_16_BIT,
int bufferCount=DEFAULT_AUDIOSINK_BUFFERCOUNT,
AudioCallback cb = NULL,
void *cookie = NULL) = 0; //開始播放 virtual void start() = 0; //寫入數(shù)據(jù) virtual ssize_t write(const void* buffer, size_t size) = 0; //停止 virtual void stop() = 0; //釋放掉緩存 virtual void flush() = 0; //暫停 virtual void pause() = 0; //關閉 virtual void close() = 0; }; 然后是播放一個文件的接口
MediaPlayerBase() : mCookie(0), mNotify(0) {} virtual ~MediaPlayerBase() {} virtual status_t initCheck() = 0; //初始化檢查 virtual bool hardwareOutput() = 0; //是否直接音頻硬件輸出 或者使用audioFlinger virtual status_t setDataSource(const char *url) = 0; //通過字符串設置播放源 virtual status_t setDataSource(int fd, int64_t offset, int64_t length) = 0;//通過文件句柄和偏移量來設置 virtual status_t setVideoSurface(const sp& surface) = 0;
//設置我們視頻輸出的surface
virtual status_t prepare() = 0; //開始準備 virtual status_t prepareAsync() = 0; //異步準備,這個準備是可以直接返回,準備好了通過消息機制回調上層,這個接口是為了兼容那些網(wǎng)絡媒體文件,準備時間比較長 virtual status_t start() = 0; //開始播放 virtual status_t stop() = 0;//停止播放 virtual status_t pause() = 0;//暫停 virtual bool isPlaying() = 0; //是否在播放 virtual status_t seekTo(int msec) = 0;//跳轉 virtual status_t getCurrentPosition(int *msec) = 0;//獲得當期位置
virtual status_t getDuration(int *msec) = 0;//獲得總長度 virtual status_t reset() = 0;//重置
virtual status_t setLooping(int loop) = 0; //設置循環(huán) virtual player_type playerType() = 0; //播放類型 應該是2.1新加函數(shù) virtual void setNotifyCallback(void* cookie, notify_callback_f notifyFunc) {
mCookie = cookie; mNotify = notifyFunc; }//回調函數(shù) // Invoke a generic method on the player by using opaque parcels // for the request and reply. 通過不透明的包裹調用播放器的通用方法
// // @param request Parcel that is positioned at the start of the // data sent by the java layer. //來自java層的數(shù)據(jù) // @param[out] reply Parcel to hold the reply data. Cannot be null. // @return OK if the call was successful. virtual status_t invoke(const Parcel& request, Parcel *reply) = 0; // The Client in the MetadataPlayerService calls this method on // the native player to retrieve all or a subset of metadata.
// // @param ids SortedList of metadata ID to be fetch. If empty, all // the known metadata should be returned. // @param[inout] records Parcel where the player appends its metadata. // @return OK if the call was successful. virtual status_t getMetadata(const
media::Metadata::Filter& ids, Parcel *records) { return
INVALID_OPERATION; }; protected: //這個是一個保護的函數(shù),只能子類和自己可以使用 virtual void
sendEvent(int msg, int ext1=0, int ext2=0) { if (mNotify)
mNotify(mCookie, msg, ext1, ext2); } void* mCookie; notify_callback_f
mNotify; } 看完了android對其的封裝,我們看看opencore是如何實現(xiàn)的。 首先簡單的說明幾個概念,opencore下面會有很多的編解碼方式,文件的播放流程類似于DShow的流水線,讀取 解碼
顯示等幾個不同的模塊,每一個模塊都稱為node,播放之前,有一個文件類型識別的子模塊,識別之后才能搭建整個流水線。 在opencore下面有這樣的一個類: class PVPlayer : public MediaPlayerInterface { public: PVPlayer(); virtual ~PVPlayer(); virtual status_t initCheck(); virtual status_t setDataSource(const char *url); virtual status_t
setDataSource(int fd, int64_t offset, int64_t length); virtual status_t
setVideoSurface(const sp& surface); virtual status_t prepare(); virtual status_t prepareAsync(); virtual status_t start(); virtual
status_t stop(); virtual status_t pause(); virtual bool isPlaying(); virtual status_t seekTo(int msec); virtual status_t
getCurrentPosition(int *msec); virtual status_t getDuration(int *msec); virtual status_t reset(); virtual status_t setLooping(int loop); virtual player_type playerType() { return PV_PLAYER; } virtual status_t
invoke(const Parcel& request, Parcel *reply); virtual status_t
getMetadata( const SortedVector& ids, Parcel *records); // make
available to PlayerDriver void sendEvent(int msg, int ext1=0, int
ext2=0) { MediaPlayerBase::sendEvent(msg, ext1, ext2); } //上面的函數(shù)都不用看,我們主要看看它有這樣的 幾個私有的靜態(tài)函數(shù) //靜態(tài)函數(shù)在類中,一般都是作為線程函數(shù)的,或者做一些與本身關系不大的操作。 private: static void
do_nothing(status_t s, void *cookie, bool cancelled) { } //真的什么都沒有做 static void run_init(status_t s, void *cookie, bool cancelled); static
void run_set_video_surface(status_t s, void *cookie, bool cancelled); static void run_set_audio_output(status_t s, void *cookie, bool
cancelled); static void run_prepare(status_t s, void *cookie, bool
cancelled); static void check_for_live_streaming(status_t s, void
*cookie, bool cancelled); //注意下面的幾個東東 PlayerDriver* mPlayerDriver; //這個是一個中間機制很重要,維護著一個消息隊列,后面會講述 char * mDataSourcePath; //記錄著播放的源 bool
mIsDataSourceSet; //這個不是很清楚 下面見到再說 sp mSurface; //一般用opencore基本上都會有視頻 int mSharedFd; //文件句柄 status_t mInit; //狀態(tài) int mDuration; //長度 #ifdef
MAX_OPENCORE_INSTANCES static volatile int32_t sNumInstances; //同時可以有幾個實例存在 #endif }; //這里cookie是一個PVPlayer的指針,通過PVPlayer得到PlayerDriver的指針,然后往這個消息隊列中放松一個消息。 注意
我們發(fā)送的是一個PlayerInit的消息,這個消息析構的時候,傳遞了兩個變量,后面的cookie是PVPlayer的指針,前面的是一個函數(shù)指
針,當處理完這個消息的時候,如果發(fā)現(xiàn)這個回調的函數(shù)指針還存在,就會繼續(xù)調用這個函數(shù)。 void
PVPlayer::run_init(status_t s, void *cookie, bool cancelled) { LOGV("run_init s=%d, cancelled=%d", s, cancelled); if (s == NO_ERROR
&& !cancelled) { PVPlayer *p = (PVPlayer*)cookie; p->mPlayerDriver->enqueueCommand(new
PlayerInit(run_set_video_surface, cookie)); } } 我們接著看run_set_video_surface,當我們的消息隊列處理了PlayerInit這個消息,然后就會跳入這個函數(shù)。注意這個處理過程
都是在另外一個線程,往消息隊列添加的線程和處理消息的線程一般是不一樣的線程。 void
PVPlayer::run_set_video_surface(status_t s, void *cookie, bool
cancelled) { LOGV("run_set_video_surface s=%d, cancelled=%d", s,
cancelled); if (s == NO_ERROR && !cancelled) { // If we don't
have a video surface, just skip to the next step. PVPlayer *p =
(PVPlayer*)cookie; if (p->mSurface == NULL) { //如果只有音頻,注意這里在前面的初始化的時候,我們就應該知道這個視頻的具體的信息了,是否有音頻或者視頻。 run_set_audio_output(s, cookie, false); } else { //如果有視頻就會先初始化視頻然后跳轉到音頻處理,注意這里的每一個函數(shù)都不是實際的處理地方,只是發(fā)送一條消息。 p->mPlayerDriver->enqueueCommand(new PlayerSetVideoSurface(p->mSurface,
run_set_audio_output, cookie)); } } } 可以看到這個函數(shù),往自身隊列中又加入了一個消息PlayerSetVideoSurface,這個消息是如何處理的我們后面再看如何初始化音頻。 void PVPlayer::run_set_audio_output(status_t s, void *cookie, bool
cancelled) { LOGV("run_set_audio_output s=%d, cancelled=%d", s,
cancelled); if (s == NO_ERROR && !cancelled) { PVPlayer *p =
(PVPlayer*)cookie; p->mPlayerDriver->enqueueCommand(new
PlayerSetAudioSink(p->mAudioSink, run_prepare, cookie)); } } 等音頻設備也初始化之后,我們跳進run_prepare,表示視頻播放鏈路已經(jīng)搭建成功,下面你可以選擇開始播放或者暫停了。 void
PVPlayer::run_prepare(status_t s, void *cookie, bool cancelled) {
LOGV("run_prepare s=%d, cancelled=%d", s, cancelled); if (s == NO_ERROR
&& !cancelled) { PVPlayer *p = (PVPlayer*)cookie; p->mPlayerDriver->enqueueCommand(new
PlayerPrepare(check_for_live_streaming, cookie)); } }
//如果是流媒體文件,這里我們還要有多余的處理,這些處理都是發(fā)送消息到隊列。 void
PVPlayer::check_for_live_streaming(status_t s, void *cookie, bool
cancelled) { LOGV("check_for_live_streaming s=%d, cancelled=%d", s,
cancelled); if (s == NO_ERROR && !cancelled) { PVPlayer *p =
(PVPlayer*)cookie; if ( (p->mPlayerDriver->getFormatType() ==
PVMF_MIME_DATA_SOURCE_RTSP_URL) ||
(p->mPlayerDriver->getFormatType() ==
PVMF_MIME_DATA_SOURCE_MS_HTTP_STREAMING_URL) ) {
p->mPlayerDriver->enqueueCommand(new PlayerCheckLiveStreaming(
do_nothing, NULL)); } } } 這個完成之后就是donothing,表示我們的一條處理鏈路完成。 我們發(fā)現(xiàn)其實這個run_init就完成文件識別和鏈路構架,音視頻輸出設備的基本工作,我們要看的重點就在這幾個消息的處理。那么這個run_init
上面是怎么調用下來的呢?上層的調用肯定是通過基類的函數(shù),因為android只認識基類。找了半天可以看到: status_t
PVPlayer::prepareAsync() { LOGV("prepareAsync"); status_t ret = OK; //如果發(fā)現(xiàn)還沒有初始化視頻源,首先初始化視頻源,當你都不知道要播放什么后面的工作基本上是無效的。一般我們的Run_init是緊接這設置
PlayerSetDataSource這個消息后面的。如果發(fā)現(xiàn),視頻源已經(jīng)被設置,那么我們也知道我們的init功能基本已經(jīng)被調用過。 if
(!mIsDataSourceSet) { // If data source has NOT been set. // Set our
data source as cached in setDataSource() above. LOGV(" data source =
%s", mDataSourcePath); ret = mPlayerDriver->enqueueCommand(new
PlayerSetDataSource(mDataSourcePath,run_init,this)); mIsDataSourceSet =
true; } else { // If data source has been already set. // No need to
run a sequence of commands. // The only code needed to run is
PLAYER_PREPARE. //這個時候只用讓視頻開始準備,然后check是否為流媒體 ret =
mPlayerDriver->enqueueCommand(new
PlayerPrepare(check_for_live_streaming, this)); } return ret; } 初始化音頻設備和視頻設備只能調用一次,后面的prepare 和 live_stream可以多次的調用。 好了
這個類幾個私有函數(shù)基本說完,我們回到正軌,上層的調用。 首先看構造函數(shù): PVPlayer::PVPlayer() { LOGV("PVPlayer constructor"); mDataSourcePath = NULL; mSharedFd = -1; mIsDataSourceSet = false; mDuration = -1; mPlayerDriver = NULL;
//連這個都是空的 消息隊列線程都不存在 #ifdef MAX_OPENCORE_INSTANCES
//如果有最大實例的限制,那么在構造的時候就會有一個判斷,然后將mInit 設置為繁忙 if
(android_atomic_inc(&sNumInstances) >= MAX_OPENCORE_INSTANCES) { LOGW("Exceeds maximum number of OpenCore instances"); mInit = -EBUSY;
return; } #endif //然后就是一些初始化的工作 LOGV("construct PlayerDriver"); //Driver的初始化,實際上就是構建了一條消息隊列 mPlayerDriver = new PlayerDriver(this); LOGV("send PLAYER_SETUP"); //往這個消息隊列發(fā)送的第一個消息就是setup PlayerSetup* setup
= new PlayerSetup(0,0); mInit =
mPlayerDriver->enqueueCommand(setup); if (mInit == NO_INIT) { delete setup; } } //Setup之后調用這個函數(shù) 看是否初始化成功 這個函數(shù)在opencore中不重要 //只是為了照顧借口的完整性 status_t PVPlayer::initCheck() { return mInit; } 下面先看看析構,當我們的播放結束之后,是如何析構的, PVPlayer::~PVPlayer() { LOGV("PVPlayer
destructor"); if (mPlayerDriver != NULL) { PlayerQuit quit =
PlayerQuit(0,0); mPlayerDriver->enqueueCommand(&quit); // will
wait on mSyncSem, signaled by player thread //這個消息會產(chǎn)生等待結束工作完成。 } free(mDataSourcePath); if (mSharedFd >= 0) { close(mSharedFd); } #ifdef MAX_OPENCORE_INSTANCES android_atomic_dec(&sNumInstances); #endif } 然后就是setdatabase,這兩個函數(shù)都是比較簡單的,只是簡單的賦值。 status_t PVPlayer::setDataSource(const char *url) { LOGV("setDataSource(%s)", url); if (mSharedFd >= 0) {
close(mSharedFd); mSharedFd = -1; } free(mDataSourcePath); mDataSourcePath = NULL; // Don't let somebody trick us in to reading
some random block of memory if (strncmp("sharedfd://", url, 11) == 0) return android::UNKNOWN_ERROR; mDataSourcePath = strdup(url); return
OK; } status_t PVPlayer::setDataSource(int fd, int64_t offset, int64_t
length) { // This is all a big hack to allow PV to play from a file
descriptor. // Eventually we'll fix PV to use a file descriptor
directly instead // of using mmap(). LOGV("setDataSource(%d, %lld,
%lld)", fd, offset, length); if (mSharedFd >= 0) { close(mSharedFd);
mSharedFd = -1; } free(mDataSourcePath); mDataSourcePath = NULL; char
buf[80]; mSharedFd = dup(fd); sprintf(buf, "sharedfd://%d:%lld:%lld",
mSharedFd, offset, length); mDataSourcePath = strdup(buf); return OK; } status_t PVPlayer::setVideoSurface(const sp& surface) { LOGV("setVideoSurface(%p)", surface.get()); mSurface = surface; return
OK; } 有人會問為什么沒有設置音頻,因為音頻對于開發(fā)者來說過于簡單,一個設備就一個喇叭,音頻只能往那里輸出,但是對于這么大的一塊屏幕,視頻是怎么輸出,甚
至是輸出到overlay上面,所以非常有必要有上面的一個函數(shù)。 下面是一個較為重要的函數(shù): status_t
PVPlayer::prepare() { status_t ret; // We need to differentiate the two
valid use cases for prepare(): // 1. new
PVPlayer/reset()->setDataSource()->prepare() // 2. new
PVPlayer/reset()->setDataSource()->prepare()/prepareAsync() //
->start()->...->stop()->prepare() // If data source has
already been set previously, no need to run // a sequence of commands
and only the PLAYER_PREPARE code needs // to be run. //首先
我們的視頻源有沒有被初始化過,如果沒有這個時候就要初始化,注意這個時候Cmmand的callback參數(shù)為空,表示這個調用是阻塞的。因為我們的
prepare本身就是阻塞的。 if (!mIsDataSourceSet) { // set data source LOGV("prepare"); LOGV(" data source = %s", mDataSourcePath); ret = mPlayerDriver->enqueueCommand(new PlayerSetDataSource(mDataSourcePath,0,0)); //這里等到消息處理之后再返回 阻塞 if (ret !=
OK) return ret; // init LOGV(" init"); ret =
mPlayerDriver->enqueueCommand(new PlayerInit(0,0)); //同樣 有一個阻塞調用 if
(ret != OK) return ret; // set video surface, if there is one if
(mSurface != NULL) { LOGV(" set video surface"); //阻塞調用設置視頻 ret =
mPlayerDriver->enqueueCommand(new
PlayerSetVideoSurface(mSurface,0,0)); if (ret != OK) return ret; } //
set audio output // If we ever need to expose selectable audio output
setup, this can be broken // out. In the meantime, however, system
audio routing APIs should suffice. LOGV(" set audio sink"); //阻塞調用設置視頻 ret = mPlayerDriver->enqueueCommand(new
PlayerSetAudioSink(mAudioSink,0,0)); if (ret != OK) return ret; // New
data source has been set successfully. mIsDataSourceSet = true; } //
prepare 最后一個是非阻塞,無論PlayerPrepare有沒有處理,我們這個函數(shù)都返回 LOGV(" prepare"); return mPlayerDriver->enqueueCommand(new
PlayerPrepare(check_for_live_streaming, this)); } 然后使一些播放的控制接口start stop pause,這里這些函數(shù)不是很難,簡單的省略。 status_t PVPlayer::start() { LOGV("start"); return mPlayerDriver->enqueueCommand(new
PlayerStart(0,0)); } status_t PVPlayer::stop() { LOGV("stop"); return
mPlayerDriver->enqueueCommand(new PlayerStop(0,0)); } status_t
PVPlayer::pause() { LOGV("pause"); return
mPlayerDriver->enqueueCommand(new PlayerPause(0,0)); } bool
PVPlayer::isPlaying() { int status = 0; if
(mPlayerDriver->enqueueCommand(new PlayerGetStatus(&status,0,0))
== NO_ERROR) { return (status == PVP_STATE_STARTED); } return false; } status_t PVPlayer::getCurrentPosition(int *msec) { return
mPlayerDriver->enqueueCommand(new PlayerGetPosition(msec,0,0)); } status_t PVPlayer::getDuration(int *msec) { status_t ret =
mPlayerDriver->enqueueCommand(new PlayerGetDuration(msec,0,0)); if
(ret == NO_ERROR) mDuration = *msec; return ret; } 上面的函數(shù)全部阻塞。
但是這個跳轉函數(shù)是非阻塞的,因為可能跳轉會很耗時 status_t PVPlayer::seekTo(int msec) { LOGV("seekTo(%d)", msec); // can't always seek to end of streams - so
we fudge a little if ((msec == mDuration) && (mDuration >
0)) { msec--; LOGV("Seek adjusted 1 msec from end"); } return
mPlayerDriver->enqueueCommand(new PlayerSeek(msec,do_nothing,0)); } 播放器重置: //阻塞的調用 status_t PVPlayer::reset() { LOGV("reset"); status_t ret
= mPlayerDriver->enqueueCommand(new PlayerCancelAllCommands(0,0)); // Log failure from CancelAllCommands() and call Reset() regardless. if (ret != NO_ERROR) { LOGE("failed to cancel all exiting PV player engine
commands with error code (%d)", ret); } ret =
mPlayerDriver->enqueueCommand(new PlayerReset(0,0)); // We should
never fail in Reset(), but logs the failure just in case. if (ret !=
NO_ERROR) { LOGE("failed to reset PV player engine with error code
(%d)", ret); } else { ret = mPlayerDriver->enqueueCommand(new
PlayerRemoveDataSource(0,0)); } mSurface.clear(); LOGV("unmap file"); if (mSharedFd >= 0) { close(mSharedFd); mSharedFd = -1; } mIsDataSourceSet = false; return ret; } 上面我們看到最多就是enqueueCommand這個函數(shù),下面我們來簡單的說一下我們的PlayerDriver,這個類是聯(lián)系PVPlayer和下 層的OMX的中間層,主要的作用是降低耦合,上面的控制者(PVPlayer),只需阻塞或者非阻塞的往這個中間層發(fā)送命令,下面具體怎么做我一概不 知,google構架就是牛x。這個類實際上就是一個消息隊列。這個類后面再說。 |
|
|
來自: 開花結果 > 《Opencore》