问题
I'm developing USB camera streaming Desktop application using MediaFoundation SourceReader technique. The camera is having USB3.0 support and gives 60fps for 1080p MJPG video format resolution.
I used Software MJPEG Decoder MFT to convert MJPG to YUY2 frames and then converted into the RGB32 frame to draw on the window. Instead of 60fps, I'm able to render only 30fps on the window when using this software decoder. I have posted a question on this site and got some suggestion to use Intel Hardware MJPEG Decoder MFT to solve frame drop issue.
To use this Hardware MJPEG decoder, I have addressed Asynchronous MFT processing model and configured an asynchronous callback for IMFMediaEventGenerator through IMFTransform interface.
After called MFT_MESSAGE_NOTIFY_START_OF_STREAM using ProcessMessage method, I received the MFTransfromNeedInput event twice but I didn't receive the MFTransformHaveOutput event from MFT.
I have shared my code here for your reference:
IMFTransform* m_pTransform = NULL;
HRESULT EnumDecoderMFT ()
{
HRESULT hr;
IMFActivate** ppActivate;
UINT32 numDecodersMJPG = 0;
LPWSTR lpMFTName = 0;
MFT_REGISTER_TYPE_INFO inputFilter = {MFMediaType_Video,MFVideoFormat_MJPG};
MFT_REGISTER_TYPE_INFO outputFilter = {MFMediaType_Video,MFVideoFormat_YUY2};
UINT32 unFlags = MFT_ENUM_FLAG_SYNCMFT | MFT_ENUM_FLAG_ASYNCMFT | MFT_ENUM_FLAG_LOCALMFT | MFT_ENUM_FLAG_HARDWARE | MFT_ENUM_FLAG_SORTANDFILTER;
hr = MFTEnumEx(MFT_CATEGORY_VIDEO_DECODER, unFlags, &inputFilter, &outputFilter, &ppActivate, &numDecodersMJPG);
if (FAILED(hr)) return hr;
hr = ppActivate[0]->GetAllocatedString(MFT_FRIENDLY_NAME_Attribute,&lpMFTName,0);
if (FAILED(hr)) return hr;
// Activate transform
hr = ppActivate[0]->ActivateObject(__uuidof(IMFTransform), (void**)&m_pTransform);
if (FAILED(hr)) return hr;
hr = hr = m_pTransform->GetAttributes(&pAttributes);
if (SUCCEEDED(hr))
{
hr = pAttributes->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, TRUE);
if(FAILED(hr)) return hr;
hr = pAttributes->SetUINT32(MFT_SUPPORT_DYNAMIC_FORMAT_CHANGE,TRUE);
if(FAILED(hr)) return hr;
hr = pAttributes->SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, TRUE);
if(FAILED(hr)) return hr;
hr = m_pTransform->QueryInterface(IID_IMFMediaEventGenerator,(void**)&m_pEventGenerator);
if(FAILED(hr)) return hr;
hr = m_pEventGenerator->BeginGetEvent((IMFAsyncCallback*)this,NULL);
if(FAILED(hr)) return hr;
pAttributes->Release();
}
SafeRelease(&ppActivate[0]);
CoTaskMemFree(ppActivate);
return hr;
}
HRESULT Invoke(IMFAsyncResult *pResult)
{
HRESULT hr = S_OK,hrStatus;
MediaEventType meType = MEUnknown; // Event type
IMFMediaEvent *pEvent = NULL;
// Get the event from the event queue.
hr = m_pEventGenerator->EndGetEvent(pResult, &pEvent); //Completes an asynchronous request for the next event in the queue.
if(FAILED(hr)) return hr;
// Get the event type.
hr = pEvent->GetType(&meType);
if(FAILED(hr)) return hr;
hr = pEvent->GetStatus(&hrStatus);
if(FAILED(hr)) return hr;
if(SUCCEEDED(hrStatus))
{
if(meType == METransformNeedInput)
{
SetEvent(m_hNeedInputEvent);
}
else if(meType == METransformHaveOutput)
{
SetEvent(m_hHaveOutputEvent);
}
else if(meType == METransformDrainComplete)
{
hr = m_pTransform->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH,0);
if(FAILED(hr)) return hr;
}
else if(meType == MEError)
{
PROPVARIANT pValue;
hr = pEvent->GetValue(&pValue);
if(FAILED(hr)) return hr;
}
hr = m_pEventGenerator->BeginGetEvent((IMFAsyncCallback*)this,NULL);
if(FAILED(hr)) return hr;
}
done:
SafeRelease(&pEvent);
return S_OK;
}
HRESULT CMFSourceReader::OnReadSample(
HRESULT hrStatus,
DWORD dwStreamIndex ,
DWORD dwStreamFlags ,
LONGLONG llTimestamp ,
IMFSample *pSample // Can be NULL
)
{
HRESULT hr = S_OK;
IMFMediaBuffer *pBuffer = NULL;
DWORD dwcbTotLen = 0;
IMFSample *mftOutSample = NULL;
EnterCriticalSection(&m_critsec);
if (FAILED(hrStatus))
{
hr = hrStatus;
}
if (SUCCEEDED(hr))
{
if (pSample != NULL)
{
if(dwStreamIndex == 0) //VideoStream
{
if(m_pTransform)
{
hr = m_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0);
if(FAILED(hr)) return hr;
m_dwWaitObj = WaitForSingleObject(m_hNeedInputEvent,INFINITE);
if(m_dwWaitObj == WAIT_OBJECT_0)
{
hr = ProcessInputSample(pSample);
if(FAILED(hr)) return hr;
}
m_dwWaitObj = WaitForSingleObject(m_hHaveOutputEvent,INFINITE);
if(m_dwWaitObj == WAIT_OBJECT_0)
{
hr = ProcessOutputSample(&mftOutSample);
if(FAILED(hr)) return hr;
}
}
}
}
}
if(SUCCEEDED(hr))
{
if(m_pReader != NULL)
{
hr = m_pReader->ReadSample(
(DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM,
0,
NULL, // actual
NULL, // flags
NULL, // timestamp
NULL // sample
);
if(FAILED(hr)) return hr;
}
}
SafeRelease(&mftOutSample);
LeaveCriticalSection(&m_critsec);
return hr;
}
HRESULT ProcessOutputSample(IMFSample **pOutSample)
{
HRESULT hr = S_OK;
MFT_OUTPUT_DATA_BUFFER outputDataBuffer;
DWORD processOutputStatus = 0,mftOutFlags = 0;
MFT_OUTPUT_STREAM_INFO StreamInfo;
IMFSample *mftOutSample = NULL;
IMFMediaBuffer *pOutBuffer = NULL;
if(m_pTransform != NULL)
{
hr = m_pTransform->GetOutputStreamInfo(0, &StreamInfo);
if(FAILED(hr)) return hr;
DWORD status = 0;
hr = m_pTransform->GetOutputStatus(&status);
if (FAILED(hr)) return hr;
hr = MFCreateSample(&mftOutSample);
if(FAILED(hr)) return hr;
hr = MFCreateMemoryBuffer(StreamInfo.cbSize, &pOutBuffer);
if(FAILED(hr)) return hr;
hr = mftOutSample->AddBuffer(pOutBuffer);
if(FAILED(hr)) return hr;
outputDataBuffer.dwStreamID = 0;
outputDataBuffer.dwStatus = 0;
outputDataBuffer.pEvents = NULL;
outputDataBuffer.pSample = mftOutSample;
hr = m_pTransform->ProcessOutput(0, 1, &outputDataBuffer, &processOutputStatus);
if(FAILED(hr)) return hr;
hr = m_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0);
if (FAILED(hr)) return hr;
hr = m_pTransform->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, 0);
if (FAILED(hr)) return hr;
if(mftOutSample)
{
*pOutSample = mftOutSample;
(*pOutSample)->AddRef();
}
ResetEvent(m_hHaveOutputEvent);
}
SafeRelease(&mftOutSample);
SafeRelease(&pOutBuffer);
return hr;
}
HRESULT ProcessInputSample(IMFSample *pInputSample)
{
HRESULT hr;
if(m_pTransform != NULL)
{
hr = m_pTransform->ProcessInput(0, pInputSample, 0);
if(FAILED(hr)) return hr;
hr = m_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM,0);
if(FAILED(hr)) return hr;
ResetEvent(m_hNeedInputEvent);
}
return hr;
}
I have commented ProcessOutputSample() method in my code and checked, continuously MFT send MFTransformNeedInput event type. After ProcessInput sample, I have ProcessOutput method but it returned an E_UNEXPECTED error. I have read about this error in MSDN, they have mentioned that I should not call IMFTransform:: ProcessOutput method without receive MFTransformHaveOutput event.
Am I missing anything?Can I use Intel Hardware MJPEG Decoder MFT inside MediaFoundation? Someone provide a sample to use this decoder? Past 4 days, I'm struggling with this issue.
Thanks in advance.
回答1:
First, you don't need to call this :
hr = pAttributes->SetUINT32(MFT_SUPPORT_DYNAMIC_FORMAT_CHANGE,TRUE);
The Mft is responsible for this, and because it is asynchronous, you can assume it is TRUE. You just can check it is really TRUE calling GetUINT32.
Second :
hr = pAttributes->SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, TRUE);
This is not intend to MFT. This attribute is for source reader or sink writer : MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS attribute.
From your code, the problem i can see is that you always call hr = m_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0);
inside the OnReadSample, and you should call once at the beginnig.
The same apply with ProcessInputSample and ProcessOutputSample, you call hr = m_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM,0);
, you tell the MFT that stream has ended...
Your code should handle things like this :
- Begin decode
- MFT_MESSAGE_NOTIFY_START_OF_STREAM
- Process Input as needed
- Process Output as needed
- ...
- ...
- Process Input as needed
- Process Output as needed
- MFT_MESSAGE_NOTIFY_END_OF_STREAM
- End decode
You never receive outputsample because you tell to the MFT that the stream has ended just after the first inputsample process.
Read this : MFT_MESSAGE_NOTIFY_START_OF_STREAM
Notifies a Media Foundation transform (MFT) that the first sample is about to be processed.
Yes the first sample, not all samples.
EDIT
There is an another problem in CMFSourceReader::OnReadSample :
m_dwWaitObj = WaitForSingleObject(m_hNeedInputEvent,INFINITE);
if(m_dwWaitObj == WAIT_OBJECT_0)
{
hr = ProcessInputSample(pSample);
if(FAILED(hr)) return hr;
}
m_dwWaitObj = WaitForSingleObject(m_hHaveOutputEvent,INFINITE);
if(m_dwWaitObj == WAIT_OBJECT_0)
{
hr = ProcessOutputSample(&mftOutSample);
if(FAILED(hr)) return hr;
}
You first wait for m_hNeedInputEvent, then for m_hHaveOutputEvent. But what happens if you receive m_hNeedInputEvent twice before m_hHaveOutputEvent. This code is not correct. You don't handle Invoke correctly. OnReadSample should be called only when ProcessInput is completed. The general design seems not to be correct.
UPDATE
When you receive a sample in CMFSourceReader::OnReadSample, you just need to queue the sample in a list (Queue(Sample)). To manage sample list, you can use this type of code : SamplePool/ThreadSafeQueue
In CMFSourceReader::Invoke, when you receive METransformNeedInput, just DeQueue(Sample) and call ProcessInputSample.
In CMFSourceReader::Invoke, when you receive m_hHaveOutputEvent, call ProcessOutputSample.
Two things :
- you can call m_pReader->ReadSample three times at the beginning of the program, and wait to have three samples in the list. When you have three samples you can start decoding, like this you will be sure that when METransformNeedInput occurs, you have one sample ready to process. You can call m_pReader->ReadSample at this moment, after ProcessInputSample to maintains three samples in the list.
- it's possible that decoder process too quickly than the source reader read sample, or the opposite. So check that when METransformNeedInput, there are always samples in the list. The strategy is to maintains a reasonnable sample count, let's say three, during the decoding process.
回答2:
I too had the same E_UNEXPECTED ("Unspecified Error") error getting returned by Transform's Event generator immediately after feeding the first input sample on Intel hardware, and, further calls just returned "Transform Need more input", but no output was produced. The same code worked fine on Nvidia machines though. After experimenting and researching a lot, I figured out that I was creating too many instances of D3d11Device, In my case, I created 2 to 3 devices for Capturing, color conversion, and Hardware encoder respectively. Whereas, I could simply have reused a single D3dDevice instance. Creating multiple D3d11Device instances might work on high-end machines though. This is not documented anywhere. I was unable to find even a clue for the causes of the "E_UNEXPECTED" error. It's mentioned nowhere. There were many StackOverflow threads similar to this kept unanswered, even Microsoft people were unable to point out the problem, given the complete source code.
Reusing the D3D11Device instance solved the problem.
来源:https://stackoverflow.com/questions/42277350/asynchronous-mft-is-not-sending-mftransformhaveoutput-eventintel-hardware-mjpeg