我正在尝试编写一个使用 Media Foundation 的 Win32 c++ 程序, 用于捕获 mp4桌面视频。我发现在屏幕分辨率超过 1920x1080 的 PC 上,捕获失败。失败是在调用 SetInputMediaType 之后() 返回错误 0xc00d36b4(为媒体类型指定的数据无效、不一致或不受此对象支持)
有什么解决办法吗?
HRESULT InitializeDirect3D9(IDirect3DDevice9** ppDevice, IDirect3DSurface9** ppSurface, UINT32& uiWidth, UINT32& uiHeight)
{
IDirect3D9* d3d = NULL;
d3d = Direct3DCreate9(D3D_SDK_VERSION);
if (d3d == NULL)
return E_POINTER;
D3DDISPLAYMODE mode;
HRESULT hr = d3d->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, &mode);
if (FAILED(hr))
{
SafeRelease(&d3d);
return hr;
}
D3DPRESENT_PARAMETERS parameters = { 0 };
parameters.Windowed = TRUE;
parameters.BackBufferCount = 1;
uiHeight = parameters.BackBufferHeight = mode.Height;
uiWidth = parameters.BackBufferWidth = mode.Width;
parameters.SwapEffect = D3DSWAPEFFECT_DISCARD;
parameters.hDeviceWindow = NULL;
hr = d3d->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, NULL, D3DCREATE_SOFTWARE_VERTEXPROCESSING, ¶meters, ppDevice);
if (FAILED(hr))
{
SafeRelease(&d3d);
return hr;
}
hr = (*ppDevice)->CreateOffscreenPlainSurface(mode.Width, mode.Height, D3DFMT_A8R8G8B8, D3DPOOL_SYSTEMMEM, ppSurface, nullptr);
SafeRelease(&d3d);
return hr;
}
HRESULT SGCaptureDecktopVideo::InitializeSinkWriter(std::wstring VideoPath, IMFSinkWriter **ppWriter, DWORD *pStreamIndex, const UINT32 uiWidth, const UINT32 uiHeight)
{
*ppWriter = NULL;
*pStreamIndex = NULL;
IMFSinkWriter *pSinkWriter = NULL;
IMFMediaType *pMediaTypeOut = NULL;
IMFMediaType *pMediaTypeIn = NULL;
DWORD streamIndex;
HRESULT hr = MFCreateSinkWriterFromURL(VideoPath.c_str(), NULL, NULL, &pSinkWriter);
// Set the output media type.
if (SUCCEEDED(hr))
{
hr = MFCreateMediaType(&pMediaTypeOut);
}
if (SUCCEEDED(hr))
{
hr = pMediaTypeOut->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
}
if (SUCCEEDED(hr))
{
hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, myVideoRecorder.CaptureParams.VIDEO_ENCODING_FORMAT);
}
�
void vidThread(vidThreadArgs *args)
{
WriteLogFile(L"Thread started");
HRESULT hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
if (SUCCEEDED(hr))
{
WriteLogFile(L"CoInitializeEx - success");
hr = MFStartup(MF_VERSION);
if (SUCCEEDED(hr))
{
WriteLogFile(L"MFStartup - success");
UINT32 uiWidth = 0;
UINT32 uiHeight = 0;
IDirect3DDevice9* pDevice = NULL;
IDirect3DSurface9* pSurface = NULL;
hr = myVideoRecorder.InitializeDirect3D9(&pDevice, &pSurface, uiWidth, uiHeight);
if (SUCCEEDED(hr))
{
WriteLogFile(L"InitializeDirect3D9 - success");
IMFSinkWriter *pSinkWriter = NULL;
DWORD stream;
hr = myVideoRecorder.InitializeSinkWriter(myVideoRecorder.CaptureParams.VideoPath, &pSinkWriter, &stream, uiWidth, uiHeight);
if (SUCCEEDED(hr))
{
WriteLogFile(L"InitializeSinkWriter - success");
LONGLONG rtStart = 0;
unsigned long long int frameCounter = 0;
while (true)
{
hr = myVideoRecorder.WriteFrame(pDevice, pSurface, pSinkWriter, stream, rtStart, uiWidth, uiHeight);
frameCounter++;
if (FAILED(hr))
{
WriteLogFile(L"Internal error %d Frame %d\n",
GetLastError(), frameCounter);
break;
}
rtStart += myVideoRecorder.CaptureParams.VIDEO_FRAME_DURATION;
if (bStopRecording)
{
WriteLogFile(L"Finalizing recording of file %s. Frame %llu",
myVideoRecorder.CaptureParams.VideoPath.c_str(),
frameCounter);
break;
}
}
}
else
{
DWORD error = GetLastError();
WriteLogFile(L"failed here. Error %d\n", error);
}
if (SUCCEEDED(hr))
{
hr = pSinkWriter->Finalize();
}
SafeRelease(&pSinkWriter);
}
else
{
WriteLogFile(L"InitializeDirect3D9 - failed Error %d",GetLastError());
result = STATUS_ERROR_INIT_DIRECT3D9;
}
SafeRelease(&pDevice);
SafeRelease(&pSurface);
MFShutdown();
WriteLogFile(L"MFShutDown");
}
else
{
result = STATUS_ERROR_MFSSTARTUP;
WriteLogFile(L"MFStartup failed");
}
CoUninitialize();
}
else
{
result = STATUS_ERROR_COINITILIZE;
WriteLogFile(L"CoInitilize - failed Error %d", GetLastError());
}
HRESULT WriteFrame(IDirect3DDevice9* pDevice, IDirect3DSurface9*
pSurface, IMFSinkWriter* pWriter, DWORD streamIndex, const
LONGLONG& rtStart, const UINT32 uiWidth, const UINT32 uiHeight){
HRESULT hr = pDevice->GetFrontBufferData(0, pSurface);
if(FAILED(hr)){
return hr;
}
D3DLOCKED_RECT rc;
hr = pSurface->LockRect(&rc, NULL, 0);
if(FAILED(hr)){
return hr;
}
IMFSample *pSample = NULL;
IMFMediaBuffer *pBuffer = NULL;
const LONG cbWidth = 4 * uiWidth;
const DWORD cbBuffer = cbWidth * uiHeight;
BYTE *pData = NULL;
// Create a new memory buffer.
hr = MFCreateMemoryBuffer(cbBuffer, &pBuffer);
// Lock the buffer and copy the video frame to the buffer.
if(SUCCEEDED(hr)){
hr = pBuffer->Lock(&pData, NULL, NULL);
}
if(SUCCEEDED(hr)){
#ifdef REVERSE_IMAGE
for(int i = 0, j = uiHeight - 1; i < uiHeight; i++, j--)
for(int k = 0; k < cbWidth; k++)
pData[(i * cbWidth) + k] = ((BYTE*)rc.pBits)[(j *
cbWidth) + k];
#else
hr = MFCopyImage(pData, cbWidth, (BYTE*)rc.pBits, rc.Pitch,
cbWidth, uiHeight);
#endif
}
if(pBuffer){
pBuffer->Unlock();
}
// Set the data length of the buffer.
if(SUCCEEDED(hr)){
hr = pBuffer->SetCurrentLength(cbBuffer);
}
// Create a media sample and add the buffer to the sample.
if(SUCCEEDED(hr)){
hr = MFCreateSample(&pSample);
}
if(SUCCEEDED(hr)){
hr = pSample->AddBuffer(pBuffer);
}
// Set the time stamp and the duration.
if(SUCCEEDED(hr)){
hr = pSample->SetSampleTime(rtStart);
}
if(SUCCEEDED(hr)){
hr = pSample->SetSampleDuration(VIDEO_FRAME_DURATION);
}
// Send the sample to the Sink Writer.
if(SUCCEEDED(hr)){
hr = pWriter->WriteSample(streamIndex, pSample);
}
hr = pSurface->UnlockRect();
SafeRelease(&pSample);
SafeRelease(&pBuffer);
return hr;
}
代码在 5 台不同的机器上进行了测试,全部使用 Windows 10 x64,发现如果我将 uiWidth 和 uiHeight 的值存储在日志文件中,只需在以下行之前:
hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, uiWidth, uiHeight);
当它们的值低于 1220 和 1080 时,代码可以完美运行,并且当这些值中的一个(或两个)超过 1220x1080 时,它们将获得大小为 0 的视频文件。
更新: 我还发布了 InitializeSinkWriter()。在第 17 步,SetInputMediaType() 失败并出现错误 c00d36b4。
HRESULT SGCaptureDecktopVideo::InitializeSinkWriter(std::wstring VideoPath, IMFSinkWriter **ppWriter, DWORD *pStreamIndex, const UINT32 uiWidth, const UINT32 uiHeight)
{
*ppWriter = NULL;
*pStreamIndex = NULL;
IMFSinkWriter *pSinkWriter = NULL;
IMFMediaType *pMediaTypeOut = NULL;
IMFMediaType *pMediaTypeIn = NULL;
DWORD streamIndex;
// Step 1 - MFCreateSinkWriterFromURL
HRESULT hr = MFCreateSinkWriterFromURL(VideoPath.c_str(), NULL, NULL, &pSinkWriter);
if (SUCCEEDED(hr) == 0)
{
WriteLogFile(L"Step 1 - MFCreateSinkWriterFromURL() failed. %x", hr);
goto cleanup;
}
// Step 2 - MFCreateMediaType
hr = MFCreateMediaType(&pMediaTypeOut);
if(SUCCEEDED(hr)==0)
{
WriteLogFile(L"Step 2 - MFCreateMediaType() failed. %x", hr);
goto cleanup;
}
// Step 3 - SetGUID (MF_MT_MAJOR_TYPE)
hr = pMediaTypeOut->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
if(SUCCEEDED(hr)==0)
{
WriteLogFile(L"Step 3 - SetGUID (MF_MT_MAJOR_TYPE) failed. %x", hr);
goto cleanup;
}
// Step 4 - SetGUID (MF_MT_SUBTYPE)
hr = pMediaTypeOut->SetGUID(MF_MT_SUBTYPE, myVideoRecorder.CaptureParams.VIDEO_ENCODING_FORMAT);
if (SUCCEEDED(hr) == 0)
{
WriteLogFile(L"Step 4 - SetGUID (MF_MT_SUBTYPE) failed. %x", hr);
goto cleanup;
}
// Step 5 - SetUINT32 (MF_MT_AVG_BITRATE)
hr = pMediaTypeOut->SetUINT32(MF_MT_AVG_BITRATE, myVideoRecorder.CaptureParams.VIDEO_BIT_RATE);
if (SUCCEEDED(hr) == 0)
{
WriteLogFile(L"Step 5 - SetUINT32 (MF_MT_AVG_BITRATE) failed. %x", hr);
goto cleanup;
}
// Step 6 - SetUINT32(MF_MT_INTERLACE_MODE)
hr = pMediaTypeOut->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
if (SUCCEEDED(hr) == 0)
{
WriteLogFile(L"Step 6 - SetUINT32(MF_MT_INTERLACE_MODE) failed. %x", hr);
goto cleanup;
}
// Step 7 - MFSetAttributeSize
WriteLogFile(L"w %d h %d",uiWidth, uiHeight);
hr = MFSetAttributeSize(pMediaTypeOut, MF_MT_FRAME_SIZE, uiWidth, uiHeight);
if (SUCCEEDED(hr) == 0)
{
WriteLogFile(L"Step 7 - MFSetAttributeSize() failed. %x", hr);
goto cleanup;
}
// Step 8 - MFSetAttributeRatio - MF_MT_FRAME_RATE
hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_FRAME_RATE, myVideoRecorder.CaptureParams.VIDEO_FPS, 1);
if (SUCCEEDED(hr) == 0)
{
WriteLogFile(L"Step 8 - MFSetAttributeRatio(MF_MT_FRAME_RATE) failed. %x", hr);
goto cleanup;
}
// Step 9 - MFSetAttributeRatio - MF_MT_PIXEL_ASPECT_RATIO
hr = MFSetAttributeRatio(pMediaTypeOut, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
if (SUCCEEDED(hr) == 0)
{
WriteLogFile(L"Step 9 - MFSetAttributeRatio(MF_MT_PIXEL_ASPECT_RATIO) failed. %x", hr);
goto cleanup;
}
// Step 10 - AddStream
hr = pSinkWriter->AddStream(pMediaTypeOut, &streamIndex);
if (SUCCEEDED(hr) == 0)
{
WriteLogFile(L"Step 10 - AddStream() failed. %x", hr);
goto cleanup;
}
// Step 11 - MFCreateMediaType
hr = MFCreateMediaType(&pMediaTypeIn);
if (SUCCEEDED(hr) == 0 || pMediaTypeIn==NULL)
{
WriteLogFile(L"Step 11 - MFCreateMediaType() failed. %x", hr);
goto cleanup;
}
// Step 12 - SetGUID (MF_MT_MAJOR_TYPE)
hr = pMediaTypeIn->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
if (SUCCEEDED(hr) == 0)
{
WriteLogFile(L"Step 12 - SetGUID (MF_MT_MAJOR_TYPE) failed. %x", hr);
goto cleanup;
}
// Step 13 - SetGUID (MF_MT_SUBTYPE)
hr = pMediaTypeIn->SetGUID(MF_MT_SUBTYPE, myVideoRecorder.CaptureParams.VIDEO_INPUT_FORMAT);
if (SUCCEEDED(hr) == 0)
{
WriteLogFile(L"Step 13 - SetGUID (MF_MT_SUBTYPE) failed. %x", hr);
goto cleanup;
}
// Step 13 - SetUINT32 (MF_MT_INTERLACE_MODE)
hr = pMediaTypeIn->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
if (SUCCEEDED(hr) == 0)
{
WriteLogFile(L"SetUINT32 (MF_MT_INTERLACE_MODE). %x", hr);
goto cleanup;
}
// Step 14 - SMFSetAttributeSize
hr = MFSetAttributeSize(pMediaTypeIn, MF_MT_FRAME_SIZE, uiWidth, uiHeight);
if (SUCCEEDED(hr) == 0)
{
WriteLogFile(L"Step 14 - MFSetAttributeSize() failed. %x", hr);
goto cleanup;
}
// Step 15 - MFSetAttributeRatio(MF_MT_FRAME_RATE)
hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_FRAME_RATE, myVideoRecorder.CaptureParams.VIDEO_FPS, 1);
if (SUCCEEDED(hr) == 0)
{
WriteLogFile(L"Step 15 - MFSetAttributeRatio(MF_MT_FRAME_RATE) failed. %x", hr);
goto cleanup;
}
// Step 16 - MFSetAttributeRatio(MF_MT_PIXEL_ASPECT_RATIO)
hr = MFSetAttributeRatio(pMediaTypeIn, MF_MT_PIXEL_ASPECT_RATIO, 1, 1);
if (SUCCEEDED(hr) == 0)
{
WriteLogFile(L"Step 16 - MFSetAttributeRatio(MF_MT_PIXEL_ASPECT_RATIO) failed. %x", hr);
goto cleanup;
}
// Step 17 - SetInputMediaType
hr = pSinkWriter->SetInputMediaType(streamIndex, pMediaTypeIn, NULL);
if (SUCCEEDED(hr) == 0)
{
WriteLogFile(L"Step 17 - SetInputMediaType() failed. %x", hr);
goto cleanup;
}
// Step 18 - BeginWriting() Tell the sink writer to start accepting data.
hr = pSinkWriter->BeginWriting();
if (SUCCEEDED(hr) == 0)
{
WriteLogFile(L"Step 18 - BeginWriting() failed. %x", hr);
goto cleanup;
}
// Return the pointer to the caller.
WriteLogFile(L"Success");
*ppWriter = pSinkWriter;
(*ppWriter)->AddRef();
*pStreamIndex = streamIndex;
cleanup:;
SafeRelease(&pSinkWriter);
SafeRelease(&pMediaTypeOut);
SafeRelease(&pMediaTypeIn);
return hr;
}
在分辨率更高的机器上,我仍然在这部分遇到错误。 CreateDevice() 失败,错误为 0x7E。
D3DDISPLAYMODE mode;
HRESULT hr = d3d->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, &mode);
if (FAILED(hr))
{
WriteLogFile(L"GetAdapterDisplayMode() failed. Error =%x", GetLastError());
SafeRelease(&d3d);
return hr;
}
WriteLogFile(L"Fetched mode. Width=%d Height=%d", mode.Width, mode.Height);
D3DPRESENT_PARAMETERS parameters = { 0 };
parameters.Windowed = TRUE;
parameters.BackBufferCount = 1;
uiHeight = parameters.BackBufferHeight = mode.Height;
uiWidth = parameters.BackBufferWidth = mode.Width;
parameters.SwapEffect = D3DSWAPEFFECT_DISCARD;
parameters.hDeviceWindow = NULL;
hr = d3d->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, NULL, D3DCREATE_SOFTWARE_VERTEXPROCESSING, ¶meters, ppDevice);
if (FAILED(hr))
{
WriteLogFile(L"CreateDevice failed. Error =%x",GetLastError());
SafeRelease(&d3d);
return hr;
}
最佳答案
您的瓶颈是视频编码器的分辨率支持。
视频编码器通常对分辨率有限制,但限制各不相同。广泛支持的最大安全分辨率为 1920x1088。在您的情况下,您可能正在使用 Microsoft 的软件编码器。我想过去的一段时间MSDN documetnation直接提到了最大分辨率,但是该信息目前不存在。
无法可靠地确定支持的最大分辨率,它看起来像是一个 API 设计缺陷。
根据记录,Windows 10 版本 1903(2019 年 5 月更新)附带软件 H.264 视频编码器,能够编码至少 3840x2160 视频。
另见:
关于c++ - 当屏幕分辨率高于 1920x1080 时捕获视频,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/57633885/
我的瘦服务器配置了nginx,我的ROR应用程序正在它们上运行。在我发布代码更新时运行thinrestart会给我的应用程序带来一些停机时间。我试图弄清楚如何优雅地重启正在运行的Thin实例,但找不到好的解决方案。有没有人能做到这一点? 最佳答案 #Restartjustthethinserverdescribedbythatconfigsudothin-C/etc/thin/mysite.ymlrestartNginx将继续运行并代理请求。如果您将Nginx设置为使用多个上游服务器,例如server{listen80;server
相信很多人在录制视频的时候都会遇到各种各样的问题,比如录制的视频没有声音。屏幕录制为什么没声音?今天小编就和大家分享一下如何录制音画同步视频的具体操作方法。如果你有录制的视频没有声音,你可以试试这个方法。 一、检查是否打开电脑系统声音相信很多小伙伴在录制视频后会发现录制的视频没有声音,屏幕录制为什么没声音?如果当时没有打开音频录制,则录制好的视频是没有声音的。因此,建议在录制前进行检查。屏幕上没有声音,很可能是因为你的电脑系统的声音被禁止了。您只需打开电脑系统的声音,即可录制音频和图画同步视频。操作方法:步骤1:点击电脑屏幕右下侧的“小喇叭”图案,在上方的选项中,选择“声音”。 步骤2:在“声
如何将send与+=一起使用?a=20;a.send"+=",10undefinedmethod`+='for20:Fixnuma=20;a+=10=>30 最佳答案 恐怕你不能。+=不是方法,而是语法糖。参见http://www.ruby-doc.org/docs/ProgrammingRuby/html/tut_expressions.html它说Incommonwithmanyotherlanguages,Rubyhasasyntacticshortcut:a=a+2maybewrittenasa+=2.你能做的最好的事情是:
📢博客主页:https://blog.csdn.net/weixin_43197380📢欢迎点赞👍收藏⭐留言📝如有错误敬请指正!📢本文由Loewen丶原创,首发于CSDN,转载注明出处🙉📢现在的付出,都会是一种沉淀,只为让你成为更好的人✨文章预览:一.分辨率(Resolution)1、工业相机的分辨率是如何定义的?2、工业相机的分辨率是如何选择的?二.精度(Accuracy)1、像素精度(PixelAccuracy)2、定位精度和重复定位精度(RepeatPrecision)三.公差(Tolerance)四.课后作业(Post-ClassExercises)视觉行业的初学者,甚至是做了1~2年
我对如何计算通过{%assignvar=0%}赋值的变量加一完全感到困惑。这应该是最简单的任务。到目前为止,这是我尝试过的:{%assignamount=0%}{%forvariantinproduct.variants%}{%assignamount=amount+1%}{%endfor%}Amount:{{amount}}结果总是0。也许我忽略了一些明显的东西。也许有更好的方法。我想要存档的只是获取运行的迭代次数。 最佳答案 因为{{incrementamount}}将输出您的变量值并且不会影响{%assign%}定义的变量,我
我有一个数组数组,想将元素附加到子数组。+=做我想做的,但我想了解为什么push不做。我期望的行为(并与+=一起工作):b=Array.new(3,[])b[0]+=["apple"]b[1]+=["orange"]b[2]+=["frog"]b=>[["苹果"],["橙子"],["Frog"]]通过推送,我将推送的元素附加到每个子数组(为什么?):a=Array.new(3,[])a[0].push("apple")a[1].push("orange")a[2].push("frog")a=>[[“苹果”、“橙子”、“Frog”]、[“苹果”、“橙子”、“Frog”]、[“苹果”、“
有没有办法让Ruby能够做这样的事情?classPlane@moved=0@x=0defx+=(v)#thisiserror@x+=v@moved+=1enddefto_s"moved#{@moved}times,currentxis#{@x}"endendplane=Plane.newplane.x+=5plane.x+=10putsplane.to_s#moved2times,currentxis15 最佳答案 您不能在Ruby中覆盖复合赋值运算符。任务在内部处理。您应该覆盖+,而不是+=。plane.a+=b与plane.a=
出于某种原因,heroku尝试要求dm-sqlite-adapter,即使它应该在这里使用Postgres。请注意,这发生在我打开任何URL时-而不是在gitpush本身期间。我构建了一个默认的Facebook应用程序。gem文件:source:gemcuttergem"foreman"gem"sinatra"gem"mogli"gem"json"gem"httparty"gem"thin"gem"data_mapper"gem"heroku"group:productiondogem"pg"gem"dm-postgres-adapter"endgroup:development,:t
我是Ruby和这个网站的新手。下面两个函数是不同的,一个在函数外修改变量,一个不修改。defm1(x)x我想确保我理解正确-当调用m1时,对str的引用被复制并传递给将其视为x的函数。运算符当调用m2时,对str的引用被复制并传递给将其视为x的函数。运算符+创建一个新字符串,赋值x=x+"4"只是将x重定向到新字符串,而原始str变量保持不变。对吧?谢谢 最佳答案 String#+::str+other_str→new_strConcatenation—ReturnsanewStringcontainingother_strconc
我正在使用PostgreSQL9.1.3(x86_64-pc-linux-gnu上的PostgreSQL9.1.3,由gcc-4.6.real(Ubuntu/Linaro4.6.1-9ubuntu3)4.6.1,64位编译)和在ubuntu11.10上运行3.2.2或3.2.1。现在,我可以使用以下命令连接PostgreSQLsupostgres输入密码我可以看到postgres=#我将以下详细信息放在我的config/database.yml中并执行“railsdb”,它工作正常。开发:adapter:postgresqlencoding:utf8reconnect:falsedat