I'm attempting to render video using using the Microsoft sample DX11VideoRenderer found at: https://github.com/Microsoft/Windows-classic-samples/tree/master/Samples/DX11VideoRenderer
From my extensive research it seems that using DirectX 11 with hardware-acceleration is the most up-to-date method (least likely to be deprecated) and offers the best performance solution.
There are 2 similar functions within Presenter.cpp that process frames but I cannot figure out what the difference is between them. ProcessFrameUsingD3D11()uses VideoProcessorBlt() to actually do the render. The mystery is that ProcessFrameUsingXVP() does not use this function so how does it actually do the render? Or is it doing something else entirely? Also it appears that my implementation is using ProcessFrameUsingXVP()based in the value of the variable m_useXVP which is by default set to '1'. Here is the code sample:
if (m_useXVP)
{
BOOL bInputFrameUsed = FALSE;
hr = ProcessFrameUsingXVP( pCurrentType, pSample, pTexture2D, rcDest, ppOutputSample, &bInputFrameUsed );
if (SUCCEEDED(hr) && !bInputFrameUsed)
{
*pbProcessAgain = TRUE;
}
}
else
{
hr = ProcessFrameUsingD3D11( pTexture2D, pEVTexture2D, dwViewIndex, dwEVViewIndex, rcDest, *punInterlaceMode, ppOutputSample );
LONGLONG hnsDuration = 0;
LONGLONG hnsTime = 0;
DWORD dwSampleFlags = 0;
if (ppOutputSample != NULL && *ppOutputSample != NULL)
{
if (SUCCEEDED(pSample->GetSampleDuration(&hnsDuration)))
{
(*ppOutputSample)->SetSampleDuration(hnsDuration);
}
if (SUCCEEDED(pSample->GetSampleTime(&hnsTime)))
{
(*ppOutputSample)->SetSampleTime(hnsTime);
}
if (SUCCEEDED(pSample->GetSampleFlags(&dwSampleFlags)))
{
(*ppOutputSample)->SetSampleFlags(dwSampleFlags);
}
}
}
The reason for setting m_useXVP is also a mystery to me and I cannot find an answer in my research. It uses a registry key that does not exist on my particular Windows10 PC so the value is not modified.
const TCHAR* lpcszInVP = TEXT("XVP");
const TCHAR* lpcszREGKEY = TEXT("SOFTWARE\\Microsoft\\Scrunch\\CodecPack\\MSDVD");
if(0 == RegOpenKeyEx(HKEY_CURRENT_USER, lpcszREGKEY, 0, KEY_READ, &hk))
{
dwData = 0;
cbData = sizeof(DWORD);
if (0 == RegQueryValueEx(hk, lpcszInVP, 0, &cbType, (LPBYTE)&dwData, &cbData))
{
m_useXVP = dwData;
}
}
So since my PC does not have this key, the code is defaulting to using ProcessFrameUsingXVP(). Here is the definition:
HRESULT DX11VideoRenderer::CPresenter::ProcessFrameUsingXVP(IMFMediaType* pCurrentType, IMFSample* pVideoFrame, ID3D11Texture2D* pTexture2D, RECT rcDest, IMFSample** ppVideoOutFrame, BOOL* pbInputFrameUsed)
{
HRESULT hr = S_OK;
ID3D11VideoContext* pVideoContext = NULL;
ID3D11Texture2D* pDXGIBackBuffer = NULL;
IMFSample* pRTSample = NULL;
IMFMediaBuffer* pBuffer = NULL;
IMFAttributes* pAttributes = NULL;
D3D11_VIDEO_PROCESSOR_CAPS vpCaps = { 0 };
do
{
if (!m_pDX11VideoDevice)
{
hr = m_pD3D11Device->QueryInterface(__uuidof(ID3D11VideoDevice), (void**)&m_pDX11VideoDevice);
if (FAILED(hr))
{
break;
}
}
hr = m_pD3DImmediateContext->QueryInterface(__uuidof(ID3D11VideoContext), (void**)&pVideoContext);
if (FAILED(hr))
{
break;
}
// remember the original rectangles
RECT TRectOld = m_rcDstApp;
RECT SRectOld = m_rcSrcApp;
UpdateRectangles(&TRectOld, &SRectOld);
//Update destination rect with current client rect
m_rcDstApp = rcDest;
D3D11_TEXTURE2D_DESC surfaceDesc;
pTexture2D->GetDesc(&surfaceDesc);
BOOL fTypeChanged = FALSE;
if (!m_pVideoProcessorEnum || !m_pSwapChain1 || m_imageWidthInPixels != surfaceDesc.Width || m_imageHeightInPixels != surfaceDesc.Height)
{
SafeRelease(m_pVideoProcessorEnum);
SafeRelease(m_pSwapChain1);
m_imageWidthInPixels = surfaceDesc.Width;
m_imageHeightInPixels = surfaceDesc.Height;
fTypeChanged = TRUE;
D3D11_VIDEO_PROCESSOR_CONTENT_DESC ContentDesc;
ZeroMemory(&ContentDesc, sizeof(ContentDesc));
ContentDesc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
ContentDesc.InputWidth = surfaceDesc.Width;
ContentDesc.InputHeight = surfaceDesc.Height;
ContentDesc.OutputWidth = surfaceDesc.Width;
ContentDesc.OutputHeight = surfaceDesc.Height;
ContentDesc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;
hr = m_pDX11VideoDevice->CreateVideoProcessorEnumerator(&ContentDesc, &m_pVideoProcessorEnum);
if (FAILED(hr))
{
break;
}
m_rcSrcApp.left = 0;
m_rcSrcApp.top = 0;
m_rcSrcApp.right = m_uiRealDisplayWidth;
m_rcSrcApp.bottom = m_uiRealDisplayHeight;
if (m_b3DVideo)
{
hr = m_pVideoProcessorEnum->GetVideoProcessorCaps(&vpCaps);
if (FAILED(hr))
{
break;
}
if (vpCaps.FeatureCaps & D3D11_VIDEO_PROCESSOR_FEATURE_CAPS_STEREO)
{
m_bStereoEnabled = TRUE;
}
DXGI_MODE_DESC1 modeFilter = { 0 };
modeFilter.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
modeFilter.Width = surfaceDesc.Width;
modeFilter.Height = surfaceDesc.Height;
modeFilter.Stereo = m_bStereoEnabled;
DXGI_MODE_DESC1 matchedMode;
if (m_bFullScreenState)
{
hr = m_pDXGIOutput1->FindClosestMatchingMode1(&modeFilter, &matchedMode, m_pD3D11Device);
if (FAILED(hr))
{
break;
}
}
hr = m_pXVP->GetAttributes(&pAttributes);
if (FAILED(hr))
{
break;
}
hr = pAttributes->SetUINT32(MF_ENABLE_3DVIDEO_OUTPUT, (0 != m_vp3DOutput) ? MF3DVideoOutputType_Stereo : MF3DVideoOutputType_BaseView);
if (FAILED(hr))
{
break;
}
}
}
// now create the input and output media types - these need to reflect
// the src and destination rectangles that we have been given.
RECT TRect = m_rcDstApp;
RECT SRect = m_rcSrcApp;
UpdateRectangles(&TRect, &SRect);
const BOOL fDestRectChanged = !EqualRect(&TRect, &TRectOld);
const BOOL fSrcRectChanged = !EqualRect(&SRect, &SRectOld);
if (!m_pSwapChain1 || fDestRectChanged)
{
hr = UpdateDXGISwapChain();
if (FAILED(hr))
{
break;
}
}
if (fTypeChanged || fSrcRectChanged || fDestRectChanged)
{
// stop streaming to avoid multiple start\stop calls internally in XVP
hr = m_pXVP->ProcessMessage(MFT_MESSAGE_NOTIFY_END_STREAMING, 0);
if (FAILED(hr))
{
break;
}
if (fTypeChanged)
{
hr = SetXVPOutputMediaType(pCurrentType, DXGI_FORMAT_B8G8R8A8_UNORM);
if (FAILED(hr))
{
break;
}
}
if (fDestRectChanged)
{
hr = m_pXVPControl->SetDestinationRectangle(&m_rcDstApp);
if (FAILED(hr))
{
break;
}
}
if (fSrcRectChanged)
{
hr = m_pXVPControl->SetSourceRectangle(&SRect);
if (FAILED(hr))
{
break;
}
}
hr = m_pXVP->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0);
if (FAILED(hr))
{
break;
}
}
m_bCanProcessNextSample = FALSE;
// Get Backbuffer
hr = m_pSwapChain1->GetBuffer(0, __uuidof(ID3D11Texture2D), (void**)&pDXGIBackBuffer);
if (FAILED(hr))
{
break;
}
// create the output media sample
hr = MFCreateSample(&pRTSample);
if (FAILED(hr))
{
break;
}
hr = MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), pDXGIBackBuffer, 0, FALSE, &pBuffer);
if (FAILED(hr))
{
break;
}
hr = pRTSample->AddBuffer(pBuffer);
if (FAILED(hr))
{
break;
}
if (m_b3DVideo && 0 != m_vp3DOutput)
{
SafeRelease(pBuffer);
hr = MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), pDXGIBackBuffer, 1, FALSE, &pBuffer);
if (FAILED(hr))
{
break;
}
hr = pRTSample->AddBuffer(pBuffer);
if (FAILED(hr))
{
break;
}
}
DWORD dwStatus = 0;
MFT_OUTPUT_DATA_BUFFER outputDataBuffer = {};
outputDataBuffer.pSample = pRTSample;
hr = m_pXVP->ProcessOutput(0, 1, &outputDataBuffer, &dwStatus);
if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT)
{
//call process input on the MFT to deliver the YUV video sample
// and the call process output to extract of newly processed frame
hr = m_pXVP->ProcessInput(0, pVideoFrame, 0);
if (FAILED(hr))
{
break;
}
*pbInputFrameUsed = TRUE;
hr = m_pXVP->ProcessOutput(0, 1, &outputDataBuffer, &dwStatus);
if (FAILED(hr))
{
break;
}
}
else
{
*pbInputFrameUsed = FALSE;
}
if (ppVideoOutFrame != NULL)
{
*ppVideoOutFrame = pRTSample;
(*ppVideoOutFrame)->AddRef();
}
} while (FALSE);
SafeRelease(pAttributes);
SafeRelease(pBuffer);
SafeRelease(pRTSample);
SafeRelease(pDXGIBackBuffer);
SafeRelease(pVideoContext);
return hr;
}
And here is the definition of ProcessFrameUsingD3D11() :
HRESULT DX11VideoRenderer::CPresenter::ProcessFrameUsingD3D11( ID3D11Texture2D* pLeftTexture2D, ID3D11Texture2D* pRightTexture2D, UINT dwLeftViewIndex, UINT dwRightViewIndex,
RECT rcDest, UINT32 unInterlaceMode, IMFSample** ppVideoOutFrame )
{
HRESULT hr = S_OK;
ID3D11VideoContext* pVideoContext = NULL;
ID3D11VideoProcessorInputView* pLeftInputView = NULL;
ID3D11VideoProcessorInputView* pRightInputView = NULL;
ID3D11VideoProcessorOutputView* pOutputView = NULL;
ID3D11Texture2D* pDXGIBackBuffer = NULL;
ID3D11RenderTargetView* pRTView = NULL;
IMFSample* pRTSample = NULL;
IMFMediaBuffer* pBuffer = NULL;
D3D11_VIDEO_PROCESSOR_CAPS vpCaps = {0};
LARGE_INTEGER lpcStart,lpcEnd;
do
{
if (!m_pDX11VideoDevice)
{
hr = m_pD3D11Device->QueryInterface(__uuidof(ID3D11VideoDevice), (void**)&m_pDX11VideoDevice);
if (FAILED(hr))
{
break;
}
}
hr = m_pD3DImmediateContext->QueryInterface(__uuidof( ID3D11VideoContext ), (void**)&pVideoContext);
if (FAILED(hr))
{
break;
}
// remember the original rectangles
RECT TRectOld = m_rcDstApp;
RECT SRectOld = m_rcSrcApp;
UpdateRectangles(&TRectOld, &SRectOld);
//Update destination rect with current client rect
m_rcDstApp = rcDest;
D3D11_TEXTURE2D_DESC surfaceDesc;
pLeftTexture2D->GetDesc(&surfaceDesc);
if (!m_pVideoProcessorEnum || !m_pVideoProcessor || m_imageWidthInPixels != surfaceDesc.Width || m_imageHeightInPixels != surfaceDesc.Height)
{
SafeRelease(m_pVideoProcessorEnum);
SafeRelease(m_pVideoProcessor);
m_imageWidthInPixels = surfaceDesc.Width;
m_imageHeightInPixels = surfaceDesc.Height;
D3D11_VIDEO_PROCESSOR_CONTENT_DESC ContentDesc;
ZeroMemory( &ContentDesc, sizeof( ContentDesc ) );
ContentDesc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_INTERLACED_TOP_FIELD_FIRST;
ContentDesc.InputWidth = surfaceDesc.Width;
ContentDesc.InputHeight = surfaceDesc.Height;
ContentDesc.OutputWidth = surfaceDesc.Width;
ContentDesc.OutputHeight = surfaceDesc.Height;
ContentDesc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;
hr = m_pDX11VideoDevice->CreateVideoProcessorEnumerator(&ContentDesc, &m_pVideoProcessorEnum);
if (FAILED(hr))
{
break;
}
UINT uiFlags;
DXGI_FORMAT VP_Output_Format = DXGI_FORMAT_B8G8R8A8_UNORM;
hr = m_pVideoProcessorEnum->CheckVideoProcessorFormat(VP_Output_Format, &uiFlags);
if (FAILED(hr) || 0 == (uiFlags & D3D11_VIDEO_PROCESSOR_FORMAT_SUPPORT_OUTPUT))
{
hr = MF_E_UNSUPPORTED_D3D_TYPE;
break;
}
m_rcSrcApp.left = 0;
m_rcSrcApp.top = 0;
m_rcSrcApp.right = m_uiRealDisplayWidth;
m_rcSrcApp.bottom = m_uiRealDisplayHeight;
DWORD index;
hr = FindBOBProcessorIndex(&index); // GG This may not be needed. BOB is something to do with deinterlacing.
if (FAILED(hr))
{
break;
}
hr = m_pDX11VideoDevice->CreateVideoProcessor(m_pVideoProcessorEnum, index, &m_pVideoProcessor);
if (FAILED(hr))
{
break;
}
if (m_b3DVideo)
{
hr = m_pVideoProcessorEnum->GetVideoProcessorCaps(&vpCaps);
if (FAILED(hr))
{
break;
}
if (vpCaps.FeatureCaps & D3D11_VIDEO_PROCESSOR_FEATURE_CAPS_STEREO)
{
m_bStereoEnabled = TRUE;
}
DXGI_MODE_DESC1 modeFilter = { 0 };
modeFilter.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
modeFilter.Width = surfaceDesc.Width;
modeFilter.Height = surfaceDesc.Height;
modeFilter.Stereo = m_bStereoEnabled;
DXGI_MODE_DESC1 matchedMode;
if (m_bFullScreenState)
{
hr = m_pDXGIOutput1->FindClosestMatchingMode1(&modeFilter, &matchedMode, m_pD3D11Device);
if (FAILED(hr))
{
break;
}
}
}
}
// now create the input and output media types - these need to reflect
// the src and destination rectangles that we have been given.
RECT TRect = m_rcDstApp;
RECT SRect = m_rcSrcApp;
UpdateRectangles(&TRect, &SRect);
const BOOL fDestRectChanged = !EqualRect(&TRect, &TRectOld);
if (!m_pSwapChain1 || fDestRectChanged)
{
hr = UpdateDXGISwapChain();
if (FAILED(hr))
{
break;
}
}
m_bCanProcessNextSample = FALSE;
// Get Backbuffer
hr = m_pSwapChain1->GetBuffer(0, __uuidof(ID3D11Texture2D), (void**)&pDXGIBackBuffer);
if (FAILED(hr))
{
break;
}
// create the output media sample
hr = MFCreateSample(&pRTSample);
if (FAILED(hr))
{
break;
}
hr = MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), pDXGIBackBuffer, 0, FALSE, &pBuffer);
if (FAILED(hr))
{
break;
}
hr = pRTSample->AddBuffer(pBuffer);
if (FAILED(hr))
{
break;
}
// GG For 3D - don't need.
if (m_b3DVideo && 0 != m_vp3DOutput)
{
SafeRelease(pBuffer);
hr = MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), pDXGIBackBuffer, 1, FALSE, &pBuffer);
if (FAILED(hr))
{
break;
}
hr = pRTSample->AddBuffer(pBuffer);
if (FAILED(hr))
{
break;
}
}
QueryPerformanceCounter(&lpcStart);
QueryPerformanceCounter(&lpcEnd);
//
// Create Output View of Output Surfaces.
//
D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC OutputViewDesc;
ZeroMemory( &OutputViewDesc, sizeof( OutputViewDesc ) );
if (m_b3DVideo && m_bStereoEnabled)
{
OutputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2DARRAY;
}
else
{
OutputViewDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D;
}
OutputViewDesc.Texture2D.MipSlice = 0;
OutputViewDesc.Texture2DArray.MipSlice = 0;
OutputViewDesc.Texture2DArray.FirstArraySlice = 0;
if (m_b3DVideo && 0 != m_vp3DOutput)
{
OutputViewDesc.Texture2DArray.ArraySize = 2; // STEREO
}
QueryPerformanceCounter(&lpcStart);
hr = m_pDX11VideoDevice->CreateVideoProcessorOutputView(pDXGIBackBuffer, m_pVideoProcessorEnum, &OutputViewDesc, &pOutputView);
if (FAILED(hr))
{
break;
}
D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC InputLeftViewDesc;
ZeroMemory( &InputLeftViewDesc, sizeof( InputLeftViewDesc ) );
InputLeftViewDesc.FourCC = 0;
InputLeftViewDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D;
InputLeftViewDesc.Texture2D.MipSlice = 0;
InputLeftViewDesc.Texture2D.ArraySlice = dwLeftViewIndex;
hr = m_pDX11VideoDevice->CreateVideoProcessorInputView(pLeftTexture2D, m_pVideoProcessorEnum, &InputLeftViewDesc, &pLeftInputView);
if (FAILED(hr))
{
break;
}
if (m_b3DVideo && MFVideo3DSampleFormat_MultiView == m_vp3DOutput && pRightTexture2D)
{
D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC InputRightViewDesc;
ZeroMemory( &InputRightViewDesc, sizeof( InputRightViewDesc ) );
InputRightViewDesc.FourCC = 0;
InputRightViewDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D;
InputRightViewDesc.Texture2D.MipSlice = 0;
InputRightViewDesc.Texture2D.ArraySlice = dwRightViewIndex;
hr = m_pDX11VideoDevice->CreateVideoProcessorInputView(pRightTexture2D, m_pVideoProcessorEnum, &InputRightViewDesc, &pRightInputView);
if (FAILED(hr))
{
break;
}
}
QueryPerformanceCounter(&lpcEnd);
QueryPerformanceCounter(&lpcStart);
SetVideoContextParameters(pVideoContext, &SRect, &TRect, unInterlaceMode);
// Enable/Disable Stereo
if (m_b3DVideo)
{
pVideoContext->VideoProcessorSetOutputStereoMode(m_pVideoProcessor, m_bStereoEnabled);
D3D11_VIDEO_PROCESSOR_STEREO_FORMAT vpStereoFormat = D3D11_VIDEO_PROCESSOR_STEREO_FORMAT_SEPARATE;
if (MFVideo3DSampleFormat_Packed_LeftRight == m_vp3DOutput)
{
vpStereoFormat = D3D11_VIDEO_PROCESSOR_STEREO_FORMAT_HORIZONTAL;
}
else if (MFVideo3DSampleFormat_Packed_TopBottom == m_vp3DOutput)
{
vpStereoFormat = D3D11_VIDEO_PROCESSOR_STEREO_FORMAT_VERTICAL;
}
pVideoContext->VideoProcessorSetStreamStereoFormat(m_pVideoProcessor,
0, m_bStereoEnabled, vpStereoFormat, TRUE, TRUE, D3D11_VIDEO_PROCESSOR_STEREO_FLIP_NONE, 0);
}
QueryPerformanceCounter(&lpcEnd);
QueryPerformanceCounter(&lpcStart);
D3D11_VIDEO_PROCESSOR_STREAM StreamData;
ZeroMemory( &StreamData, sizeof( StreamData ) );
StreamData.Enable = TRUE;
StreamData.OutputIndex = 0;
StreamData.InputFrameOrField = 0;
StreamData.PastFrames = 0;
StreamData.FutureFrames = 0;
StreamData.ppPastSurfaces = NULL;
StreamData.ppFutureSurfaces = NULL;
StreamData.pInputSurface = pLeftInputView;
StreamData.ppPastSurfacesRight = NULL;
StreamData.ppFutureSurfacesRight = NULL;
if (m_b3DVideo && MFVideo3DSampleFormat_MultiView == m_vp3DOutput && pRightTexture2D)
{
StreamData.pInputSurfaceRight = pRightInputView;
}
hr = pVideoContext->VideoProcessorBlt(m_pVideoProcessor, pOutputView, 0, 1, &StreamData );
if (FAILED(hr))
{
break;
}
QueryPerformanceCounter(&lpcEnd);
if (ppVideoOutFrame != NULL)
{
*ppVideoOutFrame = pRTSample;
(*ppVideoOutFrame)->AddRef();
}
}
while (FALSE);
SafeRelease(pBuffer);
SafeRelease(pRTSample);
SafeRelease(pDXGIBackBuffer);
SafeRelease(pOutputView);
SafeRelease(pLeftInputView);
SafeRelease(pRightInputView);
SafeRelease(pVideoContext);
return hr;
}
One last note, the documentation states that:
Specifically, this sample shows how to:
Decode the video using the Media Foundation APIs
Render the decoded video using the DirectX 11 APIs
Output the video stream to multi-monitor displays
I cannot find anything that does decoding unless by some MF magic chant phrase that I haven't stumbled across yet. But it's not a showstopper because I can stick an H.264 decoder MFT in front no problem. I would just like to clarify the documentation.
Any help would be much appreciated. Thank you!
There are 2 similar functions within Presenter.cpp that process frames but I cannot figure out what the difference is between them. ProcessFrameUsingD3D11()uses VideoProcessorBlt() to actually do the render.
The functions are not rendering - they are two ways to scale video frames. Scaling might be done with a readily available Media Foundation transform internally managed by the renderer's presenter, or scaling might be done with the help of Direct3D 11 processor. Actually both utilize Direct3D 11, so the two methods are close one to another and are just one step in the rendering process.
I cannot find anything that does decoding unless by some MF magic chant phrase that I haven't stumbled across yet.
There is no decoding and the list of sink video formats in StreamSink.cpp suggests that by only listing uncompressed video formats. The renderer presents frames carried by Direct3D 11 textures, which in turn assumes that a decode, esp. hardware decoder such as DXVA2 based already supplies the decoded textures on the renderer input.
Related
I have absolutely no experience in programming serial communication and since I'm stuck with my code I'd really appreciate your help! Thank you already!
So now to my problem:
I got a generator on which are several different sensors who communicate over CAN with a microcontroller. This mc itself communicates with a device from USBTin again over CAN. On the USBTin, a little board, is mainly a CAN controller and a microcontroller which are precoded from its developer.
So my task now is to open my COM Port, send the right messages to the USBTin (those are "S5" for the baudrate and 'O' for Open CAN) and then receive the data.
First of all the functions and my problem:
The problem is that in my output textfield stands something like "PPPPPPPPPP,Râö". There are always these 10 P's and some random characters. I have no idea where the P's or these additional "Râö" comes from. The actual output string shoud be something like "T1E18001F8". I tested that with hTerm, which is a terminal programm for serial communication.
OPEN:
long Serial::Open()
{
if (IsOpened()) return 0;
#ifdef UNICODE
wstring wtext(port.begin(),port.end());
#else
string wtext = port;
#endif
hComm = CreateFile(wtext.c_str(),
GENERIC_READ | GENERIC_WRITE,
0,
0,
OPEN_EXISTING,
FILE_ATTRIBUTE_NORMAL | FILE_FLAG_OVERLAPPED,
0);
if (hComm == INVALID_HANDLE_VALUE) {return 1;}
if (PurgeComm(hComm, PURGE_TXABORT | PURGE_RXABORT | PURGE_TXCLEAR |
PURGE_RXCLEAR) == 0) {return 2;}//purge
//get initial state
DCB dcbOri;
bool fSuccess;
fSuccess = GetCommState(hComm, &dcbOri);
if (!fSuccess) {return 3;}
DCB dcb1 = dcbOri;
dcb1.BaudRate = baud;
if (parity == 'E') dcb1.Parity = EVENPARITY;
else if (parity == 'O') dcb1.Parity = ODDPARITY;
else if (parity == 'M') dcb1.Parity = MARKPARITY;
else if (parity == 'S') dcb1.Parity = SPACEPARITY;
else if (parity == 'N') dcb1.Parity = NOPARITY;
dcb1.ByteSize = (BYTE)dsize;
if(stopbits==2) dcb1.StopBits = TWOSTOPBITS;
else if (stopbits == 1.5) dcb1.StopBits = ONE5STOPBITS;
else if (stopbits == 1) dcb1.StopBits = ONE5STOPBITS;
dcb1.fOutxCtsFlow = false;
dcb1.fOutxDsrFlow = false;
dcb1.fOutX = false;
dcb1.fDtrControl = DTR_CONTROL_DISABLE;
dcb1.fRtsControl = RTS_CONTROL_DISABLE;
fSuccess = SetCommState(hComm, &dcb1);
delay(60);
if (!fSuccess) {return 4;}
fSuccess = GetCommState(hComm, &dcb1);
if (!fSuccess) {return 5;}
osReader = { 0 };// Create the overlapped event. Must be closed before
exiting to avoid a handle leak.
osReader.hEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
if (osReader.hEvent == NULL) {return 6;}// Error creating overlapped event;
abort.
fWaitingOnRead = FALSE;
osWrite = { 0 };
osWrite.hEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
if (osWrite.hEvent == NULL) {return 7;}
if (!GetCommTimeouts(hComm, &timeouts_ori)) { return 8; } // Error getting
time-outs.
COMMTIMEOUTS timeouts;
timeouts.ReadIntervalTimeout = 20;
timeouts.ReadTotalTimeoutMultiplier = 15;
timeouts.ReadTotalTimeoutConstant = 100;
timeouts.WriteTotalTimeoutMultiplier = 15;
timeouts.WriteTotalTimeoutConstant = 100;
if (!SetCommTimeouts(hComm, &timeouts)) { return 9;} // Error setting time-
outs.
return 0;
}
WRITE:
bool Serial::Write(char *data)
{
if (!IsOpened()) {
return false;
}
BOOL fRes;
DWORD dwWritten;
long n = strlen(data);
if (n < 0) n = 0;
else if(n > 1024) n = 1024;
// Issue write.
if (!WriteFile(hComm, data, n, &dwWritten, &osWrite)) {
if (GetLastError() != ERROR_IO_PENDING) {fRes = FALSE;}// WriteFile
failed, but it isn't delayed. Report error and abort.
else {// Write is pending.
if (!GetOverlappedResult(hComm, &osWrite, &dwWritten, TRUE))
fRes = FALSE;
else fRes = TRUE;// Write operation completed successfully.
}
}
else fRes = TRUE;// WriteFile completed immediately.
return fRes;
}
READCHAR:
char Serial::ReadChar(bool& success)
{
success = false;
if (!IsOpened()) {return 0;}
DWORD dwRead;
DWORD length=1;
BYTE* data = (BYTE*)(&rxchar);
//the creation of the overlapped read operation
if (!fWaitingOnRead) {
// Issue read operation.
if (!ReadFile(hComm, data, length, &dwRead, &osReader)) {
if (GetLastError() != ERROR_IO_PENDING) { /*Error*/}
else { fWaitingOnRead = TRUE; /*Waiting*/}
}
else {if(dwRead==length) success = true;}//success
}
//detection of the completion of an overlapped read operation
DWORD dwRes;
if (fWaitingOnRead) {
dwRes = WaitForSingleObject(osReader.hEvent, READ_TIMEOUT);
switch (dwRes)
{
// Read completed.
case WAIT_OBJECT_0:
if (!GetOverlappedResult(hComm, &osReader, &dwRead, FALSE))
{/*Error*/ }
else {
if (dwRead == length) success = true;
fWaitingOnRead = FALSE;// Reset flag so that another
opertion
can be issued.
}// Read completed successfully.
break;
case WAIT_TIMEOUT:
// Operation isn't complete yet.
break;
default:
// Error in the WaitForSingleObject;
break;
}
}
return rxchar;
}
And Finally the excerpt of the main in wxWidgets to display the received data:
void GUI_1_2Frame::OnConnectButtonClick(wxCommandEvent& (event))
{
char tempString[10] = {0};
bool ReadChar_success = true;
char temp_Char;
/* Preset Serial Port setting */
Serial com(com_x, 115200, 8, NOPARITY, 1);
char* buffer;
if(connection_flag)
{
/* Port was connected, Disconnect Button unsed*/
com.Close();
wxMessageBox(_("Port closed"),_("Info!"),wxICON_INFORMATION);
connection_flag = 0;
ConnectButton->SetLabel("Connect");
TextCtrl1->SetValue("");
}
else
{
/* If open() == true -> INVALID HANDLE */
if(com.Open())
{
wxMessageBox(_("Port not available"),_("ERROR!"),wxICON_ERROR);
}
else /* Port Opened */
{
TextCtrl1->SetValue(com.GetPort());
ConnectButton->SetLabel("Disconnect");
connection_flag = 1;
}
if(com.Write("S5"))
{
TextCtrl1->SetValue("Baudrate sent!\n");
delay(100);
if(com.WriteChar('O'))
{
TextCtrl1->SetValue("Baudrate & Open Command sent!");
int i =0;
while(i<10)
{
temp_Char = com.ReadChar(ReadChar_success);
tempString[i] = temp_Char;
i++;
}
com.WriteChar('C');
com.Close();
//com.readSerialPort(data, MAX_DATA_LENGTH);
TextCtrl2->SetValue(tempString);
//wxMessageOutput::Get()->Printf("%s", tempString);
}
else
{
TextCtrl1->SetValue("Open Command Error!"); }
}
else
{
TextCtrl1->SetValue("Error!");
}
}
}
Since I am not native speaking englisch I say sorry for my language mistakes.
Thank everybody a lot and again I really appreciate every single hint!
Greetings,
MSol
I found a way to enumerate other programs handles, but I have problem now. I can not see Process type threads. I need to check which programs open handles for my process.
When I check the output, it is "unnamed", I don't know how to fix it.
Should I do this via dirver? or any other way to do this without driver?
pid = _wtoi(argv[1]);
if (!(processHandle = OpenProcess(PROCESS_DUP_HANDLE, FALSE, pid)))
{
printf("Could not open PID %d! (Don't try to open a system process.)\n", pid);
return 1;
}
handleInfo = (PSYSTEM_HANDLE_INFORMATION)malloc(handleInfoSize);
/* NtQuerySystemInformation won't give us the correct buffer size,
so we guess by doubling the buffer size. */
while ((status = NtQuerySystemInformation(
SystemHandleInformation,
handleInfo,
handleInfoSize,
NULL
)) == STATUS_INFO_LENGTH_MISMATCH)
handleInfo = (PSYSTEM_HANDLE_INFORMATION)realloc(handleInfo, handleInfoSize *= 2);
/* NtQuerySystemInformation stopped giving us STATUS_INFO_LENGTH_MISMATCH. */
if (!NT_SUCCESS(status))
{
printf("NtQuerySystemInformation failed!\n");
return 1;
}
for (i = 0; i < handleInfo->HandleCount; i++)
{
SYSTEM_HANDLE handle = handleInfo->Handles[i];
HANDLE dupHandle = NULL;
POBJECT_TYPE_INFORMATION objectTypeInfo;
PVOID objectNameInfo;
UNICODE_STRING objectName;
ULONG returnLength;
/* Check if this handle belongs to the PID the user specified. */
if (handle.ProcessId != pid)
continue;
/* Duplicate the handle so we can query it. */
if (!NT_SUCCESS(NtDuplicateObject(
processHandle,
handle.Handle,
GetCurrentProcess(),
&dupHandle,
0,
0,
0
)))
{
printf("[%#x] Error!\n", handle.Handle);
continue;
}
/* Query the object type. */
objectTypeInfo = (POBJECT_TYPE_INFORMATION)malloc(0x1000);
if (!NT_SUCCESS(NtQueryObject(
dupHandle,
ObjectTypeInformation,
objectTypeInfo,
0x1000,
NULL
)))
{
printf("[%#x] Error!\n", handle.Handle);
CloseHandle(dupHandle);
continue;
}
/* Query the object name (unless it has an access of
0x0012019f, on which NtQueryObject could hang. */
if (handle.GrantedAccess == 0x0012019f)
{
/* We have the type, so display that. */
printf(
"[%#x] %.*S: (did not get name)\n",
handle.Handle,
objectTypeInfo->Name.Length / 2,
objectTypeInfo->Name.Buffer
);
free(objectTypeInfo);
CloseHandle(dupHandle);
continue;
}
objectNameInfo = malloc(0x1000);
if (!NT_SUCCESS(NtQueryObject(
dupHandle,
ObjectNameInformation,
objectNameInfo,
0x1000,
&returnLength
)))
{
/* Reallocate the buffer and try again. */
objectNameInfo = realloc(objectNameInfo, returnLength);
if (!NT_SUCCESS(NtQueryObject(
dupHandle,
ObjectNameInformation,
objectNameInfo,
returnLength,
NULL
)))
{
/* We have the type name, so just display that. */
printf(
"[%#x] %.*S: (could not get name)\n",
handle.Handle,
objectTypeInfo->Name.Length / 2,
objectTypeInfo->Name.Buffer
);
free(objectTypeInfo);
free(objectNameInfo);
CloseHandle(dupHandle);
continue;
}
}
/* Cast our buffer into an UNICODE_STRING. */
objectName = *(PUNICODE_STRING)objectNameInfo;
/* Print the information! */
if (objectName.Length)
{
/* The object has a name. */
printf(
"[%#x] %.*S: %.*S\n",
handle.Handle,
objectTypeInfo->Name.Length / 2,
objectTypeInfo->Name.Buffer,
objectName.Length / 2,
objectName.Buffer
);
}
else
{
/* Print something else. */
printf(
"[%#x] %.*S: (unnamed)\n",
handle.Handle,
objectTypeInfo->Name.Length / 2,
objectTypeInfo->Name.Buffer
);
}
free(objectTypeInfo);
free(objectNameInfo);
CloseHandle(dupHandle);
}
free(handleInfo);
CloseHandle(processHandle);
return 0;
void SearchMyProcessHandles()
{
ULONG UniqueProcessId = GetCurrentProcessId();
if (HANDLE hProcess = OpenProcess(MAXIMUM_ALLOWED, FALSE, UniqueProcessId))
{
NTSTATUS status;
union {
PSYSTEM_HANDLE_INFORMATION_EX pshi;
PVOID buf;
};
ULONG cb = 0x10000;
do
{
status = STATUS_INSUFFICIENT_RESOURCES;
if (buf = new UCHAR[cb += PAGE_SIZE])
{
if (0 <= (status = ZwQuerySystemInformation(SystemExtendedHandleInformation, buf, cb, &cb)))
{
if (ULONG_PTR NumberOfHandles = pshi->NumberOfHandles)
{
SYSTEM_HANDLE_TABLE_ENTRY_INFO_EX* Handles = pshi->Handles;
do
{
if (Handles->UniqueProcessId == UniqueProcessId &&
Handles->HandleValue == (ULONG_PTR)hProcess)
{
PVOID Object = Handles->Object;
Handles = pshi->Handles;
NumberOfHandles = pshi->NumberOfHandles;
do
{
if (Handles->Object == Object &&
Handles->UniqueProcessId != UniqueProcessId)
{
DbgPrint("%p %p %08x\n",
Handles->UniqueProcessId,
Handles->HandleValue,
Handles->GrantedAccess);
}
} while (Handles++, --NumberOfHandles);
break;
}
} while (Handles++, --NumberOfHandles);
}
}
delete [] buf;
}
} while (status == STATUS_INFO_LENGTH_MISMATCH);
CloseHandle(hProcess);
}
}
I'm wanting to display some geometries using the solidwireframe, it appears to be a fx file with multiple effects and single passes.
So i wrote a class to perform what i thought the effect file was doing. The Header files are my Compiled Shaders, which are copies of the NVidia ones. compiled with version 4.0
# include "SolidWireFrame.h"
# include "WireGeometryShader.h"
# include "GeometryShader.h"
# include "WirePixelShader.h"
# include "ColorPixelShader.h"
# include "WireVertexShader.h"
namespace DirectX11DrawingAdapter
{
SolidWireFrame::SolidWireFrame()
{
}
bool SolidWireFrame::Init(ID3D11Device* device)
{
m_pdepthLessEqualStencilState = nullptr;
m_pfillRasterizerState = nullptr;
m_pBlendingState = nullptr;
m_pdepthWriteLessStencilState = nullptr;
m_vertexShader = nullptr;
m_geometryShader = nullptr;
m_pixelColorShader = nullptr;
m_geometrySolidWireShader = nullptr;
m_pixelSolidWireShader = nullptr;
m_vertexLayout = nullptr;
if (!InitializeDepthStencilState(device))
return false;
if (!InitializeRaterizerState(device))
return false;
if (!InitializeBlendState(device))
return false;
if (!InitializeVertexShader(device))
return false;
if (!InitializeGeometryShader(device))
return false;
if (!InitializePixelShader(device))
return false;
return true;
}
void SolidWireFrame::ApplySolidWirePattern(ID3D11DeviceContext* deviceContext)
{
SetDepthStencilState(deviceContext, m_pdepthLessEqualStencilState);
SetRasterizerState(deviceContext, m_pfillRasterizerState);
SetBlendState(deviceContext, m_pBlendingState);
deviceContext->VSSetShader(m_vertexShader, NULL, 0);
deviceContext->GSSetShader(m_geometrySolidWireShader, NULL, 0);
deviceContext->PSSetShader(m_pixelSolidWireShader, NULL, 0);
}
void SolidWireFrame::ApplyDepthAndSolid(ID3D11DeviceContext* deviceContext)
{
SetDepthStencilState(deviceContext, m_pdepthWriteLessStencilState);
SetRasterizerState(deviceContext, m_pfillRasterizerState);
SetBlendState(deviceContext, m_pBlendingState);
deviceContext->VSSetShader(m_vertexShader, NULL, 0);
deviceContext->GSSetShader(m_geometryShader, NULL, 0);
deviceContext->PSSetShader(m_pixelColorShader, NULL, 0);
}
void SolidWireFrame::ApplyDepthOnly(ID3D11DeviceContext* deviceContext)
{
SetDepthStencilState(deviceContext, m_pdepthWriteLessStencilState);
SetRasterizerState(deviceContext, m_pfillRasterizerState);
SetBlendState(deviceContext, m_pnoColorBlendingState);
deviceContext->VSSetShader(m_vertexShader, NULL, 0);
deviceContext->GSSetShader(m_geometryShader, NULL, 0);
deviceContext->PSSetShader(m_pixelColorShader, NULL, 0);
}
void SolidWireFrame::ApplySolidOnly(ID3D11DeviceContext* deviceContext)
{
SetDepthStencilState(deviceContext, m_pdepthLessEqualStencilState);
SetRasterizerState(deviceContext, m_pfillRasterizerState);
SetBlendState(deviceContext, m_pBlendingState);
deviceContext->VSSetShader(m_vertexShader, NULL, 0);
deviceContext->GSSetShader(m_geometryShader, NULL, 0);
deviceContext->PSSetShader(m_pixelColorShader, NULL, 0);
}
bool SolidWireFrame::InitializeBlendState(ID3D11Device* device)
{
D3D11_BLEND_DESC blendStateDescription;
// Clear the blend state description.
ZeroMemory(&blendStateDescription, sizeof(D3D11_BLEND_DESC));
blendStateDescription.RenderTarget[0].BlendEnable = TRUE;
blendStateDescription.RenderTarget[0].SrcBlend = D3D11_BLEND_SRC_ALPHA;
blendStateDescription.RenderTarget[0].DestBlend = D3D11_BLEND_INV_SRC_ALPHA;
blendStateDescription.RenderTarget[0].BlendOp = D3D11_BLEND_OP_ADD;
blendStateDescription.RenderTarget[0].SrcBlendAlpha = D3D11_BLEND_SRC_ALPHA;
blendStateDescription.RenderTarget[0].DestBlendAlpha = D3D11_BLEND_DEST_ALPHA;
blendStateDescription.RenderTarget[0].BlendOpAlpha = D3D11_BLEND_OP_ADD;
blendStateDescription.RenderTarget[0].RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL;
// Create the blend state using the description.
HRESULT result = device->CreateBlendState(&blendStateDescription, &m_pBlendingState);
if (FAILED(result))
{
return false;
}
//ZeroMemory(&blendStateDescription, sizeof(D3D11_BLEND_DESC));
blendStateDescription.RenderTarget[0].BlendEnable = FALSE;
result = device->CreateBlendState(&blendStateDescription, &m_pnoColorBlendingState);
if (FAILED(result))
{
return false;
}
return true;
}
bool SolidWireFrame::InitializeDepthStencilState(ID3D11Device* device)
{
D3D11_DEPTH_STENCIL_DESC depthStencilDesc;
// Initialize the description of the stencil state.
ZeroMemory(&depthStencilDesc, sizeof(depthStencilDesc));
// Set up the description of the stencil state.
depthStencilDesc.DepthEnable = TRUE;
depthStencilDesc.DepthWriteMask = D3D11_DEPTH_WRITE_MASK_ZERO;
depthStencilDesc.DepthFunc = D3D11_COMPARISON_LESS_EQUAL;
depthStencilDesc.StencilEnable = FALSE;
depthStencilDesc.StencilReadMask = 255;
depthStencilDesc.StencilWriteMask = 255;
depthStencilDesc.FrontFace.StencilFailOp = D3D11_STENCIL_OP_KEEP;
depthStencilDesc.FrontFace.StencilDepthFailOp = D3D11_STENCIL_OP_KEEP;
depthStencilDesc.FrontFace.StencilPassOp = D3D11_STENCIL_OP_KEEP;
depthStencilDesc.FrontFace.StencilFunc = D3D11_COMPARISON_ALWAYS;
depthStencilDesc.BackFace.StencilFailOp = D3D11_STENCIL_OP_KEEP;
depthStencilDesc.BackFace.StencilDepthFailOp = D3D11_STENCIL_OP_KEEP;
depthStencilDesc.BackFace.StencilPassOp = D3D11_STENCIL_OP_KEEP;
depthStencilDesc.BackFace.StencilFunc = D3D11_COMPARISON_ALWAYS;
// Create the depth stencil state.
HRESULT result = device->CreateDepthStencilState(&depthStencilDesc, &m_pdepthLessEqualStencilState);
if (FAILED(result))
{
return false;
}
//ZeroMemory(&depthStencilDesc, sizeof(depthStencilDesc));
depthStencilDesc.DepthEnable = true;
depthStencilDesc.DepthWriteMask = D3D11_DEPTH_WRITE_MASK_ALL;
depthStencilDesc.DepthFunc = D3D11_COMPARISON_LESS;
result = device->CreateDepthStencilState(&depthStencilDesc, &m_pdepthWriteLessStencilState);
if (FAILED(result))
{
return false;
}
return true;
}
bool SolidWireFrame::InitializeRaterizerState(ID3D11Device* device)
{
D3D11_RASTERIZER_DESC rasterizerDescription;
ZeroMemory(&rasterizerDescription, sizeof(D3D11_RASTERIZER_DESC));
rasterizerDescription.FillMode = D3D11_FILL_SOLID;
rasterizerDescription.CullMode = D3D11_CULL_NONE;
rasterizerDescription.DepthBias = FALSE;
rasterizerDescription.MultisampleEnable = TRUE;
rasterizerDescription.FrontCounterClockwise = FALSE;
rasterizerDescription.DepthBiasClamp = 0.000000000;
rasterizerDescription.SlopeScaledDepthBias = 0.000000000;
rasterizerDescription.DepthClipEnable = TRUE;
rasterizerDescription.ScissorEnable = FALSE;
rasterizerDescription.AntialiasedLineEnable = FALSE;
HRESULT hr = device->CreateRasterizerState(&rasterizerDescription, &m_pfillRasterizerState);
if (FAILED(hr))
return false;
return true;
}
bool SolidWireFrame::SetDepthStencilState(ID3D11DeviceContext* deviceContext, ID3D11DepthStencilState* depthStencilState)
{
deviceContext->OMSetDepthStencilState(depthStencilState, 0);
return false;
}
bool SolidWireFrame::SetRasterizerState(ID3D11DeviceContext* deviceContext, ID3D11RasterizerState* raterizerState)
{
deviceContext->RSSetState(raterizerState);
return false;
}
bool SolidWireFrame::SetBlendState(ID3D11DeviceContext* deviceContext, ID3D11BlendState* blendState)
{
float blendFactor[4];
// Setup the blend factor.
blendFactor[0] = 0.0f;
blendFactor[1] = 0.0f;
blendFactor[2] = 0.0f;
blendFactor[3] = 0.0f;
// Turn off the alpha blending.
deviceContext->OMSetBlendState(blendState, blendFactor, 0xffffffff);
return false;
}
bool SolidWireFrame::InitializeVertexShader(ID3D11Device* device)
{
// Create the vertex shader
HRESULT hr = device->CreateVertexShader(m_pwireVertexShader, sizeof(m_pwireVertexShader), NULL, &m_vertexShader);
if (FAILED(hr))
{
return false;
}
// Define the input layout
D3D11_INPUT_ELEMENT_DESC layout[] =
{
{ "POSITION", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "NORMAL", 0, DXGI_FORMAT_R32G32B32_FLOAT, 0, 12, D3D11_INPUT_PER_VERTEX_DATA, 0 },
{ "TEXCOORD", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 24, D3D11_INPUT_PER_VERTEX_DATA, 0 },
};
UINT numElements = ARRAYSIZE(layout);
// Create the input layout
hr = device->CreateInputLayout(layout, numElements, m_pwireVertexShader, sizeof(m_pwireVertexShader), &m_vertexLayout);
if (FAILED(hr))
return false;
return true;
}
bool SolidWireFrame::InitializeGeometryShader(ID3D11Device* device)
{
// Create the geometry shader
HRESULT hr = device->CreateGeometryShader(m_pwireGeometryShader, sizeof(m_pwireGeometryShader), NULL, &m_geometrySolidWireShader);
if (FAILED(hr))
return false;
hr = device->CreateGeometryShader(m_pgeometryShader, sizeof(m_pgeometryShader), NULL, &m_geometryShader);
if (FAILED(hr))
return false;
return true;
}
bool SolidWireFrame::InitializePixelShader(ID3D11Device* device)
{
// Create the pixel shader
HRESULT hr = device->CreatePixelShader(m_pwirePixelShader, sizeof(m_pwirePixelShader), NULL, &m_pixelSolidWireShader);
if (FAILED(hr))
return false;
hr = device->CreatePixelShader(m_pcolorPixelShader, sizeof(m_pcolorPixelShader), NULL, &m_pixelColorShader);
if (FAILED(hr))
return false;
return true;
}
}
now in my application, at the end of Initializing the Device and Context etc. i call.
SolidWireFrame* solidWireFrame = new SolidWireFrame();
solidWireFrame->Init(m_pd3dDevice.Get());
m_solidWireFrame.reset(solidWireFrame);
and my render loop.
//m_solidWireFrame->ApplyDepthAndSolid(m_pImmediateContext);
//m_solidWireFrame->ApplyDepthOnly(m_pImmediateContext);
//m_solidWireFrame->ApplySolidOnly(m_pImmediateContext);
/*for (it_drawingData iterator = dd.begin(); iterator != dd.end(); iterator++)
{
DrawingData *drawingData = iterator->second;
if (drawingData->IsRendered)
{
m_pImmediateContext->IASetVertexBuffers(0, 1, &drawingData->VertexBuffer, &stride, &offset);
m_pImmediateContext->IASetIndexBuffer(drawingData->IndexBuffer, DXGI_FORMAT_R16_UINT, 0);
m_pImmediateContext->DrawIndexed(drawingData->IndexData.size(), 0, 0);
}
}
//not sure if i should call this between renders?
m_pdesignerSwapChain->Present(0, 0);*/
m_solidWireFrame->ApplySolidWirePattern(m_pImmediateContext);
for (it_drawingData iterator = dd.begin(); iterator != dd.end(); iterator++)
{
DrawingData *drawingData = iterator->second;
if (drawingData->IsRendered)
{
m_pImmediateContext->IASetVertexBuffers(0, 1, &drawingData->VertexBuffer, &stride, &offset);
m_pImmediateContext->IASetIndexBuffer(drawingData->IndexBuffer, DXGI_FORMAT_R16_UINT, 0);
m_pImmediateContext->DrawIndexed(drawingData->IndexData.size(), 0, 0);
}
}
Fixed it.
I was not setting the world, view, projection constant buffer for the wireframe vs.
I have searched for a way to retrieve information from a digital signed PE file. I need the publisher, publisher link , issuer name and subject name. I need winapi / c / c++ code (functions) and i need a fast method , i don't need to check if the signature is valid or not.
Here is code that I wrote for a project of mine that will do this. It returns the details in a struct of type NSIGINFO. Feel free to use it - no attribution necessary, but I would appreciate it if you would leave the copyright intact.
If there's any functions missing (I had to consolidate things from a couple of different places so I may have missed something) please let me know and I'll make the necessary tweaks.
Let me know how this works for you. Good luck.
The header file, NAuthenticode.h:
// NAuthenticode.h: Functions for checking signatures in files
//
// Copyright (c) 2008-2012, Nikolaos D. Bougalis <nikb#bougalis.net>
#ifndef B82FBB5B_C0F8_43A5_9A31_619BB690706C
#define B82FBB5B_C0F8_43A5_9A31_619BB690706C
#include <wintrust.h>
#include <softpub.h>
#include <imagehlp.h>
struct NSIGINFO
{
LONG lValidationResult;
LPTSTR lpszPublisher;
LPTSTR lpszPublisherEmail;
LPTSTR lpszPublisherUrl;
LPTSTR lpszAuthority;
LPTSTR lpszFriendlyName;
LPTSTR lpszProgramName;
LPTSTR lpszPublisherLink;
LPTSTR lpszMoreInfoLink;
LPTSTR lpszSignature;
LPTSTR lpszSerial;
BOOL bHasSigTime;
SYSTEMTIME stSigTime;
};
VOID NCertFreeSigInfo(NSIGINFO *pSigInfo);
BOOL NVerifyFileSignature(LPCTSTR lpszFileName, NSIGINFO *pSigInfo, HANDLE hHandle = INVALID_HANDLE_VALUE);
BOOL NCertGetNameString(PCCERT_CONTEXT pCertContext, DWORD dwType,
DWORD dwFlags, LPTSTR *lpszNameString);
BOOL NCheckFileCertificates(HANDLE hFile,
VOID (*pCallback)(PCCERT_CONTEXT, LPVOID), PVOID pParam);
#endif
The implementation, NAuthenticode.cpp:
// NAuthenticode.cpp: Various routines related to validating file signatures
//
// Copyright (c) 2008-2012, Nikolaos D. Bougalis <nikb#bougalis.net>
#include "stdafx.h"
#include "NAuthenticode.h"
//////////////////////////////////////////////////////////////////////////
#pragma comment(lib, "crypt32")
#pragma comment(lib, "imagehlp")
#pragma comment(lib, "wintrust")
//////////////////////////////////////////////////////////////////////////
#define SIG_ENCODING (X509_ASN_ENCODING | PKCS_7_ASN_ENCODING)
//////////////////////////////////////////////////////////////////////////
// Some utility functions
LPVOID NHeapAlloc(SIZE_T dwBytes)
{
if(dwBytes == 0)
return NULL;
return HeapAlloc(GetProcessHeap(), HEAP_ZERO_MEMORY, dwBytes);
}
//////////////////////////////////////////////////////////////////////////
LPVOID NHeapFree(LPVOID lpMem)
{
if(lpMem != NULL)
HeapFree(GetProcessHeap(), 0, lpMem);
return NULL;
}
//////////////////////////////////////////////////////////////////////////
LPSTR NConvertW2A(LPCWSTR lpszString, int nLen, UINT nCodePage)
{
ASSERT(lpszString != NULL);
int ret = WideCharToMultiByte(nCodePage, 0, lpszString, nLen, NULL, 0, NULL, NULL);
if(ret <= 0)
return NULL;
LPSTR lpszOutString = (LPSTR)NHeapAlloc((ret + 1) * sizeof(CHAR));
if(lpszOutString == NULL)
return NULL;
ret = WideCharToMultiByte(nCodePage, 0, lpszString, nLen, lpszOutString, ret, NULL, NULL);
if(ret <= 0)
lpszOutString = (LPSTR)NHeapFree(lpszOutString);
return lpszOutString;
}
//////////////////////////////////////////////////////////////////////////
LPWSTR NDupString(LPCWSTR lpszString, int nLen)
{
if(nLen == -1)
nLen = (int)wcslen(lpszString);
LPWSTR lpszOutString = (LPWSTR)NHeapAlloc((2 + nLen) * sizeof(WCHAR));
if((lpszOutString != NULL) && (nLen != 0))
wcsncpy(lpszOutString, lpszString, nLen + 1);
return lpszOutString;
}
//////////////////////////////////////////////////////////////////////////
LPTSTR NConvertW2T(LPCWSTR lpszString, int nLen, UINT nCodePage)
{
ASSERT(lpszString != NULL);
#ifndef UNICODE
return (LPTSTR)NConvertW2A(lpszString, nLen, nCodePage);
#else
return (LPTSTR)NDupString(lpszString, nLen);
#endif
}
//////////////////////////////////////////////////////////////////////////
LPWSTR NConvertA2W(LPCSTR lpszString, int nLen, UINT nCodePage)
{
ASSERT(lpszString != NULL);
int ret = MultiByteToWideChar(nCodePage, 0, lpszString, nLen, NULL, 0);
if(ret <= 0)
return NULL;
LPWSTR lpszOutString = (LPWSTR)NHeapAlloc((ret + 1) * sizeof(WCHAR));
if(lpszOutString == NULL)
return NULL;
ret = MultiByteToWideChar(nCodePage, 0, lpszString, nLen, lpszOutString, ret);
if(ret <= 0)
lpszOutString = (LPWSTR)NHeapFree(lpszOutString);
return lpszOutString;
}
//////////////////////////////////////////////////////////////////////////
LPWSTR NConvertT2W(LPCTSTR lpszString, int nLen, UINT nCodePage)
{
ASSERT(lpszString != NULL);
#ifndef UNICODE
return NConvertA2W((LPCSTR)lpszString, nLen, nCodePage);
#else
return NDupString((LPWSTR)lpszString, nLen);
#endif
}
//////////////////////////////////////////////////////////////////////////
VOID NCertFreeSigInfo(NSIGINFO *pSigInfo)
{
if(pSigInfo == NULL)
return;
__try
{ // Be extra careful
if(pSigInfo->lpszPublisher)
pSigInfo->lpszPublisher = (LPTSTR)NHeapFree(pSigInfo->lpszPublisher);
if(pSigInfo->lpszPublisherEmail)
pSigInfo->lpszPublisherEmail = (LPTSTR)NHeapFree(pSigInfo->lpszPublisherEmail);
if(pSigInfo->lpszPublisherUrl)
pSigInfo->lpszPublisherUrl = (LPTSTR)NHeapFree(pSigInfo->lpszPublisherUrl);
if(pSigInfo->lpszAuthority)
pSigInfo->lpszAuthority = (LPTSTR)NHeapFree(pSigInfo->lpszAuthority);
if(pSigInfo->lpszProgramName)
pSigInfo->lpszProgramName = (LPTSTR)NHeapFree(pSigInfo->lpszPublisher);
if(pSigInfo->lpszPublisherLink)
pSigInfo->lpszPublisherLink = (LPTSTR)NHeapFree(pSigInfo->lpszPublisher);
if(pSigInfo->lpszMoreInfoLink)
pSigInfo->lpszMoreInfoLink = (LPTSTR)NHeapFree(pSigInfo->lpszMoreInfoLink);
if(pSigInfo->lpszSignature)
pSigInfo->lpszSignature = (LPTSTR)NHeapFree(pSigInfo->lpszSignature);
if(pSigInfo->lpszSerial)
pSigInfo->lpszSerial = (LPTSTR)NHeapFree(pSigInfo->lpszSerial);
}
__except(EXCEPTION_EXECUTE_HANDLER)
{
}
}
//////////////////////////////////////////////////////////////////////////
static BOOL NCertGetNameString(PCCERT_CONTEXT pCertContext, DWORD dwType, DWORD dwFlags, LPTSTR *lpszNameString)
{
if(pCertContext == NULL)
return FALSE;
DWORD dwData = CertGetNameString(pCertContext, dwType, 0, NULL, NULL, 0);
if(dwData == 0)
return FALSE;
*lpszNameString = (LPTSTR)NHeapAlloc((dwData + 1) * sizeof(TCHAR));
if(*lpszNameString == NULL)
return FALSE;
dwData = CertGetNameString(pCertContext, dwType, dwFlags, NULL, *lpszNameString, dwData);
if(dwData == 0)
{
NHeapFree(*lpszNameString);
return FALSE;
}
return TRUE;
}
//////////////////////////////////////////////////////////////////////////
static BOOL NCryptDecodeObject(__in LPCSTR lpszObjectId, __in_bcount(cbEncoded) const BYTE *pbEncoded, __in DWORD cbEncoded,
__inout DWORD &dwBuffer, __out void *pBuffer = NULL, __in DWORD dwFlags = 0)
{
if(((pBuffer == NULL) && (dwBuffer != 0)) || ((dwBuffer == 0) && (pBuffer != NULL)))
{ // What? You're passing a NULL pointer an a non-zero size? You so crazy!!!!
ASSERT(FALSE);
SetLastError(ERROR_INVALID_PARAMETER);
return FALSE;
}
return CryptDecodeObject(SIG_ENCODING, lpszObjectId, pbEncoded, cbEncoded, dwFlags, pBuffer, &dwBuffer);
}
//////////////////////////////////////////////////////////////////////////
static BOOL NCryptDecodeObject(__in LPCSTR lpszObjectId, __in PCRYPT_ATTR_BLOB pObject,
__inout DWORD &dwBuffer, __out void *pBuffer = NULL, __in DWORD dwFlags = 0)
{
if((pObject == NULL) || ((dwBuffer == 0) && (pBuffer != NULL)) || ((dwBuffer != 0) && (pBuffer == NULL)))
{
SetLastError(ERROR_INVALID_PARAMETER);
return FALSE;
}
return CryptDecodeObject(SIG_ENCODING, lpszObjectId, pObject->pbData, pObject->cbData, dwFlags, pBuffer, &dwBuffer);
}
//////////////////////////////////////////////////////////////////////////
static BOOL WGetSignTimestamp(PCRYPT_ATTRIBUTES pAttributes, SYSTEMTIME &stTime, LPCSTR lpszObjId)
{
if((pAttributes == NULL) || (pAttributes->cAttr == 0) || (lpszObjId == NULL) || (*lpszObjId == 0))
return FALSE;
for(DWORD dwAttr = 0; dwAttr < pAttributes->cAttr; dwAttr++)
{
if(strcmp(lpszObjId, pAttributes->rgAttr[dwAttr].pszObjId) == 0)
{
DWORD dwSize = sizeof(FILETIME);
FILETIME ftCert;
if(NCryptDecodeObject(lpszObjId, &pAttributes->rgAttr[dwAttr].rgValue[0], dwSize, (PVOID)&ftCert))
{
FILETIME ftLocal;
if(FileTimeToLocalFileTime(&ftCert, &ftLocal) && FileTimeToSystemTime(&ftLocal, &stTime))
return TRUE;
}
}
}
return FALSE;
}
//////////////////////////////////////////////////////////////////////////
static BOOL NVerifyFileSignatureWorker(LPWSTR lpszFileName, WINTRUST_DATA &wtData, NSIGINFO *pSigInfo)
{
if(pSigInfo != NULL)
memset(pSigInfo, 0, sizeof(NSIGINFO));
GUID guidAction = WINTRUST_ACTION_GENERIC_VERIFY_V2;
BOOL bVerified = FALSE;
LONG lRet = WinVerifyTrust((HWND)INVALID_HANDLE_VALUE, &guidAction, &wtData);
if(lRet != 0)
{
if(pSigInfo != NULL)
pSigInfo->lValidationResult = lRet;
return FALSE;
}
if(pSigInfo == NULL)
return TRUE;
HCERTSTORE hStore = NULL;
HCRYPTMSG hMsg = NULL;
if(!CryptQueryObject(CERT_QUERY_OBJECT_FILE, lpszFileName, CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED_EMBED, CERT_QUERY_FORMAT_FLAG_BINARY, 0, NULL, NULL, NULL, &hStore, &hMsg, NULL))
return FALSE;
PCMSG_SIGNER_INFO pSignerInfo = NULL, pCounterSignerInfo = NULL;
DWORD dwSignerInfo = 0, dwCounterSignerInfo = 0;
if(CryptMsgGetParam(hMsg, CMSG_SIGNER_INFO_PARAM, 0, NULL, &dwSignerInfo) && (dwSignerInfo != 0))
pSignerInfo = (PCMSG_SIGNER_INFO)NHeapAlloc(dwSignerInfo);
if((pSignerInfo != NULL) && CryptMsgGetParam(hMsg, CMSG_SIGNER_INFO_PARAM, 0, (PVOID)pSignerInfo, &dwSignerInfo))
{
for(DWORD dwAttr = 0; dwAttr < pSignerInfo->AuthAttrs.cAttr; dwAttr++)
{
if((strcmp(SPC_SP_OPUS_INFO_OBJID, pSignerInfo->AuthAttrs.rgAttr[dwAttr].pszObjId) != 0))
continue;
PSPC_SP_OPUS_INFO pOpus = NULL;
DWORD dwData = 0;
if(NCryptDecodeObject(SPC_SP_OPUS_INFO_OBJID, &pSignerInfo->AuthAttrs.rgAttr[dwAttr].rgValue[0], dwData) && (dwData != 0))
pOpus = (PSPC_SP_OPUS_INFO)NHeapAlloc(dwData);
if((pOpus != NULL) && NCryptDecodeObject(SPC_SP_OPUS_INFO_OBJID, &pSignerInfo->AuthAttrs.rgAttr[dwAttr].rgValue[0], dwData, (PVOID)pOpus))
{
pSigInfo->lpszProgramName = NConvertW2T(pOpus->pwszProgramName);
if(pOpus->pPublisherInfo != NULL)
{
switch(pOpus->pPublisherInfo->dwLinkChoice)
{
case SPC_URL_LINK_CHOICE:
pSigInfo->lpszPublisherLink = NConvertW2T(pOpus->pPublisherInfo->pwszUrl);
break;
case SPC_FILE_LINK_CHOICE:
pSigInfo->lpszPublisherLink = NConvertW2T(pOpus->pPublisherInfo->pwszFile);
break;
}
}
if(pOpus->pMoreInfo != NULL)
{
switch (pOpus->pMoreInfo->dwLinkChoice)
{
case SPC_URL_LINK_CHOICE:
pSigInfo->lpszMoreInfoLink = NConvertW2T(pOpus->pMoreInfo->pwszUrl);
break;
case SPC_FILE_LINK_CHOICE:
pSigInfo->lpszMoreInfoLink = NConvertW2T(pOpus->pMoreInfo->pwszFile);
break;
}
}
}
if(pOpus != NULL)
NHeapFree(pOpus);
break;
}
CERT_INFO ci;
ci.Issuer = pSignerInfo->Issuer;
ci.SerialNumber = pSignerInfo->SerialNumber;
PCCERT_CONTEXT pCertContext = CertFindCertificateInStore(hStore, SIG_ENCODING, 0, CERT_FIND_SUBJECT_CERT, (PVOID)&ci, NULL);
if(pCertContext != NULL)
{
if(pCertContext->pCertInfo->SerialNumber.cbData != 0)
{
pSigInfo->lpszSerial = (LPTSTR)NHeapAlloc(((pCertContext->pCertInfo->SerialNumber.cbData * 2) + 1) * sizeof(TCHAR));
if(pSigInfo->lpszSerial != NULL)
{
LPTSTR lpszPointer = pSigInfo->lpszSerial;
for(DWORD dwCount = pCertContext->pCertInfo->SerialNumber.cbData; dwCount != 0; dwCount--)
lpszPointer += _stprintf(lpszPointer, _T("%02X"), pCertContext->pCertInfo->SerialNumber.pbData[dwCount - 1]);
}
}
if(!NCertGetNameString(pCertContext, CERT_NAME_FRIENDLY_DISPLAY_TYPE, CERT_NAME_ISSUER_FLAG, &pSigInfo->lpszFriendlyName))
pSigInfo->lpszFriendlyName = NULL;
if(!NCertGetNameString(pCertContext, CERT_NAME_SIMPLE_DISPLAY_TYPE, CERT_NAME_ISSUER_FLAG, &pSigInfo->lpszAuthority))
pSigInfo->lpszAuthority = NULL;
if(!NCertGetNameString(pCertContext, CERT_NAME_SIMPLE_DISPLAY_TYPE, 0, &pSigInfo->lpszPublisher))
pSigInfo->lpszPublisher = NULL;
if(!NCertGetNameString(pCertContext, CERT_NAME_URL_TYPE, 0, &pSigInfo->lpszPublisherUrl))
pSigInfo->lpszPublisherUrl = NULL;
if(!NCertGetNameString(pCertContext, CERT_NAME_EMAIL_TYPE, 0, &pSigInfo->lpszPublisherEmail))
pSigInfo->lpszPublisherEmail = NULL;
CertFreeCertificateContext(pCertContext);
}
for(DWORD dwAttr = 0, dwData; dwAttr < pSignerInfo->AuthAttrs.cAttr; dwAttr++)
{
if((strcmp(szOID_RSA_signingTime, pSignerInfo->AuthAttrs.rgAttr[dwAttr].pszObjId) == 0) && (pSignerInfo->AuthAttrs.rgAttr[dwAttr].cValue != 0))
{
FILETIME ftCert;
dwData = sizeof(FILETIME);
if(NCryptDecodeObject(szOID_RSA_signingTime, &pSignerInfo->AuthAttrs.rgAttr[dwAttr].rgValue[0], dwData, (PVOID)&ftCert))
{
FILETIME ftLocal;
if(!FileTimeToLocalFileTime(&ftCert, &ftLocal))
{
if(!FileTimeToSystemTime(&ftLocal, &pSigInfo->stSigTime))
memset(&pSigInfo->stSigTime, 0, sizeof(SYSTEMTIME));
}
}
}
}
for(DWORD dwAttr = 0; dwAttr < pSignerInfo->UnauthAttrs.cAttr; dwAttr++)
{
if(strcmp(pSignerInfo->UnauthAttrs.rgAttr[dwAttr].pszObjId, szOID_RSA_counterSign) == 0)
{
if(NCryptDecodeObject(PKCS7_SIGNER_INFO, &pSignerInfo->UnauthAttrs.rgAttr[dwAttr].rgValue[0], dwCounterSignerInfo) && (dwCounterSignerInfo != 0))
pCounterSignerInfo = (PCMSG_SIGNER_INFO)NHeapAlloc(dwCounterSignerInfo);
if((pCounterSignerInfo != NULL) && !NCryptDecodeObject(PKCS7_SIGNER_INFO, &pSignerInfo->UnauthAttrs.rgAttr[dwAttr].rgValue[0], dwCounterSignerInfo, pCounterSignerInfo))
pCounterSignerInfo = (PCMSG_SIGNER_INFO)NHeapFree(pCounterSignerInfo);
break;
}
}
if(pCounterSignerInfo != NULL)
{
pSigInfo->bHasSigTime = WGetSignTimestamp(&pCounterSignerInfo->AuthAttrs, pSigInfo->stSigTime, szOID_RSA_signingTime);
if(!pSigInfo->bHasSigTime)
memset(&pSigInfo->stSigTime, 0, sizeof(SYSTEMTIME));
}
}
if(pSignerInfo != NULL)
NHeapFree(pSignerInfo);
if(pCounterSignerInfo != NULL)
NHeapFree(pCounterSignerInfo);
if(hStore != NULL)
CertCloseStore(hStore, 0);
if(hMsg != NULL)
CryptMsgClose(hMsg);
return TRUE;
}
//////////////////////////////////////////////////////////////////////////
BOOL NVerifyFileSignature(LPCTSTR lpszFileName, NSIGINFO *pSigInfo, HANDLE hHandle)
{
if(pSigInfo != NULL)
memset(pSigInfo, 0, sizeof(NSIGINFO));
if(lpszFileName == NULL)
return FALSE;
if((lpszFileName[0] != 0) && (_tcsnicmp(lpszFileName, _T("\\??\\"), 4) == 0))
lpszFileName += 4;
if(lpszFileName[0] == 0)
return FALSE;
LPWSTR lpwszFileName = NConvertT2W(lpszFileName);
if(lpwszFileName == NULL)
return FALSE;
BOOL bOK = FALSE;
__try
{ // be very careful...
WINTRUST_FILE_INFO wtFileInfo;
memset(&wtFileInfo, 0, sizeof(WINTRUST_FILE_INFO));
wtFileInfo.cbStruct = sizeof(WINTRUST_FILE_INFO);
wtFileInfo.pcwszFilePath = lpwszFileName;
if(hHandle != INVALID_HANDLE_VALUE)
wtFileInfo.hFile = hHandle;
WINTRUST_DATA wtData;
memset(&wtData, 0, sizeof(WINTRUST_DATA));
wtData.cbStruct = sizeof(WINTRUST_DATA);
wtData.dwUIChoice = WTD_UI_NONE;
wtData.fdwRevocationChecks = WTD_REVOKE_WHOLECHAIN;
wtData.dwUnionChoice = WTD_CHOICE_FILE;
wtData.pFile = &wtFileInfo;
if(NVerifyFileSignatureWorker(lpwszFileName, wtData, pSigInfo))
bOK = TRUE;
}
__except(EXCEPTION_EXECUTE_HANDLER)
{
if(pSigInfo != NULL)
{
if(pSigInfo->lpszPublisher)
pSigInfo->lpszPublisher = (LPTSTR)NHeapFree(pSigInfo->lpszPublisher);
if(pSigInfo->lpszAuthority)
pSigInfo->lpszAuthority = (LPTSTR)NHeapFree(pSigInfo->lpszAuthority);
if(pSigInfo->lpszProgramName)
pSigInfo->lpszProgramName = (LPTSTR)NHeapFree(pSigInfo->lpszPublisher);
if(pSigInfo->lpszPublisherLink)
pSigInfo->lpszPublisherLink = (LPTSTR)NHeapFree(pSigInfo->lpszPublisher);
if(pSigInfo->lpszMoreInfoLink)
pSigInfo->lpszMoreInfoLink = (LPTSTR)NHeapFree(pSigInfo->lpszMoreInfoLink);
if(pSigInfo->lpszSignature)
pSigInfo->lpszSignature = (LPTSTR)NHeapFree(pSigInfo->lpszSignature);
if(pSigInfo->lpszSerial)
pSigInfo->lpszSerial = (LPTSTR)NHeapFree(pSigInfo->lpszSerial);
}
bOK = FALSE;
}
NHeapFree(lpwszFileName);
return bOK;
}
//////////////////////////////////////////////////////////////////////////
BOOL NCheckFileCertificates(HANDLE hFile, VOID (*pCallback)(PCCERT_CONTEXT, LPVOID), PVOID pParam)
{
DWORD dwCerts = 0;
if(!ImageEnumerateCertificates(hFile, CERT_SECTION_TYPE_ANY, &dwCerts, NULL, 0))
return FALSE;
for(DWORD dwCount = 0; dwCount < dwCerts; dwCount++)
{
WIN_CERTIFICATE wcHdr;
memset(&wcHdr, 0, sizeof(WIN_CERTIFICATE));
wcHdr.dwLength = 0;
wcHdr.wRevision = WIN_CERT_REVISION_1_0;
if(!ImageGetCertificateHeader(hFile, dwCount, &wcHdr))
return FALSE;
DWORD dwLen = sizeof(WIN_CERTIFICATE) + wcHdr.dwLength;
WIN_CERTIFICATE *pWinCert = (WIN_CERTIFICATE *)NHeapAlloc(dwLen);
if(pWinCert == NULL)
return FALSE;
if(!ImageGetCertificateData(hFile, dwCount, pWinCert, &dwLen))
{ // problem getting certificate, return failure
NHeapFree(pWinCert);
return FALSE;
}
// extract the PKCS7 signed data
CRYPT_VERIFY_MESSAGE_PARA cvmp;
memset(&cvmp, 0, sizeof(CRYPT_VERIFY_MESSAGE_PARA));
cvmp.cbSize = sizeof(CRYPT_VERIFY_MESSAGE_PARA);
cvmp.dwMsgAndCertEncodingType = SIG_ENCODING;
PCCERT_CONTEXT pCertContext = NULL;
if(!CryptVerifyMessageSignature(&cvmp, dwCount, pWinCert->bCertificate, pWinCert->dwLength, NULL, NULL, &pCertContext))
{
NHeapFree(pWinCert);
return FALSE;
}
// Now, pass this context on to our callback function (if any)
if(pCallback != NULL)
pCallback(pCertContext, pParam);
if(!CertFreeCertificateContext(pCertContext))
{
NHeapFree(pWinCert);
return FALSE;
}
NHeapFree(pWinCert);
}
return TRUE;
}
Microsoft provides a way to do it in this support link: How To Get Information from Authenticode Signed Executables
You can use the WinVerifyTrust() API to verify an Authenticode signed
executable.
Although a signature is verified, a program may also have to do the
following:
Determine the details of the certificate that signed the
executable.
Determine the date and time that the file was time
stamped.
Retrieve the URL link associated with the file.
Retrieve the timestamp certificate.
This article demonstrates how to use
CryptQueryObject() API to retrieve detailed information from an
Authenticode signed executable.
I'm trying to recognize two different objects using two different cascade classifiers. I modified the face recognition samples but I have a problem: when I try to recognize object from object1.xml it prints both rectangles, when I try to recognize object from object2.xml it doesn't print nothign...
Where is the problem??
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("detection_based_tracker");
try {
// load cascade file from application resources
InputStream is = getResources().openRawResource(R.raw.object1);
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
mCascadeFile = new File(cascadeDir, "object1.xml");
FileOutputStream os = new FileOutputStream(mCascadeFile);
/* By me */
InputStream is2 = getResources().openRawResource(R.raw.object2);
mCascadeFile2 = new File(cascadeDir, "object2.xml");
FileOutputStream os2 = new FileOutputStream(mCascadeFile2);
byte[] buffer2 = new byte[4096];
int bytesRead2;
while ((bytesRead2 = is2.read(buffer2)) != -1) {
os2.write(buffer2, 0, bytesRead2);
}
is2.close();
os2.close();
/* ............... */
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
/* By me */
mJavaDetector2 = new CascadeClassifier(mCascadeFile2.getAbsolutePath());
/* ........ */
if (mJavaDetector.empty() || mJavaDetector2.empty()) {
Log.e(TAG, "Failed to load cascade classifier");
mJavaDetector = null;
/* By me */
mJavaDetector2 = null;
} else
Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath() + mCascadeFile2.getAbsolutePath());
mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);
/**/
mNativeDetector2 = new DetectionBasedTracker(mCascadeFile2.getAbsolutePath(), 0);
cascadeDir.delete();
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
}
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
>
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
mGray = inputFrame.gray();
if (mAbsoluteFaceSize == 0) {
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0) {
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
/* ...... */
mNativeDetector2.setMinFaceSize(mAbsoluteFaceSize);
}
MatOfRect faces = new MatOfRect();
/* ....... */
MatOfRect faces2 = new MatOfRect();
if (mDetectorType == JAVA_DETECTOR) {
if (mJavaDetector != null && mJavaDetector2 != null) {
mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
/* ......... */
mJavaDetector2.detectMultiScale(mGray, faces2, 1.1, 2, 2, new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
}
}
else if (mDetectorType == NATIVE_DETECTOR) {
if (mNativeDetector != null && mNativeDetector2 != null){
mNativeDetector.detect(mGray, faces);
/* ......... */
mNativeDetector2.detect(mGray, faces2);
}
}
else {
Log.e(TAG, "Detection method is not selected!");
}
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++){
Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
Core.putText(mRgba, "Obj1", facesArray[i].tl(), 1, 2, new Scalar(255, 0, 0, 255), 3);
}
/* ...... */
Rect[] facesArray2 = faces2.toArray();
for (int i = 0; i < facesArray2.length; i++){
Core.rectangle(mRgba, facesArray2[i].tl(), facesArray2[i].br(), FACE_RECT_COLOR, 3);
Core.putText(mRgba, "Obj2", facesArray2[i].tl(), 1, 2, new Scalar(255, 0, 0, 255), 3);
}
return mRgba;
}
Or there is a way to detect different objects from the same .xml??
SOLVED
I update my code in this way:
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
#Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization
System.loadLibrary("detection_based_tracker");
try {
// load cascade file from application resources
InputStream is = getResources().openRawResource(R.raw.lbpcascade_frontalface);
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
FileOutputStream os = new FileOutputStream(mCascadeFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
/* By me */
InputStream is2 = getResources().openRawResource(R.raw.haarcascade_frontalface_alt);
File cascadeDir2 = getDir("cascade2", Context.MODE_PRIVATE);
mCascadeFile2 = new File(cascadeDir2, "haarcascade_frontalface_alt.xml");
FileOutputStream os2 = new FileOutputStream(mCascadeFile2);
byte[] buffer2 = new byte[4096];
int bytesRead2;
while ((bytesRead2 = is2.read(buffer2)) != -1) {
os2.write(buffer2, 0, bytesRead2);
}
is2.close();
os2.close();
/* ............... */
mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
if (mJavaDetector.empty()) {
Log.e(TAG, "Failed to load cascade classifier");
mJavaDetector = null;
} else
Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
/* By me */
mJavaDetector2 = new CascadeClassifier(mCascadeFile2.getAbsolutePath());
/* ........ */
if (mJavaDetector2.empty()) {
Log.e(TAG, "Failed to load cascade classifier" + mCascadeFile.getAbsolutePath());
mJavaDetector2 = null;
} else
Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath() + mCascadeFile2.getAbsolutePath());
mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);
/**/
mNativeDetector2 = new DetectionBasedTracker(mCascadeFile2.getAbsolutePath(), 0);
cascadeDir.delete();
cascadeDir2.delete();
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
}
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
mRgba = inputFrame.rgba();
mGray = inputFrame.gray();
if (mAbsoluteFaceSize == 0) {
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0) {
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
/* ...... */
mNativeDetector2.setMinFaceSize(mAbsoluteFaceSize);
}
MatOfRect faces = new MatOfRect();
/* ....... */
MatOfRect faces2 = new MatOfRect();
if (mDetectorType == JAVA_DETECTOR) {
if (mJavaDetector != null && mJavaDetector2 != null) {
mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
/* ......... */
mJavaDetector2.detectMultiScale(mGray, faces2, 1.1, 2, 2, new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
}
}
else if (mDetectorType == NATIVE_DETECTOR) {
if (mNativeDetector != null && mNativeDetector2 != null){
mNativeDetector.detect(mGray, faces);
/* ......... */
mNativeDetector2.detect(mGray, faces2);
}
}
else {
Log.e(TAG, "Detection method is not selected!");
}
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++){
Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
Core.putText(mRgba, "Pista Ciclabile", facesArray[i].tl(), 1, 2, new Scalar(255, 0, 0, 255), 3);
}
/* ...... */
Rect[] facesArray2 = faces2.toArray();
for (int j = 0; j < facesArray2.length; j++){
Core.rectangle(mRgba, facesArray2[j].tl(), facesArray2[j].br(), FACE_RECT_COLOR, 3);
Core.putText(mRgba, "Viso", facesArray2[j].tl(), 1, 2, new Scalar(255, 0, 0, 255), 3);
}
return mRgba;
}
And this is the result: http://i43.tinypic.com/15eqvl.png