DirectX11 drawing simple triangle - directx

I tried to draw a simple triangle using DXUT11, but I can't see the triangle in my window.
I worked hard but still can't find out where the problem is -_-
I had checked every returned value of the D3D functions, and they all returned S_OK.
I can see my window show up and the background color is the color I set to clear the window,
so it seems the d3d device was created currectly and the back buffer was rendered to the window,
but WHERE IS THE YELLOW TRIANGLE ?
T_T...............
my code is as follows:(to simplify the problem, I have deleted all the checking S_OK sentence)
#define WIN32_LEAN_AND_MEAN
#include <Windows.h>
#include <d3d11.h>
#define DXUT_AUTOLIB
#include <DXUT.h>
#include <DXUTres.h>
#include <D3DX11.h>
#include <xnamath.h>
using namespace std;
#pragma comment(lib,"d3d11.lib")
#pragma comment(lib,"dxut.lib")
ID3D10Blob * g_pVertexShaderBuffer = 0;
ID3D10Blob * g_pPixelShaderBuffer = 0;
ID3D11VertexShader * g_pVertexShader = 0;
ID3D11PixelShader * g_pPixelShader = 0;
ID3D11InputLayout * g_pInputLayout = 0;
ID3D11Buffer * g_pVertexBuffer = 0;
ID3D11Buffer * g_pIndexBuffer = 0;
struct Vertex
{
XMFLOAT3 pos;
};
bool CALLBACK IsD3D11DeviceAcceptable( const CD3D11EnumAdapterInfo *AdapterInfo, UINT Output, const CD3D11EnumDeviceInfo *DeviceInfo,
DXGI_FORMAT BackBufferFormat, bool bWindowed, void* pUserContext )
{
return true;
}
HRESULT CALLBACK onDeviceCreated( ID3D11Device* pd3dDevice, const DXGI_SURFACE_DESC* pBackBufferSurfaceDesc, void* pUserContext )
{
ID3D11DeviceContext * pd3dImmediateContext = DXUTGetD3D11DeviceContext();
ID3D10Blob * err = NULL;
D3DX11CompileFromFile(L"Tutorial03.fx",0,0,"VS","vs_4_0",D3DCOMPILE_DEBUG|D3DCOMPILE_SKIP_OPTIMIZATION,0,0,&g_pVertexShaderBuffer,&err,0);
D3DX11CompileFromFile(L"Tutorial03.fx",0,0,"PS","ps_4_0",D3DCOMPILE_DEBUG|D3DCOMPILE_SKIP_OPTIMIZATION,0,0,&g_pPixelShaderBuffer,&err,0);
HRESULT res = 0;
res = pd3dDevice->CreateVertexShader(g_pVertexShaderBuffer->GetBufferPointer(),g_pVertexShaderBuffer->GetBufferSize(),NULL,&g_pVertexShader);
res = pd3dDevice->CreatePixelShader(g_pPixelShaderBuffer->GetBufferPointer(),g_pPixelShaderBuffer->GetBufferSize(),NULL,&g_pPixelShader);
pd3dImmediateContext->VSSetShader(g_pVertexShader,0,0);
pd3dImmediateContext->PSSetShader(g_pPixelShader,0,0);
D3D11_INPUT_ELEMENT_DESC inputElement[]=
{
{"POSITION",0,DXGI_FORMAT_R32G32B32_FLOAT,0,0, D3D11_INPUT_PER_VERTEX_DATA,0}
};
int numElements = sizeof(inputElement)/sizeof(inputElement[0]);
pd3dDevice->CreateInputLayout(inputElement ,numElements,g_pVertexShaderBuffer->GetBufferPointer(),g_pVertexShaderBuffer->GetBufferSize(),&g_pInputLayout);
SAFE_RELEASE(g_pVertexShaderBuffer);
SAFE_RELEASE(g_pPixelShaderBuffer);
Vertex vertexData[3];
vertexData[0].pos=XMFLOAT3(0.0f, 0.5f, 0.5f);
vertexData[1].pos=XMFLOAT3(0.5f, -0.5f, 0.5f);
vertexData[2].pos=XMFLOAT3( -0.5f, -0.5f, 0.5f );
D3D11_BUFFER_DESC desc;
ZeroMemory(&desc,sizeof(desc));
desc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
desc.ByteWidth = sizeof(Vertex) * 3;
desc.CPUAccessFlags = 0;
desc.MiscFlags = 0;
desc.Usage = D3D11_USAGE_DEFAULT;
D3D11_SUBRESOURCE_DATA dataPointer;
ZeroMemory(&dataPointer,sizeof(dataPointer));
dataPointer.pSysMem = vertexData;
pd3dDevice->CreateBuffer(&desc,&dataPointer,&g_pVertexBuffer);
UINT stride = sizeof(Vertex);
UINT offset = 0;
pd3dImmediateContext->IASetVertexBuffers(0,1,&g_pVertexBuffer,&stride,&offset);
pd3dImmediateContext->IASetInputLayout(g_pInputLayout);
pd3dImmediateContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
return 0;
}
void CALLBACK onRender( ID3D11Device* pd3dDevice, ID3D11DeviceContext* pd3dImmediateContext, double fTime, float fElapsedTime, void* pUserContext )
{
float ClearColor[4] = { 0.0f, 0.25f, 0.25f, 0.55f };
ID3D11RenderTargetView* pRTV = DXUTGetD3D11RenderTargetView();
pd3dImmediateContext->ClearRenderTargetView( pRTV, ClearColor );
pd3dImmediateContext->Draw(3,0);
}
void CALLBACK onDestroyed( void* pUserContext )
{
SAFE_RELEASE(g_pIndexBuffer);
SAFE_RELEASE(g_pVertexBuffer);
SAFE_RELEASE(g_pInputLayout);
SAFE_RELEASE(g_pPixelShader);
SAFE_RELEASE(g_pVertexShader);
}
int WINAPI WinMain(HINSTANCE hInstance,HINSTANCE hPrevInstance,LPSTR lpCmdLine,int nShowCmd)
{
DXUTSetCallbackD3D11DeviceAcceptable( IsD3D11DeviceAcceptable );
DXUTSetCallbackD3D11DeviceCreated(onDeviceCreated);
DXUTSetCallbackD3D11FrameRender(onRender);
DXUTSetCallbackD3D11DeviceDestroyed(onDestroyed);
DXUTInit();
DXUTCreateWindow(L"SMD",hInstance);
DXUTCreateDevice(D3D_FEATURE_LEVEL_11_0,true,300,300);
DXUTMainLoop();
return 0;
}
hopes there's someone have the patience to read all this long code -_-
and my shader code :
float4 VS( float4 Pos : POSITION ) : SV_POSITION
{
return Pos;
}
float4 PS( float4 Pos : SV_POSITION ) : SV_Target
{
return float4( 1.0f, 1.0f, 0.0f, 1.0f ); // Yellow, with Alpha = 1
}
the program alone runs well but when I tried to use PIX to trace it, it crashed and returned the following infomation:
.PRE: <this=0x06736580>ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 24, 0x008DE4FC)
Frame 000001 ........POST: <S_OK><this=0x06736580> ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 24, 0x008DE4FC)
Frame 000001 ........PRE: <this=0x06736580>ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 25, 0x008DE4FC)
Frame 000001 ........POST: <S_OK><this=0x06736580> ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 25, 0x008DE4FC)
Frame 000001 ........PRE: <this=0x06736580>ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 26, 0x008DE4FC)
Frame 000001 ........POST: <S_OK><this=0x06736580> ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 26, 0x008DE4FC)
Frame 000001 ........PRE: <this=0x06736580>ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 27, 0x008DE4FC)
Frame 000001 ........POST: <S_OK><this=0x06736580> ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 27, 0x008DE4FC)
Frame 000001 ........PRE: <this=0x06736580>ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 28, 0x008DE4FC)
Frame 000001 ........POST: <S_OK><this=0x06736580> ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 28, 0x008DE4FC)
Frame 000001 ........PRE: <this=0x06736580>ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 29, 0x008DE4FC)
Frame 000001 ........POST: <S_OK><this=0x06736580> ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 29, 0x008DE4FC)
Frame 000001 ........PRE: <this=0x06736580>ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 30, 0x008DE4FC)
Frame 000001 ........POST: <S_OK><this=0x06736580> ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 30, 0x008DE4FC)
Frame 000001 ........PRE: <this=0x06736580>ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 31, 0x008DE4FC)
Frame 000001 ........POST: <S_OK><this=0x06736580> ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 31, 0x008DE4FC)
Frame 000001 ........PRE: <this=0x06736580>ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 32, 0x008DE4FC)
Frame 000001 ........POST: <S_OK><this=0x06736580> ID3D11Device::CheckMultisampleQualityLevels(DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, 32, 0x008DE4FC)
Frame 000001 ........PRE: <this=0x06736580>ID3D11Device::Release()
Frame 000001 ........POST: <1><this=0x06736580> ID3D11Device::Release()
Frame 000001 ........PRE: <this=0x067ffa30>ID3D11DeviceContext::Release()
Frame 000001 ............PRE: RemoveObject(D3D11 Device Context, 0x067FFA30, 0x00C5F11C)
Frame 000001 ............POST: <> RemoveObject(D3D11 Device Context, 0x067FFA30, 0x00C5F11C)
Frame 000001 ............PRE: RemoveObject(D3D11 Device, 0x06736580, 0x00C3F384)
Frame 000001 ............POST: <> RemoveObject(D3D11 Device, 0x06736580, 0x00C3F384)
Frame 000001 ............PRE: RemoveObject(DXGI Device, 0x0672AC80, 0x00C3F07C)
Frame 000001 ............POST: <> RemoveObject(DXGI Device, 0x0672AC80, 0x00C3F07C)
Frame 000001 ............PRE: RemoveObject(DXGI Adapter, 0x06736B40, 0x07651C68)
Frame 000001 ............POST: <> RemoveObject(DXGI Adapter, 0x06736B40, 0x07651C68)
Frame 000001 ............PRE: RemoveObject(DXGI Factory, 0x06729D08, 0x00C31828)
Frame 000001 ............POST: <> RemoveObject(DXGI Factory, 0x06729D08, 0x00C31828)
Frame 000001 ........POST: <0><this=0x067ffa30> ID3D11DeviceContext::Release()
Frame 000001 ........PRE: <this=0x06775320>IDXGIFactory::EnumAdapters(2, 0x008DEA44)
Frame 000001 ........POST: <DXGI_ERROR_NOT_FOUND><this=0x06775320> IDXGIFactory::EnumAdapters(2, 0x008DEA44)
Frame 000001 ........PRE: D3DPERF_EndEvent()
Frame 000001 ........POST: <-1> D3DPERF_EndEvent()
Frame 000001 ........PRE: <this=0x0678df30>IDXGIOutput::GetDesc(0x008DEB20)
Frame 000001 ........POST: <S_OK><this=0x0678df30> IDXGIOutput::GetDesc(0x008DEB20)
Frame 000001 ........PRE: <this=0x0678df30>IDXGIOutput::GetDesc(0x008DEB20)
Frame 000001 ........POST: <S_OK><this=0x0678df30> IDXGIOutput::GetDesc(0x008DEB20)
Frame 000001 ........PRE: <this=0x0678df30>IDXGIOutput::GetDesc(0x008DEB20)
Frame 000001 ........POST: <S_OK><this=0x0678df30> IDXGIOutput::GetDesc(0x008DEB20)
Frame 000001 ........PRE: <this=0x0678df30>IDXGIOutput::GetDesc(0x008DEB20)
Frame 000001 ........POST: <S_OK><this=0x0678df30> IDXGIOutput::GetDesc(0x008DEB20)
Frame 000001 ........PRE: <this=0x0678df30>IDXGIOutput::GetDesc(0x008DEB20)
Frame 000001 ........POST: <S_OK><this=0x0678df30> IDXGIOutput::GetDesc(0x008DEB20)
Frame 000001 ........PRE: <this=0x0678df30>IDXGIOutput::GetDesc(0x008DEB20)
Frame 000001 ........POST: <S_OK><this=0x0678df30> IDXGIOutput::GetDesc(0x008DEB20)
Frame 000001 ........PRE: <this=0x06775320>IDXGIFactory::MakeWindowAssociation(0x005305E0, 0)
Frame 000001 ........POST: <S_OK><this=0x06775320> IDXGIFactory::MakeWindowAssociation(0x005305E0, 0)
Frame 000001 ........PRE: <this=0x06775320>IDXGIFactory::EnumAdapters1(0, 0x008DEE18)
Frame 000001 ............PRE: AddObject(DXGI Adapter, 0x067368D8, 0x07651C68)
Frame 000001 ............POST: <TRUE> AddObject(DXGI Adapter, 0x067368D8, 0x07651C68)
Frame 000001 ........POST: <S_OK><this=0x06775320> IDXGIFactory::EnumAdapters1(0, 0x008DEE18)
Frame 000001 ........PRE: D3D11CreateDevice(0x067368D8, D3D_DRIVER_TYPE_UNKNOWN, NULL, 2, 0x00A21AA4, 1, 7, 0x008DEE60, 0x008DEE48, 0x008DEE54)
Frame 000001 ............PRE: AddObject(D3D11 Device, 0x0672AB00, 0x00C3D4D4)
Frame 000001 ............POST: <TRUE> AddObject(D3D11 Device, 0x0672AB00, 0x00C3D4D4)
Frame 000001 ............PRE: AddObject(DXGI Device, 0x0672A6E0, 0x00C3D4D0)
Frame 000001 ............POST: <TRUE> AddObject(DXGI Device, 0x0672A6E0, 0x00C3D4D0)
Frame 000001 ............PRE: AddObject(D3D11 Device Context, 0x06807880, 0x075C6800)
Frame 000001 ............POST: <TRUE> AddObject(D3D11 Device Context, 0x06807880, 0x075C6800)
Frame 000001 ........POST: <S_OK> D3D11CreateDevice(0x067368D8, D3D_DRIVER_TYPE_UNKNOWN, NULL, 2, 0x00A21AA4, 1, 7, 0x008DEE60, 0x008DEE48, 0x008DEE54)
Frame 000001 ........PRE: <this=0x0672ab00>ID3D11Device::QueryInterface(IID_IDXGIDevice1, 0x008DEE00)
Frame 000001 ........POST: <S_OK><this=0x0672ab00> ID3D11Device::QueryInterface(IID_IDXGIDevice1, 0x008DEE00)
Frame 000001 ........PRE: <this=0x0672a6e0>IDXGIDevice::Release()
Frame 000001 ........POST: <2><this=0x0672a6e0> IDXGIDevice::Release()
Frame 000001 ........PRE: <this=0x0672ab00>ID3D11Device::CreateRasterizerState(0x008DEDC4, 0x008DEDB8)
Frame 000001 ............PRE: AddObject(D3D11 Rasterizer State, 0x0672B350, 0x00C14DD4)
Frame 000001 ............POST: <TRUE> AddObject(D3D11 Rasterizer State, 0x0672B350, 0x00C14DD4)
Frame 000001 ........POST: <S_OK><this=0x0672ab00> ID3D11Device::CreateRasterizerState(0x008DEDC4, 0x008DEDB8)
Frame 000001 ........PRE: <this=0x0672b350>ID3D11RasterizerState::SetPrivateData({0x429b8c22, 0x9188, 0x4b0c, 0x87, 0x42, 0xac, 0xb0, 0xbf, 0x85, 0xc2, 0x00}, 12, 0x01114E74)
Frame 000001 ........POST: <S_OK><this=0x0672b350> ID3D11RasterizerState::SetPrivateData({0x429b8c22, 0x9188, 0x4b0c, 0x87, 0x42, 0xac, 0xb0, 0xbf, 0x85, 0xc2, 0x00}, 12, 0x01114E74)
Frame 000001 ........PRE: <this=0x06807880>ID3D11DeviceContext::RSSetState(0x0672B350)
Frame 000001 ........POST: <><this=0x06807880> ID3D11DeviceContext::RSSetState(0x0672B350)
Frame 000001 ........PRE: <this=0x067368d8>IDXGIAdapter::EnumOutputs(0, 0x008DED94)
Frame 000001 ............PRE: AddObject(DXGI Output, 0x06736A90, 0x0764B090)
Frame 000001 ............POST: <TRUE> AddObject(DXGI Output, 0x06736A90, 0x0764B090)
Frame 000001 ........POST: <S_OK><this=0x067368d8> IDXGIAdapter::EnumOutputs(0, 0x008DED94)
Frame 000001 ........PRE: <this=0x06736a90>IDXGIOutput::Release()
Frame 000001 ............PRE: RemoveObject(DXGI Output, 0x06736A90, 0x0764B090)
Frame 000001 ............POST: <> RemoveObject(DXGI Output, 0x06736A90, 0x0764B090)
Frame 000001 ........POST: <0><this=0x06736a90> IDXGIOutput::Release()
Frame 000001 ........PRE: <this=0x067368d8>IDXGIAdapter::EnumOutputs(1, 0x008DED94)
Frame 000001 ........POST: <DXGI_ERROR_NOT_FOUND><this=0x067368d8> IDXGIAdapter::EnumOutputs(1, 0x008DED94)
Frame 000001 ........PRE: <this=0x067368d8>IDXGIAdapter::EnumOutputs(0, 0x00A21BD0)
Frame 000001 ............PRE: AddObject(DXGI Output, 0x06736E00, 0x0764B090)
Frame 000001 ............POST: <TRUE> AddObject(DXGI Output, 0x06736E00, 0x0764B090)
Frame 000001 ........POST: <S_OK><this=0x067368d8> IDXGIAdapter::EnumOutputs(0, 0x00A21BD0)
Frame 000001 ........PRE: <this=0x06775320>IDXGIFactory::CreateSwapChain(0x0672AB00, 0x00A21A54, 0x008DEE3C)
Frame 000001 ............PRE: AddObject(DXGI Swap Chain, 0x0672B3C8, 0x00C3F048)
Frame 000001 ............POST: <TRUE> AddObject(DXGI Swap Chain, 0x0672B3C8, 0x00C3F048)
Frame 000001 ............PRE: AddObject(DXGI Surface, 0x0672ACE0, 0x074C4D80)
Frame 000001 ............POST: <TRUE> AddObject(DXGI Surface, 0x0672ACE0, 0x074C4D80)
Frame 000001 ............PRE: AddObject(D3D10 Texture2D, 0x067755C0, 0x074C4D28)
Frame 000001 ............POST: <TRUE> AddObject(D3D10 Texture2D, 0x067755C0, 0x074C4D28)
Frame 000001 ........POST: <S_OK><this=0x06775320> IDXGIFactory::CreateSwapChain(0x0672AB00, 0x00A21A54, 0x008DEE3C)
Frame 000001 ........PRE: <this=0x0672b3c8>IDXGISwapChain::GetBuffer(0, IID_ID3D11Texture2D, 0x008DEC2C)
Frame 000001 ........POST: <S_OK><this=0x0672b3c8> IDXGISwapChain::GetBuffer(0, IID_ID3D11Texture2D, 0x008DEC2C)
Frame 000001 ........PRE: <this=0x067755c0>ID3D10Texture2D::Map(9300960, Unknown D3D10_MAP, 9301104, NULL)
D3D11 CORRUPTION: ID3D10Texture2D::Map: Fourth parameter is corrupt or NULL. [ MISCELLANEOUS CORRUPTION #16: CORRUPTED_PARAMETER4]
An unhandled exception occurred.

Problem solved,thanks everyone~
I cleared Z buffer and stencil buffer in "onRender" Function, and the triangle finally appeared!!!
the Tutorial 03 in DX SDK June 2010 using DirectX11 doesn't cleared Z buffer in "onFrameRender" but it has no problem displaying the triangle. Maybe there's some detail I missed

Related

Why is this set of F rotating in this locus

In this code , I am not able to understand why the rotation is not in circular locus. This may be basic logic behind this but i can't understand why this so random locus.
I am rotating camera in orbital motion but it is not following it.
In my knowledge, i created a orbital motion camera and inverse of that is view matrix. So, view matrix will transform the world space for this result.Is there any fault in my thinking process ?
"use strict";
const vertexShader = `#version 300 es
in vec4 a_position;
in vec4 a_color;
out vec4 v_color;
uniform mat4 u_matrix;
void main(){
gl_Position = u_matrix*a_position;
v_color = a_color;
}
`;
const fragShader = `#version 300 es
precision highp float;
in vec4 v_color;
out vec4 frag_color;
void main(){
frag_color = v_color;
}
`;
var cameraAngleDegree = 0;
var cameraAngle = 0;
const radius = 100;
var increment = 1;
var numFs = 5;
function main() {
var canvas = document.querySelector("#canvas");
var gl = canvas.getContext("webgl2");
if (!gl) {
return;
}
requestAnimationFrame(function() {
init(gl);
});
}
function init(gl) {
const program = webglUtils.createProgramFromSources(gl, [vertexShader, fragShader]);
const apositionLoc = gl.getAttribLocation(program, 'a_position');
const acolorLoc = gl.getAttribLocation(program, 'a_color');
const umatrixLoc = gl.getUniformLocation(program, 'u_matrix');
let vao = gl.createVertexArray();
gl.bindVertexArray(vao);
let positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
setGeometry(gl);
gl.enableVertexAttribArray(apositionLoc);
let size = 3;
let type = gl.FLOAT;
let normalize = false;
let stride = 0;
let offset = 0;
gl.vertexAttribPointer(apositionLoc, size, type, normalize, stride, offset);
let colorBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, colorBuffer);
setColor(gl);
gl.enableVertexAttribArray(acolorLoc);
size = 3;
type = gl.UNSIGNED_BYTE;
normalize = true;
stride = 0;
offset = 0;
gl.vertexAttribPointer(acolorLoc, size, type, normalize, stride, offset);
let fov = degreeToRadian(60);
cameraAngle = degreeToRadian(cameraAngleDegree);
function degreeToRadian(deg) {
return deg * Math.PI / 180;
}
function radToDegree(rad) {
return rad * (180) / Math.PI;
}
drawScene();
// webglLessonsUI.setupSlider("#cameraAngle", { value: radToDegree(cameraAngle), slide: updateCameraAngle, min: -360, max: 360 });
// function updateCameraAngle(event, ui) {
// cameraAngle = degreeToRadian(ui.value);
// drawScene();
// }
function drawScene() {
webglUtils.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
gl.enable(gl.CULL_FACE);
gl.enable(gl.DEPTH_TEST);
gl.useProgram(program);
let aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
let projection = m4.perspective(fov, aspect, 1, 1000);
const fPosition = [radius, 0, 0];
cameraAngleDegree += increment;
cameraAngle =degreeToRadian(cameraAngleDegree);
let camera = m4.yRotation(cameraAngle);
camera = m4.translate(camera, 0, 100, 300);
let cameraPosition = [camera[12], camera[13], camera[14]];
// let up = [0, 1, 0];
// camera = m4.lookAt(cameraPosition, fPosition, up);
let viewMatrix = m4.inverse(camera);
let viewProjection = m4.multiply(projection, viewMatrix);
for (var ii = 0; ii < numFs; ++ii) {
var angle = ii * Math.PI * 2 / numFs;
var x = Math.cos(angle) * radius;
var z = Math.sin(angle) * radius;
var matrix = m4.translate(viewProjection, x, 0, z);
// Set the matrix.
gl.uniformMatrix4fv(umatrixLoc, false, matrix);
// Draw the geometry.
var primitiveType = gl.TRIANGLES;
var offset = 0;
var count = 16 * 6;
gl.drawArrays(primitiveType, offset, count);
}
// gl.uniformMatrix4fv(umatrixLoc, false, viewProjection);
// var primitives = gl.TRIANGLES;
// var count = 16 * 6;
// var offset = 0;
// gl.drawArrays(primitives, offset, count);
// }
requestAnimationFrame(function() {
init(gl)
});
}
}
function setGeometry(gl) {
let positions = new Float32Array([
0, 0, 0,
0, 150, 0,
30, 0, 0,
0, 150, 0,
30, 150, 0,
30, 0, 0,
// top rung front
30, 0, 0,
30, 30, 0,
100, 0, 0,
30, 30, 0,
100, 30, 0,
100, 0, 0,
// middle rung front
30, 60, 0,
30, 90, 0,
67, 60, 0,
30, 90, 0,
67, 90, 0,
67, 60, 0,
// left column back
0, 0, 30,
30, 0, 30,
0, 150, 30,
0, 150, 30,
30, 0, 30,
30, 150, 30,
// top rung back
30, 0, 30,
100, 0, 30,
30, 30, 30,
30, 30, 30,
100, 0, 30,
100, 30, 30,
// middle rung back
30, 60, 30,
67, 60, 30,
30, 90, 30,
30, 90, 30,
67, 60, 30,
67, 90, 30,
// top
0, 0, 0,
100, 0, 0,
100, 0, 30,
0, 0, 0,
100, 0, 30,
0, 0, 30,
// top rung right
100, 0, 0,
100, 30, 0,
100, 30, 30,
100, 0, 0,
100, 30, 30,
100, 0, 30,
// under top rung
30, 30, 0,
30, 30, 30,
100, 30, 30,
30, 30, 0,
100, 30, 30,
100, 30, 0,
// between top rung and middle
30, 30, 0,
30, 60, 30,
30, 30, 30,
30, 30, 0,
30, 60, 0,
30, 60, 30,
// top of middle rung
30, 60, 0,
67, 60, 30,
30, 60, 30,
30, 60, 0,
67, 60, 0,
67, 60, 30,
// right of middle rung
67, 60, 0,
67, 90, 30,
67, 60, 30,
67, 60, 0,
67, 90, 0,
67, 90, 30,
// bottom of middle rung.
30, 90, 0,
30, 90, 30,
67, 90, 30,
30, 90, 0,
67, 90, 30,
67, 90, 0,
// right of bottom
30, 90, 0,
30, 150, 30,
30, 90, 30,
30, 90, 0,
30, 150, 0,
30, 150, 30,
// bottom
0, 150, 0,
0, 150, 30,
30, 150, 30,
0, 150, 0,
30, 150, 30,
30, 150, 0,
// left side
0, 0, 0,
0, 0, 30,
0, 150, 30,
0, 0, 0,
0, 150, 30,
0, 150, 0,
]);
gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW)
}
function setColor(gl) {
gl.bufferData(
gl.ARRAY_BUFFER,
new Uint8Array([
// left column front
200, 70, 120,
200, 70, 120,
200, 70, 120,
200, 70, 120,
200, 70, 120,
200, 70, 120,
// top rung front
200, 70, 120,
200, 70, 120,
200, 70, 120,
200, 70, 120,
200, 70, 120,
200, 70, 120,
// middle rung front
200, 70, 120,
200, 70, 120,
200, 70, 120,
200, 70, 120,
200, 70, 120,
200, 70, 120,
// left column back
80, 70, 200,
80, 70, 200,
80, 70, 200,
80, 70, 200,
80, 70, 200,
80, 70, 200,
// top rung back
80, 70, 200,
80, 70, 200,
80, 70, 200,
80, 70, 200,
80, 70, 200,
80, 70, 200,
// middle rung back
80, 70, 200,
80, 70, 200,
80, 70, 200,
80, 70, 200,
80, 70, 200,
80, 70, 200,
// top
70, 200, 210,
70, 200, 210,
70, 200, 210,
70, 200, 210,
70, 200, 210,
70, 200, 210,
// top rung right
200, 200, 70,
200, 200, 70,
200, 200, 70,
200, 200, 70,
200, 200, 70,
200, 200, 70,
// under top rung
210, 100, 70,
210, 100, 70,
210, 100, 70,
210, 100, 70,
210, 100, 70,
210, 100, 70,
// between top rung and middle
210, 160, 70,
210, 160, 70,
210, 160, 70,
210, 160, 70,
210, 160, 70,
210, 160, 70,
// top of middle rung
70, 180, 210,
70, 180, 210,
70, 180, 210,
70, 180, 210,
70, 180, 210,
70, 180, 210,
// right of middle rung
100, 70, 210,
100, 70, 210,
100, 70, 210,
100, 70, 210,
100, 70, 210,
100, 70, 210,
// bottom of middle rung.
76, 210, 100,
76, 210, 100,
76, 210, 100,
76, 210, 100,
76, 210, 100,
76, 210, 100,
// right of bottom
140, 210, 80,
140, 210, 80,
140, 210, 80,
140, 210, 80,
140, 210, 80,
140, 210, 80,
// bottom
90, 130, 110,
90, 130, 110,
90, 130, 110,
90, 130, 110,
90, 130, 110,
90, 130, 110,
// left side
160, 160, 220,
160, 160, 220,
160, 160, 220,
160, 160, 220,
160, 160, 220,
160, 160, 220,
]),
gl.STATIC_DRAW);
}
main();
<!DOCTYPE html>
<html>
<head>
<title>Traingle Webgl 2</title>
<style type="text/css">
#import url("https://webglfundamentals.org/webgl/resources/webgl-tutorials.css");
body {
margin: 0;
}
button {
position: absolute;
}
canvas {
width: 100vw;
height: 100vh;
display: block;
}
</style>
</head>
<body>
<canvas id="canvas"></canvas>
<div id="uiContainer">
<div id="ui">
<div id="cameraAngle"></div>
</div>
</div>
<!--
for most samples webgl-utils only provides shader compiling/linking and
canvas resizing because why clutter the examples with code that's the same in every sample.
See https://webglfundamentals.org/webgl/lessons/webgl-boilerplate.html
and https://webglfundamentals.org/webgl/lessons/webgl-resizing-the-canvas.html
for webgl-utils, m3, m4, and webgl-lessons-ui.
-->
<script src="https://webglfundamentals.org/webgl/resources/webgl-utils.js"></script>
<script src="https://webglfundamentals.org/webgl/resources/webgl-lessons-ui.js"></script>
<script src="https://webglfundamentals.org/webgl/resources/m4.js"></script>
<script src="https://greggman.github.io/webgl-helpers/webgl-gl-error-check.js"></script>
<script type="text/javascript" src="js/lookat.js"></script>
<!-- <script type="text/javascript" src="js/camera.js"></script> -->
</body>
</html>
If I understand your question, the problem you see is it looks like the camera is getting closer to and further from the Fs
The issue is the vertex data for the Fs are built so the top left front corner is at 0,0,0, from that they go +X 100 units (so 100 units wide), +Y 150 units (so 100 units tall), and +Z 30 units so 30 units deep
So then, when you draw them around a 100 unit circle their origin is the part you are positioning and you get this
The image is top down so the Fs are just a rectangle. The green circle is the local origin of each F, its local 0,0,0. The other vertices of the F are relative to that local origin so they are closer to the outer circle (the orbit of the camera) on one side and further on the other
You can fix it by moving the Fs -50 in X and -15 in Z. In other words
var angle = ii * Math.PI * 2 / numFs;
var x = Math.cos(angle) * radius - 50;
var z = Math.sin(angle) * radius - 15;
Which gives you this situation
The local origin of each F is no longer on the circle.
You could also fix it by centering the F vertex data, go through all the vertices and subtract 50 from X and 15 from Z. That would give you this situation
Now the origin of each F is centered and it's local origin is on the circle.
Yet another way to fix it would be to compute the extents of the group of Fs, compute the center of the extents, move the center of the camera orbit there which would be this situation

OpenCV, how can we normalize a Mat min to max and max to min?

I want to normalize a Mat to the min value goes to 255 and max goes to 0 (normalize the Mat between 0~255).
For example, if we have an array like [0.02, 0.002, 0.0002] after normalization I want to get a result like this: [3, 26, 255], but now when I am using NORM_MINMAX I got [255, 26, 3].
But I did not find any function to do the inverted operation of the NORM_MINMAX.
Code used:
cv::Mat mat(10, 10, CV_64F);
mat.setTo(0);
mat.row(0) = 0.02;
mat.row(1) = 0.002;
mat.row(2) = 0.0002;
cv::normalize(mat, mat, 255, 0, cv::NORM_MINMAX);
mat.convertTo(mat, CV_8UC1);
std::cout << mat << std::endl;
Result is:
[255, 255, 255, 255, 255, 255, 255, 255, 255, 255;
26, 26, 26, 26, 26, 26, 26, 26, 26, 26;
3, 3, 3, 3, 3, 3, 3, 3, 3, 3;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
But I want the inverse of the above result.
Update: When I subtract 255 from the mat like:
cv::subtract(255, mat, mat, mat); // the last mat acts as mask
std::cout << mat << std::endl;
Result is:
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
229, 229, 229, 229, 229, 229, 229, 229, 229, 229;
252, 252, 252, 252, 252, 252, 252, 252, 252, 252;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
I finally found the way how to calculate, below are the steps:
By using inverse proportions formula, we can easily calculate the inverse of the NORM_MINMAX
x = a*b/c
Where the a= min value of the mat element, b=255 (max value), and c= the element which we want to calculate it.
cv::Mat mat(10, 10, CV_64F);
mat.setTo(0);
mat.row(0) = 0.02;
mat.row(1) = 0.002;
mat.row(2) = 0.0002;
std::cout << mat<< std::endl;
// craete a mask
cv::Mat mask(mat.size(), CV_8U);
mask.setTo(0);
mask.row(0) = 255;
mask.row(1) = 255;
mask.row(2) = 255;
// find the min value
double min;
cv::minMaxLoc(mat, &min, nullptr, nullptr, nullptr, mask);
std::cout << "min=" << min << std::endl;
// unfortunately opencv divide operation does not support mask, so we need some extra steps to perform.
cv::Mat result, maskNeg;
cv::divide(min*255, mat, result); // this is the magic line
cv::bitwise_not(mask, maskNeg);
mat.copyTo(result, maskNeg);
std::cout << result << std::endl;
// convert to 8bit
result .convertTo(result , CV_8UC1);
std::cout << "the final result:" << std::endl;
std::cout << temp << std::endl;
And the outputs:
original mat
[0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02, 0.02;
0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002, 0.002;
0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
min=0.0002
the calculated min-max
[2.55, 2.55, 2.55, 2.55, 2.55, 2.55, 2.55, 2.55, 2.55, 2.55;
25.5, 25.5, 25.5, 25.5, 25.5, 25.5, 25.5, 25.5, 25.5, 25.5;
255, 255, 255, 255, 255, 255, 255, 255, 255, 255;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
the final result:
[ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3;
26, 26, 26, 26, 26, 26, 26, 26, 26, 26;
255, 255, 255, 255, 255, 255, 255, 255, 255, 255;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0;
0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
Yes, It is what I want.

Use FFMPEG mux flv and send rtmp on IOS

I would like to use iphone camera & microphone to capture information pushed out through FFMPEG RTMP Streaming
The following Function capture information on IOS
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
if (connection == videoCaptureConnection)
{
[manager264 encoderToH264:sampleBuffer];
}
else if (connection == audioCaptureConnection)
{
[manager264 encoderToMP3:sampleBuffer];
}
}
Initialization FFMPEG
- (int)setX264Resource
{
Global_Variables_VVV = (AppDelegate *)[[UIApplication sharedApplication] delegate];
avformat_network_init();
av_register_all();
pFormatCtx = avformat_alloc_context();
avformat_alloc_output_context2(&pFormatCtx, NULL, "flv", out_file);
fmt = pFormatCtx->oformat;
//Open output URL
if (avio_open(&pFormatCtx->pb, out_file, AVIO_FLAG_READ_WRITE) < 0)
{
printf("Failed to open output file! \n");
return -1;
}
/* Add the audio and video streams using the default format codecs
* and initialize the codecs. */
video_st = NULL;
audio_st = NULL;
if (fmt->video_codec != AV_CODEC_ID_NONE) {
video_st = add_stream(pFormatCtx, &pCodec, AV_CODEC_ID_H264);
}
if (fmt->audio_codec != AV_CODEC_ID_NONE) {
audio_st = add_stream(pFormatCtx, &aCodec, AV_CODEC_ID_MP3);
}
/* Now that all the parameters are set, we can open the audio and
* video codecs and allocate the necessary encode buffers. */
if (video_st)
[self open_video:pFormatCtx avcodec:pCodec avstream:video_st];
if (audio_st)
[self open_audio:pFormatCtx avcodec:aCodec avstream:audio_st];
// Show some Information
av_dump_format(pFormatCtx, 0, out_file, 1);
//Write File Header
avformat_write_header(pFormatCtx, NULL);
av_new_packet(&pkt, picture_size);
av_new_packet(&pkt2, picture_size);
AVCodecContext *c = video_st->codec;
y_size = c->width * c->height;
if (pFrame)
pFrame->pts = 0;
if(aFrame)
{
aFrame->pts = 0;
}
return 0;
}
static AVStream *add_stream(AVFormatContext *oc, AVCodec **codec, enum AVCodecID codec_id)
{
AVCodecContext *c;
AVStream *st;
/* find the encoder */
*codec = avcodec_find_encoder(codec_id);
if (!(*codec))
{
NSLog(#"Could not find encoder for '%s'\n",
avcodec_get_name(codec_id));
}
st = avformat_new_stream(oc, *codec);
if (!st)
{
NSLog(#"Could not allocate stream\n");
}
st->id = oc->nb_streams-1;
c = st->codec;
switch ((*codec)->type)
{
case AVMEDIA_TYPE_AUDIO:
c->codec_id = AV_CODEC_ID_MP3;
c->codec_type = AVMEDIA_TYPE_AUDIO;
c->channels = 1;
c->sample_fmt = AV_SAMPLE_FMT_S16P;
c->bit_rate = 128000;
c->sample_rate = 44100;
c->channel_layout = AV_CH_LAYOUT_MONO;
break;
case AVMEDIA_TYPE_VIDEO:
c->codec_id = AV_CODEC_ID_H264;
c->codec_type=AVMEDIA_TYPE_VIDEO;
/* Resolution must be a multiple of two. */
c->width = 720;
c->height = 1280;
/* timebase: This is the fundamental unit of time (in seconds) in terms
* of which frame timestamps are represented. For fixed-fps content,
* timebase should be 1/framerate and timestamp increments should be
* identical to 1. */
c->time_base.den = 30;
c->time_base.num = 1;
c->gop_size = 15; /* emit one intra frame every twelve frames at most */
c->pix_fmt = PIX_FMT_YUV420P;
c->max_b_frames = 0;
c->bit_rate = 3000000;
c->qmin = 10;
c->qmax = 51;
break;
default:
break;
}
/* Some formats want stream headers to be separate. */
if (oc->oformat->flags & AVFMT_GLOBALHEADER)
c->flags |= CODEC_FLAG_GLOBAL_HEADER;
return st;
}
SampleBuffer will turn into H264 and pushed out RTMP Streaming
- (void)encoderToH264:(CMSampleBufferRef)sampleBuffer
{
CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (CVPixelBufferLockBaseAddress(imageBuffer, 0) == kCVReturnSuccess)
{
UInt8 *bufferbasePtr = (UInt8 *)CVPixelBufferGetBaseAddress(imageBuffer);
UInt8 *bufferPtr = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer,0);
UInt8 *bufferPtr1 = (UInt8 *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer,1);
size_t buffeSize = CVPixelBufferGetDataSize(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t bytesrow0 = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,0);
size_t bytesrow1 = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,1);
size_t bytesrow2 = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,2);
UInt8 *yuv420_data = (UInt8 *)malloc(width * height *3/ 2); // buffer to store YUV with layout YYYYYYYYUUVV
/* convert NV12 data to YUV420*/
UInt8 *pY = bufferPtr ;
UInt8 *pUV = bufferPtr1;
UInt8 *pU = yuv420_data + width*height;
UInt8 *pV = pU + width*height/4;
for(int i =0;i<height;i++)
{
memcpy(yuv420_data+i*width,pY+i*bytesrow0,width);
}
for(int j = 0;j<height/2;j++)
{
for(int i =0;i<width/2;i++)
{
*(pU++) = pUV[i<<1];
*(pV++) = pUV[(i<<1) + 1];
}
pUV+=bytesrow1;
}
//Read raw YUV data
picture_buf = yuv420_data;
pFrame->data[0] = picture_buf; // Y
pFrame->data[1] = picture_buf+ y_size; // U
pFrame->data[2] = picture_buf+ y_size*5/4; // V
int got_picture = 0;
// Encode
pFrame->width = 720;
pFrame->height = 1280;
pFrame->format = PIX_FMT_YUV420P;
AVCodecContext *c = video_st->codec;
int ret = avcodec_encode_video2(c, &pkt, pFrame, &got_picture);
if(ret < 0)
{
printf("Failed to encode! \n");
}
if (got_picture==1)
{
/* Compute current audio and video time. */
video_time = video_st ? video_st->pts.val * av_q2d(video_st->time_base) : 0.0;
pFrame->pts += av_rescale_q(1, video_st->codec->time_base, video_st->time_base);
if(pkt.size != 0)
{
printf("Succeed to encode frame: %5lld\tsize:%5d\n", pFrame->pts, pkt.size);
pkt.stream_index = video_st->index;
ret = av_write_frame(pFormatCtx, &pkt);
av_free_packet(&pkt);
}
}
free(yuv420_data);
}
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}
SampleBuffer will turn into MP3 and pushed out RTMP Streaming
-(void)encoderToMP3:(CMSampleBufferRef)sampleBuffer
{
CMSampleTimingInfo timing_info;
CMSampleBufferGetSampleTimingInfo(sampleBuffer, 0, &timing_info);
double pts=0;
double dts=0;
AVCodecContext *c;
int got_packet, ret;
c = audio_st->codec;
CMItemCount numSamples = CMSampleBufferGetNumSamples(sampleBuffer);
NSUInteger channelIndex = 0;
CMBlockBufferRef audioBlockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
size_t audioBlockBufferOffset = (channelIndex * numSamples * sizeof(SInt16));
size_t lengthAtOffset = 0;
size_t totalLength = 0;
SInt16 *samples = NULL;
CMBlockBufferGetDataPointer(audioBlockBuffer, audioBlockBufferOffset, &lengthAtOffset, &totalLength, (char **)(&samples));
const AudioStreamBasicDescription *audioDescription = CMAudioFormatDescriptionGetStreamBasicDescription(CMSampleBufferGetFormatDescription(sampleBuffer));
SwrContext *swr = swr_alloc();
int in_smprt = (int)audioDescription->mSampleRate;
av_opt_set_int(swr, "in_channel_layout", AV_CH_LAYOUT_MONO, 0);
av_opt_set_int(swr, "out_channel_layout", audio_st->codec->channel_layout, 0);
av_opt_set_int(swr, "in_channel_count", audioDescription->mChannelsPerFrame, 0);
av_opt_set_int(swr, "out_channel_count", 1, 0);
av_opt_set_int(swr, "out_channel_layout", audio_st->codec->channel_layout, 0);
av_opt_set_int(swr, "in_sample_rate", audioDescription->mSampleRate,0);
av_opt_set_int(swr, "out_sample_rate", audio_st->codec->sample_rate,0);
av_opt_set_sample_fmt(swr, "in_sample_fmt", AV_SAMPLE_FMT_S16, 0);
av_opt_set_sample_fmt(swr, "out_sample_fmt", audio_st->codec->sample_fmt, 0);
swr_init(swr);
uint8_t **input = NULL;
int src_linesize;
int in_samples = (int)numSamples;
ret = av_samples_alloc_array_and_samples(&input, &src_linesize, audioDescription->mChannelsPerFrame, in_samples, AV_SAMPLE_FMT_S16P, 0);
*input=(uint8_t*)samples;
uint8_t *output=NULL;
int out_samples = av_rescale_rnd(swr_get_delay(swr, in_smprt) +in_samples, (int)audio_st->codec->sample_rate, in_smprt, AV_ROUND_UP);
av_samples_alloc(&output, NULL, audio_st->codec->channels, out_samples, audio_st->codec->sample_fmt, 0);
in_samples = (int)numSamples;
out_samples = swr_convert(swr, &output, out_samples, (const uint8_t **)input, in_samples);
aFrame->nb_samples =(int) out_samples;
ret = avcodec_fill_audio_frame(aFrame, audio_st->codec->channels, audio_st->codec->sample_fmt,
(uint8_t *)output,
(int) out_samples *
av_get_bytes_per_sample(audio_st->codec->sample_fmt) *
audio_st->codec->channels, 1);
if (ret < 0)
{
fprintf(stderr, "Error fill audio frame: %s\n", av_err2str(ret));
}
aFrame->channel_layout = audio_st->codec->channel_layout;
aFrame->channels=audio_st->codec->channels;
aFrame->sample_rate= audio_st->codec->sample_rate;
if (timing_info.presentationTimeStamp.timescale!=0)
pts=(double) timing_info.presentationTimeStamp.value/timing_info.presentationTimeStamp.timescale;
aFrame->pts = pts*audio_st->time_base.den;
aFrame->pts = av_rescale_q(aFrame->pts, audio_st->time_base, audio_st->codec->time_base);
ret = avcodec_encode_audio2(c, &pkt2, aFrame, &got_packet);
if (ret < 0)
{
fprintf(stderr, "Error encoding audio frame: %s\n", av_err2str(ret));
}
swr_free(&swr);
if (got_packet)
{
pkt2.stream_index = audio_st->index;
// Write the compressed frame to the media file.
ret = av_interleaved_write_frame(pFormatCtx, &pkt2);
if (ret != 0)
{
fprintf(stderr, "Error while writing audio frame: %s\n", av_err2str(ret));
av_free_packet(&pkt2);
}
}
}
Soon "Broken pipe" problem occurs after execution.
PTS is currently feeling is not adjusted, but do not know how to adjust the PTS.
2016-03-09 16:57:41.058 PoliceCamPlayer[1004:193465] recordVideo....
[libx264 # 0x12f8b6e00] using cpu capabilities: ARMv8 NEON
[libx264 # 0x12f8b6e00] profile Constrained Baseline, level 3.1
[libx264 # 0x12f8b6e00] 264 - core 148 - H.264/MPEG-4 AVC codec - Copyleft 2003-2016 - http://www.videolan.org/x264.html - options: cabac=0 ref=1 deblock=1:0:0 analyse=0x1:0x111 me=hex subme=2 psy=1 psy_rd=1.00:0.00 mixed_ref=0 me_range=16 chroma_me=1 trellis=0 8x8dct=0 cqm=0 deadzone=21,11 fast_pskip=1 chroma_qp_offset=0 threads=2 lookahead_threads=2 sliced_threads=1 slices=2 nr=0 decimate=1 interlaced=0 bluray_compat=0 constrained_intra=0 bframes=0 weightp=0 keyint=15 keyint_min=1 scenecut=40 intra_refresh=0 rc=abr mbtree=0 bitrate=3000 ratetol=1.0 qcomp=0.60 qpmin=25 qpmax=51 qpstep=4 ip_ratio=1.40 aq=1:1.00
Output #0, flv, to 'rtmp://XXX.XX.XXX.XX/myapp/jackal':
Stream #0:0: Video: h264 (libx264), yuv420p, 720x1280, q=25-51, 3000 kb/s, 23 tbc
Stream #0:1: Audio: mp3 (libmp3lame), 44100 Hz, mono, s16p, 64 kb/s
[flv # 0x12f8b5400] Using AVStream.codec.time_base as a timebase hint to the muxer is deprecated. Set AVStream.time_base instead.
[flv # 0x12f8b5400] Using AVStream.codec.time_base as a timebase hint to the muxer is deprecated. Set AVStream.time_base instead.
[libx264 # 0x12f8b6e00] Provided packet is too small, needs to be 33468
Failed to encode!
Audio_pts:4154432515 pts_time:4.15443e+06 dts:4154432515 dts_time:4.15443e+06 duration:1152 duration_time:1.152 stream_index:1
Video_pts:43 pts_time:0.043 dts:43 dts_time:0.043 duration:0 duration_time:0 stream_index:0
Audio_pts:4154433667 pts_time:4.15443e+06 dts:4154433667 dts_time:4.15443e+06 duration:1152 duration_time:1.152 stream_index:1
Audio_pts:4154434854 pts_time:4.15443e+06 dts:4154434854 dts_time:4.15443e+06 duration:1152 duration_time:1.152 stream_index:1
Video_pts:86 pts_time:0.086 dts:86 dts_time:0.086 duration:0 duration_time:0 stream_index:0
Audio_pts:4154435996 pts_time:4.15444e+06 dts:4154435996 dts_time:4.15444e+06 duration:1152 duration_time:1.152 stream_index:1
Audio_pts:4154437138 pts_time:4.15444e+06 dts:4154437138 dts_time:4.15444e+06 duration:1152 duration_time:1.152 stream_index:1
Video_pts:129 pts_time:0.129 dts:129 dts_time:0.129 duration:0 duration_time:0 stream_index:0
Audio_pts:4154438281 pts_time:4.15444e+06 dts:4154438281 dts_time:4.15444e+06 duration:1152 duration_time:1.152 stream_index:1
Video_pts:172 pts_time:0.172 dts:172 dts_time:0.172 duration:0 duration_time:0 stream_index:0
Audio_pts:4154439467 pts_time:4.15444e+06 dts:4154439467 dts_time:4.15444e+06 duration:1152 duration_time:1.152 stream_index:1
Video_pts:215 pts_time:0.215 dts:215 dts_time:0.215 duration:0 duration_time:0 stream_index:0
Audio_pts:4154440609 pts_time:4.15444e+06 dts:4154440609 dts_time:4.15444e+06 duration:1152 duration_time:1.152 stream_index:1
Audio_pts:4154441752 pts_time:4.15444e+06 dts:4154441752 dts_time:4.15444e+06 duration:1152 duration_time:1.152 stream_index:1
Video_pts:258 pts_time:0.258 dts:258 dts_time:0.258 duration:0 duration_time:0 stream_index:0
Audio_pts:4154442884 pts_time:4.15444e+06 dts:4154442884 dts_time:4.15444e+06 duration:1152 duration_time:1.152 stream_index:1
Audio_pts:4154444071 pts_time:4.15444e+06 dts:4154444071 dts_time:4.15444e+06 duration:1152 duration_time:1.152 stream_index:1
Video_pts:301 pts_time:0.301 dts:301 dts_time:0.301 duration:0 duration_time:0 stream_index:0
Audio_pts:4154445213 pts_time:4.15445e+06 dts:4154445213 dts_time:4.15445e+06 duration:1152 duration_time:1.152 stream_index:1
Audio_pts:4154446355 pts_time:4.15445e+06 dts:4154446355 dts_time:4.15445e+06 duration:1152 duration_time:1.152 stream_index:1
Video_pts:344 pts_time:0.344 dts:344 dts_time:0.344 duration:0 duration_time:0 stream_index:0
Audio_pts:4154447498 pts_time:4.15445e+06 dts:4154447498 dts_time:4.15445e+06 duration:1152 duration_time:1.152 stream_index:1
Video_pts:387 pts_time:0.387 dts:387 dts_time:0.387 duration:0 duration_time:0 stream_index:0
Audio_pts:4154448640 pts_time:4.15445e+06 dts:4154448640 dts_time:4.15445e+06 duration:1152 duration_time:1.152 stream_index:1
Audio_pts:4154449826 pts_time:4.15445e+06 dts:4154449826 dts_time:4.15445e+06 duration:1152 duration_time:1.152 stream_index:1
Video_pts:430 pts_time:0.43 dts:430 dts_time:0.43 duration:0 duration_time:0 stream_index:0
Audio_pts:4154450969 pts_time:4.15445e+06 dts:4154450969 dts_time:4.15445e+06 duration:1152 duration_time:1.152 stream_index:1
Audio_pts:4154452101 pts_time:4.15445e+06 dts:4154452101 dts_time:4.15445e+06 duration:1152 duration_time:1.152 stream_index:1
...................
...................
...................
Video_pts:4343 pts_time:4.343 dts:4343 dts_time:4.343 duration:0 duration_time:0 stream_index:0
Audio_pts:4154622619 pts_time:4.15462e+06 dts:4154622619 dts_time:4.15462e+06 duration:1152 duration_time:1.152 stream_index:1
Video_pts:4386 pts_time:4.386 dts:4386 dts_time:4.386 duration:0 duration_time:0 stream_index:0
Audio_pts:4154623761 pts_time:4.15462e+06 dts:4154623761 dts_time:4.15462e+06 duration:1152 duration_time:1.152 stream_index:1
Audio_pts:4154624903 pts_time:4.15462e+06 dts:4154624903 dts_time:4.15462e+06 duration:1152 duration_time:1.152 stream_index:1
Audio_pts:4154626090 pts_time:4.15463e+06 dts:4154626090 dts_time:4.15463e+06 duration:1152 duration_time:1.152 stream_index:1
Video_pts:4429 pts_time:4.429 dts:4429 dts_time:4.429 duration:0 duration_time:0 stream_index:0
Audio_pts:4154627222 pts_time:4.15463e+06 dts:4154627222 dts_time:4.15463e+06 duration:1152 duration_time:1.152 stream_index:1
Video_pts:4472 pts_time:4.472 dts:4472 dts_time:4.472 duration:0 duration_time:0 stream_index:0
Error while writing audio frame: Broken pipe
Audio_pts:4154628365 pts_time:4.15463e+06 dts:4154628365 dts_time:4.15463e+06 duration:1152 duration_time:1.152 stream_index:1
Error while writing audio frame: Broken pipe
Audio_pts:4154629507 pts_time:4.15463e+06 dts:4154629507 dts_time:4.15463e+06 duration:1152 duration_time:1.152 stream_index:1
Error while writing audio frame: Broken pipe
Audio_pts:4154630693 pts_time:4.15463e+06 dts:4154630693 dts_time:4.15463e+06 duration:1152 duration_time:1.152 stream_index:1
Error while writing audio frame: Broken pipe
Audio_pts:4154631836 pts_time:4.15463e+06 dts:4154631836 dts_time:4.15463e+06 duration:1152 duration_time:1.152 stream_index:1
Error while writing audio frame: Broken pipe
Audio_pts:4154632978 pts_time:4.15463e+06 dts:4154632978 dts_time:4.15463e+06 duration:1152 duration_time:1.152 stream_index:1
.......................
.......................
.......................
2016-03-09 16:57:49.345 PoliceCamPlayer[1004:193465] stopRecord!!!
Video_pts:7783 pts_time:7.783 dts:7783 dts_time:7.783 duration:0 duration_time:0 stream_index:0
[flv # 0x12f8b5400] Failed to update header with correct duration.
[flv # 0x12f8b5400] Failed to update header with correct filesize.
[libx264 # 0x12f8b6e00] frame I:28 Avg QP:25.36 size: 24181
[libx264 # 0x12f8b6e00] frame P:154 Avg QP:25.34 size: 6603
[libx264 # 0x12f8b6e00] mb I I16..4: 80.9% 0.0% 19.1%
[libx264 # 0x12f8b6e00] mb P I16..4: 5.9% 0.0% 0.2% P16..4: 28.2% 4.4% 1.0% 0.0% 0.0% skip:60.2%
[libx264 # 0x12f8b6e00] final ratefactor: 16.70
[libx264 # 0x12f8b6e00] coded y,uvDC,uvAC intra: 35.8% 9.3% 0.4% inter: 8.8% 1.6% 0.0%
[libx264 # 0x12f8b6e00] i16 v,h,dc,p: 28% 26% 26% 21%
[libx264 # 0x12f8b6e00] i4 v,h,dc,ddl,ddr,vr,hd,vl,hu: 13% 26% 25% 3% 7% 4% 5% 3% 13%
[libx264 # 0x12f8b6e00] i8c dc,h,v,p: 85% 9% 5% 0%
[libx264 # 0x12f8b6e00] kb/s:1712.63
Increase PTS by amount of samples that you are sending to encoder.
Also, don't forget to rescale timings from your audio stream to output format context.
So, the fixes are:
audioFrame->pts = audioSamplesCounter; // starting from zero
Then after encoding (avcodec_encode_audio2) increase counter by amount of samples in frame that you have sent to encoder (in your case this will be not the amount you've got from CMSampleBuffer, but the one after resampling by SWR - "out_samples"):
audioSamplesCounter += audioFrame->nb_samples;
And right before writing to an media output file rescale timings:
av_packet_rescale_ts(&audioPacket,
audioStream->codec->time_base,
outputFormatContext->streams[audioStream->index]->time_base);
Also, I'd like recommend you to optimize approaches you using device's resources.
Create contexts for rescaling/resampling once and reuse them.
Allocate buffers for audio and video as soon as your stream starts or when first CMSampleBufferRef arrives. The size will not change until restarting stream/session. It will be huge improvement for performance & memory consumption.
Use hardware acceleration when it's possible.
Do not forget to free any allocated arrays & contexts.
Hope it helps you :)

Depth testing not working openGL ES

I have a cube which i'm drawing on screen, i have activated the depth testing in order for it not to be transparent, the thing is, in the front and back side of the cube, it look's good.
but when translating and rotating the cube, the results are strange, here are some photos to demonstrate:
front:
sides:
This see through effect is not welcomed of course.
Here is some relevant code:
Indices & vertices:
const Vertex Vertices[] = {
{{1, -1, 0}, {1, 1, 1, 1}, {1, 0}},
{{1, 1, 0}, {1, 1, 1, 1}, {1, 1}},
{{-1, 1, 0}, {1, 1, 1, 1}, {0, 1}},
{{-1, -1, 0}, {1, 1, 1, 1}, {0, 0}},
{{1, -1, -1}, {1, 1, 1, 1}, {1, 0}},
{{1, 1, -1}, {1, 1, 1, 1}, {1, 1}},
{{-1, 1, -1}, {1, 1, 1, 1}, {0, 1}},
{{-1, -1, -1}, {1, 1, 1, 1}, {0, 0}}
};
const GLubyte Indices[] = {
// Front
0, 1, 2
,2, 3, 0,
// Back
4, 6, 5,
4, 7, 6,
// Left
2, 7, 3,
7, 6, 2,
// Right
0, 4, 1,
4, 1, 5,
// Top
6, 2, 1,
1, 6, 5,
// Bottom
0, 3, 7,
0, 7, 4,
};
Layer setup:
- (void)setupLayer {
_eaglLayer = (CAEAGLLayer*) self.layer;
_eaglLayer.opaque = YES;
glEnable(GL_DEPTH_TEST);
glDepthFunc(GL_NEVER);
}
Depth buffer setup:
- (void)setupDepthBuffer {
glGenRenderbuffers(1, &_depthRenderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _depthRenderBuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, self.frame.size.width, self.frame.size.height);
}
Rendering:
- (void)render:(CADisplayLink*)displayLink {
glClearColor(0, 0, 0, 1.0);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glEnable(GL_DEPTH_TEST);
CC3GLMatrix *projection = [CC3GLMatrix matrix];
float h = 4.0f * self.frame.size.height / self.frame.size.width;
[projection populateFromFrustumLeft:-2 andRight:2 andBottom:-h/2 andTop:h/2 andNear:4 andFar:10];
glUniformMatrix4fv(_projectionUniform, 1, 0, projection.glMatrix);
CC3GLMatrix *modelView = [CC3GLMatrix matrix];
[modelView populateFromTranslation:CC3VectorMake(sin(CACurrentMediaTime()), 0, -5)];
_currentRotation += displayLink.duration * 90;
[modelView rotateBy:CC3VectorMake(_currentRotation, _currentRotation, 0)];
glUniformMatrix4fv(_modelViewUniform, 1, 0, modelView.glMatrix);
glViewport(0, 0, self.frame.size.width, self.frame.size.height);
glVertexAttribPointer(_positionSlot, 3, GL_FLOAT, GL_FALSE,
sizeof(Vertex), 0);
glVertexAttribPointer(_colorSlot, 4, GL_FLOAT, GL_FALSE,
sizeof(Vertex), (GLvoid*) (sizeof(float)* 3));
glVertexAttribPointer(_texCoordSlot, 2, GL_FLOAT, GL_FALSE,
sizeof(Vertex), (GLvoid*) (sizeof(float) * 7));
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, _floorTexture);
glUniform1i(_textureUniform, 0);
glDrawElements(GL_TRIANGLE_STRIP, sizeof(Indices)/sizeof(Indices[0]),
GL_UNSIGNED_BYTE, 0);
[_context presentRenderbuffer:GL_RENDERBUFFER];
}
EDIT:
adding the texture mapping function:
- (GLuint)setupTexture:(NSString *)fileName {
// 1
CGImageRef spriteImage = [UIImage imageNamed:fileName].CGImage;
if (!spriteImage) {
NSLog(#"Failed to load image %#", fileName);
exit(1);
}
// 2
size_t width = CGImageGetWidth(spriteImage);
size_t height = CGImageGetHeight(spriteImage);
GLubyte * spriteData = (GLubyte *) calloc(width*height*4, sizeof(GLubyte));
CGContextRef spriteContext = CGBitmapContextCreate(spriteData, width, height, 8, width*4,
CGImageGetColorSpace(spriteImage), kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
// 3
CGContextDrawImage(spriteContext, CGRectMake(0, 0, width, height), spriteImage);
CGContextRelease(spriteContext);
// 4
GLuint texName;
glGenTextures(1, &texName);
glBindTexture(GL_TEXTURE_2D, texName);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, spriteData);
free(spriteData);
return texName;
}
What am i doing wrong over here?
glDepthFunc(GL_NEVER); - at least replace this with GL_LEQUAL.

KNN find nearest error

I'm doing KNN classification of static gestures and i get this error.
ERROR: Unhandled exception at 0x01213aa2 in NUIGHR.exe: 0xC0000005:
Access violation reading location 0x00000000.
CvMat* GetFeatures(CvSeq* contour, CvSeq* hull, double boundingRectArea){
CvMoments moments;
CvHuMoments humoments;
cvMoments(contour, &moments, cvGetHuMoments(&moments, &humoments);
int cCont;
double cArea, cPerimeter, cDiameter, cExtent, cCompactness, cEccentricity, cCircularity;
cCont = contour->total;
cArea = fabs(cvContourArea(contour));
cPerimeter = cvContourPerimeter(contour);
cDiameter = sqrt( 4 * cArea / CV_PI);
cExtent = cArea / (boundingRectArea * boundingRectArea);
cCompactness = (4 * cArea * CV_PI) / cPerimeter;
cEccentricity = pow( (moments.m20 - moments.m02), 2) - (4 * pow(moments.m11, 2)) / ( pow(moments.m20 + moments.m02, 2) );
cCircularity = pow(cPerimeter, 2) / cArea;
cvmSet( featureVector, 0, 0, boundingRectArea);
cvmSet( featureVector, 0, 1, cCont);
cvmSet( featureVector, 0, 2, cArea);
cvmSet( featureVector, 0, 3, cPerimeter);
cvmSet( featureVector, 0, 4, cDiameter);
cvmSet( featureVector, 0, 5, cExtent);
cvmSet( featureVector, 0, 6, cCompactness);
cvmSet( featureVector, 0, 7, cEccentricity);
cvmSet( featureVector, 0, 8, cCircularity);
cvmSet( featureVector, 0, 9, humoments.hu1);
cvmSet( featureVector, 0, 10, humoments.hu2);
cvmSet( featureVector, 0, 11, humoments.hu3);
cvmSet( featureVector, 0, 12, humoments.hu4);
cvmSet( featureVector, 0, 13, humoments.hu5);
cvmSet( featureVector, 0, 14, humoments.hu6);
cvmSet( featureVector, 0, 15, humoments.hu7);
return featureVector;
}
int main(){
...
const int K = 10;
CvKNearest *knn = NULL;
float resultNode = 0;
CvMat* featVector = cvCreateMat(1, NUMBER_OF_FEATURES, CV_32FC1 );
CvMat* nearest = cvCreateMat(1, K, CV_32FC1);
...
resultNode = knn->find_nearest(&featVector, K, 0, 0, nearest, 0);
}
I think i need to convert CvMat* to CvMat.
How do i do it?
You cannot pass 0 as 3rd and 4th argument to find_nearest function, not if you do pass something as 5th argument. OpenCV tries to populate the results and neighbourResponses (see doc), but cannot read/write the NULL pointer.
featureVector = NULL pointer
sorry guys... i'm a beginner :(

Resources