Direct 3D 9 CreateDevice method fails - directx

I am trying to create a direct 3D 9 device. I am setting everything up with this piece of code:
LPDIRECT3D9 pD3D = Direct3DCreate9(D3D_SDK_VERSION);
D3DPRESENT_PARAMETERS d3dpp;
ZeroMemory(&d3dpp, sizeof(d3dpp));
d3dpp.Windowed = TRUE;
d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
d3dpp.hDeviceWindow = getGameHWND();
LPDIRECT3DDEVICE9 device;
HRESULT res;
res = pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, getGameHWND(), D3DCREATE_SOFTWARE_VERTEXPROCESSING, &d3dpp, &device);
if (res != D3D_OK) {
char buff[100];
sprintf_s(buff, 100, "%X", res);
MessageBoxA(0, "ERROR", buff, 0);
sprintf_s(buff, 100, "%X", getGameHWND());
MessageBoxA(0, "ERROR", buff, 0);
return;
}
But this function fails everytime I execute it. As a response I get the error code 0x80070057, which is according to Google E_INVALIDARG. But which argument is wrong here? I followed a tutorial, which has the exact same code as I do.
I also checked if getGameHWND() returns 0, which is not the case.
Here is my updated code:
LPDIRECT3D9 pD3D = Direct3DCreate9(D3D_SDK_VERSION);
if (pD3D == NULL) {
MessageBoxA(0, "ERROR", "pD3D", 0);
}
D3DPRESENT_PARAMETERS d3dpp;
ZeroMemory(&d3dpp, sizeof(d3dpp));
d3dpp.Windowed = TRUE;
d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
d3dpp.hDeviceWindow = getGameHWND();
d3dpp.BackBufferFormat = D3DFMT_UNKNOWN;
d3dpp.BackBufferCount = 1;
d3dpp.EnableAutoDepthStencil = 0;
d3dpp.BackBufferWidth = 0;
d3dpp.BackBufferHeight = 0;
d3dpp.FullScreen_RefreshRateInHz = 0;
d3dpp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
d3dpp.MultiSampleType = D3DMULTISAMPLE_NONE;
d3dpp.MultiSampleQuality = 0;
LPDIRECT3DDEVICE9 device;
HRESULT res;
res = pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, getGameHWND(), D3DCREATE_SOFTWARE_VERTEXPROCESSING, &d3dpp, &device);
if (res != D3D_OK) {
res = pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, getGameHWND(), D3DCREATE_HARDWARE_VERTEXPROCESSING, &d3dpp, &device);
}
if (res != D3D_OK) {
res = pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, getGameHWND(), D3DCREATE_MIXED_VERTEXPROCESSING, &d3dpp, &device);
}
if (res != D3D_OK) {
char buff[100];
sprintf_s(buff, 100, "%X", res);
MessageBoxA(0, "ERROR", buff, 0);
sprintf_s(buff, 100, "%d", getPID());
MessageBoxA(0, "ERROR", buff, 0);
return;
}
But I still get the same error message

Related

Why is there a video memory leak when using CopyResource to share texture across ID3D11Devices?

I created two devices A and B, where A is the producer and B is the consumer. The texture is shared between the two through D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX. Now I find that every time B calls CopyResource to copy the shared texture to the local, the video memory will keep increasing. This The problem has troubled me for a long time. I searched github or other code sources, but couldn’t find a solution. My machine is Win10 and Geforce GTX 1060.
The most streamlined code to reproduce the problem is as follows:
#include <dxgi.h>
#include <d3d11.h>
#include <atlbase.h>
#pragma comment(lib, "D3D11.lib")
#define RETURN_ON_FAIL(hr) if(FAILED(hr)) return hr;
namespace SharedTextureLeakTest
{
const UINT32 texture_width = 320;
const UINT32 texture_height = 240;
ATL::CComPtr<ID3D11Device> device_1 = nullptr;
ATL::CComPtr<ID3D11DeviceContext> context_1 = nullptr;
ATL::CComPtr<ID3D11Device> device_2 = nullptr;
ATL::CComPtr<ID3D11DeviceContext> context_2 = nullptr;
ATL::CComPtr<ID3D11Texture2D> shared_texture_dev1 = nullptr; //owner by device1
ATL::CComPtr<ID3D11Texture2D> dst_texture_dev2 = nullptr; //owner by device2
HANDLE shared_handle = nullptr;
HRESULT CheckEnvironmentValid()
{
HRESULT hr = S_OK;
//Create Device1
if (!device_1)
{
D3D_FEATURE_LEVEL pFeatureLevel;
UINT Flags = D3D11_CREATE_DEVICE_DEBUG;
hr = ::D3D11CreateDevice(nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr, Flags, nullptr, 0, D3D11_SDK_VERSION, &device_1, &pFeatureLevel, &context_1);
if (FAILED(hr))
{
device_1 = nullptr;
return hr;
}
}
//Create Shared Texture
if (!shared_texture_dev1)
{
D3D11_TEXTURE2D_DESC desc;
desc.ArraySize = 1;
desc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
desc.CPUAccessFlags = 0;
desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
desc.Width = texture_width;
desc.Height = texture_height;
desc.MipLevels = 1;
desc.Usage = D3D11_USAGE_DEFAULT;
desc.MiscFlags = D3D11_RESOURCE_MISC_SHARED_KEYEDMUTEX;
desc.SampleDesc.Count = 1;
desc.SampleDesc.Quality = 0;
hr = device_1->CreateTexture2D(&desc, nullptr, &shared_texture_dev1);
RETURN_ON_FAIL(hr);
}
//Get Handle of Shared Texture
if (!shared_handle)
{
ATL::CComPtr<IDXGIResource> dxgi_res;
hr = shared_texture_dev1->QueryInterface(__uuidof(IDXGIResource), (void**)&dxgi_res);
RETURN_ON_FAIL(hr);
hr = dxgi_res->GetSharedHandle(&shared_handle);
RETURN_ON_FAIL(hr);
}
//Create Device2
if (!device_2)
{
D3D_FEATURE_LEVEL pFeatureLevel;
UINT Flags = D3D11_CREATE_DEVICE_DEBUG;
hr = ::D3D11CreateDevice(nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr, Flags, nullptr, 0, D3D11_SDK_VERSION, &device_2, &pFeatureLevel, &context_2);
if (FAILED(hr))
{
device_2= nullptr;
return hr;
}
}
//Create Dst Texture
if (!dst_texture_dev2)
{
D3D11_TEXTURE2D_DESC desc;
desc.ArraySize = 1;
desc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
desc.CPUAccessFlags = 0;
desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
desc.Width = texture_width;
desc.Height = texture_height;
desc.MipLevels = 1;
desc.Usage = D3D11_USAGE_DEFAULT;
desc.MiscFlags = 0;
desc.SampleDesc.Count = 1;
desc.SampleDesc.Quality = 0;
hr = device_2->CreateTexture2D(&desc, nullptr, &dst_texture_dev2);
RETURN_ON_FAIL(hr);
}
return hr;
}
HRESULT LoopOnceTest()
{
HRESULT hr = CheckEnvironmentValid();
RETURN_ON_FAIL(hr);
//Open Shared Texture
ATL::CComPtr<ID3D11Texture2D> shared_texture_dev2;
hr = device_2->OpenSharedResource(shared_handle, IID_PPV_ARGS(&shared_texture_dev2));
RETURN_ON_FAIL(hr);
//Copy
do
{
CComPtr<IDXGIKeyedMutex> km;
hr = shared_texture_dev2->QueryInterface(__uuidof(IDXGIKeyedMutex), (void**)&km);
RETURN_ON_FAIL(hr);
INT64 release_key = 0;
hr = km->AcquireSync(0, 10);
RETURN_ON_FAIL(hr);
if (hr == WAIT_OBJECT_0)
{
context_2->CopyResource(dst_texture_dev2, shared_texture_dev2);
// context_2->Flush();
}
hr = km->ReleaseSync(0);
RETURN_ON_FAIL(hr);
} while (FALSE);
return hr;
}
}
void DoTest()
{
for(;;)
{
SharedTextureLeakTest::LoopOnceTest();
Sleep(1000);
}
}

Can't set TextureWic C++DirectX

I am trying to use a texture embedded in a file, it's not a tga.
Here is my code, I don't know where the logical error is.
ID3D11ShaderResourceView* texturePtr = nullptr;
ID3D11Texture2D* texture2D = nullptr;
ID3D11SamplerState* sampleStatePtr = nullptr;
hr = CoInitialize(NULL);
assert(SUCCEEDED(hr));
devConPtr->PSSetSamplers(0, 1, &sampleStatePtr);
devConPtr->PSSetShaderResources(0, 1, &texturePtr);
Texture2D tex : TEXTURE;
SamplerState mySampler : SAMPLER;
D3D11_SAMPLER_DESC sd;
ZeroMemory(&sd, sizeof(sd));
sd.Filter = D3D11_FILTER_MIN_MAG_MIP_LINEAR;
sd.AddressU = D3D11_TEXTURE_ADDRESS_WRAP;
sd.AddressV = D3D11_TEXTURE_ADDRESS_WRAP;
sd.AddressW = D3D11_TEXTURE_ADDRESS_WRAP;
sd.MipLODBias = 0.0f;
sd.MaxLOD = D3D11_FLOAT32_MAX;
sd.ComparisonFunc = D3D11_COMPARISON_NEVER;
hr = devPtr->CreateSamplerState(&sd, &sampleStatePtr);
DXGI_SAMPLE_DESC sample;
sample.Count = 1;
sample.Quality = 0;
D3D11_TEXTURE2D_DESC textureDesc;
textureDesc.Width = w;
textureDesc.Height = h;
textureDesc.MipLevels = 1;
textureDesc.ArraySize = 1;
textureDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
textureDesc.SampleDesc = sample;
textureDesc.Usage = D3D11_USAGE_DEFAULT;
textureDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
textureDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
textureDesc.MiscFlags = 0;
D3D11_SUBRESOURCE_DATA subData;
subData.pSysMem = data;
subData.SysMemPitch = sizeof(*data)*w;
HRESULT hr = devPtr->CreateTexture2D(
&textureDesc,
&subData,
&texture2D
);
assert(SUCCEEDED(hr));
//(ID3D11Texture2D*)texture2D;
texturePtr->QueryInterface(IID_ID3D11Texture2D, (void**)&texture2D);
D3D11_SHADER_RESOURCE_VIEW_DESC shvD;
shvD.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
shvD.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
hr= devPtr->CreateShaderResourceView(texture2D, &shvD, &texturePtr);
assert(SUCCEEDED(hr));
hr = DirectX::CreateWICTextureFromMemory(devPtr, devConPtr, (const
uint8_t*)&data, sizeof(*data),
nullptr, &texturePtr, NULL);
assert(SUCCEEDED(hr));
unsigned int textureCount = mat->GetTextureCount(aiTextureType_UNKNOWN);
for (UINT j = 0; j < textureCount; j++)
{
aiString* path = nullptr;
mat->GetTexture(aiTextureType_UNKNOWN, j, path);
assert(path->length >= 2);
int index = atoi(&path->C_Str()[1]);
createTexture(scenePtr->mTextures[index]->mWidth, scenePtr-
>mTextures[index]->mHeight, (uint8_t*)scenePtr->mTextures[index]->pcData);
}
If you could find some kind of logical error or help with the debugging that would be super helpful, I try to put a breakpoint at my HRESULTS but I can't find the variables however it does say that my resourceviewptr is always nullptr despite me trying to use it.
I am using c++ and directx and directx toolkit etc.
You are not initializing shvD completely. To fix it, initialize it like this:
D3D11_SHADER_RESOURCE_VIEW_DESC shvD;
shvD.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
shvD.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
shvD.Texture2D.MostDetailedMip = 0;
shvD.Texture2D.MipLevels = 1;

iOS FFMPEG hight level API

I have video file with subtitles and I'd like to get all subtitles from it. With terminal it's quite easy to do this.
ffmpeg -i video.mkv -map 0:s:0 subs.srt
How can I execute this command on iOS?
Edit
Or maybe you know easy way to get subtitles from video file? Fails on av_guess_format returns NULL.
+ (void)readSubtitles:(NSString *)videoPath saveFolder:(NSString *)saveFolder {
AVFormatContext *pFormatCtx;
av_register_all();
avcodec_register_all();
avformat_network_init();
pFormatCtx = avformat_alloc_context();
if (avformat_open_input(&pFormatCtx, [videoPath UTF8String], NULL, NULL) != 0) {
return;
}
if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
return;
}
for (int i = 0; i < pFormatCtx->nb_streams; i++) {
if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_SUBTITLE) {
NSString *subPath = [saveFolder stringByAppendingPathComponent:[NSString stringWithFormat:#"sub_%d.srt", i]];
[self parseSubtitles:pFormatCtx streamIdx:i savePath:subPath];
}
}
}
+ (void)parseSubtitles:(AVFormatContext *)context streamIdx:(int)idx savePath:(NSString *)savePath {
const char *filename = [savePath UTF8String];
AVStream *avstream = context->streams[idx];
AVCodec *codec = avcodec_find_decoder( avstream->codec->codec_id );
int result = avcodec_open2( avstream->codec, codec, NULL );
AVOutputFormat *outFormat = av_guess_format( NULL, "sub.mp4", NULL );
NSAssert(outFormat != NULL, #"Error finding format"); // !!! fails !!!
NSLog(#"Found output format: %# (%#)", [NSString stringWithUTF8String:outFormat->name], [NSString stringWithUTF8String:outFormat->long_name]);
AVFormatContext *outFormatContext;
avformat_alloc_output_context2( &outFormatContext, NULL, NULL, filename );
AVCodec *encoder = avcodec_find_encoder( outFormat->subtitle_codec );
// checkResult( encoder != NULL, "Error finding encoder" );
NSLog(#"Found encoder: %#", [NSString stringWithUTF8String:encoder->name]);
AVStream *outStream = avformat_new_stream( outFormatContext, encoder );
AVCodecContext *c = outStream->codec;
result = avcodec_get_context_defaults3( c, encoder );
// outStream->codecpar
NSLog(#"outstream codec: %#", [NSString stringWithUTF8String:outStream->codec]);
NSLog(#"Opened stream %d, codec=%d", outStream->id, outStream->codec->codec_id);
result = avio_open( &outFormatContext->pb, filename, AVIO_FLAG_WRITE );
// checkResult( result == 0, "Error opening out file" );
// cerr << "out file opened correctly" << endl;
result = avformat_write_header( outFormatContext, NULL );
// checkResult(result==0, "Error writing header");
// cerr << "header wrote correctly" << endl;
result = 0;
AVPacket pkt;
av_init_packet( &pkt );
pkt.data = NULL;
pkt.size = 0;
// cerr << "srt codec id: " << AV_CODEC_ID_SUBRIP << endl;
while( av_read_frame( context, &pkt ) >= 0 )
{
if(pkt.stream_index != idx)
continue;
int gotSubtitle = 0;
AVSubtitle subtitle;
result = avcodec_decode_subtitle2( avstream->codec, &subtitle, &gotSubtitle, &pkt );
uint64_t bufferSize = 1024 * 1024 ;
uint8_t *buffer = (uint8_t *)malloc(bufferSize * sizeof(uint8_t));
memset(buffer, 0, bufferSize);
if( result >= 0 )
{
result = avcodec_encode_subtitle( outStream->codec, buffer, bufferSize, &subtitle );
// cerr << "Encode subtitle result: " << result << endl;
}
// cerr << "Encoded subtitle: " << buffer << endl;
free(buffer);
}
}

Can not read data sent form mobile..Shows Unrecognized text in WireShark

I am capturing data sent form a mobile device to my local server. I am not getting expected data in my computer so I tried WireShark to capture the data to see whats going on. The data is successfully received by WireShark but it shows Unrecognized text.
(source: pbrd.co)
I have tired the following code to receive the data programmatically:
public class Server {
public static void main(String[] args) {
int port = 9090;
try (ServerSocket serverSocket = new ServerSocket(9090)) {
System.out.println("Server is listening on port " + port);
while (true) {
Socket socket = serverSocket.accept();
System.out.println("New client connected");
InputStream inputStreamm = socket.getInputStream();
FileOutputStream mFileOuptutStream = new FileOutputStream("output.txt");
byte[] fileBytes = new byte[100];
int fByte = inputStreamm.read(fileBytes);
int i = 0;
while (fByte > 0) {
mFileOuptutStream.write(fileBytes, 0, fByte);
fByte = inputStreamm.read(fileBytes);
i++;
}
mFileOuptutStream.close();
System.out.println("New file created");
inputStreamm.close();
socket.close();
}
} catch (IOException ex) {
System.out.println("Server exception: " + ex.getMessage());
ex.printStackTrace();
}
}
}
I have used following code example to send the data to the server.
myhttp.sendtohttp(getrecorddate(upload_id),String.format(Locale.ENGLISH, url, ip, port));
public byte[] getrecorddate(int ID) {
Log.i("getrecorddate", "11111");
GlobalRecord uploadRecord = upload_db.readDbFile(sqlite_cmd.RecordTable, ID);
String jpg_filename = String.format(Locale.ENGLISH, "well%05d.jpg", ID);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int dataLen = 0;
int recordLen = 0;
byte strFormData[] = new byte[100];
for (int i = 0; i < 100; i++)
strFormData[i] = 0;
strFormData[0] = 0x68;
strFormData[1] = (byte) 0x99;
strFormData[2] = (byte) 0x88;
strFormData[3] = (byte) 0x77;
strFormData[4] = 0x78;
strFormData[5] = 0x01;
strFormData[6] = 0x09;
strFormData[7] = 0x68;
strFormData[8] = (byte) 0x84;
strFormData[11] = 0x50;
strFormData[12] = 0x00;
strFormData[21] = 0x01;
baos.write(strFormData, 0, 28);
byte[] jpgBuf = null;
int jpgLen = 0;
try {
String fileName = GlobalFinalValues.RECORD_PATH + "Num00/SubNum000/" + jpg_filename;
fileName = uploadRecord.String_Record[34];
FileInputStream fin = new FileInputStream(fileName);
jpgLen = fin.available();
jpgBuf = new byte[jpgLen];
fin.read(jpgBuf);
fin.close();
baos.write(jpgBuf, 0, jpgLen);
} catch (Exception e) {
jpgLen = 0;
e.printStackTrace();
}
if (uploadRecord != null) {
try {
// String jpg_filename="well00001.jpg";
byte[] jpg_name = jpg_filename.getBytes("UTF-8");
int jpg_filelen = jpg_name.length;
baos.write(jpg_filelen);
baos.write(jpg_name, 0, jpg_filelen);
baos.write(0xff);
recordLen = jpg_filelen + 2;
for (int i = 1; i < 15; i++) {
int m;
if (i > 7) m = i + 6;
else m = i;
if (i == 14) m = 35;
String str = uploadRecord.String_Record[m];
if (m == 35) {
str = uploadRecord.String_Record[35] + ";" + uploadRecord.String_Record[36];
}//GPS
if (str.length() > 50) str = str.substring(0, 50);
byte[] srtbyte = str.getBytes("UTF-8");
int len = srtbyte.length;
baos.write(len % 256);
if (len > 0)
baos.write(srtbyte, 0, len);
baos.write(0xff);
recordLen += len + 2;
}
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
baos.write(recordLen / 256);
baos.write(recordLen % 256);
baos.write(0xff);
recordLen += 3;
baos.write(0);
baos.write(0x16);
byte[] data = baos.toByteArray();
dataLen = data.length;
data[9] = (byte) ((dataLen - 13) % 65536 / 256);
data[10] = (byte) ((dataLen - 13) % 65536 % 256);
data[13] = (byte) ((dataLen - 13) / 65536);
//data[14] = (byte) ((dataLen - 13) / 65536 % 256);
data[dataLen - 2] = 0x00;
for (int i = 0; i < (dataLen - 2); i++)
data[dataLen - 2] += data[i];
return data;
}
public boolean sendtohttp(byte strFormData[], String url) {
boolean back = false;
HttpURLConnection urlConnection = null;
try {
int dataLen = strFormData.length;
urlConnection = (HttpURLConnection) new URL(url).openConnection();
urlConnection.setDoInput(true);
urlConnection.setDoOutput(true);
urlConnection.setFixedLengthStreamingMode(dataLen);
urlConnection.setRequestMethod("POST");
urlConnection.setRequestProperty("Content-Type",
("application/xml; charset=utf-8").replaceAll("\\s", ""));
OutputStream out = urlConnection.getOutputStream();
out.write(strFormData, 0, dataLen);
out.close();
int responseCode = urlConnection.getResponseCode();
InputStream in = null;
if (responseCode == 200) {
in = new BufferedInputStream(urlConnection.getInputStream());
} else {
in = new BufferedInputStream(urlConnection.getErrorStream());
}
byte readbuf[] = new byte[100];
int readlen = in.read(readbuf, 0, 100);
String str = new String(readbuf, 0, readlen, "UTF-8");
if (str.equals("succ")) {
back = true;
}
} catch (MalformedURLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
urlConnection.disconnect();
}
return back;
}
I am new to this technique would really appreciate if you can guide me.
I am expecting the data to be in text but I am getting following output:
POST /alky_gps_server/alky/send9_1 HTTP/1.1
Content-Type: application/xml;charset=utf-8
Content-Length: 25925
User-Agent: Dalvik/2.1.0 (Linux; U; Android 5.1.1; AOSP on Drone Build/LMY48G)
Host: 192.168.0.75:9090
Connection: Keep-Alive
Accept-Encoding: gzip
h��wx h�e8P ���� JFIF �� C

%# , #&')*)-0-(0%()(�� C

(((((((((((((((((((((((((((((((((((((((((((((((((((�� ��" ��
�� � } !1AQa"q2���#B��R��$3br�
%&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz���������������������������������������������������������������������������
�� � w !1AQaq"2�B���� #3R�br�
$4�%�&'()56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz��������������������������������������������������������������������������  ? �#�~T׵9m��g&�����#��) n�9�h<�V�l5�y4�i���)1�zhM��� v�(��B���ݹ��܎�:��<� �B�G8�����Ҍm� ��1DIhj�g=iTg�"�~�&� � �b�(�Fj�A�;��
"�����P-�Ҝ�����c�Gʝ�=(sFp=���;�#��P1��N�84�3
�qN^�����_ژ���S�z�㎴�qސ�z������I��ļ� ���4���p�ۧ�#=9(^��dNP:H"7#<P�<�t����LOAXp �t�<����=:PTE���9xjS�8}i���s�4|٧�<�N��-E�{pi��<�G^��N�v�
!c4(��:��g֌�ȡ{�^i ϩ�J u�{�/Niz�P5�hLsN�q�A �s#�=r��=�����2�1B�)�ڐ��x ���BM9�)�gЉ���i#�JR8��i99�9�SH���(�8�k�^GJiG=iFH�h��M ���v� s�R q�v�.��ܯ'�GN��+c={Q���!��#ph^ �zU��H�,)��F�J^~��F����$��0s�jM��
�M;�g<�r�"�&����H���hM���N^=��Hu��n��4���o$Ҩ �#�]g����F�-bM8�V�,KjKmE#&F��
��H���z~�>�����uxe����Vh���O�3�1D4f5������6����i���U3yr�E���:���>��z���k7��?:[5�4�\��~5�j�����������Z�RFm��$��Y�}����l5��N�ÚY���.��D`y'v2A�=kE�0��+G���{a�+��=Sϱ��}�Iq8NT�o=�8��4{�������s�[�?%�4�q��<)���?��6O�\� �B�������6���g��W�z�

AudioConverterFillComplexBuffer returns 1852797029 (kAudioCodecIllegalOperationError)

I'm trying to decode aac data with AudioToolbox in iOS environment. I consulted this thread.
'AudioConverterNew' function call succeed but AudioConverterFillComplexBuffer returns error code 1852797029, kAudioCodecIllegalOperationError.
I'm trying to find my mistakes. Thank you for reading.
- (void)initAudioToolBox {
HCAudioAsset* asset = [self.provider getAudioAsset];
AudioStreamBasicDescription outFormat;
memset(&outFormat, 0, sizeof(outFormat));
outFormat.mSampleRate = 44100;
outFormat.mFormatID = kAudioFormatLinearPCM;
outFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger;
outFormat.mBytesPerPacket = 2;
outFormat.mFramesPerPacket = 1;
outFormat.mBytesPerFrame = 2;
outFormat.mChannelsPerFrame = 1;
outFormat.mBitsPerChannel = 16;
outFormat.mReserved = 0;
AudioStreamBasicDescription inFormat;
memset(&inFormat, 0, sizeof(inFormat));
inFormat.mSampleRate = [asset sampleRate];
inFormat.mFormatID = kAudioFormatMPEG4AAC;
inFormat.mFormatFlags = kMPEG4Object_AAC_LC;
inFormat.mBytesPerPacket = 0;
inFormat.mFramesPerPacket = (UInt32)[asset framePerPacket];
inFormat.mBytesPerFrame = 0;
inFormat.mChannelsPerFrame = (UInt32)[asset channelCount];
inFormat.mBitsPerChannel = 0;
inFormat.mReserved = 0;
OSStatus status = AudioConverterNew(&inFormat, &outFormat, &audioConverter);
if (status != noErr) {
NSLog(#"setup converter error, status: %i\n", (int)status);
} else {
NSLog(#"Audio Converter is initialized successfully.");
}
}
typedef struct _PassthroughUserData PassthroughUserData;
struct _PassthroughUserData {
UInt32 mChannels;
UInt32 mDataSize;
const void* mData;
AudioStreamPacketDescription mPacket;
};
int inInputDataProc(AudioConverterRef aAudioConverter,
UInt32* aNumDataPackets,
AudioBufferList* aData,
AudioStreamPacketDescription** aPacketDesc,
void* aUserData)
{
PassthroughUserData* userData = (PassthroughUserData*)aUserData;
if (!userData->mDataSize) {
*aNumDataPackets = 0;
NSLog(#"inInputDataProc returns -1");
return -1;
}
if (aPacketDesc) {
userData->mPacket.mStartOffset = 0;
userData->mPacket.mVariableFramesInPacket = 0;
userData->mPacket.mDataByteSize = userData->mDataSize;
NSLog(#"mDataSize:%d", userData->mDataSize);
*aPacketDesc = &userData->mPacket;
}
aData->mBuffers[0].mNumberChannels = userData->mChannels;
aData->mBuffers[0].mDataByteSize = userData->mDataSize;
aData->mBuffers[0].mData = (void*)(userData->mData);
NSLog(#"buffer[0] - channel:%d, byte size:%u, data:%p",
aData->mBuffers[0].mNumberChannels,
(unsigned int)aData->mBuffers[0].mDataByteSize,
aData->mBuffers[0].mData);
// No more data to provide following this run.
userData->mDataSize = 0;
NSLog(#"inInputDataProc returns 0");
return 0;
}
- (void)decodeAudioFrame:(NSData *)frame withPts:(NSInteger)pts{
if(!audioConverter){
[self initAudioToolBox];
}
HCAudioAsset* asset = [self.provider getAudioAsset];
PassthroughUserData userData = { (UInt32)[asset channelCount], (UInt32)frame.length, [frame bytes]};
NSMutableData *decodedData = [NSMutableData new];
const uint32_t MAX_AUDIO_FRAMES = 128;
const uint32_t maxDecodedSamples = MAX_AUDIO_FRAMES * 1;
do {
uint8_t *buffer = (uint8_t *)malloc(maxDecodedSamples * sizeof(short int));
AudioBufferList decBuffer;
memset(&decBuffer, 0, sizeof(AudioBufferList));
decBuffer.mNumberBuffers = 1;
decBuffer.mBuffers[0].mNumberChannels = 2;
decBuffer.mBuffers[0].mDataByteSize = maxDecodedSamples * sizeof(short int);
decBuffer.mBuffers[0].mData = buffer;
UInt32 numFrames = MAX_AUDIO_FRAMES;
AudioStreamPacketDescription outPacketDescription;
memset(&outPacketDescription, 0, sizeof(AudioStreamPacketDescription));
outPacketDescription.mDataByteSize = MAX_AUDIO_FRAMES;
outPacketDescription.mStartOffset = 0;
outPacketDescription.mVariableFramesInPacket = 0;
NSLog(#"frame - size:%lu, buffer:%p", [frame length], [frame bytes]);
OSStatus rv = AudioConverterFillComplexBuffer(audioConverter,
inInputDataProc,
&userData,
&numFrames,
&decBuffer,
&outPacketDescription);
NSLog(#"num frames:%d, dec buffer [0] channels:%d, dec buffer [0] data byte size:%d, rv:%d",
numFrames, decBuffer.mBuffers[0].mNumberChannels,
decBuffer.mBuffers[0].mDataByteSize, (int)rv);
if (rv && rv != noErr) {
NSLog(#"Error decoding audio stream: %d\n", rv);
break;
}
if (numFrames) {
[decodedData appendBytes:decBuffer.mBuffers[0].mData length:decBuffer.mBuffers[0].mDataByteSize];
}
} while (true);
//void *pData = (void *)[decodedData bytes];
//audioRenderer->Render(&pData, decodedData.length, pts);
}

Resources