Opencv Camera megapixels Android - opencv

i am new with Opencv.I use JavaCameraView,
which uses 1280x 720 camera resolution.
How can I increase the resolution to 5 megapixels?
Please help me :)

You need to pick a resolution that is allowed by your camera. Call the following code in your onCameraViewStarted function and set listNum so that it corresponds to the resolution you desire:
Parameters params = mCamera.getParameters();
List<Size> resList = mCamera.getParameters().getSupportedPictureSizes();
int listNum = 1;// 0 is the maximum resolution
int width = resList.get(listNum).width;
int height = resList.get(listNum).height;
params.setPictureSize(width, height);
mCamera.setParameters(params);
mCamera is your org.opencv.android.JavaCameraView object.
Brian

It seems that OpenCV Camera (JavaCameraView) has been set to default maximum resolution of 1280*720.
We can decrease the resolution using:
cameraView.setMaxFrameSize(320, 280);
Or
cameraView.setMaxFrameSize(480, 320);
Or
cameraView.setMaxFrameSize(640, 480);
Or
cameraView.setMaxFrameSize(800, 600);
Or
cameraView.setMaxFrameSize(1280, 720);
Now, the question is how to increase the resolution after that, if my mobile supports more resolution. For that we have to make our own customized JavaCameraView (Let's say MyCameraView).
Just copy and paste JavaCameraView to MyCameraView and do corrections according to your need.
I do the following corrections in my case:
package com.example.opencvcamera;
import org.opencv.android.CameraBridgeViewBase;
import android.hardware.Camera;
import java.util.List;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.os.Build;
import android.util.AttributeSet;
import android.util.Log;
import android.view.ViewGroup.LayoutParams;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
/**
* Created by Kumar.Vaibhav on 19-04-2017.
*/
public class MyCameraView extends CameraBridgeViewBase implements Camera.PreviewCallback {
private static final int MAGIC_TEXTURE_ID = 10;
private static final String TAG = "MyCameraView";
private byte mBuffer[];
private Mat[] mFrameChain;
private int mChainIdx = 0;
private Thread mThread;
private boolean mStopThread;
protected Camera mCamera;
protected MyCameraView.JavaCameraFrame[] mCameraFrame;
private SurfaceTexture mSurfaceTexture;
public static class JavaCameraSizeAccessor implements ListItemAccessor {
#Override
public int getWidth(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.width;
}
#Override
public int getHeight(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.height;
}
}
public MyCameraView(Context context, int cameraId) {
super(context, cameraId);
}
public MyCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
}
protected boolean initializeCamera(int width, int height) {
Log.d(TAG, "Initialize java camera");
boolean result = true;
synchronized (this) {
mCamera = null;
if (mCameraIndex == CAMERA_ID_ANY) {
Log.d(TAG, "Trying to open camera with old open()");
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
boolean connected = false;
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")");
try {
mCamera = Camera.open(camIdx);
connected = true;
} catch (RuntimeException e) {
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
if (connected) break;
}
}
} else {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
int localCameraIndex = mCameraIndex;
if (mCameraIndex == CAMERA_ID_BACK) {
Log.i(TAG, "Trying to open back camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
localCameraIndex = camIdx;
break;
}
}
} else if (mCameraIndex == CAMERA_ID_FRONT) {
Log.i(TAG, "Trying to open front camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
localCameraIndex = camIdx;
break;
}
}
}
if (localCameraIndex == CAMERA_ID_BACK) {
Log.e(TAG, "Back camera not found!");
} else if (localCameraIndex == CAMERA_ID_FRONT) {
Log.e(TAG, "Front camera not found!");
} else {
Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(localCameraIndex) + ")");
try {
mCamera = Camera.open(localCameraIndex);
} catch (RuntimeException e) {
Log.e(TAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage());
}
}
}
}
if (mCamera == null)
return false;
/* Now set camera parameters */
try {
Camera.Parameters params = mCamera.getParameters();
Log.d(TAG, "getSupportedPreviewSizes()");
List<android.hardware.Camera.Size> sizes = params.getSupportedPreviewSizes();
if (sizes != null) {
/* Select the size that fits surface considering maximum size allowed */
//Size frameSize = calculateCameraFrameSize(sizes, new MyCameraView.JavaCameraSizeAccessor(), width, height);
Camera.Size mSizePicture =sizes.get(0);
if(mSizePicture.width<3264 && mSizePicture.height<2448)
{
int cameraSize= sizes.size();
mSizePicture =sizes.get(cameraSize-1);
}
/*params.setPreviewFormat(ImageFormat.NV21);
Log.d(TAG, "Set preview size to " + Integer.valueOf((int)frameSize.width) + "x" + Integer.valueOf((int)frameSize.height));
params.setPreviewSize((int)frameSize.width, (int)frameSize.height);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH && !android.os.Build.MODEL.equals("GT-I9100"))
params.setRecordingHint(true);*/
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
params = mCamera.getParameters();
mFrameWidth = params.getPreviewSize().width;
mFrameHeight = params.getPreviewSize().height;
if ((getLayoutParams().width == LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LayoutParams.MATCH_PARENT))
mScale = Math.min(((float)height)/mFrameHeight, ((float)width)/mFrameWidth);
else
mScale = 0;
if (mFpsMeter != null) {
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
int size = mFrameWidth * mFrameHeight;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
mCamera.addCallbackBuffer(mBuffer);
mCamera.setPreviewCallbackWithBuffer(this);
mFrameChain = new Mat[2];
mFrameChain[0] = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1);
mFrameChain[1] = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1);
AllocateCache();
mCameraFrame = new MyCameraView.JavaCameraFrame[2];
mCameraFrame[0] = new MyCameraView.JavaCameraFrame(mFrameChain[0], mFrameWidth, mFrameHeight);
mCameraFrame[1] = new MyCameraView.JavaCameraFrame(mFrameChain[1], mFrameWidth, mFrameHeight);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
mCamera.setPreviewTexture(mSurfaceTexture);
} else
mCamera.setPreviewDisplay(null);
/* Finally we are ready to start the preview */
Log.d(TAG, "startPreview");
mCamera.startPreview();
}
else
result = false;
} catch (Exception e) {
result = false;
e.printStackTrace();
}
}
return result;
}
protected void releaseCamera() {
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
}
mCamera = null;
if (mFrameChain != null) {
mFrameChain[0].release();
mFrameChain[1].release();
}
if (mCameraFrame != null) {
mCameraFrame[0].release();
mCameraFrame[1].release();
}
}
}
private boolean mCameraFrameReady = false;
#Override
protected boolean connectCamera(int width, int height) {
/* 1. We need to instantiate camera
* 2. We need to start thread which will be getting frames
*/
/* First step - initialize camera connection */
Log.d(TAG, "Connecting to camera");
if (!initializeCamera(width, height))
return false;
mCameraFrameReady = false;
/* now we can start update thread */
Log.d(TAG, "Starting processing thread");
mStopThread = false;
mThread = new Thread(new CameraWorker());
mThread.start();
return true;
}
#Override
protected void disconnectCamera() {
/* 1. We need to stop thread which updating the frames
* 2. Stop camera and release it
*/
Log.d(TAG, "Disconnecting from camera");
try {
mStopThread = true;
Log.d(TAG, "Notify thread");
synchronized (this) {
this.notify();
}
Log.d(TAG, "Wating for thread");
if (mThread != null)
mThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
mThread = null;
}
/* Now release camera */
releaseCamera();
mCameraFrameReady = false;
}
#Override
public void onPreviewFrame(byte[] frame, Camera arg1) {
Log.d(TAG, "Preview Frame received. Frame size: " + frame.length);
synchronized (this) {
mFrameChain[mChainIdx].put(0, 0, frame);
mCameraFrameReady = true;
this.notify();
}
if (mCamera != null)
mCamera.addCallbackBuffer(mBuffer);
}
private class JavaCameraFrame implements CvCameraViewFrame {
#Override
public Mat gray() {
return mYuvFrameData.submat(0, mHeight, 0, mWidth);
}
#Override
public Mat rgba() {
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
return mRgba;
}
public JavaCameraFrame(Mat Yuv420sp, int width, int height) {
super();
mWidth = width;
mHeight = height;
mYuvFrameData = Yuv420sp;
mRgba = new Mat();
}
public void release() {
mRgba.release();
}
private Mat mYuvFrameData;
private Mat mRgba;
private int mWidth;
private int mHeight;
};
private class CameraWorker implements Runnable {
#Override
public void run() {
do {
boolean hasFrame = false;
synchronized (MyCameraView.this) {
try {
while (!mCameraFrameReady && !mStopThread) {
MyCameraView.this.wait();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
if (mCameraFrameReady)
{
mChainIdx = 1 - mChainIdx;
mCameraFrameReady = false;
hasFrame = true;
}
}
if (!mStopThread && hasFrame) {
if (!mFrameChain[1 - mChainIdx].empty())
deliverAndDrawFrame(mCameraFrame[1 - mChainIdx]);
}
} while (!mStopThread);
Log.d(TAG, "Finish processing thread");
}
}
}
Also change the activity_main.xml to as follows:
<com.example.opencvcamera.MyCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="#+id/java_camera_view"
android:visibility="visible"/>

Related

AWS transcribe Unable to load credentials from any of the providers in the chain AwsCredentialsProviderChain

I am running a java program that uses AWS TranscribeStreaming. I have created(from AWS console) and downloaded AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY then set the environment variables.
When I run the program I am getting error message:
Unable to load credentials from any of the providers in the chain AwsCredentialsProviderChain.
I don't why but this error was also occurring while using Google SDK for speech recognition.
here is the code I am trying to run.
public class TranscribeStreamingDemoApp {
private static final Region REGION = Region.US_WEST_2;
private static TranscribeStreamingAsyncClient client;
public static void main(String args[]) throws URISyntaxException, ExecutionException, InterruptedException, LineUnavailableException {
client = TranscribeStreamingAsyncClient.builder()
.credentialsProvider(getCredentials())
.region(REGION)
.build();
CompletableFuture<Void> result = client.startStreamTranscription(getRequest(16_000),
new AudioStreamPublisher(getStreamFromMic()),
getResponseHandler());
result.get();
client.close();
}
private static InputStream getStreamFromMic() throws LineUnavailableException {
// Signed PCM AudioFormat with 16kHz, 16 bit sample size, mono
int sampleRate = 16000;
AudioFormat format = new AudioFormat(sampleRate, 16, 1, true, false);
DataLine.Info info = new DataLine.Info(TargetDataLine.class, format);
if (!AudioSystem.isLineSupported(info)) {
System.out.println("Line not supported");
System.exit(0);
}
TargetDataLine line = (TargetDataLine) AudioSystem.getLine(info);
line.open(format);
line.start();
InputStream audioStream = new AudioInputStream(line);
return audioStream;
}
private static AwsCredentialsProvider getCredentials() {
return DefaultCredentialsProvider.create();
}
private static StartStreamTranscriptionRequest getRequest(Integer mediaSampleRateHertz) {
return StartStreamTranscriptionRequest.builder()
.languageCode(LanguageCode.FR_FR.toString())
.mediaEncoding(MediaEncoding.PCM)
.mediaSampleRateHertz(mediaSampleRateHertz)
.build();
}
private static StartStreamTranscriptionResponseHandler getResponseHandler() {
return StartStreamTranscriptionResponseHandler.builder()
.onResponse(r -> {
System.out.println("Received Initial response");
})
.onError(e -> {
System.out.println(e.getMessage());
StringWriter sw = new StringWriter();
e.printStackTrace(new PrintWriter(sw));
System.out.println("Error Occurred: " + sw.toString());
})
.onComplete(() -> {
System.out.println("=== All records stream successfully ===");
})
.subscriber(event -> {
List<software.amazon.awssdk.services.transcribestreaming.model.Result> results = ((TranscriptEvent) event).transcript().results();
if (results.size() > 0) {
if (!results.get(0).alternatives().get(0).transcript().isEmpty()) {
System.out.println(results.get(0).alternatives().get(0).transcript());
}
}
})
.build();
}
private InputStream getStreamFromFile(String audioFileName) {
try {
File inputFile = new File(getClass().getClassLoader().getResource(audioFileName).getFile());
InputStream audioStream = new FileInputStream(inputFile);
return audioStream;
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
}
}
private static class AudioStreamPublisher implements Publisher<AudioStream> {
private final InputStream inputStream;
private static Subscription currentSubscription;
private AudioStreamPublisher(InputStream inputStream) {
this.inputStream = inputStream;
}
#Override
public void subscribe(Subscriber<? super AudioStream> s) {
if (this.currentSubscription == null) {
this.currentSubscription = new SubscriptionImpl(s, inputStream);
} else {
this.currentSubscription.cancel();
this.currentSubscription = new SubscriptionImpl(s, inputStream);
}
s.onSubscribe(currentSubscription);
}
}
public static class SubscriptionImpl implements Subscription {
private static final int CHUNK_SIZE_IN_BYTES = 1024 * 1;
private final Subscriber<? super AudioStream> subscriber;
private final InputStream inputStream;
private ExecutorService executor = Executors.newFixedThreadPool(1);
private AtomicLong demand = new AtomicLong(0);
SubscriptionImpl(Subscriber<? super AudioStream> s, InputStream inputStream) {
this.subscriber = s;
this.inputStream = inputStream;
}
#Override
public void request(long n) {
if (n <= 0) {
subscriber.onError(new IllegalArgumentException("Demand must be positive"));
}
demand.getAndAdd(n);
executor.submit(() -> {
try {
do {
ByteBuffer audioBuffer = getNextEvent();
if (audioBuffer.remaining() > 0) {
AudioEvent audioEvent = audioEventFromBuffer(audioBuffer);
subscriber.onNext(audioEvent);
} else {
subscriber.onComplete();
break;
}
} while (demand.decrementAndGet() > 0);
} catch (Exception e) {
subscriber.onError(e);
}
});
}
#Override
public void cancel() {
executor.shutdown();
}
private ByteBuffer getNextEvent() {
ByteBuffer audioBuffer = null;
byte[] audioBytes = new byte[CHUNK_SIZE_IN_BYTES];
int len = 0;
try {
len = inputStream.read(audioBytes);
if (len <= 0) {
audioBuffer = ByteBuffer.allocate(0);
} else {
audioBuffer = ByteBuffer.wrap(audioBytes, 0, len);
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
return audioBuffer;
}
private AudioEvent audioEventFromBuffer(ByteBuffer bb) {
return AudioEvent.builder()
.audioChunk(SdkBytes.fromByteBuffer(bb))
.build();
}
}
}
Finally, I solved the problem, the documentation specifies that:
AWS credentials provider chain that looks for credentials in this order:
1.Java System Properties - aws.accessKeyId and aws.secretAccessKey
2.Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY
3.Credential profiles file at the default location (~/.aws/credentials) shared by all AWS SDKs and the AWS CLI
Since setting up through environment variables didn't work, I opted to set credentials with Java system properties and it works!
Doc ref: https://sdk.amazonaws.com/java/api/latest/software/amazon/awssdk/auth/credentials/DefaultCredentialsProvider.html
System.setProperty("aws.accessKeyId", "**************");
System.setProperty("aws.secretAccessKey", "**************");

How to add text input field in cocos2d.Android cocos sharp?

I am trying to get CCTextFieldTTF to work in cocos sharp with Xamarin for an android application. But can't get hold of this for the life of me. Could not find any documentation on cocos sharp API either. Does anyone know how to use this class to render a text area in an android application? The reason I am asking is in a xamarin forum I saw someone saying that this does not work in the API yet. Any help would be highly appreciated. Thanks in advance.
I have this working in android
Here is the sample code:
Create a node to track the textfield
CCTextField trackNode;
protected CCTextField TrackNode
{
get { return trackNode; }
set
{
if (value == null)
{
if (trackNode != null)
{
DetachListeners();
trackNode = value;
return;
}
}
if (trackNode != value)
{
DetachListeners();
}
trackNode = value;
AttachListeners();
}
}
//create the actual input textfield
var textField = new CCTextField(string.Empty, "Somefont", 25, CCLabelFormat.SystemFont);
textField.IsColorModifiedByOpacity = false;
textField.Color = new CCColor3B(Theme.TextWhite);
textField.BeginEditing += OnBeginEditing;
textField.EndEditing += OnEndEditing;
textField.Position = new CCPoint (0, 0);
textField.Dimensions = new CCSize(VisibleBoundsWorldspace.Size.Width - (160 * sx), vPadding);
textField.PlaceHolderTextColor = Theme.TextYellow;
textField.PlaceHolderText = Constants.TextHighScoreEnterNamePlaceholder;
textField.AutoEdit = true;
textField.HorizontalAlignment = CCTextAlignment.Center;
textField.VerticalAlignment = CCVerticalTextAlignment.Center;
TrackNode = textField;
TrackNode.Position = pos;
AddChild(textField);
// Register Touch Event
var touchListener = new CCEventListenerTouchOneByOne();
touchListener.OnTouchBegan = OnTouchBegan;
touchListener.OnTouchEnded = OnTouchEnded;
AddEventListener(touchListener);
// The events
bool OnTouchBegan(CCTouch pTouch, CCEvent touchEvent)
{
beginPosition = pTouch.Location;
return true;
}
void OnTouchEnded(CCTouch pTouch, CCEvent touchEvent)
{
if (trackNode == null)
{
return;
}
var endPos = pTouch.Location;
if (trackNode.BoundingBox.ContainsPoint(beginPosition) && trackNode.BoundingBox.ContainsPoint(endPos))
{
OnClickTrackNode(true);
}
else
{
OnClickTrackNode(false);
}
}
public void OnClickTrackNode(bool bClicked)
{
if (bClicked && TrackNode != null)
{
if (!isKeyboardShown)
{
isKeyboardShown = true;
TrackNode.Edit();
}
}
else
{
if (TrackNode != null)
{
TrackNode.EndEdit();
}
}
}
private void OnEndEditing(object sender, ref string text, ref bool canceled)
{
//((CCNode)sender).RunAction(scrollDown);
Console.WriteLine("OnEndEditing text {0}", text);
}
private void OnBeginEditing(object sender, ref string text, ref bool canceled)
{
//((CCNode)sender).RunAction(scrollUp);
Console.WriteLine("OnBeginEditing text {0}", text);
}
void AttachListeners()
{
// Attach our listeners.
var imeImplementation = trackNode.TextFieldIMEImplementation;
imeImplementation.KeyboardDidHide += OnKeyboardDidHide;
imeImplementation.KeyboardDidShow += OnKeyboardDidShow;
imeImplementation.KeyboardWillHide += OnKeyboardWillHide;
imeImplementation.KeyboardWillShow += OnKeyboardWillShow;
imeImplementation.InsertText += InsertText;
}
void DetachListeners()
{
if (TrackNode != null)
{
// Remember to remove our event listeners.
var imeImplementation = TrackNode.TextFieldIMEImplementation;
imeImplementation.KeyboardDidHide -= OnKeyboardDidHide;
imeImplementation.KeyboardDidShow -= OnKeyboardDidShow;
imeImplementation.KeyboardWillHide -= OnKeyboardWillHide;
imeImplementation.KeyboardWillShow -= OnKeyboardWillShow;
imeImplementation.InsertText -= InsertText;
}
}
This is all taken from the link below but needed a bit of additional work to get it working on each platform.
https://github.com/mono/cocos-sharp-samples/tree/master/TextField

Blackberry App is working on wi-fi but not on Packet Data/GPRS

I have created Blackberry Application which is working perfectly fine on wi-fi. But when I turn off wi-fi and use Mobile Network then I am not able to run application. It shows No Internet connection available.
I had written these lines.
if (DeviceInfo.isSimulator())
{
deviceinfo = deviceinfo.concat(";deviceside=true");
}
Here is My Http Connection Class
import java.io.IOException;
import java.io.InputStream;
import javax.microedition.io.Connector;
import javax.microedition.io.HttpConnection;
import net.rim.device.api.system.DeviceInfo;
import net.rim.device.api.system.RadioInfo;
import net.rim.device.api.ui.UiApplication;
import net.rim.device.api.ui.component.Dialog;
public class HttpConn
{
public String jsonresponse (String url)
{
String response = null;
HttpConnection httpConnection = null;
InputStream inStream = null;
int code;
StringBuffer stringBuffer = new StringBuffer();
String deviceinfo=url;
try
{
if (DeviceInfo.isSimulator())
{
deviceinfo = deviceinfo.concat(";deviceside=true");
}
/* else if (WLANInfo.getWLANState() == WLANInfo.WLAN_STATE_CONNECTED)
{
deviceinfo = ";interface=wifi";
}*/
else if ( (RadioInfo.getActiveWAFs() & RadioInfo.WAF_WLAN) != 0 && RadioInfo.getSignalLevel( RadioInfo.WAF_WLAN ) != RadioInfo.LEVEL_NO_COVERAGE )
{
deviceinfo = deviceinfo.concat(";interface=wifi");
}
// else if (WLANInfo.getWLANState() == WLANInfo.WLAN_STATE_CONNECTED)
// {
// deviceinfo = ";interface=wifi";//
// }
if ( (RadioInfo.getActiveWAFs() & RadioInfo.WAF_WLAN) != 0 && RadioInfo.getSignalLevel( RadioInfo.WAF_WLAN ) != RadioInfo.LEVEL_NO_COVERAGE )
{
deviceinfo = deviceinfo.concat(";interface=wifi");
}
// else if ((CoverageInfo.getCoverageStatus() & CoverageInfo.COVERAGE_DIRECT) == CoverageInfo.COVERAGE_DIRECT)
// {
// String carrierUid = getCarrierBIBSUid();
//
// if (carrierUid == null)
// {
// // Has carrier coverage, but not BIBS. So use the carrier's TCP
// // network
//
// deviceinfo = ";deviceside=true";
//
// }
// else
// {
// // otherwise, use the Uid to construct a valid carrier BIBS
// // request
//
// deviceinfo = ";deviceside=false;connectionUID="+carrierUid + ";ConnectionType=mds-public";
//
// }
// }
//
// // Check for an MDS connection instead (BlackBerry Enterprise Server)
// else if ((CoverageInfo.getCoverageStatus() & CoverageInfo.COVERAGE_MDS) == CoverageInfo.COVERAGE_MDS)
// {
//
// deviceinfo = ";deviceside=false";
//
// }
//
// // If there is no connection available abort to avoid hassling the user
// // unnecssarily.
// else if (CoverageInfo.getCoverageStatus() == CoverageInfo.COVERAGE_NONE)
// {
// deviceinfo = "none";
//
//
// }
// else
// {
// deviceinfo=";deviceside=true";
// }
//if(CoverageInfo.isCoverageSufficient(CoverageInfo.COVERAGE_DIRECT,RadioInfo.WAF_WLAN,false))
// httpConnection = (HttpConnection) Connector.open(url+";interface=wifi", Connector.READ);
httpConnection = (HttpConnection) Connector.open(deviceinfo, Connector.READ);
httpConnection.setRequestMethod(HttpConnection.GET);
code = httpConnection.getResponseCode();
if(code == HttpConnection.HTTP_OK)
{
inStream=httpConnection.openInputStream();
int c;
while((c=inStream.read())!=-1)
{
stringBuffer.append((char)c);
}
response=stringBuffer.toString();
System.out.println("Response Getting from Server is ================" + response);
}
// Is the carrier network the only way to connect?
// In theory, all bases are covered by now so this shouldn't be reachable.But hey, just in case ...
else
{
UiApplication.getUiApplication().invokeLater(new Runnable()
{
public void run()
{
Dialog.inform("Connection error");
}
});
}
// return deviceinfo;
}
catch (Exception e)
{
System.out.println("caught exception in jsonResponse method"+e.getMessage());
}
finally
{
// if (outputStream != null)
// {
// outputStream.close();
// }
if (inStream != null)
{
try
{
inStream.close();
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
if (httpConnection != null )
{
try
{
httpConnection.close();
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
return response;
}
// private synchronized static String getCarrierBIBSUid() {
// ServiceRecord[] records = ServiceBook.getSB().getRecords();
// int currentRecord;
//
// for (currentRecord = 0; currentRecord < records.length; currentRecord++) {
// if (records[currentRecord].getCid().toLowerCase().equals("ippp")) {
// if (records[currentRecord].getName().toLowerCase()
// .indexOf("bibs") >= 0) {
// return records[currentRecord].getUid();
// }
// }
// }
//
// return null;
// }
}
Actually solution is found.
java.io.IOException:APN is not specified . what APN i have to set manually to device for Wi-Fi network?
http://m2m.icpdas.com/download/gtm-201_modem/manual/gprs_apn.pdf
http://www.faqspedia.com/list-of-all-indian-mobile-operators-access-point-names
Refer this link.
Actually we have to set APN manually. We get it from the Provider.
I hope someone get solution from my this answer. Thanks a lot for your support.

Illegal Argument Exception when trying to convert byte to Bitmap in blackberry

Here is my code where i am getting profile image bytes from twitter api,
new Thread() {
public void run() {
byte dbata[] = getBitmap(profle_pic_str);
if (dbata != null) {
EncodedImage bitmap_img = EncodedImage.createEncodedImage(dbata, 0, -1);
Bitmap image =bitmap_img.getBitmap();
final Bitmap profle_pic_bmp = image;
final Bitmap scld_bmp = new Bitmap(90, 100);
Application.getApplication().invokeLater(new Runnable() {
public void run() {
if (profle_pic_bmp != null) {
profle_pic_bmp.scaleInto(scld_bmp, Bitmap.FILTER_LANCZOS);
phot_profle.setBitmap(scld_bmp);
} else {
Dialog.alert("null");
}
}
});
// } else {
// Dialog.alert("bytes are null");
}
}
}.start();
Here i have method getBitmap(profle_pic_str); which returning bytes array of image,
public byte[] getBitmap(String url) {
Bitmap bitmap = null;
String strg = HttpConnector.getConnectionString();
byte b[] = null;
try {
b = getXML(url + strg);
} catch (IOException ie) {
ie.printStackTrace();
}
return b;
}
the url which i used is this one
http://api.twitter.com/1/users/profile_image?screen_name=screen_nameof_user&size=bigger
public byte[] getXML(String url) throws IOException {
ContentConnection connection =
(ContentConnection) javax.microedition.io.Connector.open(url);
java.io.DataInputStream iStrm = connection.openDataInputStream();
java.io.ByteArrayOutputStream bStrm = null;
byte xmlData[] = null;
try {
// ContentConnection includes a length method
int length = (int) connection.getLength();
if (length != -1) {
xmlData = new byte[length];
// Read the png into an array
// iStrm.read(imageData);
iStrm.readFully(xmlData);
} else // Length not available...
{
bStrm = new java.io.ByteArrayOutputStream();
int ch;
while ((ch = iStrm.read()) != -1) bStrm.write(ch);
xmlData = bStrm.toByteArray();
bStrm.close();
}
} catch (Exception e) {
e.printStackTrace();
} finally {
// Clean up
if (iStrm != null) iStrm.close();
if (connection != null) connection.close();
if (bStrm != null) bStrm.close();
}
return xmlData;
}
When i am trying to convert byte array to EncodedImage
EncodedImage bitmap_img = EncodedImage.createEncodedImage(dbata, 0, -1);
in this line of code i am getting illegal argument exception.
same code is working for Facebook profile image. I dont know why this code giving error when i am doing for twitter. Please help me friends.
try this -
EncodedImage _encoded_img = EncodedImage.createEncodedImage(dbata, 0, dbata.length);
On your code,
EncodedImage bitmap_img = EncodedImage.createEncodedImage(dbata, 0,-1);
-1 is the length of the array. Its not static. Change -1 to dbata.length.

open shazam from my blackberry application

I need the code open shazam(other intalled applications in the phone) if its installed.
How can I check whether shazam is installed in a phone,and if installed how can I open it from my app?If any one have idea please help.Thanks in advace.
public static ApplicationDescriptor getApplicationDescriptor(String appName) {
try {
int[] moduleHandles = CodeModuleManager.getModuleHandles();
if (moduleHandles != null && moduleHandles.length > 0) {
for (int i = 0; i < moduleHandles.length; i++) {
ApplicationDescriptor[] applicationDescriptors = CodeModuleManager.getApplicationDescriptors(moduleHandles[i]);
if (applicationDescriptors != null && applicationDescriptors.length > 0) {
for (int j = 0; j < applicationDescriptors.length; j++) {
if (applicationDescriptors[j].getModuleName().toLowerCase().equals(appName.toLowerCase())) {
return applicationDescriptors[j];
}
}
}
}
}
} catch (Exception e) {
System.out.println("error at getApplicationDescriptor" + e);
}
return null;
}
public static int runApplication(String appName) {
int processId = -1;
ApplicationDescriptor appDescriptor = getApplicationDescriptor(appName);
if (appDescriptor != null) {
//is not null Application installed
processId = ApplicationManager.getApplicationManager().getProcessId(appDescriptor);
if (processId == -1) {
// -1 if application has no process (i.e. is not running).
try {
processId = ApplicationManager.getApplicationManager().runApplication(appDescriptor);
} catch (ApplicationManagerException e) {
e.printStackTrace();
}
}
}
return processId;
}
call runApplication like
int pid=-1;
if((pid=runApplication(appName))>-1){
//application running
System.out.println(appName +" runing with process id "+pid);
}

Resources