I want to track objects using the Camshift algorithm. I tried to correct the bugs that I found in the JavaCV translation of the original OpenCV Camshift file.
Here is my code:
package objectTracking;
import com.googlecode.javacv.CanvasFrame;
import com.googlecode.javacv.FrameGrabber;
import com.googlecode.javacv.OpenCVFrameGrabber;
import static com.googlecode.javacv.cpp.opencv_core.*;
import com.googlecode.javacv.cpp.opencv_core.CvBox2D;
import com.googlecode.javacv.cpp.opencv_core.CvPoint;
import com.googlecode.javacv.cpp.opencv_core.CvRect;
import com.googlecode.javacv.cpp.opencv_core.CvScalar;
import com.googlecode.javacv.cpp.opencv_core.IplImage;
import com.googlecode.javacv.cpp.opencv_core.IplImageArray;
import static com.googlecode.javacv.cpp.opencv_imgproc.*;
import com.googlecode.javacv.cpp.opencv_imgproc.CvConnectedComp;
import com.googlecode.javacv.cpp.opencv_imgproc.CvHistogram;
import static com.googlecode.javacv.cpp.opencv_video.*;
import com.sun.jna.ptr.FloatByReference;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
public class CamShifter implements MouseListener{
final static int MOUSE_PRESSED = 1;
final static int MOUSE_RELEASED = 2;
IplImage image, frame, hsv, hue, mask, backproject, histimg;
IplImageArray hueArray;
CvHistogram hist;
CanvasFrame histogram = new CanvasFrame("Histogram"), camshiftDemo = new CanvasFrame("CamshiftDemo");
boolean backproject_mode = false;
boolean select_object = false;
int track_object = 0;
boolean show_hist = true;
boolean paused = false;
CvPoint origin = new CvPoint();
CvRect selection = new CvRect();
OpenCVFrameGrabber grabber = new OpenCVFrameGrabber(0);
CvRect track_window = new CvRect();
CvBox2D track_box = new CvBox2D();
CvConnectedComp track_comp = new CvConnectedComp();
int hsize = 16;
float hranges[] = {0,180};
int[] hdims = {hsize};
float hranges_arr[][] = {hranges};
int vmin = 10, vmax = 256, smin = 30;
public static void main(String args[]) throws Exception {
CamShifter csh = new CamShifter();
csh.work(csh);
System.out.println("CamShiftDetector trial");
}
public CamShifter() throws FrameGrabber.Exception {
grabber.start();
camshiftDemo.getCanvas().addMouseListener(this);
}
public void onMouse(int event, int x, int y) {
if (select_object) {
//get selection
int selX, selY, selW, selH;
selX = Math.min(x, origin.x());
selY = Math.min(y, origin.y());
selW = selX + Math.abs(x - origin.x());
selH = selY + Math.abs(y - origin.y());
selection = cvRect(selX, selY, selW, selH);
System.out.println("Selection : \n("+selX+", "+selY+")\n("+selW+", "+selH+")");
//ensure that selection is enclosed within the image
selX = Math.max(selection.x(), 0);
selY = Math.max(selection.y(), 0);
selW = Math.min(selection.width(), image.width());
selH = Math.min(selection.height(), image.height());
selection = cvRect(selX, selY, selW - selX, selH - selY);
System.out.println("ensure that selection is enclosed within the image");
System.out.println("Selection : \n("+selX+", "+selY+")\n("+selW+", "+selH+")");
}
switch (event) {
case MOUSE_PRESSED:
origin = cvPoint(x, y);
selection = cvRect(x, y, 0, 0);
select_object = true;
break;
case MOUSE_RELEASED:
select_object = false;
if (selection.width() > 0 && selection.height() > 0) {
track_object = -1;
}
break;
}
}
CvScalar hsv2rgb(float hue) {
int[] rgb = new int[3];
int p, sector;
int[][] sector_data = {{0, 2, 1}, {1, 2, 0}, {1, 0, 2}, {2, 0, 1}, {2, 1, 0}, {0, 1, 2}};
hue *= 0.033333333333333333333333333333333f;
sector = (int) Math.floor(hue);
p = Math.round(255 * (hue - sector));
p = p ^ 1;
int temp = 0;
if ((sector & 1) == 1) {
temp = 255;
} else {
temp = 0;
}
p ^= temp;
rgb[sector_data[sector][0]] = 255;
rgb[sector_data[sector][1]] = 0;
rgb[sector_data[sector][2]] = p;
return cvScalar(rgb[2], rgb[1], rgb[0], 0);
}
String coffee;
public void work(CamShifter csh) throws Exception {
IplImage capture = grabber.grab();
System.out.println("paused = "+paused);
if (capture == null) {
System.out.println("Could not initialize capturing...\n");
return;
}
while (true) {
int bin_w;
if (!paused) {
frame = grabber.grab();
if (frame == null) {
return;
}
}
if (image == null) {
image = cvCreateImage(frame.cvSize(), 8, 3);
hsv = cvCreateImage(frame.cvSize(), 8, 3);
hue = cvCreateImage(frame.cvSize(), 8, 1);
mask = cvCreateImage(frame.cvSize(), 8, 1);
backproject = cvCreateImage(frame.cvSize(), 8, 1);
histimg = cvCreateImage(cvSize(320, 200), 8, 3);
cvZero(histimg);
hist = cvCreateHist( 1, hdims, CV_HIST_ARRAY, hranges_arr, 1 );
}
cvCopy(frame, image);
if (!paused)
{
cvCvtColor(image, hsv, CV_BGR2HSV);
if (track_object != 0) {
int _vmin = vmin, _vmax = vmax;
cvInRangeS(hsv, cvScalar(0, smin, Math.min(_vmin, _vmax), 0), cvScalar(180, 256, Math.max(_vmin, _vmax), 0), mask);
cvSplit(hsv, hue, null, null, null);
hueArray = new IplImageArray(hue);
if (track_object < 0) {
float max_val = 0.f;
cvSetImageROI(hue, selection);
cvSetImageROI(mask, selection);
cvCalcHist(hueArray, hist, 0, null);
if (max_val != 0) { // TODO: entier non null == true en C, à vérifier
max_val = (float) 255. / max_val;
} else {
max_val = 0;
}
FloatByReference fl_ref = new FloatByReference(max_val);
cvConvertScale(hist.bins(), hist.bins(), Float.parseFloat(fl_ref.toString()), 0);
cvResetImageROI(hue);
cvResetImageROI(mask);
track_window = selection;
track_object = 1;
cvZero(histimg);
bin_w = histimg.width() / hsize;
for (int i = 0; i < hsize; i++) {
int val = Math.round((int) (cvGetReal1D(hist.bins(), i) * histimg.height() / 255.));
CvScalar color = hsv2rgb(i * 180.f / hsize);
cvRectangle(histimg, cvPoint(i * bin_w, histimg.height()), cvPoint((i + 1) * bin_w, histimg.height() - val), color, -1, 8, 0);
}
}
cvCalcBackProject(hueArray, backproject, hist);
cvAnd(backproject, mask, backproject, null);
cvCamShift(backproject, track_window, cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1), track_comp, track_box);
track_window = track_comp.rect();
// if (track_window.width()*track_window.height()<=1)
// {
// int cols = backproject.width(), rows = backproject.height(), r = (Math.min(cols, rows)+5)/6;
// track_window = cvRect(
// Math.max(track_window.x()-r,0),
// Math.max(track_window.y()-r,0),
// Math.min(track_window.x()+r,cols),
// Math.min(track_window.y()+r,rows));
// }
if (backproject_mode) {
cvCvtColor(backproject, image, CV_GRAY2BGR);
}
if (image.origin() == 0) {
track_box = track_box.angle(-track_box.angle());
cvEllipseBox(image, track_box, cvScalar(0, 0, 255, 0), 3, CV_AA, 0);
}
}
} else if (track_object < 0) {
paused = false;
}
if (select_object && selection.width() > 0 && selection.height() > 0) {
cvSetImageROI(image, selection);
cvXorS(image, cvScalarAll(255), image, null);
cvResetImageROI(image);
}
camshiftDemo.showImage(image);
histogram.showImage(histimg);
}
}
#Override
public void mouseClicked(MouseEvent e) {
// System.out.println("Mouse Clicked !");
}
#Override
public void mousePressed(MouseEvent e) {
this.onMouse(MOUSE_PRESSED, e.getX(), e.getY());
System.out.println("Mouse Pressed !");
System.out.println("\t e.getX(): "+e.getX());
System.out.println("\t e.getY(): "+e.getY());
}
#Override
public void mouseReleased(MouseEvent e) {
this.onMouse(MOUSE_RELEASED, e.getX(), e.getY());
System.out.println("Mouse Released !");
System.out.println("\t e.getX(): "+e.getX());
System.out.println("\t e.getY(): "+e.getY());
}
#Override
public void mouseEntered(MouseEvent e) {
// System.out.println("Mouse Entered !");
}
#Override
public void mouseExited(MouseEvent e) {
// System.out.println("Mouse Exited !");
}
}
Everytime I run the application, and after I select my object to track, the JVM crashes when getting to the line containing cvCalcHist.
Can anyone please tell me what's wrong with my code. I've been on it since days and I can't figure out the problem with it :(
Thank you very much.
Related
Output
I think the following code isn't giving the correct result.
What's wrong withe following code?
public class ImagePadder
{
public static Bitmap Pad(Bitmap image, int newWidth, int newHeight)
{
int width = image.Width;
int height = image.Height;
if (width >= newWidth) throw new Exception("New width must be larger than the old width");
if (height >= newHeight) throw new Exception("New height must be larger than the old height");
Bitmap paddedImage = Grayscale.CreateGrayscaleImage(newWidth, newHeight);
BitmapLocker inputImageLocker = new BitmapLocker(image);
BitmapLocker paddedImageLocker = new BitmapLocker(paddedImage);
inputImageLocker.Lock();
paddedImageLocker.Lock();
//Reading row by row
for (int y = 0; y < image.Height; y++)
{
for (int x = 0; x < image.Width; x++)
{
Color col = inputImageLocker.GetPixel(x, y);
paddedImageLocker.SetPixel(x, y, col);
}
}
string str = string.Empty;
paddedImageLocker.Unlock();
inputImageLocker.Unlock();
return paddedImage;
}
}
Relevant Source Code:
public class BitmapLocker : IDisposable
{
//private properties
Bitmap _bitmap = null;
BitmapData _bitmapData = null;
private byte[] _imageData = null;
//public properties
public bool IsLocked { get; set; }
public IntPtr IntegerPointer { get; private set; }
public int Width { get { return _bitmap.Width; } }
public int Height { get { return _bitmap.Height; } }
public int Stride { get { return _bitmapData.Stride; } }
public int ColorDepth { get { return Bitmap.GetPixelFormatSize(_bitmap.PixelFormat); } }
public int Channels { get { return ColorDepth / 8; } }
public int PaddingOffset { get { return _bitmapData.Stride - (_bitmap.Width * Channels); } }
public PixelFormat ImagePixelFormat { get { return _bitmap.PixelFormat; } }
public bool IsGrayscale { get { return Grayscale.IsGrayscale(_bitmap); } }
//Constructor
public BitmapLocker(Bitmap source)
{
IsLocked = false;
IntegerPointer = IntPtr.Zero;
this._bitmap = source;
}
/// Lock bitmap
public void Lock()
{
if (IsLocked == false)
{
try
{
// Lock bitmap (so that no movement of data by .NET framework) and return bitmap data
_bitmapData = _bitmap.LockBits(
new Rectangle(0, 0, _bitmap.Width, _bitmap.Height),
ImageLockMode.ReadWrite,
_bitmap.PixelFormat);
// Create byte array to copy pixel values
int noOfBitsNeededForStorage = _bitmapData.Stride * _bitmapData.Height;
int noOfBytesNeededForStorage = noOfBitsNeededForStorage / 8;
_imageData = new byte[noOfBytesNeededForStorage * ColorDepth];//# of bytes needed for storage
IntegerPointer = _bitmapData.Scan0;
// Copy data from IntegerPointer to _imageData
Marshal.Copy(IntegerPointer, _imageData, 0, _imageData.Length);
IsLocked = true;
}
catch (Exception)
{
throw;
}
}
else
{
throw new Exception("Bitmap is already locked.");
}
}
/// Unlock bitmap
public void Unlock()
{
if (IsLocked == true)
{
try
{
// Copy data from _imageData to IntegerPointer
Marshal.Copy(_imageData, 0, IntegerPointer, _imageData.Length);
// Unlock bitmap data
_bitmap.UnlockBits(_bitmapData);
IsLocked = false;
}
catch (Exception)
{
throw;
}
}
else
{
throw new Exception("Bitmap is not locked.");
}
}
public Color GetPixel(int x, int y)
{
Color clr = Color.Empty;
// Get color components count
int cCount = ColorDepth / 8;
// Get start index of the specified pixel
int i = (Height - y - 1) * Stride + x * cCount;
int dataLength = _imageData.Length - cCount;
if (i > dataLength)
{
throw new IndexOutOfRangeException();
}
if (ColorDepth == 32) // For 32 bpp get Red, Green, Blue and Alpha
{
byte b = _imageData[i];
byte g = _imageData[i + 1];
byte r = _imageData[i + 2];
byte a = _imageData[i + 3]; // a
clr = Color.FromArgb(a, r, g, b);
}
if (ColorDepth == 24) // For 24 bpp get Red, Green and Blue
{
byte b = _imageData[i];
byte g = _imageData[i + 1];
byte r = _imageData[i + 2];
clr = Color.FromArgb(r, g, b);
}
if (ColorDepth == 8)
// For 8 bpp get color value (Red, Green and Blue values are the same)
{
byte c = _imageData[i];
clr = Color.FromArgb(c, c, c);
}
return clr;
}
public void SetPixel(int x, int y, Color color)
{
// Get color components count
int cCount = ColorDepth / 8;
// Get start index of the specified pixel
int i = (Height - y - 1) * Stride + x * cCount;
try
{
if (ColorDepth == 32) // For 32 bpp set Red, Green, Blue and Alpha
{
_imageData[i] = color.B;
_imageData[i + 1] = color.G;
_imageData[i + 2] = color.R;
_imageData[i + 3] = color.A;
}
if (ColorDepth == 24) // For 24 bpp set Red, Green and Blue
{
_imageData[i] = color.B;
_imageData[i + 1] = color.G;
_imageData[i + 2] = color.R;
}
if (ColorDepth == 8)
// For 8 bpp set color value (Red, Green and Blue values are the same)
{
_imageData[i] = color.B;
}
}
catch (Exception ex)
{
throw new Exception("(" + x + ", " + y + "), " + _imageData.Length + ", " + ex.Message + ", i=" + i);
}
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (disposing)
{
// free managed resources
_bitmap = null;
_bitmapData = null;
_imageData = null;
IntegerPointer = IntPtr.Zero;
}
}
}
The layout of a Windows bitmap is different than you might expect. The bottom line of the image is the first line in memory, and continues backwards from there. It can also be laid out the other way when the height is negative, but those aren't often encountered.
Your calculation of an offset into the bitmap appears to take that into account, so your problem must be more subtle.
int i = (Height - y - 1) * Stride + x * cCount;
The problem is that the BitmapData class already takes this into account and tries to fix it for you. The bitmap I described above is a bottom-up bitmap. From the documentation for BitmapData.Stride:
The stride is the width of a single row of pixels (a scan line), rounded up to a four-byte boundary. If the stride is positive, the bitmap is top-down. If the stride is negative, the bitmap is bottom-up.
It is intended to be used with the Scan0 property to access the bitmap in a consistent fashion whether it's top-down or bottom-up.
I want to use some code like this in processing, since I am not familiar with openFrameworks.
https://www.youtube.com/watch?v=pCq7u2TvlxU&list=UUtYM3-7ldtX7kf_sSoHt1Pw&index=1&feature=plcp
Any chance any one have heard of a project like this for processing?
Since I am not a programmer, I am trying to use the CalibrationDemo example mixed with MarkerDetection (from opencv for processing library)- wondering if I can get some transformation matrix from the checkbox plane and the camera at first...
Most of the examples and tutorials about opencv are written in C, so I am having hard time to understand some definitions without a practical example.
Below, a work-in-progress code. It is not working for what I want. As I said, its a mix from two examples in processing opencv library. My first goal is to extract the transformation matrix of the plane of the checkbox.
import gab.opencv.*;
import org.opencv.imgproc.Imgproc;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.MatOfPoint2f;
import org.opencv.core.MatOfPoint2f;
import org.opencv.core.CvType;
import org.opencv.core.Point;
import org.opencv.core.Size;
import processing.video.*;
//import java.util.list;
OpenCV opencv;
Capture cam;
PImage src, dst, markerImg;
ArrayList<MatOfPoint> contours;
ArrayList<MatOfPoint2f> approximations;
ArrayList<MatOfPoint2f> markers;
boolean[][] markerCells;
void setup() {
size(1000, 365);
//1000 × 730
cam = new Capture(this, 800, 480);
cam.start();
//src = cam.get();//opencv.getInput();
opencv = new OpenCV(this, 800, 480);
}
void update() {
//src = opencv.getInput();
opencv.loadImage(src);
// hold on to this for later, since adaptiveThreshold is destructive
Mat gray = OpenCV.imitate(opencv.getGray());
opencv.getGray().copyTo(gray);
Mat thresholdMat = OpenCV.imitate(opencv.getGray());
opencv.blur(5);
Imgproc.adaptiveThreshold(opencv.getGray(), thresholdMat, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY_INV, 451, -65);
contours = new ArrayList<MatOfPoint>();
Imgproc.findContours(thresholdMat, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_NONE);
image(opencv.getOutput(), 0, 0);
approximations = createPolygonApproximations(contours);
markers = new ArrayList<MatOfPoint2f>();
markers = selectMarkers(approximations);
MatOfPoint2f canonicalMarker = new MatOfPoint2f();
Point[] canonicalPoints = new Point[4];
canonicalPoints[0] = new Point(0, 350);
canonicalPoints[1] = new Point(0, 0);
canonicalPoints[2] = new Point(350, 0);
canonicalPoints[3] = new Point(350, 350);
canonicalMarker.fromArray(canonicalPoints);
if (markers.size() <= 0) return;
println("num points: " + markers.get(0).height());
Mat transform = Imgproc.getPerspectiveTransform(markers.get(0), canonicalMarker);
Mat unWarpedMarker = new Mat(50, 50, CvType.CV_8UC1);
Imgproc.warpPerspective(gray, unWarpedMarker, transform, new Size(350, 350));
Imgproc.threshold(unWarpedMarker, unWarpedMarker, 125, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU);
float cellSize = 350/7.0;
markerCells = new boolean[7][7];
for (int row = 0; row < 7; row++) {
for (int col = 0; col < 7; col++) {
int cellX = int(col*cellSize);
int cellY = int(row*cellSize);
Mat cell = unWarpedMarker.submat(cellX, cellX +(int)cellSize, cellY, cellY+ (int)cellSize);
markerCells[row][col] = (Core.countNonZero(cell) > (cellSize*cellSize)/2);
}
}
for (int col = 0; col < 7; col++) {
for (int row = 0; row < 7; row++) {
if (markerCells[row][col]) {
print(1);
} else {
print(0);
}
}
println();
}
dst = createImage(350, 350, RGB);
opencv.toPImage(unWarpedMarker, dst);
}
ArrayList<MatOfPoint2f> selectMarkers(ArrayList<MatOfPoint2f> candidates) {
float minAllowedContourSide = 50;
minAllowedContourSide = minAllowedContourSide * minAllowedContourSide;
ArrayList<MatOfPoint2f> result = new ArrayList<MatOfPoint2f>();
for (MatOfPoint2f candidate : candidates) {
if (candidate.size().height != 4) {
continue;
}
if (!Imgproc.isContourConvex(new MatOfPoint(candidate.toArray()))) {
continue;
}
// eliminate markers where consecutive
// points are too close together
float minDist = src.width * src.width;
Point[] points = candidate.toArray();
for (int i = 0; i < points.length; i++) {
Point side = new Point(points[i].x - points[(i+1)%4].x, points[i].y - points[(i+1)%4].y);
float squaredLength = (float)side.dot(side);
// println("minDist: " + minDist + " squaredLength: " +squaredLength);
minDist = min(minDist, squaredLength);
}
// println(minDist);
if (minDist < minAllowedContourSide) {
continue;
}
result.add(candidate);
}
return result;
}
ArrayList<MatOfPoint2f> createPolygonApproximations(ArrayList<MatOfPoint> cntrs) {
ArrayList<MatOfPoint2f> result = new ArrayList<MatOfPoint2f>();
double epsilon = cntrs.get(0).size().height * 0.01;
println(epsilon);
for (MatOfPoint contour : cntrs) {
MatOfPoint2f approx = new MatOfPoint2f();
Imgproc.approxPolyDP(new MatOfPoint2f(contour.toArray()), approx, epsilon, true);
result.add(approx);
}
return result;
}
void drawContours(ArrayList<MatOfPoint> cntrs) {
for (MatOfPoint contour : cntrs) {
beginShape();
Point[] points = contour.toArray();
for (int i = 0; i < points.length; i++) {
vertex((float)points[i].x, (float)points[i].y);
}
endShape();
}
}
void drawContours2f(ArrayList<MatOfPoint2f> cntrs) {
for (MatOfPoint2f contour : cntrs) {
beginShape();
Point[] points = contour.toArray();
for (int i = 0; i < points.length; i++) {
vertex((float)points[i].x, (float)points[i].y);
}
endShape(CLOSE);
}
}
void draw() {
//VIDEO
if (!cam.available()) {
println("no video available");
return;
}
cam.read();
src = cam.get();
pushMatrix();
background(125);
scale(0.7);
//image(src, 0, 0);
update();
noFill();
smooth();
strokeWeight(5);
stroke(0, 255, 0);
if (markers.size() > 0) drawContours2f(markers);
popMatrix();
if (markers.size() <= 0) {
drawContours2f(markers);
return;
}
pushMatrix();
translate(200 + src.width/2, 0);
strokeWeight(1);
image(dst, 0, 0);
float cellSize = dst.width/7.0;
for (int col = 0; col < 7; col++) {
for (int row = 0; row < 7; row++) {
if (markerCells[row][col]) {
fill(255);
} else {
fill(0);
}
stroke(0, 255, 0);
rect(col*cellSize, row*cellSize, cellSize, cellSize);
}
}
popMatrix();
}
Any help or indication would be great!
B
I want to set focus on first element or item of listfield. The problem is that I set focus from other field (not within the list field).
protected boolean navigationMovement(int dx, int dy, int status, int time) {
while (dy > 0) {
// How to jump into listfield?
Field f = ???
if (f.isFocusable()) {
f.setFocus();
dy--;
}
}
}
Here is my Listfield class.
public class Custom_ListField extends ListField {
private String[] title, category, date, imagepath;
private int[] newsid, catsid;
private List_News newslist;
private Bitmap imagebitmap[], localimage = Config_GlobalFunction
.Bitmap("image_base.png");
private BrowserField webpage;
private boolean islatest;
private Vector content = null, text;
private ListCallback callback = null;
private int currentPosition = 0, j;
private Util_LazyLoader loader;
public Custom_ListField(Vector content, boolean islatest) {
this.content = content;
this.islatest = islatest;
newsid = new int[content.size()];
title = new String[content.size()];
category = new String[content.size()];
date = new String[content.size()];
imagepath = new String[content.size()];
catsid = new int[content.size()];
imagebitmap = new Bitmap[content.size()];
for (int i = 0; i < content.size(); i++) {
newslist = (List_News) content.elementAt(i);
newsid[i] = newslist.getID();
title[i] = newslist.getNtitle();
category[i] = newslist.getNewCatName();
date[i] = newslist.getNArticalD();
imagepath[i] = newslist.getImagePath();
catsid[i] = newslist.getCatID();
if (!imagepath[i].toString().equals("no picture"))
imagebitmap[i] = localimage;
}
initCallbackListening();
Main.getUiApplication().invokeLater(new Runnable() {
public void run() {
for (j = 0; j < imagepath.length; j++) {
if (!imagepath[j].toString().equals("no picture")
&& Config_GlobalFunction.isConnected()) {
loader = new Util_LazyLoader(imagepath[j],
new Util_BitmapDowloadListener() {
public void ImageDownloadCompleted(
Bitmap bmp) {
imagebitmap[j] = bmp;
invalidate();
}
});
loader.run();
}
}
}
}, 500, false);
}
private void initCallbackListening() {
callback = new ListCallback();
this.setCallback(callback);
this.setRowHeight(-2);
}
private class ListCallback implements ListFieldCallback {
public ListCallback() {
}
public void drawListRow(ListField listField, Graphics graphics,
final int index, int y, int width) {
currentPosition = index;
if (!imagepath[index].toString().equals("no picture")) {
float ratio = (float) ((float) localimage.getHeight() / (float) imagebitmap[index]
.getHeight());
Bitmap temp = new Bitmap(
(int) (imagebitmap[index].getWidth() * ratio),
(int) (imagebitmap[index].getHeight() * ratio));
imagebitmap[index].scaleInto(temp, Bitmap.FILTER_BILINEAR,
Bitmap.SCALE_TO_FIT);
imagebitmap[index] = temp;
graphics.drawBitmap(
Display.getWidth()
- localimage.getWidth()
- 5
+ ((localimage.getWidth() - imagebitmap[index]
.getWidth()) / 2),
y
+ (listField.getRowHeight(index) - imagebitmap[index]
.getHeight()) / 2,
imagebitmap[index].getWidth(),
imagebitmap[index].getHeight(), imagebitmap[index], 0,
0);
graphics.setColor(Color.BLACK);
text = Config_GlobalFunction
.wrap(title[index], Display.getWidth()
- imagebitmap[index].getWidth() - 15);
for (int i = 0; i < text.size(); i++) {
int liney = y + (i * Font.getDefault().getHeight());
graphics.drawText(
(String) text.elementAt(i),
5,
liney + 3,
DrawStyle.TOP | DrawStyle.LEFT | DrawStyle.ELLIPSIS,
Display.getWidth() - imagebitmap[index].getWidth()
- 10);
}
} else {
graphics.setColor(Color.BLACK);
text = Config_GlobalFunction.wrap(title[index],
Display.getWidth() - 10);
for (int i = 0; i < text.size(); i++) {
int liney = y + (i * Font.getDefault().getHeight());
graphics.drawText(
(String) text.elementAt(i),
5,
liney + 3,
DrawStyle.TOP | DrawStyle.LEFT | DrawStyle.ELLIPSIS,
Display.getWidth() - 10);
}
}
if (text.size() == 2) {
graphics.setColor(Color.GRAY);
graphics.drawText(date[index], 5, y
+ Font.getDefault().getHeight() + 3);
if (islatest) {
graphics.setColor(Color.RED);
graphics.drawText(category[index], Font.getDefault()
.getAdvance(date[index]) + 15, y
+ Font.getDefault().getHeight() + 3);
}
} else if (text.size() == 3) {
graphics.setColor(Color.GRAY);
graphics.drawText(date[index], 5, y
+ Font.getDefault().getHeight() * 2 + 3);
if (islatest) {
graphics.setColor(Color.RED);
graphics.drawText(category[index], Font.getDefault()
.getAdvance(date[index]) + 15, y
+ Font.getDefault().getHeight() * 2 + 3);
}
}
if (!imagepath[index].toString().equals("no picture"))
setRowHeight(index, imagebitmap[index].getHeight() + 10);
else {
if (text.size() == 2)
setRowHeight(index, getRowHeight() + 9);
else if (text.size() == 3) {
setRowHeight(index, getRowHeight() * 15 / 10 + 9);
}
}
graphics.setColor(Color.WHITE);
graphics.drawRect(0, y, width, listField.getRowHeight(index));
}
public Object get(ListField listField, int index) {
return content.elementAt(index);
}
public int getPreferredWidth(ListField listField) {
return Display.getWidth();
}
public int indexOfList(ListField listField, String prefix, int start) {
return content.indexOf(prefix, start);
}
}
public int getCurrentPosition() {
return currentPosition;
}
protected boolean navigationClick(int status, int time) {
final int index = getCurrentPosition();
Main.getUiApplication().pushScreen(new Custom_LoadingScreen(1));
Main.getUiApplication().invokeLater(new Runnable() {
public void run() {
if (catsid[index] == 9) {
if (Config_GlobalFunction.isConnected()) {
webpage = new BrowserField();
MainScreen aboutus = new Menu_Aboutus();
aboutus.add(webpage);
Main.getUiApplication().pushScreen(aboutus);
webpage.requestContent("http://www.orientaldaily.com.my/index.php?option=com_k2&view=item&id="
+ newsid[index]
+ ":&Itemid=223"
+ Database_Webservice.ht_params);
} else
Config_GlobalFunction.Message(
Config_GlobalFunction.nowifi, 1);
} else {
Main.getUiApplication().pushScreen(
new Main_NewsDetail(newsid[index]));
}
}
}, 1 * 1000, false);
return true;
}
}
I tried to use .get but when I check isFocusable(), it returns me NullPointerException.
I'm not sure how to get the correct field.
Finally i found the solution.
It consists of two layers field. Outer one must set to listfield, after that inner one set to first element.
while (dy > 0) {
Field f = listfield;
if (f.isFocusable()) {
f.setFocus(); //outer field
listfield.setSelectedIndex(0); //inner field
dy--;
}
}
For my current code, it will download the images first then only display data and cause the device like lagging.
public Custom_ListField(Vector content, boolean islatest) {
this.content = content;
this.islatest = islatest;
newsid = new int[content.size()];
title = new String[content.size()];
category = new String[content.size()];
date = new String[content.size()];
imagepath = new String[content.size()];
catsid = new int[content.size()];
imagebitmap = new Bitmap[content.size()];
ischeck = new boolean[content.size()];
for (int i = 0; i < content.size(); i++) {
newslist = (List_News) content.elementAt(i);
newsid[i] = newslist.getID();
title[i] = newslist.getNtitle();
category[i] = newslist.getNewCatName();
date[i] = newslist.getNArticalD();
imagepath[i] = newslist.getImagePath();
catsid[i] = newslist.getCatID();
ischeck[i] = false;
if (!imagepath[i].toString().equals("no picture")) {
if (Config_GlobalFunction.isConnected())
imagebitmap[i] = Util_ImageLoader.loadImage(imagepath[i]);
else
imagebitmap[i] = localimage;
}
}
initCallbackListening();
}
private void initCallbackListening() {
callback = new ListCallback();
this.setCallback(callback);
this.setRowHeight(-2);
}
private class ListCallback implements ListFieldCallback {
public ListCallback() {
}
public void drawListRow(ListField listField, Graphics graphics,
final int index, int y, int width) {
currentPosition = index;
if (!imagepath[index].toString().equals("no picture")) {
float ratio = (float) ((float) localimage.getHeight() / (float) imagebitmap[index]
.getHeight());
Bitmap temp = new Bitmap(
(int) (imagebitmap[index].getWidth() * ratio),
(int) (imagebitmap[index].getHeight() * ratio));
imagebitmap[index].scaleInto(temp, Bitmap.FILTER_BILINEAR,
Bitmap.SCALE_TO_FIT);
imagebitmap[index] = temp;
graphics.drawBitmap(
Display.getWidth()
- localimage.getWidth()
- 5
+ ((localimage.getWidth() - imagebitmap[index]
.getWidth()) / 2),
y
+ (listField.getRowHeight(index) - imagebitmap[index]
.getHeight()) / 2,
imagebitmap[index].getWidth(),
imagebitmap[index].getHeight(), imagebitmap[index], 0,
0);
graphics.setColor(Color.BLACK);
text = Config_GlobalFunction
.wrap(title[index], Display.getWidth()
- imagebitmap[index].getWidth() - 15);
for (int i = 0; i < text.size(); i++) {
int liney = y + (i * Font.getDefault().getHeight());
graphics.drawText(
(String) text.elementAt(i),
5,
liney + 3,
DrawStyle.TOP | DrawStyle.LEFT | DrawStyle.ELLIPSIS,
Display.getWidth() - imagebitmap[index].getWidth()
- 10);
}
} else {
graphics.setColor(Color.BLACK);
text = Config_GlobalFunction.wrap(title[index],
Display.getWidth() - 10);
for (int i = 0; i < text.size(); i++) {
int liney = y + (i * Font.getDefault().getHeight());
graphics.drawText(
(String) text.elementAt(i),
5,
liney + 3,
DrawStyle.TOP | DrawStyle.LEFT | DrawStyle.ELLIPSIS,
Display.getWidth() - 10);
}
}
if (text.size() == 2) {
graphics.setColor(Color.GRAY);
graphics.drawText(date[index], 5, y
+ Font.getDefault().getHeight() + 3);
if (islatest) {
graphics.setColor(Color.RED);
graphics.drawText(category[index], Font.getDefault()
.getAdvance(date[index]) + 15, y
+ Font.getDefault().getHeight() + 3);
}
} else if (text.size() == 3) {
graphics.setColor(Color.GRAY);
graphics.drawText(date[index], 5, y
+ Font.getDefault().getHeight() * 2 + 3);
if (islatest) {
graphics.setColor(Color.RED);
graphics.drawText(category[index], Font.getDefault()
.getAdvance(date[index]) + 15, y
+ Font.getDefault().getHeight() * 2 + 3);
}
}
if (!imagepath[index].toString().equals("no picture"))
setRowHeight(index, imagebitmap[index].getHeight() + 10);
else {
if (text.size() == 2)
setRowHeight(index, getRowHeight() + 9);
else if (text.size() == 3) {
setRowHeight(index, getRowHeight() * 15 / 10 + 9);
}
}
graphics.setColor(Color.WHITE);
graphics.drawRect(0, y, width, listField.getRowHeight(index));
ischeck[index] = true;
}
}
I want this imagebitmap[i] = Util_ImageLoader.loadImage(imagepath[i]); run after display data so that no need stuck there. However, I tried to put inside drawListRow, it works but very slow because initially display it will run 0-8 times then when i scroll the listfield, it run again. It was download and download again.
Update
public class Util_LazyLoader implements Runnable {
String url = null;
BitmapDowloadListener listener = null;
public Util_LazyLoader(String url, BitmapDowloadListener listener) {
this.url = url;
this.listener = listener;
}
public void run() {
Bitmap bmpImage = getImageFromWeb(url);
listener.ImageDownloadCompleted(bmpImage);
}
private Bitmap getImageFromWeb(String url) {
HttpConnection connection = null;
InputStream inputStream = null;
EncodedImage bitmap;
byte[] dataArray = null;
try {
connection = (HttpConnection) (new ConnectionFactory())
.getConnection(url + Database_Webservice.ht_params)
.getConnection();
int responseCode = connection.getResponseCode();
if (responseCode == HttpConnection.HTTP_OK) {
inputStream = connection.openDataInputStream();
dataArray = IOUtilities.streamToBytes(inputStream);
}
} catch (Exception ex) {
} finally {
try {
inputStream.close();
connection.close();
} catch (Exception e) {
}
}
if (dataArray != null) {
bitmap = EncodedImage.createEncodedImage(dataArray, 0,
dataArray.length);
return bitmap.getBitmap();
} else {
return null;
}
}
}
I created a new class but i don't know how to use it.
You need to use lazy loading concept here.
For ex:
http://supportforums.blackberry.com/t5/Java-Development/How-to-load-images-quickly-like-android/m-p/1487995#M187253
http://supportforums.blackberry.com/t5/Java-Development/Lazy-loading-issue-in-blackberry/m-p/1835127
You need to download images in a separate thread (not in UI Thread). Actually what happens when you render a list row , it looks for bitmap image.
So what you can do once you are creating your List view. Provide a default loading bitmap image, start a thread to download image ,
you should create method on thread where you are putting the data from Url to vector. this could be in your connection class where you have extend as thread.
like this>>>>
getimagemethod(image[i]);
after you declare your method get the image string url to the method. like this>>
private void getimagemethod(String image2)
{
this.imageforlist = image2;
// you should declare imageforlist string as global string..
newBitmap1 = Util_ImageLoader.getImageFromUrl(imageforlist);
//newBitmap1 is also global Bitmap..**
}
after this put your bitmap which is newBitmap1 to the Vector like this>>
imagevct.addElement(newBitmap1);
here imagevct is vector which is also global**
**inorder to create global vector use this....
private Vector imagevct = new Vector();
now you are ready to draw the bitmap on your list
for that do it like this...
public void drawListRow(ListField list, Graphics g, int index, int y, int w) {
Bitmap imagee = (Bitmap) imagevct.elementAt(index);
g.drawBitmap(HPADDING, 15 + y, 60, 60, imagee , 0, 0);
}
Here HPADDING is>>>>
private static final int HPADDING = Display.getWidth() <= 320 ? 6 : 8;
this is just tutorial sample step by step..
if any query then you can post here...
Could someone help me figure out how to draw a route on RichMapField,
i am able to draw on a MapField.
I want to use RichMapField, because i can use the MapDataModel to add more than one marker dynamically.
Updated Code:
This is my attempt at writing code to display route from A to B on a RichMapField, all I am getting is a dot on the map. Could someone please help me with this:
class MapPathScreen extends MainScreen {
MapControl map;
Road mRoad = new Road();
RichMapField mapField = MapFactory.getInstance().generateRichMapField();
public MapPathScreen() {
double fromLat = 47.67, fromLon = 9.38, toLat =47.12, toLon = 9.47;
/* double fromLat = 49.85, fromLon = 24.016667;
double toLat = 50.45, toLon = 30.523333;
*/
String url = RoadProvider.getUrl(fromLat, fromLon, toLat, toLon);
InputStream is = getConnection(url);
mRoad = RoadProvider.getRoute(is);
map = new MapControl(mapField);
add(new LabelField(mRoad.mName));
add(new LabelField(mRoad.mDescription));
add(map);
}
protected void onUiEngineAttached(boolean attached) {
super.onUiEngineAttached(attached);
if (attached) {
map.drawPath(mRoad);
}
}
private InputStream getConnection(String url) {
HttpConnection urlConnection = null;
InputStream is = null;
try {
urlConnection = (HttpConnection) Connector.open(url);
urlConnection.setRequestMethod("GET");
is = urlConnection.openInputStream();
} catch (IOException e) {
e.printStackTrace();
}
return is;
}
protected boolean keyDown(int keycode, int time)
{
MapAction action=mapField.getAction();
StringBuffer sb = new StringBuffer();
// Retrieve the characters mapped to the keycode for the current keyboard layout
Keypad.getKeyChars(keycode, sb);
// Zoom in
if(sb.toString().indexOf('i') != -1)
{
action.zoomIn();
return true;
}
// Zoom out
else if(sb.toString().indexOf('o') != -1)
{
action.zoomOut();
return true;
}
return super.keyDown(keycode, time);
}
}
class MapControl extends net.rim.device.api.lbs.maps.ui.MapField {
Bitmap bmp = null;
MapAction action;
MapField map = new MapField();
RichMapField mapRich;
Road road;
public MapControl(RichMapField mapRich)
{
this.mapRich = mapRich;
}
public void drawPath(Road road) {
if (road.mRoute.length > 0) {
Coordinates[] mPoints = new Coordinates[] {};
for (int i = 0; i < road.mRoute.length; i++) {
Arrays.add(mPoints, new Coordinates(road.mRoute[i][1],
road.mRoute[i][0], 0));
}
double moveToLat = mPoints[0].getLatitude()
+ (mPoints[mPoints.length - 1].getLatitude() - mPoints[0].getLatitude()) / 2;
double moveToLong = mPoints[0].getLongitude()
+ (mPoints[mPoints.length - 1].getLongitude() - mPoints[0].getLongitude()) / 2;
Coordinates moveTo = new Coordinates(moveToLat, moveToLong, 0);
action = this.getAction();
action.setZoom(15);
action.setCentreAndZoom(new MapPoint(moveToLat,moveToLong), 15);
bmp = new Bitmap(500, 500);
bmp.createAlpha(Bitmap.ALPHA_BITDEPTH_8BPP);
Graphics g = Graphics.create(bmp);
int x1 = -1, y1 = -1, x2 = -1, y2 = -1;
XYPoint point = new XYPoint();
Coordinates c = new Coordinates(mPoints[0].getLatitude(),mPoints[0].getLongitude(),0);
map.convertWorldToField(c, point);
x1=point.x;
y1 = point.y;
g.fillEllipse(x1, y1, x1, y1 + 1, x1 + 1, y1, 0, 360);
for (int i = 1; i < mPoints.length; i++) {
XYPoint point1 = new XYPoint();
Coordinates c1 = new Coordinates(mPoints[i].getLatitude(),mPoints[i].getLongitude(),0);
map.convertWorldToField(c1, point1);
x2 = point1.x;
y2 = point1.y;
g.setColor(Color.GREEN);
//g.fillEllipse(x1, y1, x1, y1 + 1, x1 + 1, y1, 0, 360);
g.drawLine(x1, y1, x2, y2);
x1 = x2;
y1 = y2;
}
}
}
protected void paint(Graphics g) {
super.paint(g);
if (bmp != null) {
g.setGlobalAlpha(100);
g.drawBitmap(0, 0, bmp.getWidth(), bmp.getHeight(), bmp, 0, 0);
}
}