I need a way to segment each arrow alone. I tried OpenCv findContours but it broke it or add it to multiple shapes and arrows as the share the boundaries of shapes. I tried OpenCV connected components but this arrows almost in some graph connected all of it. Plus having trouble as the boundaries almost have the same color as the arrow. And in these kind of images each arrow contains different colors. Any opinion about this problem.
This is a sample diagram. I have to deal with harder diagrams like this.
Ok, work with new picture.
1. Binarization the arrows (and shapes):
cv::Mat imgCl = cv::imread("62uoU.jpg", cv::IMREAD_COLOR);
cv::Mat img;
cv::cvtColor(imgCl, img, cv::COLOR_BGR2GRAY);
cv::Mat mask1;
cv::threshold(img, mask1, 30, 255, cv::THRESH_BINARY_INV);
cv::Mat mask2;
cv::threshold(img, mask2, 120, 255, cv::THRESH_BINARY_INV);
cv::Mat diff;
cv::absdiff(mask1, mask2, diff);
cv::imshow("diff1", diff);
Result 1:
Remove rectangle shapes:
cv::Rect objRect(0, 0, diff.cols, diff.rows);
cv::Size minSize(objRect.width / 100, objRect.height / 100);
cv::Mat bin = cv::Mat(diff, objRect).clone();
for (;;)
{
cv::Rect cutRect;
if (!PosRefinement(bin, cutRect, 0.9f, minSize))
{
break;
}
cv::rectangle(bin, cutRect, cv::Scalar(0, 0, 0), cv::FILLED);
cv::rectangle(diff, cutRect, cv::Scalar(0, 0, 0), cv::FILLED);
objRect.x += cutRect.x;
objRect.y += cutRect.y;
objRect.width = cutRect.width;
objRect.height = cutRect.height;
}
cv::imshow("diff", diff);
Result 2:
Find lines:
std::vector<cv::Vec4i> linesP;
cv::HoughLinesP(diff, linesP, 1, CV_PI / 180, 20, 10, 5);
for (size_t i = 0; i < linesP.size(); i++)
{
cv::Vec4i l = linesP[i];
cv::line(imgCl, cv::Point(l[0], l[1]), cv::Point(l[2], l[3]), cv::Scalar(0, 0, 255), 3, cv::LINE_AA);
}
cv::imshow("img", imgCl);
Result 3:
Black arrows was founded. It can to improve this solution: find and delete text areas from image (tesseract or cv::text::ERFilter). And add a little morphology for draw arrow tips with Hough lines.
P.S. Utility function:
bool PosRefinement(
cv::Mat bin,
cv::Rect& cutRect,
double kThreshold,
cv::Size minSize
)
{
const double areaThreshold = 100;
const int radius = 5;
const int maxIters = 100;
std::vector<std::vector<cv::Point>> contours;
std::vector<cv::Vec4i> hierarchy;
cv::findContours(bin, contours, hierarchy, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE, cv::Point());
size_t bestCont = contours.size();
double maxArea = 0;
for (size_t i = 0; i < contours.size(); i++)
{
double area = cv::contourArea(contours[i]);
if (area > maxArea)
{
maxArea = area;
bestCont = i;
}
}
if (maxArea < areaThreshold)
{
return false;
}
cv::Moments m = cv::moments(contours[bestCont]);
cv::Point mc(cvRound(m.m10 / m.m00), cvRound(m.m01 / m.m00));
cv::Rect currRect(mc.x - radius / 2, mc.y - radius / 2, radius, radius);
auto Clamp = [](int v, int hi) -> bool
{
if (v < 0)
{
v = 0;
return true;
}
else if (hi && v > hi - 1)
{
v = hi - 1;
return true;
}
return false;
};
auto RectClamp = [&](cv::Rect& r, int w, int h) -> bool
{
return Clamp(r.x, w) || Clamp(r.x + r.width, w) || Clamp(r.y, h) || Clamp(r.y + r.height, h);
};
int stepL = radius / 2;
int stepR = radius / 2;
int stepT = radius / 2;
int stepB = radius / 2;
double k = 0;
struct State
{
double k = 0;
int stepL = 0;
int stepR = 0;
int stepT = 0;
int stepB = 0;
cv::Rect currRect;
State() = default;
State(double k_, int stepL_, int stepR_, int stepT_, int stepB_, cv::Rect currRect_)
:
k(k_),
stepL(stepL_),
stepR(stepR_),
stepT(stepT_),
stepB(stepB_),
currRect(currRect_)
{
}
bool operator==(const State& st) const
{
return (st.k == k) && (st.stepL == stepL) && (st.stepR == stepR) && (st.stepT == stepT) && (st.stepB == stepB) && (st.currRect == currRect);
}
};
const size_t statesCount = 2;
State prevStates[statesCount];
size_t stateInd = 0;
for (int it = 0; it < maxIters; ++it)
{
cv::Rect rleft(currRect.x - stepL, currRect.y, currRect.width + stepL, currRect.height);
cv::Rect rright(currRect.x, currRect.y, currRect.width + stepR, currRect.height);
cv::Rect rtop(currRect.x, currRect.y - stepT, currRect.width, currRect.height + stepT);
cv::Rect rbottom(currRect.x, currRect.y, currRect.width, currRect.height + stepB);
double kleft = 0;
double kright = 0;
double ktop = 0;
double kbottom = 0;
if (!RectClamp(rleft, bin.cols, bin.rows))
{
cv::Rect rstep(currRect.x - stepL, currRect.y, stepL, currRect.height);
if (cv::sum(bin(rstep))[0] / (255.0 * rstep.area()) > kThreshold / 2)
{
kleft = cv::sum(bin(rleft))[0] / (255.0 * rleft.area());
}
}
if (!RectClamp(rright, bin.cols, bin.rows))
{
cv::Rect rstep(currRect.x + currRect.width, currRect.y, stepR, currRect.height);
if (cv::sum(bin(rstep))[0] / (255.0 * rstep.area()) > kThreshold / 2)
{
kright = cv::sum(bin(rright))[0] / (255.0 * rright.area());
}
}
if (!RectClamp(rtop, bin.cols, bin.rows))
{
cv::Rect rstep(currRect.x, currRect.y - stepT, currRect.width, stepT);
if (cv::sum(bin(rstep))[0] / (255.0 * rstep.area()) > kThreshold / 2)
{
ktop = cv::sum(bin(rtop))[0] / (255.0 * rtop.area());
}
}
if (!RectClamp(rbottom, bin.cols, bin.rows))
{
cv::Rect rstep(currRect.x, currRect.y + currRect.height, currRect.width, stepB);
if (cv::sum(bin(rstep))[0] / (255.0 * rstep.area()) > kThreshold / 2)
{
kbottom = cv::sum(bin(rbottom))[0] / (255.0 * rbottom.area());
}
}
bool wasEnlargeX = false;
if (kleft > kThreshold)
{
currRect.x -= stepL;
currRect.width += stepL;
wasEnlargeX = true;
if (kleft > k)
{
++stepL;
}
}
else
{
if (stepL > 1)
{
--stepL;
}
currRect.x += 1;
currRect.width -= 1;
}
if (kright > kThreshold)
{
currRect.width += stepR;
wasEnlargeX = true;
if (kright > k)
{
++stepR;
}
}
else
{
if (stepR > 1)
{
--stepR;
}
currRect.width -= 1;
}
bool wasEnlargeY = false;
if (ktop > kThreshold)
{
currRect.y -= stepT;
currRect.height += stepT;
wasEnlargeY = true;
if (ktop > k)
{
++stepT;
}
}
else
{
if (stepT > 1)
{
--stepT;
}
currRect.y += 1;
currRect.height -= 1;
}
if (kbottom > kThreshold)
{
currRect.height += stepB;
wasEnlargeY = true;
if (kbottom > k)
{
++stepB;
}
}
else
{
if (stepB > 1)
{
--stepB;
}
currRect.height -= 1;
}
k = cv::sum(bin(currRect))[0] / (255.0 * currRect.area());
State currState(k, stepL, stepR, stepT, stepB, currRect);
bool repState = false;
for (size_t i = 0; i < statesCount; ++i)
{
if (prevStates[i] == currState)
{
repState = true;
break;
}
}
if (repState)
{
break;
}
else
{
prevStates[stateInd] = currState;
stateInd = (stateInd + 1 < statesCount) ? (stateInd + 1) : 0;
}
if (k < kThreshold && (stepL + stepR + stepT + stepB == 4) && !wasEnlargeX && !wasEnlargeY)
{
break;
}
}
cutRect.x = std::max(0, currRect.x - 1);
cutRect.width = currRect.width + 2;
cutRect.y = std::max(0, currRect.y - 1);
cutRect.height = currRect.height + 2;
return (cutRect.width >= minSize.width) && (cutRect.height >= minSize.height);
}
For your example it might be simple. The picture (png) has 4 channels and 4th channel is transparent mask. It can work only with transparent channel and filter arrows with moments:
cv::Mat img = cv::imread("voXFs.png", cv::IMREAD_UNCHANGED);
std::cout << "imsize = " << img.size() << ", chans = " << img.channels() << std::endl;
cv::imshow("img", img);
std::vector<cv::Mat> chans;
cv::split(img, chans);
cv::imshow("transp", chans.back());
cv::Mat mask;
cv::threshold(chans.back(), mask, 50, 255, cv::THRESH_BINARY | cv::THRESH_OTSU);
std::vector<std::vector<cv::Point> > contours;
cv::findContours(mask, contours, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE);
cv::Mat draw;
cv::cvtColor(mask, draw, cv::COLOR_GRAY2BGR);
for (size_t i = 0; i < contours.size(); ++i)
{
double area = cv::contourArea(contours[i]);
double len = cv::arcLength(contours[i], false);
double k = len / area;
if (area > 10 && len > 60 && k > 2)
{
std::cout << "area = " << area << ", len = " << len << ", k = " << k << std::endl;
cv::drawContours(draw, contours, i, cv::Scalar(255, 0, 0), 1);
}
}
cv::imshow("mask", mask);
cv::imshow("draw", draw);
cv::waitKey(0);
But for more robust result:
Find and delete text areas from image (tesseract or cv::text::ERFilter).
Erode mask, find all shapes by contours, draw and dilate they. Bitwise and operation for mask and result.
The end!
Can anyone help me with this problem, how to do flipping of an image without using the inbuilt flipping function i.e. flip(src image, destination image , 1 or 0) in C++ using OpenCV. I am new to this software so please help.
OpenCV's flip function uses internal flipHoriz or flipVert functions.
static void
flipHoriz( const uchar* src, size_t sstep, uchar* dst, size_t dstep, Size size, size_t esz )
{
int i, j, limit = (int)(((size.width + 1)/2)*esz);
AutoBuffer<int> _tab(size.width*esz);
int* tab = _tab;
for( i = 0; i < size.width; i++ )
for( size_t k = 0; k < esz; k++ )
tab[i*esz + k] = (int)((size.width - i - 1)*esz + k);
for( ; size.height--; src += sstep, dst += dstep )
{
for( i = 0; i < limit; i++ )
{
j = tab[i];
uchar t0 = src[i], t1 = src[j];
dst[i] = t1; dst[j] = t0;
}
}
}
static void
flipVert( const uchar* src0, size_t sstep, uchar* dst0, size_t dstep, Size size, size_t esz )
{
const uchar* src1 = src0 + (size.height - 1)*sstep;
uchar* dst1 = dst0 + (size.height - 1)*dstep;
size.width *= (int)esz;
for( int y = 0; y < (size.height + 1)/2; y++, src0 += sstep, src1 -= sstep,
dst0 += dstep, dst1 -= dstep )
{
int i = 0;
if( ((size_t)src0|(size_t)dst0|(size_t)src1|(size_t)dst1) % sizeof(int) == 0 )
{
for( ; i <= size.width - 16; i += 16 )
{
int t0 = ((int*)(src0 + i))[0];
int t1 = ((int*)(src1 + i))[0];
((int*)(dst0 + i))[0] = t1;
((int*)(dst1 + i))[0] = t0;
t0 = ((int*)(src0 + i))[1];
t1 = ((int*)(src1 + i))[1];
((int*)(dst0 + i))[1] = t1;
((int*)(dst1 + i))[1] = t0;
t0 = ((int*)(src0 + i))[2];
t1 = ((int*)(src1 + i))[2];
((int*)(dst0 + i))[2] = t1;
((int*)(dst1 + i))[2] = t0;
t0 = ((int*)(src0 + i))[3];
t1 = ((int*)(src1 + i))[3];
((int*)(dst0 + i))[3] = t1;
((int*)(dst1 + i))[3] = t0;
}
for( ; i <= size.width - 4; i += 4 )
{
int t0 = ((int*)(src0 + i))[0];
int t1 = ((int*)(src1 + i))[0];
((int*)(dst0 + i))[0] = t1;
((int*)(dst1 + i))[0] = t0;
}
}
for( ; i < size.width; i++ )
{
uchar t0 = src0[i];
uchar t1 = src1[i];
dst0[i] = t1;
dst1[i] = t0;
}
}
}
// you can use it with a small modification as below
void myflip( InputArray _src, OutputArray _dst, int flip_mode )
{
CV_Assert( _src.dims() <= 2 );
Size size = _src.size();
if (flip_mode < 0)
{
if (size.width == 1)
flip_mode = 0;
if (size.height == 1)
flip_mode = 1;
}
if ((size.width == 1 && flip_mode > 0) ||
(size.height == 1 && flip_mode == 0) ||
(size.height == 1 && size.width == 1 && flip_mode < 0))
{
return _src.copyTo(_dst);
}
Mat src = _src.getMat();
int type = src.type();
_dst.create( size, type );
Mat dst = _dst.getMat();
size_t esz = CV_ELEM_SIZE(type);
if( flip_mode <= 0 )
flipVert( src.ptr(), src.step, dst.ptr(), dst.step, src.size(), esz );
else
flipHoriz( src.ptr(), src.step, dst.ptr(), dst.step, src.size(), esz );
if( flip_mode < 0 )
flipHoriz( dst.ptr(), dst.step, dst.ptr(), dst.step, dst.size(), esz );
}
Assuming you have a good reason not to use OpenCV flip function, you can write your custom one.
For this example, I'll use CV_8UC3 images. I'll point out at the end how to expand this to different formats.
Let's see first how to flip an image x axis, which corresponds to cv::flip(src, dst, 1). Given an src image, the dst image will have the same y coordinate, and x coordinate as src.cols - 1 - x coordinates. In practice:
void flip_lr(const Mat3b& src, Mat3b& dst)
{
Mat3b _dst(src.rows, src.cols);
for (int r = 0; r < _dst.rows; ++r) {
for (int c = 0; c < _dst.cols; ++c) {
_dst(r, c) = src(r, src.cols - 1 - c);
}
}
dst = _dst;
}
Then, to flip around y axis (corresponding to cv::flip(src, dst, 0)), dst will have the same x coordinate, and y as src.rows - 1 - y. However, you can reuse the above-mentioned function, simply transposing the dst matrix, apply flip on x axis, and then transpose back. In practice:
dst = src.t();
flip_lr(dst, dst);
dst = dst.t();
Then, to flip both axis, corresponding to cv::flip(src, dst, -1), you need simply to combine the flip on x and y axis:
flip_lr(src, dst);
dst = dst.t();
flip_lr(dst, dst);
dst = dst.t();
You can wrap this functionality in a custom flip function that takes the same parameters as cv::flip:
void custom_flip(const Mat3b& src, Mat3b& dst, int code)
{
if (code > 0)
{ // Flip x axis
flip_lr(src, dst);
}
else if (code == 0)
{
// Flip y axis
dst = src.t();
flip_lr(dst, dst);
dst = dst.t();
}
else // code < 0
{
// Flip x and y axis
flip_lr(src, dst);
dst = dst.t();
flip_lr(dst, dst);
dst = dst.t();
}
}
Note that you can adapt this to different format simply modifing the flip_lr function, and taking care to call the appropriate version inside custom_flip, that will now accept Mat instead of Mat3b.
Full code for reference:
void flip_lr(const Mat3b& src, Mat3b& dst)
{
Mat3b _dst(src.rows, src.cols);
for (int r = 0; r < _dst.rows; ++r) {
for (int c = 0; c < _dst.cols; ++c) {
_dst(r, c) = src(r, src.cols - 1 - c);
}
}
dst = _dst;
}
void custom_flip(const Mat3b& src, Mat3b& dst, int code)
{
if (code > 0)
{ // Flip x axis
flip_lr(src, dst);
}
else if (code == 0)
{
// Flip y axis
dst = src.t();
flip_lr(dst, dst);
dst = dst.t();
}
else // code < 0
{
// Flip x and y axis
flip_lr(src, dst);
dst = dst.t();
flip_lr(dst, dst);
dst = dst.t();
}
}
int main(void)
{
Mat3b img = imread("path_to_image");
Mat3b flipped;
flip(img, flipped, -1);
Mat3b custom;
custom_flip(img, custom, -1);
imshow("OpenCV flip", flipped);
imshow("Custom flip", custom);
waitKey();
return 0;
}
I am looking for ways to create fisheye lens effect, looked at documentations for openCV, it looks like it contains Camera Calibration functions for radial distortions like fisheye. Is it possible to simulate fisheye distortion by openCV?
If it is possible to do it by openCV, comparing to openGL, which one will generate better results? Thanks.
I created this app using opencv. Is this the effect you are referring to?
I basically coded the formula shown on wikipedia's "Distortion(optics)" I can show the code if needed
Update:
OK, so below is the actual code written in c++ using opencv (not documented so feel free to ask for explanations):
The program recieves as input the following parameter: |input image| |output image| |K which controlls amount of distortion (typically try values around 0.001)| |x coordinate of center of distortion| |y coordinate of center of distortion|
So the crux of the program is the double for loop which iterates pixel by pixel on the result image and looks for the matching pixel in the input image using the formula for radial distortion (this is the way image warping is generally done - perhaps counter intuitively by back-projection from output to input). There are some subtleties which have to do with the scale of the output image (in this program the resulting image is the same size as the input), and I won't get into it unless you want to get into more details.enjoy.
#include <cv.h>
#include <highgui.h>
#include <math.h>
#include <unistd.h>
#include <getopt.h>
#include <iostream>
void sampleImage(const IplImage* arr, float idx0, float idx1, CvScalar& res)
{
if(idx0<0 || idx1<0 || idx0>(cvGetSize(arr).height-1) || idx1>(cvGetSize(arr).width-1)){
res.val[0]=0;
res.val[1]=0;
res.val[2]=0;
res.val[3]=0;
return;
}
float idx0_fl=floor(idx0);
float idx0_cl=ceil(idx0);
float idx1_fl=floor(idx1);
float idx1_cl=ceil(idx1);
CvScalar s1=cvGet2D(arr,(int)idx0_fl,(int)idx1_fl);
CvScalar s2=cvGet2D(arr,(int)idx0_fl,(int)idx1_cl);
CvScalar s3=cvGet2D(arr,(int)idx0_cl,(int)idx1_cl);
CvScalar s4=cvGet2D(arr,(int)idx0_cl,(int)idx1_fl);
float x = idx0 - idx0_fl;
float y = idx1 - idx1_fl;
res.val[0]= s1.val[0]*(1-x)*(1-y) + s2.val[0]*(1-x)*y + s3.val[0]*x*y + s4.val[0]*x*(1-y);
res.val[1]= s1.val[1]*(1-x)*(1-y) + s2.val[1]*(1-x)*y + s3.val[1]*x*y + s4.val[1]*x*(1-y);
res.val[2]= s1.val[2]*(1-x)*(1-y) + s2.val[2]*(1-x)*y + s3.val[2]*x*y + s4.val[2]*x*(1-y);
res.val[3]= s1.val[3]*(1-x)*(1-y) + s2.val[3]*(1-x)*y + s3.val[3]*x*y + s4.val[3]*x*(1-y);
}
float xscale;
float yscale;
float xshift;
float yshift;
float getRadialX(float x,float y,float cx,float cy,float k){
x = (x*xscale+xshift);
y = (y*yscale+yshift);
float res = x+((x-cx)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
return res;
}
float getRadialY(float x,float y,float cx,float cy,float k){
x = (x*xscale+xshift);
y = (y*yscale+yshift);
float res = y+((y-cy)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
return res;
}
float thresh = 1;
float calc_shift(float x1,float x2,float cx,float k){
float x3 = x1+(x2-x1)*0.5;
float res1 = x1+((x1-cx)*k*((x1-cx)*(x1-cx)));
float res3 = x3+((x3-cx)*k*((x3-cx)*(x3-cx)));
// std::cerr<<"x1: "<<x1<<" - "<<res1<<" x3: "<<x3<<" - "<<res3<<std::endl;
if(res1>-thresh and res1 < thresh)
return x1;
if(res3<0){
return calc_shift(x3,x2,cx,k);
}
else{
return calc_shift(x1,x3,cx,k);
}
}
int main(int argc, char** argv)
{
IplImage* src = cvLoadImage( argv[1], 1 );
IplImage* dst = cvCreateImage(cvGetSize(src),src->depth,src->nChannels);
IplImage* dst2 = cvCreateImage(cvGetSize(src),src->depth,src->nChannels);
float K=atof(argv[3]);
float centerX=atoi(argv[4]);
float centerY=atoi(argv[5]);
int width = cvGetSize(src).width;
int height = cvGetSize(src).height;
xshift = calc_shift(0,centerX-1,centerX,K);
float newcenterX = width-centerX;
float xshift_2 = calc_shift(0,newcenterX-1,newcenterX,K);
yshift = calc_shift(0,centerY-1,centerY,K);
float newcenterY = height-centerY;
float yshift_2 = calc_shift(0,newcenterY-1,newcenterY,K);
// scale = (centerX-xshift)/centerX;
xscale = (width-xshift-xshift_2)/width;
yscale = (height-yshift-yshift_2)/height;
std::cerr<<xshift<<" "<<yshift<<" "<<xscale<<" "<<yscale<<std::endl;
std::cerr<<cvGetSize(src).height<<std::endl;
std::cerr<<cvGetSize(src).width<<std::endl;
for(int j=0;j<cvGetSize(dst).height;j++){
for(int i=0;i<cvGetSize(dst).width;i++){
CvScalar s;
float x = getRadialX((float)i,(float)j,centerX,centerY,K);
float y = getRadialY((float)i,(float)j,centerX,centerY,K);
sampleImage(src,y,x,s);
cvSet2D(dst,j,i,s);
}
}
#if 0
cvNamedWindow( "Source1", 1 );
cvShowImage( "Source1", dst);
cvWaitKey(0);
#endif
cvSaveImage(argv[2],dst,0);
#if 0
for(int j=0;j<cvGetSize(src).height;j++){
for(int i=0;i<cvGetSize(src).width;i++){
CvScalar s;
sampleImage(src,j+0.25,i+0.25,s);
cvSet2D(dst,j,i,s);
}
}
cvNamedWindow( "Source1", 1 );
cvShowImage( "Source1", src);
cvWaitKey(0);
#endif
}
Thanks to the above 2 for this code. I've modified the above transcribed code in Java to use Bitmaps instead of BufferedImage. This enables the code to run on Android(which doesn't support AWT). I've also made the effect just manipulate the pixels in a circle rather than the whole Bitmap, this gives a fisheye "lens" effect. Hopes this helps any Android developers.
import android.graphics.Bitmap;
import android.util.Log;
class Filters{
float xscale;
float yscale;
float xshift;
float yshift;
int [] s;
private String TAG = "Filters";
public Filters(){
Log.e(TAG, "***********inside constructor");
}
public Bitmap barrel (Bitmap input, float k){
Log.e(TAG, "***********inside barrel method ");
float centerX=input.getWidth()/2; //center of distortion
float centerY=input.getHeight()/2;
int width = input.getWidth(); //image bounds
int height = input.getHeight();
Bitmap dst = Bitmap.createBitmap(width, height,input.getConfig() ); //output pic
Log.e(TAG, "***********dst bitmap created ");
xshift = calc_shift(0,centerX-1,centerX,k);
float newcenterX = width-centerX;
float xshift_2 = calc_shift(0,newcenterX-1,newcenterX,k);
yshift = calc_shift(0,centerY-1,centerY,k);
float newcenterY = height-centerY;
float yshift_2 = calc_shift(0,newcenterY-1,newcenterY,k);
xscale = (width-xshift-xshift_2)/width;
yscale = (height-yshift-yshift_2)/height;
Log.e(TAG, "***********about to loop through bm");
/*for(int j=0;j<dst.getHeight();j++){
for(int i=0;i<dst.getWidth();i++){
float x = getRadialX((float)i,(float)j,centerX,centerY,k);
float y = getRadialY((float)i,(float)j,centerX,centerY,k);
sampleImage(input,x,y);
int color = ((s[1]&0x0ff)<<16)|((s[2]&0x0ff)<<8)|(s[3]&0x0ff);
// System.out.print(i+" "+j+" \\");
dst.setPixel(i, j, color);
}
}*/
int origPixel; // the pixel in orig image
for(int j=0;j<dst.getHeight();j++){
for(int i=0;i<dst.getWidth();i++){
origPixel= input.getPixel(i,j);
float x = getRadialX((float)i,(float)j,centerX,centerY,k);
float y = getRadialY((float)i,(float)j,centerX,centerY,k);
sampleImage(input,x,y);
int color = ((s[1]&0x0ff)<<16)|((s[2]&0x0ff)<<8)|(s[3]&0x0ff);
// System.out.print(i+" "+j+" \\");
// check whether a pixel is within the circle bounds of 150
if( Math.sqrt( Math.pow(i - centerX, 2) + ( Math.pow(j - centerY, 2) ) ) <= 150 ){
dst.setPixel(i, j, color);
}else{
dst.setPixel(i,j,origPixel);
}
}
}
return dst;
}
void sampleImage(Bitmap arr, float idx0, float idx1)
{
s = new int [4];
if(idx0<0 || idx1<0 || idx0>(arr.getHeight()-1) || idx1>(arr.getWidth()-1)){
s[0]=0;
s[1]=0;
s[2]=0;
s[3]=0;
return;
}
float idx0_fl=(float) Math.floor(idx0);
float idx0_cl=(float) Math.ceil(idx0);
float idx1_fl=(float) Math.floor(idx1);
float idx1_cl=(float) Math.ceil(idx1);
int [] s1 = getARGB(arr,(int)idx0_fl,(int)idx1_fl);
int [] s2 = getARGB(arr,(int)idx0_fl,(int)idx1_cl);
int [] s3 = getARGB(arr,(int)idx0_cl,(int)idx1_cl);
int [] s4 = getARGB(arr,(int)idx0_cl,(int)idx1_fl);
float x = idx0 - idx0_fl;
float y = idx1 - idx1_fl;
s[0]= (int) (s1[0]*(1-x)*(1-y) + s2[0]*(1-x)*y + s3[0]*x*y + s4[0]*x*(1-y));
s[1]= (int) (s1[1]*(1-x)*(1-y) + s2[1]*(1-x)*y + s3[1]*x*y + s4[1]*x*(1-y));
s[2]= (int) (s1[2]*(1-x)*(1-y) + s2[2]*(1-x)*y + s3[2]*x*y + s4[2]*x*(1-y));
s[3]= (int) (s1[3]*(1-x)*(1-y) + s2[3]*(1-x)*y + s3[3]*x*y + s4[3]*x*(1-y));
}
int [] getARGB(Bitmap buf,int x, int y){
int rgb = buf.getPixel(y, x); // Returns by default ARGB.
int [] scalar = new int[4];
scalar[0] = (rgb >>> 24) & 0xFF;
scalar[1] = (rgb >>> 16) & 0xFF;
scalar[2] = (rgb >>> 8) & 0xFF;
scalar[3] = (rgb >>> 0) & 0xFF;
return scalar;
}
float getRadialX(float x,float y,float cx,float cy,float k){
x = (x*xscale+xshift);
y = (y*yscale+yshift);
float res = x+((x-cx)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
return res;
}
float getRadialY(float x,float y,float cx,float cy,float k){
x = (x*xscale+xshift);
y = (y*yscale+yshift);
float res = y+((y-cy)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
return res;
}
float thresh = 1;
float calc_shift(float x1,float x2,float cx,float k){
float x3 = (float)(x1+(x2-x1)*0.5);
float res1 = x1+((x1-cx)*k*((x1-cx)*(x1-cx)));
float res3 = x3+((x3-cx)*k*((x3-cx)*(x3-cx)));
if(res1>-thresh && res1 < thresh)
return x1;
if(res3<0){
return calc_shift(x3,x2,cx,k);
}
else{
return calc_shift(x1,x3,cx,k);
}
}
}
.
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.FutureTask;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.os.Debug;
import android.util.Log;
public class MultiRuntimeProcessorFilter {
private static final String TAG = "mrpf";
private int x = 0;
private Bitmap input = null;
private int radius;
public void createBitmapSections(int nOp, int[] sections){
int processors = nOp;
int jMax = input.getHeight();
int aSectionSize = (int) Math.ceil(jMax/processors);
Log.e(TAG, "++++++++++ sections size = "+aSectionSize);
int k = 0;
for(int h=0; h<processors+1; h++){
sections[h] = k;
k+= aSectionSize;
}
}// end of createBitmapSections()
#SuppressWarnings("unchecked")
public Bitmap barrel (Bitmap input, float k, int r){
this.radius = r;
this.input = input;
int []arr = new int[input.getWidth()*input.getHeight()];
Log.e(TAG, "bitmap height = "+input.getHeight());
int nrOfProcessors = Runtime.getRuntime().availableProcessors();
Log.e(TAG, "no of processors = "+nrOfProcessors);
int[] sections = new int[nrOfProcessors+1];
createBitmapSections(nrOfProcessors,sections);
ExecutorService threadPool = Executors.newFixedThreadPool(nrOfProcessors);
for(int g=0; g<sections.length;g++){
Log.e(TAG, "++++++++++ sections= "+sections[g]);
}
// ExecutorService threadPool = Executors.newFixedThreadPool(nrOfProcessors);
Object[] task = new Object[nrOfProcessors];
for(int z = 0; z < nrOfProcessors; z++){
task[z] = (FutureTask<PartialResult>) threadPool.submit(new PartialProcessing(sections[z], sections[z+1] - 1, input, k));
Log.e(TAG, "++++++++++ task"+z+"= "+task[z].toString());
}
PartialResult[] results = new PartialResult[nrOfProcessors];
try{
for(int t = 0; t < nrOfProcessors; t++){
results[t] = ((FutureTask<PartialResult>) task[t]).get();
results[t].fill(arr);
}
}catch(Exception e){
e.printStackTrace();
}
Bitmap dst2 = Bitmap.createBitmap(arr,input.getWidth(),input.getHeight(),input.getConfig());
return dst2;
}//end of barrel()
public class PartialResult {
int startP;
int endP;
int[] storedValues;
public PartialResult(int startp, int endp, Bitmap input){
this.startP = startp;
this.endP = endp;
this.storedValues = new int[input.getWidth()*input.getHeight()];
}
public void addValue(int p, int result) {
storedValues[p] = result;
}
public void fill(int[] arr) {
for (int p = startP; p < endP; p++){
for(int b=0;b<radius;b++,x++)
arr[x] = storedValues[x];
}
Log.e(TAG, "++++++++++ x ="+x);
}
}//end of partialResult
public class PartialProcessing implements Callable<PartialResult> {
int startJ;
int endJ;
private int[] scalar;
private float xscale;
private float yscale;
private float xshift;
private float yshift;
private float thresh = 1;
private int [] s1;
private int [] s2;
private int [] s3;
private int [] s4;
private int [] s;
private Bitmap input;
private float k;
public PartialProcessing(int startj, int endj, Bitmap input, float k) {
this.startJ = startj;
this.endJ = endj;
this.input = input;
this.k = k;
s = new int[4];
scalar = new int[4];
s1 = new int[4];
s2 = new int[4];
s3 = new int[4];
s4 = new int[4];
}
int [] getARGB(Bitmap buf,int x, int y){
int rgb = buf.getPixel(y, x); // Returns by default ARGB.
// int [] scalar = new int[4];
// scalar[0] = (rgb >>> 24) & 0xFF;
scalar[1] = (rgb >>> 16) & 0xFF;
scalar[2] = (rgb >>> 8) & 0xFF;
scalar[3] = (rgb >>> 0) & 0xFF;
return scalar;
}
float getRadialX(float x,float y,float cx,float cy,float k){
x = (x*xscale+xshift);
y = (y*yscale+yshift);
float res = x+((x-cx)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
return res;
}
float getRadialY(float x,float y,float cx,float cy,float k){
x = (x*xscale+xshift);
y = (y*yscale+yshift);
float res = y+((y-cy)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
return res;
}
float calc_shift(float x1,float x2,float cx,float k){
float x3 = (float)(x1+(x2-x1)*0.5);
float res1 = x1+((x1-cx)*k*((x1-cx)*(x1-cx)));
float res3 = x3+((x3-cx)*k*((x3-cx)*(x3-cx)));
if(res1>-thresh && res1 < thresh)
return x1;
if(res3<0){
return calc_shift(x3,x2,cx,k);
}
else{
return calc_shift(x1,x3,cx,k);
}
}
void sampleImage(Bitmap arr, float idx0, float idx1)
{
// s = new int [4];
if(idx0<0 || idx1<0 || idx0>(arr.getHeight()-1) || idx1>(arr.getWidth()-1)){
s[0]=0;
s[1]=0;
s[2]=0;
s[3]=0;
return;
}
float idx0_fl=(float) Math.floor(idx0);
float idx0_cl=(float) Math.ceil(idx0);
float idx1_fl=(float) Math.floor(idx1);
float idx1_cl=(float) Math.ceil(idx1);
s1 = getARGB(arr,(int)idx0_fl,(int)idx1_fl);
s2 = getARGB(arr,(int)idx0_fl,(int)idx1_cl);
s3 = getARGB(arr,(int)idx0_cl,(int)idx1_cl);
s4 = getARGB(arr,(int)idx0_cl,(int)idx1_fl);
float x = idx0 - idx0_fl;
float y = idx1 - idx1_fl;
// s[0]= (int) (s1[0]*(1-x)*(1-y) + s2[0]*(1-x)*y + s3[0]*x*y + s4[0]*x*(1-y));
s[1]= (int) (s1[1]*(1-x)*(1-y) + s2[1]*(1-x)*y + s3[1]*x*y + s4[1]*x*(1-y));
s[2]= (int) (s1[2]*(1-x)*(1-y) + s2[2]*(1-x)*y + s3[2]*x*y + s4[2]*x*(1-y));
s[3]= (int) (s1[3]*(1-x)*(1-y) + s2[3]*(1-x)*y + s3[3]*x*y + s4[3]*x*(1-y));
}
#Override public PartialResult call() {
PartialResult partialResult = new PartialResult(startJ, endJ,input);
float centerX=input.getWidth()/2; //center of distortion
float centerY=input.getHeight()/2;
int width = input.getWidth(); //image bounds
int height = input.getHeight();
xshift = calc_shift(0,centerX-1,centerX,k);
float newcenterX = width-centerX;
float xshift_2 = calc_shift(0,newcenterX-1,newcenterX,k);
yshift = calc_shift(0,centerY-1,centerY,k);
float newcenterY = height-centerY;
float yshift_2 = calc_shift(0,newcenterY-1,newcenterY,k);
xscale = (width-xshift-xshift_2)/width;
yscale = (height-yshift-yshift_2)/height;
int p = startJ*radius;
int origPixel = 0;
int color = 0;
int i;
for (int j = startJ; j < endJ; j++){
for ( i = 0; i < width; i++, p++){
origPixel = input.getPixel(i,j);
float x = getRadialX((float)j,(float)i,centerX,centerY,k);
float y = getRadialY((float)j,(float)i,centerX,centerY,k);
sampleImage(input,x,y);
color = ((s[1]&0x0ff)<<16)|((s[2]&0x0ff)<<8)|(s[3]&0x0ff);
//Log.e(TAG, "radius = "+radius);
if(((i-centerX)*(i-centerX) + (j-centerY)*(j-centerY)) <= radius*(radius/4)){
partialResult.addValue(p, color);
}else{
partialResult.addValue(p, origPixel);
}
}//end of inner for
}//end of outer for
return partialResult;
}//end of call
}// end of partialprocessing
}//end of MultiProcesorFilter
#And_Dev as promised
Below is the view that gets the users touch co-ords and then calls the filter on a selected area. the selected area is the cord eg center of circle plus a radius(a circle). the code does this twice as its for a breast augmentation app:) Just comment out the HorizontalSlider code as you don't need this.
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff.Mode;
import android.graphics.PorterDuffXfermode;
import android.os.Environment;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import com.tecmark.HorizontalSlider.OnProgressChangeListener;
public class TouchView extends View{
private File tempFile;
private byte[] imageArray;
private Bitmap bgr;
private Bitmap crop;
private Bitmap crop2;
private Bitmap overLay;
private Bitmap overLay2;
private float centreX;
private float centreY;
private float centreA = 200;
private float centreB = 200;
private Boolean xyFound = false;
private int Progress = 1;
private static final String TAG = "*********TouchView";
private Filters f = null;
private boolean bothCirclesInPlace = false;
private MultiProcessorFilter mpf;
private MultiProcessorFilter mpf2;
private MultiRuntimeProcessorFilter mrpf;
private MultiRuntimeProcessorFilter mrpf2;
public TouchView(Context context) {
super(context);
}
public TouchView(Context context, AttributeSet attr) {
super(context,attr);
Log.e(TAG, "++++++++++ inside touchview constructor");
tempFile = new File(Environment.getExternalStorageDirectory().
getAbsolutePath() + "/"+"image.jpg");
imageArray = new byte[(int)tempFile.length()];
try{
InputStream is = new FileInputStream(tempFile);
BufferedInputStream bis = new BufferedInputStream(is);
DataInputStream dis = new DataInputStream(bis);
int i = 0;
while (dis.available() > 0) {
imageArray[i] = dis.readByte();
i++;
}
dis.close();
} catch (Exception e) {
e.printStackTrace();
}
Bitmap bm = BitmapFactory.decodeByteArray(imageArray, 0, imageArray.length);
bgr = bm.copy(bm.getConfig(), true);;
overLay = null;
overLay2 = null;
bm.recycle();
}// end of touchView constructor
public void findCirclePixels(){
// f = new Filters();
// mpf = new MultiProcessorFilter();
// mpf2 = new MultiProcessorFilter();
mrpf = new MultiRuntimeProcessorFilter();
mrpf2 = new MultiRuntimeProcessorFilter();
crop = Bitmap.createBitmap(bgr,Math.max((int)centreX-75,0),Math.max((int)centreY-75,0),150,150);
crop2 = Bitmap.createBitmap(bgr,Math.max((int)centreA-75,0),Math.max((int)centreB-75,0),150,150);
new Thread(new Runnable() {
public void run() {
float prog = (float)Progress/150001;
// final Bitmap bgr3 = f.barrel(crop,prog);
// final Bitmap bgr4 = f.barrel(crop2,prog);
// final Bitmap bgr3 = mpf.barrel(crop,prog);
// final Bitmap bgr4 = mpf2.barrel(crop2,prog);
final Bitmap bgr3 = mrpf.barrel(crop,prog);
final Bitmap bgr4 = mrpf2.barrel(crop2,prog);
TouchView.this.post(new Runnable() {
public void run() {
TouchView.this.overLay = bgr3;
TouchView.this.overLay2 = bgr4;
TouchView.this.invalidate();
}
});
}
}).start();
}// end of changePixel()
#Override
public boolean onTouchEvent(MotionEvent ev) {
switch (ev.getAction()) {
case MotionEvent.ACTION_DOWN: {
if(xyFound == false){
centreX = (int) ev.getX();
centreY = (int) ev.getY();
xyFound = true;
}else{
centreA = (int) ev.getX();
centreB = (int) ev.getY();
bothCirclesInPlace = true;
}
break;
}
/* case MotionEvent.ACTION_MOVE: {
if(xyFound == false){
centreX = (int) ev.getX();
centreY = (int) ev.getY();
xyFound = true;
}else{
centreA = (int) ev.getX();
centreB = (int) ev.getY();
bothCirclesInPlace = true;
}
findCirclePixels();
// TouchView.this.invalidate();
break;
}*/
case MotionEvent.ACTION_UP:
break;
}
return true;
}//end of onTouchEvent
public void initSlider(final HorizontalSlider slider)
{
slider.setOnProgressChangeListener(changeListener);
}
private OnProgressChangeListener changeListener = new OnProgressChangeListener() {
#Override
public void onProgressChanged(View v, int progress) {
setProgress(progress);
}
};
#Override
public void onDraw(Canvas canvas){
super.onDraw(canvas);
Log.e(TAG, "******about to draw bgr ");
canvas.drawBitmap(bgr, 0, 0, null);
if(bothCirclesInPlace == true){
if(overLay != null){
Log.e(TAG, "******about to draw overlay1 ");
canvas.drawBitmap(overLay, centreX-75, centreY-75, null);
}
if(overLay2 != null){
Log.e(TAG, "******about to draw overlay2 ");
canvas.drawBitmap(overLay2, centreA-75, centreB-75, null);
}
}
}//end of onDraw
protected void setProgress(int progress2) {
Log.e(TAG, "***********in SETPROGRESS");
this.Progress = progress2;
findCirclePixels();
}
}
.
The calling activity.
import android.app.Activity;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.View.OnClickListener;
import android.widget.Button;
public class Jjilapp extends Activity {
private static final String TAG = "*********jjil";
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.touchview);
final TouchView touchView = (TouchView)findViewById(R.id.touchview);
final HorizontalSlider slider = (HorizontalSlider)findViewById(R.id.slider);
touchView.initSlider(slider);
}//end of oncreate
}
If you need any help mate just ask. hope this helps
Thanks to you for that code. It helps me a lot. I transcrypted it for Java. Maybe someone has a similar function for symulating tangencial distorsion?
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import javax.imageio.ImageIO;
import com.jhlabs.image.InterpolateFilter;
class Filters{
float xscale;
float yscale;
float xshift;
float yshift;
int [] s;
public Filters(){
}
public BufferedImage barrel (BufferedImage input, float k){
float centerX=input.getWidth()/2; //center of distortion
float centerY=input.getHeight()/2;
int width = input.getWidth(); //image bounds
int height = input.getHeight();
BufferedImage dst = new BufferedImage(width, height,BufferedImage.TYPE_INT_RGB); //output pic
xshift = calc_shift(0,centerX-1,centerX,k);
float newcenterX = width-centerX;
float xshift_2 = calc_shift(0,newcenterX-1,newcenterX,k);
yshift = calc_shift(0,centerY-1,centerY,k);
float newcenterY = height-centerY;
float yshift_2 = calc_shift(0,newcenterY-1,newcenterY,k);
xscale = (width-xshift-xshift_2)/width;
yscale = (height-yshift-yshift_2)/height;
for(int j=0;j<dst.getHeight();j++){
for(int i=0;i<dst.getWidth();i++){
float x = getRadialX((float)i,(float)j,centerX,centerY,k);
float y = getRadialY((float)i,(float)j,centerX,centerY,k);
sampleImage(input,x,y);
int color = ((s[1]&0x0ff)<<16)|((s[2]&0x0ff)<<8)|(s[3]&0x0ff);
// System.out.print(i+" "+j+" \\");
dst.setRGB(i, j, color);
}
}
return dst;
}
void sampleImage(BufferedImage arr, float idx0, float idx1)
{
s = new int [4];
if(idx0<0 || idx1<0 || idx0>(arr.getHeight()-1) || idx1>(arr.getWidth()-1)){
s[0]=0;
s[1]=0;
s[2]=0;
s[3]=0;
return;
}
float idx0_fl=(float) Math.floor(idx0);
float idx0_cl=(float) Math.ceil(idx0);
float idx1_fl=(float) Math.floor(idx1);
float idx1_cl=(float) Math.ceil(idx1);
int [] s1 = getARGB(arr,(int)idx0_fl,(int)idx1_fl);
int [] s2 = getARGB(arr,(int)idx0_fl,(int)idx1_cl);
int [] s3 = getARGB(arr,(int)idx0_cl,(int)idx1_cl);
int [] s4 = getARGB(arr,(int)idx0_cl,(int)idx1_fl);
float x = idx0 - idx0_fl;
float y = idx1 - idx1_fl;
s[0]= (int) (s1[0]*(1-x)*(1-y) + s2[0]*(1-x)*y + s3[0]*x*y + s4[0]*x*(1-y));
s[1]= (int) (s1[1]*(1-x)*(1-y) + s2[1]*(1-x)*y + s3[1]*x*y + s4[1]*x*(1-y));
s[2]= (int) (s1[2]*(1-x)*(1-y) + s2[2]*(1-x)*y + s3[2]*x*y + s4[2]*x*(1-y));
s[3]= (int) (s1[3]*(1-x)*(1-y) + s2[3]*(1-x)*y + s3[3]*x*y + s4[3]*x*(1-y));
}
int [] getARGB(BufferedImage buf,int x, int y){
int rgb = buf.getRGB(x, y); // Returns by default ARGB.
int [] scalar = new int[4];
scalar[0] = (rgb >>> 24) & 0xFF;
scalar[1] = (rgb >>> 16) & 0xFF;
scalar[2] = (rgb >>> 8) & 0xFF;
scalar[3] = (rgb >>> 0) & 0xFF;
return scalar;
}
float getRadialX(float x,float y,float cx,float cy,float k){
x = (x*xscale+xshift);
y = (y*yscale+yshift);
float res = x+((x-cx)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
return res;
}
float getRadialY(float x,float y,float cx,float cy,float k){
x = (x*xscale+xshift);
y = (y*yscale+yshift);
float res = y+((y-cy)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
return res;
}
float thresh = 1;
float calc_shift(float x1,float x2,float cx,float k){
float x3 = (float)(x1+(x2-x1)*0.5);
float res1 = x1+((x1-cx)*k*((x1-cx)*(x1-cx)));
float res3 = x3+((x3-cx)*k*((x3-cx)*(x3-cx)));
if(res1>-thresh && res1 < thresh)
return x1;
if(res3<0){
return calc_shift(x3,x2,cx,k);
}
else{
return calc_shift(x1,x3,cx,k);
}
}
}
I debugged the java files and it works fine on my phones(higher than 4.0). It consists of 3 java files and 1 xml file. You have to place checkerboardback.jpg file under drawaable directory. As someone said, alpha value was missing and I gave it "0x0ff". In addition, upperbound of some Looping were wrong.
//1. MultiRuntimeProcessorFilter.java
public class MultiRuntimeProcessorFilter {
private static final String TAG = "mrpf";
private int x = 0;
private Bitmap input = null;
private int radius;
private int mHeight;
public void createBitmapSections(int nOp, int[] sections){
int processors = nOp;
int jMax = input.getHeight();
int aSectionSize = (int) Math.ceil(jMax/processors);
Log.e("yoSIZECHK", "++++++++++ sections size = "+aSectionSize);
int k = 0;
for(int h=0; h<processors+1; h++){
sections[h] = k;
k+= aSectionSize;
if(h==processors){
sections[h] = mHeight;//Last must cover ceiling
}
Log.v("yoSEC","sections = "+h+" "+sections[h]);
}
}// end of createBitmapSections()
//#SuppressWarnings("unchecked")
public Bitmap barrel (Bitmap input, float k, int r){
this.radius = r;
this.input = input;
int []mArray = new int[input.getWidth()*input.getHeight()];
mHeight = input.getHeight();
Log.e(TAG, "bitmap height x width = "+mHeight+" "+input.getWidth());
//Log.v("yoRESULT", "height width = "+ input.getWidth()+" "+input.getHeight());
int nrOfProcessors = Runtime.getRuntime().availableProcessors();
Log.e(TAG, "no of processors = "+nrOfProcessors);
int[] sections = new int[nrOfProcessors+1];
createBitmapSections(nrOfProcessors,sections);
ExecutorService threadPool = Executors.newFixedThreadPool(nrOfProcessors);
for(int g=0; g<sections.length;g++){
Log.e(TAG, "++++++++++ sections= "+sections[g]);
}
// ExecutorService threadPool = Executors.newFixedThreadPool(nrOfProcessors);
Object[] task = new Object[nrOfProcessors];
for(int z = 0; z < nrOfProcessors; z++){
task[z] = (FutureTask<PartialResult>) threadPool.submit(new PartialProcessing(sections[z], sections[z+1] - 1, input, k, z));
Log.e(TAG, "++++++++++ task"+z+"= "+task[z].toString());
}
PartialResult[] results = new PartialResult[nrOfProcessors];
try{
for(int t = 0; t < nrOfProcessors; t++){
results[t] = ((FutureTask<PartialResult>) task[t]).get();
results[t].fill(mArray);
}
}catch(Exception e){
e.printStackTrace();
}
Log.v("yoRESULT", "height width = "+ input.getHeight()+" "+input.getWidth());
Bitmap dst2 = Bitmap.createBitmap(mArray,input.getWidth(),input.getHeight(),input.getConfig());
return dst2;
}//end of barrel()
public class PartialResult {
int startP;
int endP;
int[] storedValues;
public PartialResult(int startp, int endp, Bitmap input){
this.startP = startp;
this.endP = endp;
this.storedValues = new int[input.getWidth()*input.getHeight()];
}
public void addValue(int p, int result) {
storedValues[p] = result;
}
public void fill(int[] mArray) {
Log.v("yo09", startP + " " + endP + " " + input.getWidth());
//yoko for (int p = startP; p < endP; p++){
for (int p = startP; p < endP+1; p++){
//for(int b=0;b<radius;b++,x++)
for(int b=0;b<input.getWidth();b++,x++) {
mArray[x] = storedValues[x];
if (b == 0) Log.v("yoyoyo", p+" + " + storedValues[x]);
}
}
Log.e("yoFill", " ++++++++++ radius x = "+radius+" "+x);
}
}//end of partialResult
public class PartialProcessing implements Callable<PartialResult> {
int startJ;
int endJ;
int mID;
private int[] scalar;
private float xscale;
private float yscale;
private float xshift;
private float yshift;
private float thresh = 1;
private int [] s1;
private int [] s2;
private int [] s3;
private int [] s4;
private int [] s;
private Bitmap input;
private float k;
public PartialProcessing(int startj, int endj, Bitmap input, float k, int mID) {
this.startJ = startj;
this.endJ = endj;
this.input = input;
this.k = k;
this.mID = mID;
s = new int[4];
scalar = new int[4];
s1 = new int[4];
s2 = new int[4];
s3 = new int[4];
s4 = new int[4];
}
int [] getARGB(Bitmap buf,int x, int y){
int rgb = buf.getPixel(y, x); // Returns by default ARGB.
// int [] scalar = new int[4];
// scalar[0] = (rgb >>> 24) & 0xFF;
scalar[1] = (rgb >>> 16) & 0xFF;
scalar[2] = (rgb >>> 8) & 0xFF;
scalar[3] = (rgb >>> 0) & 0xFF;
return scalar;
}
float getRadialX(float x,float y,float cx,float cy,float k){
x = (x*xscale+xshift);
y = (y*yscale+yshift);
float res = x+((x-cx)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
return res;
}
float getRadialY(float x,float y,float cx,float cy,float k){
x = (x*xscale+xshift);
y = (y*yscale+yshift);
float res = y+((y-cy)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
return res;
}
float calc_shift(float x1,float x2,float cx,float k){
float x3 = (float)(x1+(x2-x1)*0.5);
float res1 = x1+((x1-cx)*k*((x1-cx)*(x1-cx)));
float res3 = x3+((x3-cx)*k*((x3-cx)*(x3-cx)));
if(res1>-thresh && res1 < thresh)
return x1;
if(res3<0){
return calc_shift(x3,x2,cx,k);
}
else{
return calc_shift(x1,x3,cx,k);
}
}
//void sampleImage(Bitmap mArray, float idx0, float idx1)
int [] sampleImage(Bitmap mArray2, float idx0, float idx1)
{
// s = new int [4];
if(idx0<0 || idx1<0 || idx0>(mArray2.getHeight()-1) || idx1>(mArray2.getWidth()-1)){
s[0]=0;
s[1]=0;
s[2]=0;
s[3]=0;
return s;// yoko
}
float idx0_fl=(float) Math.floor(idx0);
float idx0_cl=(float) Math.ceil(idx0);
float idx1_fl=(float) Math.floor(idx1);
float idx1_cl=(float) Math.ceil(idx1);
s1 = getARGB(mArray2,(int)idx0_fl,(int)idx1_fl);
s2 = getARGB(mArray2,(int)idx0_fl,(int)idx1_cl);
s3 = getARGB(mArray2,(int)idx0_cl,(int)idx1_cl);
s4 = getARGB(mArray2,(int)idx0_cl,(int)idx1_fl);
float x = idx0 - idx0_fl;
float y = idx1 - idx1_fl;
// s[0]= (int) (s1[0]*(1-x)*(1-y) + s2[0]*(1-x)*y + s3[0]*x*y + s4[0]*x*(1-y));
s[1]= (int) (s1[1]*(1-x)*(1-y) + s2[1]*(1-x)*y + s3[1]*x*y + s4[1]*x*(1-y));
s[2]= (int) (s1[2]*(1-x)*(1-y) + s2[2]*(1-x)*y + s3[2]*x*y + s4[2]*x*(1-y));
s[3]= (int) (s1[3]*(1-x)*(1-y) + s2[3]*(1-x)*y + s3[3]*x*y + s4[3]*x*(1-y));
return s;
}
#Override
public PartialResult call() {
PartialResult partialResult = new PartialResult(startJ, endJ,input);
float centerX=input.getWidth()/2; //center of distortion
float centerY=input.getHeight()/2;
int width = input.getWidth(); //image bounds
int height = input.getHeight();
xshift = calc_shift(0,centerX-1,centerX,k);
float newcenterX = width-centerX;
float xshift_2 = calc_shift(0,newcenterX-1,newcenterX,k);
yshift = calc_shift(0,centerY-1,centerY,k);
float newcenterY = height-centerY;
float yshift_2 = calc_shift(0,newcenterY-1,newcenterY,k);
xscale = (width-xshift-xshift_2)/width;
yscale = (height-yshift-yshift_2)/height;
// yoko int p = startJ*radius;
int p = startJ*width;//yoko
int origPixel = 0;
int color = 0;
int i;
Log.v("yokoIJ","PartialResult startJ endJ "+startJ+" "+endJ);
//yoko for (int j = startJ; j < endJ; j++){
for (int j = startJ; j < endJ+1; j++){
for ( i = 0; i < width; i++, p++){
s = new int [4];//yoko added
origPixel = input.getPixel(i,j);
float x = getRadialX((float)j,(float)i,centerX,centerY,k);
float y = getRadialY((float)j,(float)i,centerX,centerY,k);
//sampleImage(input,x,y); //yoko
s= sampleImage(input,x,y);
color = (0xff<<24)|((s[1]&0x0ff)<<16)|((s[2]&0x0ff)<<8)|(s[3]&0x0ff);
//Log.e(TAG, "radius = "+radius);
//Not understand why it is not radius but radius/2
//yoko if(((i-centerX)*(i-centerX) + (j-centerY)*(j-centerY)) <= radius*(radius/4)){
if(((i-centerX)*(i-centerX) + (j-centerY)*(j-centerY)) <= radius*radius){
//yo if(j%10 == 1 && i%10 == 1)
//yo Log.v("yoJI", mID+" "+j + " " + i );
partialResult.addValue(p, color);
}else{
partialResult.addValue(p, origPixel);
}
}//end of inner for
}//end of outer for
return partialResult;
}//end of call
}// end of partialprocessing
}//end of MultiProcesorFilter
// 2. Filters.java:
class Filters{
float xscale;
float yscale;
float xshift;
float yshift;
int [] s;
private static String TAG = "Filters";
public Filters(){
Log.e(TAG, "***********inside constructor");
}
public Bitmap barrel (Bitmap input, float k, boolean check, int Range){
Log.e(TAG, "***********inside barrel method : hasAlpha = ");
float centerX=input.getWidth()/2; //center of distortion
float centerY=input.getHeight()/2;
int width = input.getWidth(); //image bounds
int height = input.getHeight();
//yoko Log.v("yoQQ", width+" "+height+" "+centerX+" "+centerY);
if(check)return input;
Bitmap dst = Bitmap.createBitmap(width, height,input.getConfig() ); //output pic
Log.e(TAG, "***********dst bitmap created ");
xshift = calc_shift(0,centerX-1,centerX,k);
float newcenterX = width-centerX;
float xshift_2 = calc_shift(0,newcenterX-1,newcenterX,k);
yshift = calc_shift(0,centerY-1,centerY,k);
float newcenterY = height-centerY;
float yshift_2 = calc_shift(0,newcenterY-1,newcenterY,k);
xscale = (width-xshift-xshift_2)/width;
yscale = (height-yshift-yshift_2)/height;
Log.e(TAG, "***********about to loop through bm");
Log.v("yoQQ2", xscale + " " + yscale);
//if(check==1)return input;//yoko
/*for(int j=0;j<dst.getHeight();j++){
for(int i=0;i<dst.getWidth();i++){
float x = getRadialX((float)i,(float)j,centerX,centerY,k);
float y = getRadialY((float)i,(float)j,centerX,centerY,k);
sampleImage(input,x,y);
int color = ((s[1]&0x0ff)<<16)|((s[2]&0x0ff)<<8)|(s[3]&0x0ff);
// System.out.print(i+" "+j+" \\");
dst.setPixel(i, j, color);
}
}*/
int origPixel; // the pixel in orig image
int i=0,j=0;
for(j=0;j<dst.getHeight();j++){
for(i=0;i<dst.getWidth();i++){
s = new int [4];//yoko added
origPixel= input.getPixel(i,j);
float x = getRadialX((float)i,(float)j,centerX,centerY,k);
float y = getRadialY((float)i,(float)j,centerX,centerY,k);
//yoko sampleImage(input,x,y);
s = sampleImage(input,x,y);
//yoko int color = ((s[1]&0x0ff)<<16)|((s[2]&0x0ff)<<8)|(s[3]&0x0ff);
int color = (0xff<<24)|((s[1]&0xff)<<16)|((s[2]&0xff)<<8)|(s[3]&0xff);
//Log.v("yoQQ3", j + " " + i + " : "+dst.getHeight()+" "+dst.getWidth());
// check whether a pixel is within the circle bounds of 150
if( Math.sqrt( Math.pow(i - centerX, 2) + ( Math.pow(j - centerY, 2) ) ) <= Range ){
dst.setPixel(i, j, color);
//if(j%10 == 1 && i%10 == 1)
// Log.v("yoJI", j + " " + i );
}else{
dst.setPixel(i,j,origPixel);
}
}
}
Log.v("yoDONE", "======== Loop End ======== "+j+" "+i+" : " + dst.getHeight()+" "+dst.getWidth());
return dst;
}//barrel
// void sampleImage(Bitmap arr, float idx0, float idx1) // yoko
int[] sampleImage(Bitmap arr, float idx0, float idx1)
{
s = new int [4];
if(idx0<0 || idx1<0 || idx0>(arr.getHeight()-1) || idx1>(arr.getWidth()-1)){
s[0]=0;
s[1]=0;
s[2]=0;
s[3]=0;
return s;
}
float idx0_fl=(float) Math.floor(idx0);
float idx0_cl=(float) Math.ceil(idx0);
float idx1_fl=(float) Math.floor(idx1);
float idx1_cl=(float) Math.ceil(idx1);
int [] s1 = getARGB(arr,(int)idx0_fl,(int)idx1_fl);
int [] s2 = getARGB(arr,(int)idx0_fl,(int)idx1_cl);
int [] s3 = getARGB(arr,(int)idx0_cl,(int)idx1_cl);
int [] s4 = getARGB(arr,(int)idx0_cl,(int)idx1_fl);
float x = idx0 - idx0_fl;
float y = idx1 - idx1_fl;
s[0]= (int) (s1[0]*(1-x)*(1-y) + s2[0]*(1-x)*y + s3[0]*x*y + s4[0]*x*(1-y));
s[1]= (int) (s1[1]*(1-x)*(1-y) + s2[1]*(1-x)*y + s3[1]*x*y + s4[1]*x*(1-y));
s[2]= (int) (s1[2]*(1-x)*(1-y) + s2[2]*(1-x)*y + s3[2]*x*y + s4[2]*x*(1-y));
s[3]= (int) (s1[3]*(1-x)*(1-y) + s2[3]*(1-x)*y + s3[3]*x*y + s4[3]*x*(1-y));
return s;///yoko added to make return the result value
}//sampleImage
int [] getARGB(Bitmap buf,int x, int y){
int rgb = buf.getPixel(y, x); // Returns by default ARGB.
int [] scalar = new int[4];
scalar[0] = (rgb >>> 24) & 0xFF;
scalar[1] = (rgb >>> 16) & 0xFF;
scalar[2] = (rgb >>> 8) & 0xFF;
scalar[3] = (rgb >>> 0) & 0xFF;
return scalar;
}//getARGB
float getRadialX(float x,float y,float cx,float cy,float k){
x = (x*xscale+xshift);
y = (y*yscale+yshift);
float res = x+((x-cx)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
return res;
}//getRadial1X
float getRadialY(float x,float y,float cx,float cy,float k){
x = (x*xscale+xshift);
y = (y*yscale+yshift);
float res = y+((y-cy)*k*((x-cx)*(x-cx)+(y-cy)*(y-cy)));
return res;
}//getRadialY
float thresh = 1;
float calc_shift(float x1,float x2,float cx,float k){
float x3 = (float)(x1+(x2-x1)*0.5);
float res1 = x1+((x1-cx)*k*((x1-cx)*(x1-cx)));
float res3 = x3+((x3-cx)*k*((x3-cx)*(x3-cx)));
if(res1>-thresh && res1 < thresh)
return x1;
if(res3<0){
return calc_shift(x3,x2,cx,k);
}
else{
return calc_shift(x1,x3,cx,k);
}
}//calc_shift
}
And
//3 MainActivity.java, toplevel class.
public class MainActivity extends Activity {
ImageView iv1=null;
ImageView iv2=null;
Button bT, bB, b0;
Bitmap bitmap1, bitmap2, bitmapSP;
Boolean view1 = true;
private static final String TAG = "*********jjil";
public static int mH,mW,RADIUS;
#Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_main);
Resources res = this.getResources();
//bitmap1 = BitmapFactory.decodeResource(res, R.drawable.checkerboard);
bitmap1 = BitmapFactory.decodeResource(res, R.drawable.checkerboardback);
mH=bitmap1.getHeight();
mW=bitmap1.getWidth();
RADIUS = mH/3;
bT = (Button)findViewById(R.id.buttontoggle);
bT.setOnClickListener(onClickToggleView);
bB = (Button)findViewById(R.id.buttonbarrel);
bB.setOnClickListener(onClickToggleView);
b0 = (Button)findViewById(R.id.button0);
b0.setOnClickListener(onClickToggleView);
iv1=(ImageView)findViewById(R.id.touchview1);
iv1.setImageBitmap(bitmap1);
iv1.setVisibility(View.VISIBLE);
}//end of oncreate
public View.OnClickListener onClickToggleView = new View.OnClickListener() {
public void onClick(View v) {
if (v == bT) {
/// fromhere
new AsyncTask<Void, Void, String>() {
com.example.owner.opengl2.Filters mFilers = new com.example.owner.opengl2.Filters();
TextView tx = (TextView)findViewById(R.id.mStatus);
Bitmap bitmapSP;long start,end;
protected void onPreExecute() {
start = System.nanoTime();
iv1.setImageBitmap(bitmap1);
tx.setText("- Running -");
}
protected String doInBackground(Void... params) {
bitmapSP = mFilers.barrel(bitmap1,(float)0.00005,false,RADIUS);
return "message";
}
protected void onPostExecute(String msg) {
end = System.nanoTime();
long elapsedTime = end - start;
long seconds = elapsedTime / 1000000;
iv1.setImageBitmap(bitmapSP);
tx.setText("- READY : ElapsedTime(ms) = "+seconds);
// Post Code
// Use `msg` in code
}
}.execute();
///upto here
} else if (v == bB){
/// fromhere
new AsyncTask<Void, Void, String>() {
com.example.owner.opengl2.MultiRuntimeProcessorFilter mFilers = new com.example.owner.opengl2.MultiRuntimeProcessorFilter();
TextView tx = (TextView)findViewById(R.id.mStatus);
Bitmap bitmapSP;long start,end;
protected void onPreExecute() {
start = System.nanoTime();
iv1.setImageBitmap(bitmap1);
tx.setText("- Running -");
}
protected String doInBackground(Void... params) {
bitmapSP = mFilers.barrel(bitmap1,(float)0.00005,RADIUS);
return "message";
}
protected void onPostExecute(String msg) {
end = System.nanoTime();
long elapsedTime = end - start;
//double seconds = (double)elapsedTime / 1000000000.0;
long seconds = elapsedTime / 1000000;
iv1.setImageBitmap(bitmapSP);
tx.setText("- READY : ElapsedTime(ms) = "+seconds);
// Post Code
// Use `msg` in code
}
}.execute();
} else if (v == b0){
new AsyncTask<Void, Void, String>() {
protected String doInBackground(Void... Unused) {
return "OK";
}
protected void onPostExecute(String message) {
Log.v("YO", "---------------------------------");
Log.v("YO", "----------ORIGINAL SHAPE-------- "+message);
Log.v("YO", "---------------------------------");
iv1.setImageBitmap(bitmap1);
TextView tx = (TextView)findViewById(R.id.mStatus);
tx.setText("- READY : w h RADIUS = "+mW+" "+mH+" "+RADIUS);
}
}.execute();
}
///upto here
}
};
}
Here is XML file
//4 activity_main.xml
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools" android:layout_width="match_parent"
android:layout_height="match_parent" android:paddingLeft="#dimen/activity_horizontal_margin"
android:paddingRight="#dimen/activity_horizontal_margin"
android:paddingTop="#dimen/activity_vertical_margin"
android:orientation="vertical"
android:paddingBottom="#dimen/activity_vertical_margin" tools:context=".MainActivity">
<LinearLayout
android:id="#+id/buttons"
android:layout_centerHorizontal="true"
android:layout_alignParentTop="true"
android:orientation="horizontal"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
<Button
android:id="#+id/buttontoggle"
android:text="Barrel 1P"
android:layout_width="wrap_content"
android:layout_height="wrap_content" />
<Button
android:id="#+id/buttonbarrel"
android:text="Barrele NP"
android:layout_width="wrap_content"
android:layout_height="wrap_content" />
<Button
android:id="#+id/button0"
android:text="ORIGINAL"
android:layout_width="wrap_content"
android:layout_height="wrap_content" />
</LinearLayout>
<TextView
android:id="#+id/mStatus"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text=" - Ready - "
android:textAppearance="?android:attr/textAppearanceSmall"
android:layout_below="#+id/buttons"
android:layout_centerHorizontal="true"
/>
<ImageView
android:id="#+id/touchview1"
android:layout_below="#+id/mStatus"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_centerHorizontal="true"
/>
<!--ImageView
android:id="#+id/touchview2"
android:layout_below="#+id/touchview1"
android:layout_alignParentBottom="true"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_centerHorizontal="true" /-->
Do you want to use this distortion on sintetic images, or do you want to apply to a video camera or something ?
In OpenCv you should be able to do camera calibration (using the built-in functions, Zhang's algorithm) ..
In OpenGL see this.
Regards