Skip to content

Add WarpPerspective in FastCV extension #3922

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: 4.x
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 15 additions & 1 deletion modules/fastcv/include/opencv2/fastcv/warp.hpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright (c) 2024 Qualcomm Innovation Center, Inc. All rights reserved.
* Copyright (c) 2024-2025 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: Apache-2.0
*/

Expand All @@ -17,6 +17,20 @@ namespace fastcv {
//! @addtogroup fastcv
//! @{

/**
* @brief Transform an image using perspective transformation, same as cv::warpPerspective but not bit-exact.
* @param _src Input 8-bit image. Size of buffer is srcStride*srcHeight bytes.
* @param _dst Output 8-bit image. Size of buffer is dstStride*dstHeight bytes.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please remove the information about sizes. The destination buffer is allocated by the function itself automatically.

* @param _M0 3x3 perspective transformation matrix.
* @param dsize Size of the output image.
* @param interpolation Interpolation method. Only cv::INTER_NEAREST, cv::INTER_LINEAR and cv::INTER_AREA are supported.
* @param borderType Pixel extrapolation method. Only cv::BORDER_CONSTANT, cv::BORDER_REPLICATE and cv::BORDER_TRANSPARENT
* are supported.
* @param borderValue Value used in case of a constant border.
*/
void warpPerspective(InputArray _src, OutputArray _dst, InputArray _M0, Size dsize, int interpolation, int borderType,
const Scalar& borderValue);

/**
* @brief Perspective warp two images using the same transformation. Bi-linear interpolation is used where applicable.
* For example, to warp a grayscale image and an alpha image at the same time, or warp two color channels.
Expand Down
79 changes: 57 additions & 22 deletions modules/fastcv/perf/perf_warp.cpp
Original file line number Diff line number Diff line change
@@ -1,37 +1,25 @@
/*
* Copyright (c) 2024 Qualcomm Innovation Center, Inc. All rights reserved.
* Copyright (c) 2024-2025 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: Apache-2.0
*/

#include "perf_precomp.hpp"

namespace opencv_test {

typedef perf::TestBaseWithParam<Size> WarpPerspective2PlanePerfTest;

PERF_TEST_P(WarpPerspective2PlanePerfTest, run,
::testing::Values(perf::szVGA, perf::sz720p, perf::sz1080p))
static void getInvertMatrix(Mat& src, Mat& dst, Mat& M)
{
cv::Size dstSize = GetParam();
cv::Mat img = imread(cvtest::findDataFile("cv/shared/baboon.png"));
Mat src(img.rows, img.cols, CV_8UC1);
cvtColor(img,src,cv::COLOR_BGR2GRAY);
cv::Mat dst1, dst2, mat;
mat.create(3,3,CV_32FC1);
dst1.create(dstSize,CV_8UC1);
dst2.create(dstSize,CV_8UC1);

RNG& rng = cv::theRNG();
Point2f s[4], d[4];

s[0] = Point2f(0,0);
d[0] = Point2f(0,0);
s[1] = Point2f(src.cols-1.f,0);
d[1] = Point2f(dst1.cols-1.f,0);
d[1] = Point2f(dst.cols-1.f,0);
s[2] = Point2f(src.cols-1.f,src.rows-1.f);
d[2] = Point2f(dst1.cols-1.f,dst1.rows-1.f);
d[2] = Point2f(dst.cols-1.f,dst.rows-1.f);
s[3] = Point2f(0,src.rows-1.f);
d[3] = Point2f(0,dst1.rows-1.f);
d[3] = Point2f(0,dst.rows-1.f);

float buffer[16];
Mat tmp( 1, 16, CV_32FC1, buffer );
Expand All @@ -41,18 +29,65 @@ PERF_TEST_P(WarpPerspective2PlanePerfTest, run,
{
s[i].x += buffer[i*4]*src.cols/2;
s[i].y += buffer[i*4+1]*src.rows/2;
d[i].x += buffer[i*4+2]*dst1.cols/2;
d[i].y += buffer[i*4+3]*dst1.rows/2;
d[i].x += buffer[i*4+2]*dst.cols/2;
d[i].y += buffer[i*4+3]*dst.rows/2;
}

cv::getPerspectiveTransform( s, d ).convertTo( mat, mat.depth() );
cv::getPerspectiveTransform( s, d ).convertTo( M, M.depth() );

// Invert the perspective matrix
invert(mat,mat);
invert(M,M);
}

typedef perf::TestBaseWithParam<Size> WarpPerspective2PlanePerfTest;

PERF_TEST_P(WarpPerspective2PlanePerfTest, run,
::testing::Values(perf::szVGA, perf::sz720p, perf::sz1080p))
{
cv::Size dstSize = GetParam();
cv::Mat img = imread(cvtest::findDataFile("cv/shared/baboon.png"));
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Add EXPECT_FALSE(img.empty()); after the line to check that test input is not empty.

Mat src(img.rows, img.cols, CV_8UC1);
cvtColor(img,src,cv::COLOR_BGR2GRAY);
cv::Mat dst1, dst2, matrix;
matrix.create(3,3,CV_32FC1);
dst1.create(dstSize,CV_8UC1);
dst2.create(dstSize,CV_8UC1);

getInvertMatrix(src, dst1, matrix);

while (next())
{
startTimer();
cv::fastcv::warpPerspective2Plane(src, src, dst1, dst2, matrix, dstSize);
stopTimer();
}

SANITY_CHECK_NOTHING();
}

typedef perf::TestBaseWithParam<tuple<Size, int, int>> WarpPerspectivePerfTest;

PERF_TEST_P(WarpPerspectivePerfTest, run,
::testing::Combine( ::testing::Values(perf::szVGA, perf::sz720p, perf::sz1080p),
::testing::Values(INTER_NEAREST, INTER_LINEAR, INTER_AREA),
::testing::Values(BORDER_CONSTANT, BORDER_REPLICATE, BORDER_TRANSPARENT)))
{
cv::Size dstSize = get<0>(GetParam());
int interplation = get<1>(GetParam());
int borderType = get<2>(GetParam());
cv::Scalar borderValue = Scalar::all(100);

cv::Mat src = imread(cvtest::findDataFile("cv/shared/baboon.png"), cv::IMREAD_GRAYSCALE);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The same assert.

cv::Mat dst, matrix, ref;
matrix.create(3, 3, CV_32FC1);
dst.create(dstSize, src.type());

getInvertMatrix(src, dst, matrix);

while (next())
{
startTimer();
cv::fastcv::warpPerspective2Plane(src, src, dst1, dst2, mat, dstSize);
cv::fastcv::warpPerspective(src, dst, matrix, dstSize, interplation, borderType, borderValue);
stopTimer();
}

Expand Down
175 changes: 139 additions & 36 deletions modules/fastcv/src/warp.cpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright (c) 2024 Qualcomm Innovation Center, Inc. All rights reserved.
* Copyright (c) 2024-2025 Qualcomm Innovation Center, Inc. All rights reserved.
* SPDX-License-Identifier: Apache-2.0
*/

Expand All @@ -12,49 +12,52 @@ class FcvWarpPerspectiveLoop_Invoker : public cv::ParallelLoopBody
{
public:

FcvWarpPerspectiveLoop_Invoker(InputArray _src1, InputArray _src2, OutputArray _dst1, OutputArray _dst2, InputArray _M0,
Size _dsize) : cv::ParallelLoopBody()
{
src1 = _src1.getMat();
src2 = _src2.getMat();
dsize = _dsize;

_dst1.create(dsize, src1.type());
_dst2.create(dsize, src2.type());
dst1 = _dst1.getMat();
dst2 = _dst2.getMat();

M = _M0.getMat();
}
FcvWarpPerspectiveLoop_Invoker(const Mat& _src1, const Mat& _src2, Mat& _dst1, Mat& _dst2,
const float * _M, fcvInterpolationType _interpolation = FASTCV_INTERPOLATION_TYPE_NEAREST_NEIGHBOR,
fcvBorderType _borderType = fcvBorderType::FASTCV_BORDER_UNDEFINED, const int _borderValue = 0)
: ParallelLoopBody(), src1(_src1), src2(_src2), dst1(_dst1), dst2(_dst2), M(_M), interpolation(_interpolation),
borderType(_borderType), borderValue(_borderValue)
{}

virtual void operator()(const cv::Range& range) const CV_OVERRIDE
{
uchar* dst1_ptr = dst1.data + range.start*dst1.step;
uchar* dst2_ptr = dst2.data + range.start*dst2.step;
uchar* dst1_ptr = dst1.data + range.start * dst1.step;
int rangeHeight = range.end - range.start;

float rangeMatrix[9];
rangeMatrix[0] = M.at<float>(0,0);
rangeMatrix[1] = M.at<float>(0,1);
rangeMatrix[2] = M.at<float>(0,2)+range.start*M.at<float>(0,1);
rangeMatrix[3] = M.at<float>(1,0);
rangeMatrix[4] = M.at<float>(1,1);
rangeMatrix[5] = M.at<float>(1,2)+range.start*M.at<float>(1,1);
rangeMatrix[6] = M.at<float>(2,0);
rangeMatrix[7] = M.at<float>(2,1);
rangeMatrix[8] = M.at<float>(2,2)+range.start*M.at<float>(2,1);

fcv2PlaneWarpPerspectiveu8(src1.data, src2.data, src1.cols, src1.rows, src1.step, src2.step, dst1_ptr, dst2_ptr,
dsize.width, rangeHeight, dst1.step, dst2.step, rangeMatrix);
rangeMatrix[0] = M[0];
rangeMatrix[1] = M[1];
rangeMatrix[2] = M[2]+range.start*M[1];
rangeMatrix[3] = M[3];
rangeMatrix[4] = M[4];
rangeMatrix[5] = M[5]+range.start*M[4];
rangeMatrix[6] = M[6];
rangeMatrix[7] = M[7];
rangeMatrix[8] = M[8]+range.start*M[7];

if ((src2.empty()) || (dst2.empty()))
{
fcvWarpPerspectiveu8_v5(src1.data, src1.cols, src1.rows, src1.step, src1.channels(), dst1_ptr, dst1.cols, rangeHeight,
dst1.step, rangeMatrix, interpolation, borderType, borderValue);
}
else
{
uchar* dst2_ptr = dst2.data + range.start * dst2.step;
fcv2PlaneWarpPerspectiveu8(src1.data, src2.data, src1.cols, src1.rows, src1.step, src2.step, dst1_ptr, dst2_ptr,
dst1.cols, rangeHeight, dst1.step, dst2.step, rangeMatrix);
}
}

private:
Mat src1;
Mat src2;
Mat dst1;
Mat dst2;
Mat M;
Size dsize;

const Mat& src1;
const Mat& src2;
Mat& dst1;
Mat& dst2;
const float* M;
fcvInterpolationType interpolation;
fcvBorderType borderType;
int borderValue;

FcvWarpPerspectiveLoop_Invoker(const FcvWarpPerspectiveLoop_Invoker &); // = delete;
const FcvWarpPerspectiveLoop_Invoker& operator= (const FcvWarpPerspectiveLoop_Invoker &); // = delete;
Expand All @@ -68,8 +71,108 @@ void warpPerspective2Plane(InputArray _src1, InputArray _src2, OutputArray _dst1
CV_Assert(!_src2.empty() && _src2.type() == CV_8UC1);
CV_Assert(!_M0.empty());

Mat src1 = _src1.getMat();
Mat src2 = _src2.getMat();

_dst1.create(dsize, src1.type());
_dst2.create(dsize, src2.type());
Mat dst1 = _dst1.getMat();
Mat dst2 = _dst2.getMat();

Mat M0 = _M0.getMat();
CV_Assert((M0.type() == CV_32F || M0.type() == CV_64F) && M0.rows == 3 && M0.cols == 3);
float matrix[9];
Mat M(3, 3, CV_32F, matrix);
M0.convertTo(M, M.type());

int nThreads = getNumThreads();
int nStripes = nThreads > 1 ? 2*nThreads : 1;

cv::parallel_for_(cv::Range(0, dsize.height),
FcvWarpPerspectiveLoop_Invoker(src1, src2, dst1, dst2, matrix), nStripes);
}

void warpPerspective(InputArray _src, OutputArray _dst, InputArray _M0, Size dsize, int interpolation, int borderType,
const Scalar& borderValue)
{
Mat src = _src.getMat();

_dst.create(dsize, src.type());
Mat dst = _dst.getMat();

Mat M0 = _M0.getMat();
CV_Assert((M0.type() == CV_32F || M0.type() == CV_64F) && M0.rows == 3 && M0.cols == 3);
float matrix[9];
Mat M(3, 3, CV_32F, matrix);
M0.convertTo(M, M.type());

// Do not support inplace case
CV_Assert(src.data != dst.data);
// Only support CV_8U
CV_Assert(src.depth() == CV_8U);

INITIALIZATION_CHECK;

fcvBorderType fcvBorder;
uint8_t fcvBorderValue = 0;
fcvInterpolationType fcvInterpolation;

switch (borderType)
{
case BORDER_CONSTANT:
{
// Border value should be same
CV_Assert((borderValue[0] == borderValue[1]) &&
(borderValue[0] == borderValue[2]) &&
(borderValue[0] == borderValue[3]));

fcvBorder = fcvBorderType::FASTCV_BORDER_CONSTANT;
fcvBorderValue = static_cast<uint8_t>(borderValue[0]);
break;
}
case BORDER_REPLICATE:
{
fcvBorder = fcvBorderType::FASTCV_BORDER_REPLICATE;
break;
}
case BORDER_TRANSPARENT:
{
fcvBorder = fcvBorderType::FASTCV_BORDER_UNDEFINED;
break;
}
default:
CV_Error(cv::Error::StsBadArg, cv::format("Border type:%d is not supported", borderType));
}

switch(interpolation)
{
case INTER_NEAREST:
{
fcvInterpolation = FASTCV_INTERPOLATION_TYPE_NEAREST_NEIGHBOR;
break;
}
case INTER_LINEAR:
{
fcvInterpolation = FASTCV_INTERPOLATION_TYPE_BILINEAR;
break;
}
case INTER_AREA:
{
fcvInterpolation = FASTCV_INTERPOLATION_TYPE_AREA;
break;
}
default:
CV_Error(cv::Error::StsBadArg, cv::format("Interpolation type:%d is not supported", interpolation));
}

int nThreads = cv::getNumThreads();
int nStripes = nThreads > 1 ? 2*nThreads : 1;

// placeholder
Mat tmp;

cv::parallel_for_(cv::Range(0, dsize.height),
FcvWarpPerspectiveLoop_Invoker(_src1, _src2, _dst1, _dst2, _M0, dsize), 1);
FcvWarpPerspectiveLoop_Invoker(src, tmp, dst, tmp, matrix, fcvInterpolation, fcvBorder, fcvBorderValue), nStripes);
}

} // fastcv::
Expand Down
Loading
Loading