"OpenCV, overlaypatches , Dimensions of coords and onesTransposed matrices do not match!"

I’m encountering an OpenCV error :
“Affine Transformation Matrix:
[0.9982519601601529, -0.0001474894468255113, 1661.997353717446;
-0.000592161787642926, 0.9998475676606888, 447.0124025198956;
-1.008188156421432e-06, -9.998633813641276e-08, 1]
Before vconcat:
coords size: [69920 x 1]
coords type: 5
ones size: [34960 x 1]
ones type: 5
onesTransposed size: [1 x 34960]
onesTransposed type: 5
Error: Dimensions of coords and onesTransposed matrices do not match!”

in the following code

 "#include <iostream>
#include <vector>
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/imgproc.hpp>
#include "opencv2/imgcodecs.hpp"
// Function to load the corrupted image
cv::Mat loadImage(const std::string& imagePath) {
    // Load the image using OpenCV
    cv::Mat image = cv::imread(imagePath);

    // Check if the image was loaded successfully
    if (image.empty()) {
        std::cout << "Failed to load the image!" << std::endl;
        // Return an empty matrix or handle the error as per your requirement
    }

    // Return the loaded image
    return image;
}

// Function to load the patches
std::vector<cv::Mat> loadPatches(const std::string& patchesPath) {
    // Create a vector to store the patches
    std::vector<cv::Mat> patches;

    std::vector<std::string> patchFiles;
    cv::glob(patchesPath, patchFiles);

    // Iterate over the patch files
    for (const auto& patchFile : patchFiles) {
        // Load each patch using OpenCV
        cv::Mat patch = cv::imread(patchFile);

        // Check if the patch was loaded successfully
        if (patch.empty()) {
            std::cout << "Failed to load patch: " << patchFile << std::endl;
            // Handle the error or continue with the remaining patches
            continue;
        }

        // Add the patch to the vector
        patches.push_back(patch);
    }

    // Display the loaded patches
    for (int i = 0; i < patches.size(); i++) {
        cv::imshow("Patch " + std::to_string(i), patches[i]);
    }

    cv::waitKey(0);

    // Return the loaded patches
    return patches;

}


// Function to extract SIFT features from an image
cv::Mat extractSiftFeatures(const cv::Mat& image) {
    // Create a SIFT object
    cv::Ptr<cv::SIFT> sift = cv::SIFT::create();

    // Detect keypoints and extract descriptors
    std::vector<cv::KeyPoint> keypoints;
    cv::Mat descriptors;
    sift->detectAndCompute(image, cv::noArray(), keypoints, descriptors);

    // Return the extracted SIFT features
    return descriptors;
}

// Function to compute matches between image and patch features
std::vector<cv::DMatch> computeMatches(const cv::Mat& imageDescriptors, const cv::Mat& patchDescriptors) {
    // Create a BFMatcher object with L2 distance for SIFT
    cv::Ptr<cv::BFMatcher> matcher = cv::BFMatcher::create(cv::NORM_L2);

    // Perform matching between the descriptors
    std::vector<std::vector<cv::DMatch>> knnMatches;
    matcher->knnMatch(imageDescriptors, patchDescriptors, knnMatches, 2);

    // Apply ratio test to refine the matches
    const float ratioThreshold = 0.8f; // User-defined threshold for ratio test
    std::vector<cv::DMatch> refinedMatches;
    for (const auto& match : knnMatches) {
        if (match.size() < 2)
            continue;

        float distanceRatio = match[0].distance / match[1].distance;
        if (distanceRatio < ratioThreshold) {
            refinedMatches.push_back(match[0]);
        }
    }

    // Return the refined matches
    return refinedMatches;
}

// Function to find the affine transformation using RANSAC
cv::Mat findAffineTransformation(const std::vector<cv::KeyPoint>& imageKeypoints,
    const std::vector<cv::KeyPoint>& patchKeypoints,
    const std::vector<cv::DMatch>& matches) {
    // Convert keypoints to Point2f format
    std::vector<cv::Point2f> imagePoints, patchPoints;
    for (const auto& match : matches) {
        imagePoints.push_back(imageKeypoints[match.queryIdx].pt);
        patchPoints.push_back(patchKeypoints[match.trainIdx].pt);
    }

    // Find the affine transformation using RANSAC
    cv::Mat mask;
    cv::Mat affineMatrix = cv::findHomography(patchPoints, imagePoints, cv::RANSAC, 3.0, mask);

    // Print the affine transformation matrix
    std::cout << "Affine Transformation Matrix:" << std::endl;
    std::cout << affineMatrix << std::endl;
    // Return the affine transformation matrix
    return affineMatrix;
}

void overlayPatches(cv::Mat& image, const std::vector<cv::Mat>& patches, const cv::Mat& affineMatrix) {
    cv::Mat patchMask;
    for (const auto& patch : patches) {
        int patchRows = patch.rows;
        int patchCols = patch.cols;

        // Convert patch to single-channel image
        cv::Mat patchGray;
        cv::cvtColor(patch, patchGray, cv::COLOR_BGR2GRAY);

        // Create patch mask
        cv::Mat patchMask = (patchGray != 0);

        cv::Mat meshX, meshY;
        for (int i = 0; i < patchRows; ++i) {
            cv::Mat rowX(1, patchCols, CV_32F);
            cv::Mat rowY(1, patchCols, CV_32F);
            for (int j = 0; j < patchCols; ++j) {
                rowX.at<float>(j) = static_cast<float>(j);
                rowY.at<float>(j) = static_cast<float>(i);
            }
            meshX.push_back(rowX);
            meshY.push_back(rowY);
        }

        cv::Mat coords;
        cv::hconcat(meshX.reshape(1, 1), meshY.reshape(1, 1), coords);

        cv::Mat ones = cv::Mat::ones(1, patchRows * patchCols, CV_32F);

        std::cout << "Before vconcat:" << std::endl;
        std::cout << "coords size: " << coords.size() << std::endl;
        std::cout << "coords type: " << coords.type() << std::endl;

        std::cout << "ones size: " << ones.size() << std::endl;
        std::cout << "ones type: " << ones.type() << std::endl;

        cv::Mat onesTransposed = ones.t();  // Transpose the ones matrix

        std::cout << "onesTransposed size: " << onesTransposed.size() << std::endl;
        std::cout << "onesTransposed type: " << onesTransposed.type() << std::endl;

        if (coords.cols != onesTransposed.cols) {
            std::cerr << "Error: Dimensions of coords and onesTransposed matrices do not match!" << std::endl;
            return;
        }

        if (coords.type() != onesTransposed.type()) {
            std::cerr << "Error: Types of coords and onesTransposed matrices do not match!" << std::endl;
            return;
        }

        try {
            cv::vconcat(coords.reshape(1, 1), onesTransposed.reshape(1, 1), coords);

            std::cout << "After vconcat:" << std::endl;
            std::cout << "coords size: " << coords.size() << std::endl;
            std::cout << "coords type: " << coords.type() << std::endl;
        }
        catch (const cv::Exception& e) {
            std::cerr << "OpenCV Error: " << e.what() << std::endl;
            return;
        }



        cv::Mat warpedCoords = affineMatrix * coords;
        warpedCoords.row(0) /= warpedCoords.row(2);
        warpedCoords.row(1) /= warpedCoords.row(2);

        cv::Mat warpedX = warpedCoords.row(0).reshape(1, patchRows);
        cv::Mat warpedY = warpedCoords.row(1).reshape(1, patchRows);

        cv::Mat warpedXInt, warpedYInt;
        warpedX.convertTo(warpedXInt, CV_32S);
        warpedY.convertTo(warpedYInt, CV_32S);

        for (int i = 0; i < patchRows; ++i) {
            for (int j = 0; j < patchCols; ++j) {
                int x = warpedXInt.at<int>(i, j);
                int y = warpedYInt.at<int>(i, j);
                if (x >= 0 && x < image.cols && y >= 0 && y < image.rows) {
                    image.at<cv::Vec3b>(y, x) = patch.at<cv::Vec3b>(i, j);
                }
            }
        }
    }
}

int main() {
    // Step 1: Load the corrupted image and patches
    cv::Mat corruptedImage = loadImage("C:/Users/Nomad/source/repos/Project6/image_to_complete.jpg");
    std::vector<cv::Mat> patches = loadPatches("C:/Users/Nomad/source/repos/Project6/Patch");


    // Step 2: Extract SIFT features from the image
    cv::Ptr<cv::SIFT> sift = cv::SIFT::create();
    cv::Mat imageDescriptors;
    std::vector<cv::KeyPoint> imageKeypoints;
    sift->detectAndCompute(corruptedImage, cv::noArray(), imageKeypoints, imageDescriptors);

    // Step 3: Extract SIFT features from the patches
    std::vector<cv::Mat> patchDescriptors;
    std::vector<cv::KeyPoint> patchKeypoints;
    for (const auto& patch : patches) {
        cv::Mat patchDescriptor;
        std::vector<cv::KeyPoint> keypoints;
        sift->detectAndCompute(patch, cv::noArray(), keypoints, patchDescriptor);
        patchKeypoints.insert(patchKeypoints.end(), keypoints.begin(), keypoints.end());
        patchDescriptors.push_back(patchDescriptor);
    }

    // Step 4: Compute matches between image and patch features
    std::vector<cv::DMatch> matches = computeMatches(imageDescriptors, patchDescriptors[0]);


    // Step 5: Find the affine transformation using RANSAC
    
    cv::Mat affineMatrix = findAffineTransformation(imageKeypoints, patchKeypoints, matches);

    // Step 6: Overlay patches on the image using the found homographies
    overlayPatches(corruptedImage, patches, affineMatrix);

    // Display the result or save the image as per your requirement
        // Step 7: Save the output image
    cv::imwrite("output.jpg", corruptedImage);

    return 0;
}

I would greatly appreciate any guidance or suggestions to resolve this issue. Thank you in advance for your help!

full, verbatim error msg, please
also, the whole i had followed: section is useless, without code or context (what is it for ?) so please update post