Can't get a confidence value from

Hi,

I’ve been trying various ways to extract the confidence value from the detections.
I either get this: Exception in thread “main” java.lang.NullPointerException: Cannot load from double array because the return value of “org.opencv.core.Mat.get(int, int)” is null

If I use this:

                double confidence;
                confidence = detections.get(i, 2)[0];

or if I change my code below get a null value!

                double[] detection;
                detection = detections.get(i, 2);
                //System.out.println("c : "+confidence);
                
                if (detection != null) {
                    confidence = detection[0];
                    System.out.println("i : " + i + " Confidence : " + confidence);
                }

If I dump detections I get:

dump : [1.6463306;
 0.39511931;
 0.59267896;
 1.778037;
 0.52682579;
 0.46097255;
 1.8438902;
etc...

I need someone to point out where I’m going wrong, as I can’t figure it out!

https://stackoverflow.com/questions/75581064/java-opencv-dnn-geti-20

what is detection, exactly ? what is it from ?

sadly, your dump is truncated, so we can’t see, please update !

(well if it’s 1d, get(i,2) is clearly wrong, no ?)

A Face from Net faceNet = Dnn.readNetFromONNX(“face_detection_yunet_2022mar-act_int8-wt_int8-quantized.onnx”);

package AgeGenderCV;

import java.util.ArrayList;
import java.util.List;

import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.dnn.Dnn;
import org.opencv.dnn.Net;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import org.opencv.videoio.VideoCapture;

public class AgeGenderCV {

    private static final String[] GENDER_LIST = {"Male", "Female"};
    private static final String[] AGE_LIST = {"(0-2)", "(4-6)", "(8-12)", "(15-20)", "(25-32)", "(38-43)", "(48-53)", "(60-100)"};
    private static final double[] MODEL_MEAN_VALUES = {78.4263377603, 87.7689143744, 114.895847746};
    private static final double CONFIDENCE_THRESHOLD = 0.4;

    public void ProcessImage() {
        // Load networks
        Net faceNet = Dnn.readNetFromONNX("face_detection_yunet_2022mar-act_int8-wt_int8-quantized.onnx");
        Net ageNet = Dnn.readNetFromCaffe("age_deploy.prototxt", "age_net.caffemodel");
        Net genderNet = Dnn.readNetFromCaffe("gender_deploy.prototxt", "gender_net.caffemodel");

        // Open a video file or an image file or a camera stream
        VideoCapture cap;
        //if (args.length > 0) {
        //    cap = new VideoCapture(args[0]);
        //} else {
        cap = new VideoCapture(0);
        //}

        // Read frame
        Mat frame = new Mat();
        cap.read(frame);

        while (true) {
            // Get face bounding boxes
            Mat frameFace = frame.clone();

            List<Rect> bboxes = new ArrayList<>();
            Size size = new Size(300, 300);
            Scalar scalar = new Scalar(104, 117, 123);

            Mat blob = Dnn.blobFromImage(frameFace, 1.0, size, scalar, true, false);
            faceNet.setInput(blob);

            Mat detections = faceNet.forward();
            System.out.println("dtot : "+detections.total());
            System.out.println("dump : "+detections.dump());
            double confidence = 0;
            
            for (int i = 0; i < detections.rows(); i++) {
                //double confidence;
                //confidence = detections.get(i, 2)[0];
                
                double[] detection;
                detection = detections.get(i, 2);
                System.out.println("c : "+confidence);
                
                if (detection != null) {
                    confidence = detection[0];
                    System.out.println("i : " + i + " Confidence : " + confidence);
                }
                
                
                
                if (confidence > CONFIDENCE_THRESHOLD){
                    //frame.width //frame.height //frame.cols //frame.rows
                    int x1 = (int) (detections.get(i, 3)[0] * frame.cols());
                    int y1 = (int) (detections.get(i, 4)[0] * frame.rows());
                    int x2 = (int) (detections.get(i, 5)[0] * frame.cols());
                    int y2 = (int) (detections.get(i, 6)[0] * frame.rows());
                    bboxes.add(new Rect(x1, y1, x2 - x1, y2 - y1));
                    Imgproc.rectangle(frameFace, new org.opencv.core.Point(x1, y1), new org.opencv.core.Point(x2, y2), new org.opencv.core.Scalar(0, 255, 0), (int) Math.round(frame.rows() / 150), 8, 0);
                }
            }

            if (bboxes.isEmpty()) {
                System.out.println("No face Detected, Checking next frame");
                continue;
            }

            for (Rect bbox : bboxes) {
                Mat face = new Mat(frame, bbox);
                Size sizeb = new Size(227, 227);
                Scalar scalarb = new Scalar(MODEL_MEAN_VALUES);
                blob = Dnn.blobFromImage(face, 1.0, sizeb, scalarb, false);
                genderNet.setInput(blob);
                Mat genderPreds = genderNet.forward();
                String gender = GENDER_LIST[(int) genderPreds.get(0, 0)[0]];
                //System.out.println("Gender Output : " + genderPreds);
                System.out.println("Gender : " + gender + ", conf = " + genderPreds.get(0, 0)[0]);

                ageNet.setInput(blob);
                Mat agePreds = ageNet.forward();
                agePreds = agePreds.reshape(1, 1); // reshape to 2D matrix with one row
                //Point maxLoc = new Point(); // !!! not assiged !!!
                //Core.minMaxLoc(agePreds);
                //int ageIdx = (int) maxLoc.x; // !!! uninitialized !!!
                Core.MinMaxLocResult mm = Core.minMaxLoc(agePreds);
                int ageIdx = (int) mm.maxLoc.x;

                //Imgcodecs.imwrite("age-gender-out-"+args[0]+".jpg", frameFace);
                //Imgcodecs.imwrite("age-gender-out-.jpg", frameFace);
                System.out.println("Age : " + AGE_LIST[ageIdx] + ", conf = " + agePreds.get(0, ageIdx)[0]);
            }
            cap.release();
        }
    }

    public static void main(String args[]) {
        System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
        System.loadLibrary("opencv_java470");

        AgeGenderCV example = new AgeGenderCV();
        example.ProcessImage();
    }
}

unfortunately, your code looks, like you snipped it from the ssd sample

however, yunet output is entirely different from that !

any chance, you can use the FaceDetectorYN class here ?
(you for sure dont want to parse this on your own !

also note: Mat faces will contain a row(for each face) with 2 box points & 5 facial landmarks:

I fixed that section of my code with a previous example on the old forum:

I will make another post in a few days if i can’t figure why I’m going wrong with the next step.

https://answers.opencv.org/question/224079/java-dnn-help/

            float[] data = new float[7];
            
            for (int i = 0; i < detections.rows(); i++) {
                detections.get(i, 0, data);
                double confidence = data[2];
               
                if (confidence > CONFIDENCE_THRESHOLD){
                    //frame.width //frame.height //frame.cols //frame.rows
                    int x1 = (int)(data[3] * frame.cols());
                    int y1 = (int)(data[4] * frame.rows());
                    int x2 = (int)(data[5] * frame.cols());
                    int y2 = (int)(data[6] * frame.rows());
                    bboxes.add(new Rect(x1, y1, x2 - x1, y2 - y1));
                    Imgproc.rectangle(frameFace, new org.opencv.core.Point(x1, y1), new org.opencv.core.Point(x2, y2), new org.opencv.core.Scalar(0, 255, 0), (int) Math.round(frame.rows() / 150), 8, 0);                   
                }