Commit 4bdc9373 authored by Ján Popeláš's avatar Ján Popeláš 🤠 Committed by Radek Ošlejšek
Browse files

Resolve "Fix found issues on detection-based superimposition"

parent 4b5b2c64
Loading
Loading
Loading
Loading
+9 −1
Original line number Diff line number Diff line
@@ -4,6 +4,7 @@ import cz.fidentis.analyst.Logger;
import cz.fidentis.analyst.canvas.Canvas;
import cz.fidentis.analyst.canvas.CanvasState;
import cz.fidentis.analyst.data.face.HumanFace;
import cz.fidentis.analyst.data.face.HumanFaceState;
import cz.fidentis.analyst.data.landmarks.Landmark;
import cz.fidentis.analyst.data.landmarks.LandmarksFactory;
import cz.fidentis.analyst.data.landmarks.MeshVicinity;
@@ -54,7 +55,8 @@ public class FaceDetectionServices {
    public static List<Landmark> detectAndAddLandmarks(Canvas canvas, HumanFace primaryFace, HumanFace secondaryFace) {
        OpenCVYuNetFaceDetection faceDetector = getFaceDetector();
        CanvasState formerState = canvas.getState();
        presetCanvasForOptimalImaging(canvas);
        HumanFaceState primaryFaceState = primaryFace.getState();
        HumanFaceState secondaryFaceState = secondaryFace.getState();

        List<Landmark> primaryFaceLandmarks = detectLandmarksInPrimaryFace(canvas, faceDetector);
        List<Landmark> secondaryFaceLandmarks = detectLandmarksInSecondaryFace(canvas, faceDetector);
@@ -70,6 +72,8 @@ public class FaceDetectionServices {
        }

        canvas.setState(formerState);
        primaryFace.setSymmetryPlane(primaryFaceState.symmetryPlane());
        secondaryFace.setSymmetryPlane(secondaryFaceState.symmetryPlane());

        primaryFaceLandmarks.addAll(secondaryFaceLandmarks);
        return primaryFaceLandmarks;
@@ -120,6 +124,7 @@ public class FaceDetectionServices {
    private static List<Landmark> detectLandmarksInPrimaryFace(Canvas canvas, OpenCVYuNetFaceDetection faceDetector) {
        DrawableFace primaryFace = canvas.getScene().getDrawableFace(canvas.getScene().getPrimaryFaceSlot());
        DrawableFace secondaryFace = canvas.getScene().getDrawableFace(canvas.getScene().getSecondaryFaceSlot());
        presetCanvasForOptimalImaging(canvas);
        primaryFace.show(true);
        secondaryFace.show(false);

@@ -149,6 +154,7 @@ public class FaceDetectionServices {
    private static List<Landmark> detectLandmarksInSecondaryFace(Canvas canvas, OpenCVYuNetFaceDetection faceDetector) {
        DrawableFace primaryFace = canvas.getScene().getDrawableFace(canvas.getScene().getPrimaryFaceSlot());
        DrawableFace secondaryFace = canvas.getScene().getDrawableFace(canvas.getScene().getSecondaryFaceSlot());
        presetCanvasForOptimalImaging(canvas);
        primaryFace.show(false);
        secondaryFace.show(true);

@@ -209,6 +215,8 @@ public class FaceDetectionServices {
        DrawableFace primaryFace = canvas.getScene().getDrawableFace(canvas.getScene().getPrimaryFaceSlot());
        DrawableFace secondaryFace = canvas.getScene().getDrawableFace(canvas.getScene().getSecondaryFaceSlot());

        canvas.setDarkBackground(false);

        primaryFace.show(true);
        secondaryFace.show(true);

+1 −1
Original line number Diff line number Diff line
@@ -43,7 +43,7 @@ public class OpenCVYuNetFaceDetection {
    private static final String MODEL_NAME = "face_detection_yunet_2023mar.onnx";
    private static final Size MODEL_RECOGNITION_SIZE = new Size(320, 320);
    private static final String MODEL_CONFIG = "";
    private static final float MODEL_SCORE_THRESHOLD = 0.80f;
    private static final float MODEL_SCORE_THRESHOLD = 0.6f;
    private static final float MODEL_NMS_THRESHOLD = 0.5f;

    private static final SignificantPointType[] SIGNIFICANT_POINTS = new SignificantPointType[]{
+14 −12
Original line number Diff line number Diff line
@@ -335,9 +335,6 @@ public class RegistrationAction extends ControlPanelAction<RegistrationPanel> im
    private void alignDetection() {
        Logger out = Logger.measureTime();

        HumanFaceState primaryFaceState = getPrimaryFace().getState();
        HumanFaceState secondaryFaceState = getSecondaryFace().getState();

        // Detect faces and add landmarks
        List<Landmark> detectedLandmarks = FaceDetectionServices.detectAndAddLandmarks(
                getCanvas(),
@@ -377,7 +374,7 @@ public class RegistrationAction extends ControlPanelAction<RegistrationPanel> im
            }
        }

        out.printDuration("Procrustes for models with "
        out.printDuration("Procrustes (Detection) for models with "
                + getPrimaryFace().getMeshModel().getNumVertices()
                + "/"
                + getSecondaryFace().getMeshModel().getNumVertices()
@@ -388,28 +385,33 @@ public class RegistrationAction extends ControlPanelAction<RegistrationPanel> im
        out = Logger.measureTime();

        // Align using ICP
        HumanFace body = getPrimaryFace();
        HumanFace face = getSecondaryFace();

        if (getPrimaryFace().getBoundingBox().diagonalLength() < getSecondaryFace().getBoundingBox().diagonalLength()) {
            body = getSecondaryFace();
            face = getPrimaryFace();
        }

        PointSamplingConfig samplingStrategy = getSamplingStrategy();
        FaceRegistrationServices.alignMeshes(
                getSecondaryFace(), // is transformed
                face, // is transformed
                new IcpConfig(
                        getPrimaryFace().getMeshModel(),
                        body.getMeshModel(),
                        getControlPanel().getMaxIcpIterParam(),
                        getControlPanel().getScaleParam(),
                        getControlPanel().getMinIcpErrorParam(),
                        samplingStrategy,
                        getControlPanel().getIcpAutoCropParam() ? 0 : -1));

        out.printDuration("ICP for models with "
                + getPrimaryFace().getMeshModel().getNumVertices()
        out.printDuration("ICP (Detection) for models with "
                + body.getMeshModel().getNumVertices()
                + "/"
                + getSecondaryFace().getMeshModel().getNumVertices()
                + face.getMeshModel().getNumVertices()
                + " vertices. Sub-sampling of the secondary face: "
                + samplingStrategy
        );

        getPrimaryFace().setState(primaryFaceState);
        getSecondaryFace().setState(secondaryFaceState);

    }

    private PointSamplingConfig getSamplingStrategy() {
+14 −1
Original line number Diff line number Diff line
@@ -96,13 +96,26 @@ public class Canvas extends JPanel implements HumanFaceListener {
        return scene;
    }

    /**
     * Creates the set of the canvas.
     * Primarily stores the camera, background color, transparency and rendering mode of the faces.
     *
     * @return the state of the canvas
     */
    public CanvasState getState() {
        return new CanvasState(this);
    }

    /**
     * Sets the state of the canvas.
     * Primarily adjusts the camera, background color, transparency and rendering mode of the faces.
     *
     * @param state the state to set
     */
    public void setState(CanvasState state) {
        camera = state.camera();
        scene = state.scene();
        setDarkBackground(state.backgroundColor() == SceneRenderer.DARK_BACKGROUND);

        scene.getDrawableFace(scene.getPrimaryFaceSlot()).setTransparency(state.primaryFaceTransparency());
        scene.getDrawableFace(scene.getSecondaryFaceSlot()).setTransparency(state.secondaryFaceTransparency());
        scene.getDrawableFace(scene.getPrimaryFaceSlot()).setRenderMode(state.primaryFaceRenderingMode());
+16 −5
Original line number Diff line number Diff line
@@ -2,28 +2,39 @@ package cz.fidentis.analyst.canvas;

import cz.fidentis.analyst.rendering.Camera;
import cz.fidentis.analyst.rendering.RenderingMode;
import cz.fidentis.analyst.rendering.Scene;

import java.awt.*;

/**
 * A copy of the internal state of the {@link Canvas} instance.
 *
 * @param camera camera
 * @param scene  scene
 * @param camera the camera
 * @param backgroundColor background color of GLCanvas
 * @param primaryFaceTransparency transparency of the primary face
 * @param secondaryFaceTransparency transparency of the secondary face
 * @param primaryFaceRenderingMode rendering mode of the primary face (smooth, texture, etc.)
 * @param secondaryFaceRenderingMode rendering mode of the secondary face (smooth, texture, etc.)
 *
 * @author Jan Popelas
 */
public record CanvasState(
        Camera camera,
        Scene scene,
        Color backgroundColor,
        float primaryFaceTransparency,
        float secondaryFaceTransparency,
        RenderingMode primaryFaceRenderingMode,
        RenderingMode secondaryFaceRenderingMode
) {

    /**
     * Constructor from Canvas
     *
     * @param canvas the Canvas object for which we want to construct the state
     */
    public CanvasState(Canvas canvas) {
        this(
                canvas.getCamera().copy(),
                canvas.getScene().copy(),
                canvas.getGLCanvas().getBackground(),
                canvas.getScene().getDrawableFace(canvas.getScene().getPrimaryFaceSlot()).getTransparency(),
                canvas.getScene().getDrawableFace(canvas.getScene().getSecondaryFaceSlot()).getTransparency(),
                canvas.getScene().getDrawableFace(canvas.getScene().getPrimaryFaceSlot()).getRenderMode(),
Loading