All files / packages/tools/src/utilities/planar filterAnnotationsWithinPlane.ts

11.11% Statements 2/18
0% Branches 0/6
0% Functions 0/2
11.11% Lines 2/18

Press n or j to go to the next uncovered block, b, p or k for the previous block.

1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77          1x   1x                                                                                                                                          
import { vec3 } from 'gl-matrix';
import { CONSTANTS, metaData } from '@cornerstonejs/core';
import type { Types } from '@cornerstonejs/core';
import { Annotations, Annotation } from '../../types';
 
const { EPSILON } = CONSTANTS;
 
const PARALLEL_THRESHOLD = 1 - EPSILON;
 
/**
 * given some `Annotations`, and the slice defined by the camera's normal
 * direction and the spacing in the normal, filter the `Annotations` which
 * is within the slice.
 *
 * @param annotations - Annotations
 * @param camera - The camera
 * @param spacingInNormalDirection - The spacing in the normal direction
 * @returns The filtered `Annotations`.
 */
export function filterAnnotationsWithinSamePlane(
  annotations: Annotations,
  camera: Types.ICamera
): Annotations {
  const { viewPlaneNormal } = camera;
 
  // The reason we use parallel normals instead of actual orientation is that
  // flipped action is done through camera API, so we can't rely on the
  // orientation (viewplaneNormal and viewUp) since even the same image and
  // same slice if flipped will have different orientation, but still rendering
  // the same slice. Instead, we choose to use the parallel normals to filter
  // the annotations and later we fine tune it with the annotation within slice
  // logic down below.
  const annotationsWithParallelNormals = annotations.filter(
    (td: Annotation) => {
      let annotationViewPlaneNormal = td.metadata.viewPlaneNormal;
 
      if (!annotationViewPlaneNormal) {
        // This code is run to set the annotation view plane normal
        // for historical data which was saved without the normal.
        const { referencedImageId } = td.metadata;
        const { imageOrientationPatient } = metaData.get(
          'imagePlaneModule',
          referencedImageId
        );
        const rowCosineVec = vec3.fromValues(
          imageOrientationPatient[0],
          imageOrientationPatient[1],
          imageOrientationPatient[2]
        );
 
        const colCosineVec = vec3.fromValues(
          imageOrientationPatient[3],
          imageOrientationPatient[4],
          imageOrientationPatient[5]
        );
 
        annotationViewPlaneNormal = vec3.create() as Types.Point3;
 
        vec3.cross(annotationViewPlaneNormal, rowCosineVec, colCosineVec);
        td.metadata.viewPlaneNormal = annotationViewPlaneNormal;
      }
      const isParallel =
        Math.abs(vec3.dot(viewPlaneNormal, annotationViewPlaneNormal)) >
        PARALLEL_THRESHOLD;
 
      return annotationViewPlaneNormal && isParallel;
    }
  );
 
  // No in plane annotations.
  if (!annotationsWithParallelNormals.length) {
    return [];
  }
 
  return annotationsWithParallelNormals;
}