Files
dronevideoplayer/client/src/utils/geoProjection.ts
minsung 2aae3d1c0d feat: StationOverlay 렌더링 최적화 및 스무딩 적용 close #1
- 텍스트(측점/POI) 전 프레임 사전 계산 Map (requestIdleCallback 백그라운드)
- 드론 데이터 이동 평균 스무딩 (smoothFrame ±N프레임)
- 30fps→60fps 프레임 간 선형 보간 (performance.now() 기반)
- EMA(지수이동평균) 표시 위치 스무딩 (α=0.01 기본값)
- 글씨 2배 크기, bold, strokeText 테두리, 배경 박스 제거
- 카메라 파라미터 패널에 smooth/EMA α 슬라이더 추가

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-01 15:11:39 +09:00

295 lines
10 KiB
TypeScript
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
/**
* 클라이언트 사이드 3D 좌표 변환 투영
*
* Python advanced_tuner_v2.py 와 동일한 알고리즘:
* R_b2w = Rz(-yaw) * Rx(pitch) * Ry(roll)
* R_align = [[1,0,0],[0,0,-1],[0,1,0]]
* R_w2c = R_align @ R_b2w.T
*
* 좌표계: EPSG:5186 TM [East(m), North(m), Up(m)]
* Python swap_xy=ON 과 동일: easting=X, northing=Y
* sensorH 기본값 20.25mm = 36 × (9/16), 16:9 동영상 기준
*/
import proj4 from 'proj4';
export interface DroneFrameBasic {
frame: number;
lat: number;
lon: number;
altitude: number;
yaw: number;
pitch: number;
roll: number;
focalLen: number;
}
/**
* 카메라 파라미터 — Python advanced_tuner_v2.py 기본값 기준
*
* yaw / pitch / roll 은 모두 SRT 프레임값에 더하는 오프셋 (기본 0).
* Python: pitch = radians(meta['pitch'] + spn_pitch.value()) ← spn_pitch 기본 0
* focalLen / sensorW / sensorH 는 Python spn_focal(24) / spn_sensor(36) 기본값.
* offX/offY/offZ: 드론 위치 보정 — Python off_x/y/z 동치.
*/
export interface CameraParams {
yawOffset: number; // yaw 오프셋 (degrees, per-frame SRT yaw에 더함)
pitch: number; // pitch 오프셋 (degrees, per-frame SRT pitch에 더함, 기본 0)
roll: number; // roll 오프셋 (degrees, per-frame SRT roll에 더함, 기본 0)
focalLen: number; // 초점거리 (35mm 환산 mm, 기본 24)
cx0: number; // 주점 X 오프셋 (정규화)
cy0: number; // 주점 Y 오프셋 (정규화)
offX: number; // 드론 위치 East 보정 (m, 기본 0)
offY: number; // 드론 위치 North 보정 (m, 기본 0)
offZ: number; // 드론 위치 Up 보정 (m, 기본 0)
sensorW: number; // 센서 폭 (mm, 기본 36)
sensorH: number; // 센서 높이 (mm, 기본 20.25 = 36×9/16, 16:9 영상)
}
/** Python advanced_tuner_v2.py 기본값 */
export const DEFAULT_CAMERA_PARAMS: CameraParams = {
yawOffset: 0,
pitch: 0, // offset, Python spn_pitch 기본값 0
roll: 0, // offset, Python spn_roll 기본값 0
focalLen: 24, // Python spn_focal 기본값 24
cx0: 0,
cy0: 0,
offX: 0,
offY: 0,
offZ: 0,
sensorW: 36, // Python spn_sensor 기본값 36
sensorH: 20.25,
};
/** 항상 DEFAULT_CAMERA_PARAMS 반환 (Python 방식: SRT 값은 per-frame으로 자동 적용됨) */
export function paramsFromFrame(_frame: DroneFrameBasic): CameraParams {
return { ...DEFAULT_CAMERA_PARAMS };
}
// EPSG:5186 Korean TM 정의 (Python pyproj와 동일)
proj4.defs('EPSG:5186',
'+proj=tmerc +lat_0=38 +lon_0=127 +k=1 +x_0=200000 +y_0=600000 +ellps=GRS80 +units=m +no_defs'
);
const _toTM = proj4('EPSG:4326', 'EPSG:5186');
/** 위경도 → EPSG:5186 TM [easting(m), northing(m)] */
function latLonToTM(lat: number, lon: number): [number, number] {
// proj4: forward(lon, lat) → [easting, northing]
const [e, n] = _toTM.forward([lon, lat]);
return [e, n];
}
function toRad(d: number) { return d * Math.PI / 180; }
/** 위경도+표고 → 월드 [East, North, Up] (m).
* Python swap_xy=ON 방식: EPSG:5186 TM easting/northing + altitude */
function geoToEnu(
lat: number, lon: number, alt: number,
_refLat: number, _refLon: number, refAlt: number,
): [number, number, number] {
const [e, n] = latLonToTM(lat, lon);
return [e, n, alt - refAlt];
}
export interface ProjectResult {
px: number; // 0~1, 0=왼쪽 (클램프됨)
py: number; // 0~1, 0=위 (클램프됨)
pxRaw: number; // 클램프 없는 원본
pyRaw: number;
dist: number; // 수평 거리 (m)
h: number; // 수평각 (degrees)
v: number; // 수직각 (degrees)
inFov: boolean;
}
type Vec3 = [number, number, number];
/** 카메라 좌표 (Zc 부호 체크 없음 — 근거리 클리핑은 호출자가 처리) */
export interface CameraCoords {
Xc: number;
Yc: number;
Zc: number;
}
/** 카메라 좌표 → 정규화 픽셀 (Zc > 0 보장 후 호출) */
export function pixelFromCamera(
cc: CameraCoords,
params: CameraParams,
): { pxRaw: number; pyRaw: number } {
const f = params.focalLen;
const sW = params.sensorW ?? 36;
const sH = params.sensorH ?? 20.25;
return {
pxRaw: (0.5 + params.cx0) + (cc.Xc / cc.Zc) * (f / sW),
pyRaw: (0.5 + params.cy0) + (cc.Yc / cc.Zc) * (f / sH),
};
}
// ── 공통 내부 계산 ────────────────────────────────────────────────────────────
function buildRelEnu(
camera: DroneFrameBasic,
targetLat: number, targetLon: number, targetAlt: number,
params: CameraParams,
ref?: { lat: number; lon: number; alt: number },
): { relEnu: Vec3; dist: number } {
const refPt = ref ?? { lat: camera.lat, lon: camera.lon, alt: camera.altitude };
const stEnu = geoToEnu(targetLat, targetLon, targetAlt, refPt.lat, refPt.lon, refPt.alt);
const drEnu = geoToEnu(camera.lat, camera.lon, camera.altitude, refPt.lat, refPt.lon, refPt.alt);
const drEnuAdj: Vec3 = [
drEnu[0] + (params.offX ?? 0),
drEnu[1] + (params.offY ?? 0),
drEnu[2] + (params.offZ ?? 0),
];
const relEnu: Vec3 = [stEnu[0] - drEnuAdj[0], stEnu[1] - drEnuAdj[1], stEnu[2] - drEnuAdj[2]];
const dist = Math.sqrt(relEnu[0] ** 2 + relEnu[1] ** 2);
return { relEnu, dist };
}
function buildRotation(camera: DroneFrameBasic, params: CameraParams): [Vec3, Vec3, Vec3] {
const yaw = toRad(camera.yaw + params.yawOffset);
const pitch = toRad(camera.pitch + params.pitch);
const roll = toRad(camera.roll + params.roll);
const cy = Math.cos(yaw), sy = Math.sin(yaw);
const cp = Math.cos(pitch), sp = Math.sin(pitch);
const cr = Math.cos(roll), sr = Math.sin(roll);
return [
[ cy*cr + sy*sp*sr, sy*cp, cy*sr - sy*sp*cr],
[-sy*cr + cy*sp*sr, cy*cp, -sy*sr - cy*sp*cr],
[ -cp*sr, sp, cp*cr ],
];
}
function applyRw2c(b2w: [Vec3, Vec3, Vec3], rel: Vec3): CameraCoords {
return {
Xc: b2w[0][0]*rel[0] + b2w[1][0]*rel[1] + b2w[2][0]*rel[2],
Yc: -(b2w[0][2]*rel[0] + b2w[1][2]*rel[1] + b2w[2][2]*rel[2]),
Zc: b2w[0][1]*rel[0] + b2w[1][1]*rel[1] + b2w[2][1]*rel[2],
};
}
/**
* 카메라 좌표만 반환 (Zc 체크 없음).
* 선로 중심선 근거리 클리핑(Python 방식)에 사용.
*/
export function toCameraCoords(
camera: DroneFrameBasic,
targetLat: number, targetLon: number, targetAlt: number,
params: CameraParams,
ref?: { lat: number; lon: number; alt: number },
): CameraCoords {
const { relEnu } = buildRelEnu(camera, targetLat, targetLon, targetAlt, params, ref);
const b2w = buildRotation(camera, params);
return applyRw2c(b2w, relEnu);
}
/**
* Python advanced_tuner_v2.py 와 동일한 투영 공식
*
* 회전 행렬:
* R_b2w = Rz(-yaw) × Rx(pitch) × Ry(roll)
* R_align = [[1,0,0],[0,0,-1],[0,1,0]] (body→camera 축 변환)
* R_w2c = R_align × R_b2w.T
*
* 투영:
* pts_cam = R_w2c × rel_enu
* u_norm = 0.5 + (Xc/Zc) × (f/sensorW)
* v_norm = 0.5 + (Yc/Zc) × (f/sensorH)
*
* 드론 위치 오프셋 (off_x/y/z): 카메라 위치를 ENU 공간에서 보정
*/
export function projectPoint(
camera: DroneFrameBasic,
targetLat: number,
targetLon: number,
targetAlt: number,
params: CameraParams,
ref?: { lat: number; lon: number; alt: number },
): ProjectResult | null {
const refPt = ref ?? { lat: camera.lat, lon: camera.lon, alt: camera.altitude };
// 1. 월드 ENU (m)
const stEnu = geoToEnu(targetLat, targetLon, targetAlt, refPt.lat, refPt.lon, refPt.alt);
const drEnu = geoToEnu(camera.lat, camera.lon, camera.altitude, refPt.lat, refPt.lon, refPt.alt);
// 드론 위치 보정 적용 (Python: drone_pos = [dx+off_x, dy+off_y, alt+off_z])
const drEnuAdj: Vec3 = [
drEnu[0] + (params.offX ?? 0),
drEnu[1] + (params.offY ?? 0),
drEnu[2] + (params.offZ ?? 0),
];
const relEnu: Vec3 = [
stEnu[0] - drEnuAdj[0],
stEnu[1] - drEnuAdj[1],
stEnu[2] - drEnuAdj[2],
];
const dist = Math.sqrt(relEnu[0] ** 2 + relEnu[1] ** 2);
// 2. 회전 행렬 (Python 방식: Rz(-yaw)*Rx(pitch)*Ry(roll), 모두 라디안)
// Python: yaw=radians(meta['yaw']+off_yaw), pitch=radians(meta['pitch']+off_pitch), ...
const yaw = toRad(camera.yaw + params.yawOffset);
const pitch = toRad(camera.pitch + params.pitch); // SRT per-frame + offset
const roll = toRad(camera.roll + params.roll); // SRT per-frame + offset
const cy = Math.cos(yaw), sy = Math.sin(yaw);
const cp = Math.cos(pitch), sp = Math.sin(pitch);
const cr = Math.cos(roll), sr = Math.sin(roll);
// Rz(-yaw): rotation around Z by -yaw
// [[cy, sy, 0], [-sy, cy, 0], [0, 0, 1]]
// Rx(pitch): rotation around X by pitch
// [[1, 0, 0], [0, cp, -sp], [0, sp, cp]]
// Ry(roll): rotation around Y by roll
// [[cr, 0, sr], [0, 1, 0], [-sr, 0, cr]]
//
// R_b2w = Rz(-yaw) * Rx(pitch) * Ry(roll)
// Computed element by element:
const b2w: [Vec3, Vec3, Vec3] = [
[
cy*cr + sy*sp*sr, sy*cp, cy*sr - sy*sp*cr,
],
[
-sy*cr + cy*sp*sr, cy*cp, -sy*sr - cy*sp*cr,
],
[
-cp*sr, sp, cp*cr,
],
];
// R_w2c = R_align @ R_b2w.T (R_align = [[1,0,0],[0,0,-1],[0,1,0]])
//
// R_w2c rows are derived from columns of R_b2w:
// R_w2c row 0 = col 0 of R_b2w (R_align row 0 = [1,0,0])
// R_w2c row 1 = -(col 2 of R_b2w) (R_align row 1 = [0,0,-1])
// R_w2c row 2 = col 1 of R_b2w (R_align row 2 = [0,1,0])
//
// p_cam = R_w2c @ relEnu → access b2w columns (swap first index across rows)
const Xc = b2w[0][0]*relEnu[0] + b2w[1][0]*relEnu[1] + b2w[2][0]*relEnu[2]; // col 0
const Yc = -(b2w[0][2]*relEnu[0] + b2w[1][2]*relEnu[1] + b2w[2][2]*relEnu[2]); // -col 2
const Zc = b2w[0][1]*relEnu[0] + b2w[1][1]*relEnu[1] + b2w[2][1]*relEnu[2]; // col 1
if (Zc <= 0) return null;
// 3. 핀홀 투영 (Python: u=f_px*(Xc/Zc)+w/2, v=f_px*(Yc/Zc)+h/2)
const f = params.focalLen;
const sW = params.sensorW ?? 36;
const sH = params.sensorH ?? 20.25;
const pxRaw = (0.5 + params.cx0) + (Xc / Zc) * (f / sW);
const pyRaw = (0.5 + params.cy0) + (Yc / Zc) * (f / sH);
return {
px: Math.max(0, Math.min(1, pxRaw)),
py: Math.max(0, Math.min(1, pyRaw)),
pxRaw,
pyRaw,
dist,
h: Math.atan2(Xc, Zc) * (180 / Math.PI),
v: Math.atan2(-Yc, Zc) * (180 / Math.PI),
inFov: pxRaw >= 0 && pxRaw <= 1 && pyRaw >= 0 && pyRaw <= 1,
};
}