4028 lines
165 KiB
Python
Executable File
4028 lines
165 KiB
Python
Executable File
from __future__ import annotations
|
|
|
|
import csv
|
|
import base64
|
|
from datetime import date, datetime, time, timedelta, timezone
|
|
import hashlib
|
|
import hmac
|
|
from io import BytesIO, StringIO
|
|
import json
|
|
import math
|
|
from decimal import Decimal, ROUND_HALF_UP
|
|
from pathlib import Path
|
|
import re
|
|
import secrets
|
|
import shutil
|
|
import struct
|
|
import unicodedata
|
|
import uuid
|
|
|
|
import ezdxf
|
|
from ezdxf import recover
|
|
from fastapi import FastAPI, File, Form, Header, HTTPException, Request, UploadFile
|
|
from fastapi.middleware.cors import CORSMiddleware
|
|
from fastapi.responses import FileResponse, HTMLResponse
|
|
from fastapi.staticfiles import StaticFiles
|
|
from openpyxl import load_workbook
|
|
from pydantic import BaseModel, Field
|
|
|
|
from .config import BASE_DIR, LEGACY_DIR, MOCK_LOGIN_ENABLED, UPLOAD_DIR
|
|
from .db import get_conn, init_db
|
|
|
|
|
|
app = FastAPI(title="MH Dashboard Organization API")
|
|
|
|
app.add_middleware(
|
|
CORSMiddleware,
|
|
allow_origins=["*"],
|
|
allow_credentials=True,
|
|
allow_methods=["*"],
|
|
allow_headers=["*"],
|
|
)
|
|
|
|
LEGACY_STATIC_DIR = LEGACY_DIR / "static"
|
|
INCOMING_FILES_DIR = BASE_DIR / "incoming-files"
|
|
FIXED_OFFICE_SOURCE_KEY = "technical-development-center"
|
|
FIXED_OFFICE_CONFIGS = {
|
|
"technical-development-center": {
|
|
"name": "기술개발센터",
|
|
"html_path": INCOMING_FILES_DIR / "seat" / "center_chair_people_map.html",
|
|
"payload_path": INCOMING_FILES_DIR / "seat" / "center_chair_people_payload.js",
|
|
},
|
|
"hanmac-building-6f": {
|
|
"name": "한맥빌딩 6층",
|
|
"html_path": INCOMING_FILES_DIR / "seat" / "center_chair_people_map_6f.html",
|
|
"payload_path": INCOMING_FILES_DIR / "seat" / "center_chair_people_payload_6f.js",
|
|
},
|
|
"hanmac-building-7f": {
|
|
"name": "한맥빌딩 7층",
|
|
"html_path": INCOMING_FILES_DIR / "seat" / "center_chair_people_map_7f.html",
|
|
"payload_path": INCOMING_FILES_DIR / "seat" / "center_chair_people_payload_7f.js",
|
|
},
|
|
}
|
|
_fixed_office_cache: dict[str, dict[str, object]] = {}
|
|
AUTH_DEFAULT_PASSWORD = "1111"
|
|
AUTH_PASSWORD_ITERATIONS = 390000
|
|
AUTH_SESSION_HOURS = 12
|
|
PAYMENT_HEADER_ORDER = [
|
|
"상신회사", "청구일", "발행일", "발행월", "계정코드", "관리계정코드", "각사 계정명", "프로젝트코드",
|
|
"사업명", "사업명(표출PJT)", "사업명(인트라넷기준)", "사업분야", "세부분야", "기획/개발/영업",
|
|
"대분류", "중분류", "소분류", "부서명", "팀명", "거래처", "적요", "차변공급가", "대변공급가",
|
|
"지출", "수입", "특이사항", "구분", "프로젝트성격", "", "", "", "", "", "", "", "", ""
|
|
]
|
|
MH_HEADER_ORDER = [
|
|
"No", "근무일자", "주말/지각", "팀 분류", "팀", "사원번호", "이름", "직책", "user_state", "시차시간",
|
|
"사업 종류", "메인업무 프로젝트 코드", "메인업무 프로젝트명", "메인업무 서브 코드", "메인업무 근무시간", "검토",
|
|
"사업 종류", "추가업무1 프로젝트 코드", "추가업무1 프로젝트명", "추가업무1 서브 코드", "추가업무1 근무시간",
|
|
"사업 종류", "추가업무2 프로젝트 코드", "추가업무2 프로젝트명", "추가업무2 서브 코드", "추가업무2 근무시간",
|
|
"사업 종류", "추가업무3 프로젝트 코드", "추가업무3 프로젝트명", "추가업무3 서브 코드", "추가업무3 근무시간",
|
|
"사업 종류", "추가업무4 프로젝트 코드", "추가업무4 프로젝트명", "추가업무4 서브 코드", "추가업무4 근무시간",
|
|
"사업 종류", "추가업무5 프로젝트 코드", "추가업무5 프로젝트명", "추가업무5 서브 코드", "추가업무5 근무시간",
|
|
"사업 종류", "연장근무 프로젝트 코드", "연장근무 프로젝트명", "연장근무 서브코드", "연장근무 시간(실제)", "연장근무 시간(가공)"
|
|
]
|
|
|
|
|
|
class MemberPayload(BaseModel):
|
|
id: int | None = None
|
|
name: str = Field(min_length=1)
|
|
employee_id: str = ""
|
|
company: str = ""
|
|
rank: str = ""
|
|
role: str = ""
|
|
department: str = ""
|
|
grp: str = ""
|
|
division: str = ""
|
|
team: str = ""
|
|
cell: str = ""
|
|
work_status: str = ""
|
|
work_time: str = ""
|
|
phone: str = ""
|
|
email: str = ""
|
|
seat_label: str = ""
|
|
photo_url: str = ""
|
|
sort_order: int | None = None
|
|
|
|
|
|
class MemberBulkPayload(BaseModel):
|
|
items: list[MemberPayload]
|
|
|
|
|
|
class SeatMapPayload(BaseModel):
|
|
name: str = Field(min_length=1)
|
|
image_url: str = ""
|
|
source_type: str = "image"
|
|
source_url: str = ""
|
|
preview_svg: str = ""
|
|
view_box_min_x: float | None = None
|
|
view_box_min_y: float | None = None
|
|
view_box_width: float | None = None
|
|
view_box_height: float | None = None
|
|
image_width: int | None = None
|
|
image_height: int | None = None
|
|
grid_rows: int = Field(default=1, ge=1, le=200)
|
|
grid_cols: int = Field(default=1, ge=1, le=200)
|
|
cell_gap: int = Field(default=0, ge=0, le=24)
|
|
is_active: bool = True
|
|
|
|
|
|
class SeatPlacementPayload(BaseModel):
|
|
member_id: int
|
|
seat_slot_id: int | None = None
|
|
row_index: int = Field(default=0, ge=0)
|
|
col_index: int = Field(default=0, ge=0)
|
|
seat_label: str = ""
|
|
|
|
|
|
class SeatLayoutPayload(BaseModel):
|
|
placements: list[SeatPlacementPayload]
|
|
|
|
|
|
LEGACY_HEADER_MAP = {
|
|
"이름": "name",
|
|
"name": "name",
|
|
"tag": "employee_id",
|
|
"employee_id": "employee_id",
|
|
"소속회사": "company",
|
|
"co": "company",
|
|
"company": "company",
|
|
"직급": "rank",
|
|
"rank": "rank",
|
|
"직책": "role",
|
|
"pos": "role",
|
|
"role": "role",
|
|
"부서": "department",
|
|
"part": "department",
|
|
"department": "department",
|
|
"그룹": "grp",
|
|
"gr": "grp",
|
|
"grp": "grp",
|
|
"디비전": "division",
|
|
"div": "division",
|
|
"division": "division",
|
|
"팀": "team",
|
|
"team": "team",
|
|
"teal": "team",
|
|
"셀": "cell",
|
|
"cell": "cell",
|
|
"근무상태": "work_status",
|
|
"work_status": "work_status",
|
|
"근무시간": "work_time",
|
|
"work_time": "work_time",
|
|
"전화번호": "phone",
|
|
"ph": "phone",
|
|
"phone": "phone",
|
|
"이메일": "email",
|
|
"mail": "email",
|
|
"email": "email",
|
|
"자리위치": "seat_label",
|
|
"seat_label": "seat_label",
|
|
"사진": "photo_url",
|
|
"photo_url": "photo_url",
|
|
}
|
|
|
|
|
|
def normalize_phone(value: object) -> str:
|
|
raw = str(value or "").strip()
|
|
digits = "".join(ch for ch in raw if ch.isdigit())
|
|
if not digits:
|
|
return ""
|
|
if len(digits) == 10 and not digits.startswith("0"):
|
|
digits = f"0{digits}"
|
|
if len(digits) == 11 and digits.startswith("0"):
|
|
return f"{digits[:3]}-{digits[3:7]}-{digits[7:]}"
|
|
if len(digits) == 10 and digits.startswith("0"):
|
|
return f"{digits[:3]}-{digits[3:6]}-{digits[6:]}"
|
|
return raw
|
|
|
|
|
|
def serialize_member_payload(item: MemberPayload, sort_order: int) -> tuple[object, ...]:
|
|
return (
|
|
item.name.strip(),
|
|
item.employee_id.strip(),
|
|
item.company.strip(),
|
|
item.rank.strip(),
|
|
item.role.strip(),
|
|
item.department.strip(),
|
|
item.grp.strip(),
|
|
item.division.strip(),
|
|
item.team.strip(),
|
|
item.cell.strip(),
|
|
item.work_status.strip(),
|
|
item.work_time.strip(),
|
|
normalize_phone(item.phone),
|
|
item.email.strip(),
|
|
item.seat_label.strip(),
|
|
item.photo_url.strip(),
|
|
sort_order,
|
|
)
|
|
|
|
|
|
def _encode_auth_bytes(value: bytes) -> str:
|
|
return base64.urlsafe_b64encode(value).decode("ascii").rstrip("=")
|
|
|
|
|
|
def _decode_auth_bytes(value: str) -> bytes:
|
|
padded = value + "=" * (-len(value) % 4)
|
|
return base64.urlsafe_b64decode(padded.encode("ascii"))
|
|
|
|
|
|
def hash_password(password: str, *, salt: bytes | None = None) -> str:
|
|
actual_salt = salt or secrets.token_bytes(16)
|
|
digest = hashlib.pbkdf2_hmac(
|
|
"sha256",
|
|
password.encode("utf-8"),
|
|
actual_salt,
|
|
AUTH_PASSWORD_ITERATIONS,
|
|
)
|
|
return f"pbkdf2_sha256${AUTH_PASSWORD_ITERATIONS}${_encode_auth_bytes(actual_salt)}${_encode_auth_bytes(digest)}"
|
|
|
|
|
|
def verify_password(password: str, stored_hash: str) -> bool:
|
|
try:
|
|
algorithm, iterations_raw, salt_raw, digest_raw = stored_hash.split("$", 3)
|
|
if algorithm != "pbkdf2_sha256":
|
|
return False
|
|
iterations = int(iterations_raw)
|
|
salt = _decode_auth_bytes(salt_raw)
|
|
expected = _decode_auth_bytes(digest_raw)
|
|
except Exception:
|
|
return False
|
|
|
|
actual = hashlib.pbkdf2_hmac(
|
|
"sha256",
|
|
password.encode("utf-8"),
|
|
salt,
|
|
iterations,
|
|
)
|
|
return hmac.compare_digest(actual, expected)
|
|
|
|
|
|
def serialize_auth_user(user: dict[str, object]) -> dict[str, object]:
|
|
return {
|
|
"id": int(user["id"]),
|
|
"username": str(user.get("username") or ""),
|
|
"display_name": str(user.get("display_name") or ""),
|
|
"role": str(user.get("role") or "admin"),
|
|
"member_id": int(user["member_id"]) if user.get("member_id") is not None else None,
|
|
"rank": str(user.get("rank") or ""),
|
|
}
|
|
|
|
|
|
def build_auth_session_payload(user: dict[str, object], session_id: uuid.UUID, expires_at: datetime) -> dict[str, object]:
|
|
expires_at_text = expires_at.astimezone(timezone.utc).isoformat().replace("+00:00", "Z")
|
|
return {
|
|
"token": str(session_id),
|
|
"user": serialize_auth_user(user),
|
|
"session_expires_at": expires_at_text,
|
|
}
|
|
|
|
|
|
def extract_bearer_token(authorization: str | None) -> str | None:
|
|
if not authorization:
|
|
return None
|
|
scheme, _, token = authorization.partition(" ")
|
|
if scheme.lower() != "bearer" or not token.strip():
|
|
return None
|
|
return token.strip()
|
|
|
|
|
|
def ensure_default_admin_user(cur) -> None:
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO auth.users (
|
|
username, password_hash, display_name, role, member_id, is_active, created_from, password_changed_at
|
|
)
|
|
VALUES (%s, %s, %s, %s, NULL, TRUE, 'seed_admin', NOW())
|
|
ON CONFLICT (username) DO UPDATE
|
|
SET password_hash = EXCLUDED.password_hash,
|
|
display_name = EXCLUDED.display_name,
|
|
role = EXCLUDED.role,
|
|
is_active = TRUE,
|
|
updated_at = NOW()
|
|
""",
|
|
("1", hash_password("1"), "System Admin", "admin"),
|
|
)
|
|
|
|
|
|
def sync_auth_users_from_members(cur) -> None:
|
|
cur.execute(
|
|
"""
|
|
SELECT id, employee_id, name
|
|
FROM members
|
|
WHERE COALESCE(TRIM(employee_id), '') <> ''
|
|
ORDER BY id ASC
|
|
"""
|
|
)
|
|
members = cur.fetchall()
|
|
|
|
cur.execute(
|
|
"""
|
|
SELECT id, username, password_hash, display_name, role, member_id, is_active, created_from
|
|
FROM auth.users
|
|
"""
|
|
)
|
|
existing_users = cur.fetchall()
|
|
existing_by_member_id: dict[int, dict[str, object]] = {}
|
|
existing_by_username: dict[str, dict[str, object]] = {}
|
|
for user in existing_users:
|
|
if user.get("member_id") is not None:
|
|
existing_by_member_id[int(user["member_id"])] = user
|
|
username = str(user.get("username") or "").strip().lower()
|
|
if username:
|
|
existing_by_username[username] = user
|
|
|
|
matched_user_ids: set[int] = set()
|
|
seen_usernames: set[str] = set()
|
|
default_hash = hash_password(AUTH_DEFAULT_PASSWORD)
|
|
|
|
for member in members:
|
|
member_id = int(member["id"])
|
|
username = str(member.get("employee_id") or "").strip().lower()
|
|
display_name = str(member.get("name") or "").strip() or username
|
|
if username in seen_usernames:
|
|
raise HTTPException(status_code=400, detail=f"중복 사번이 있어 로그인 계정을 생성할 수 없습니다: {username}")
|
|
seen_usernames.add(username)
|
|
existing = existing_by_member_id.get(member_id) or existing_by_username.get(username)
|
|
|
|
if existing is None:
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO auth.users (
|
|
username, password_hash, display_name, role, member_id, is_active, created_from, password_changed_at
|
|
)
|
|
VALUES (%s, %s, %s, %s, %s, TRUE, 'member_import', NOW())
|
|
RETURNING id
|
|
""",
|
|
(username, default_hash, display_name, "admin", member_id),
|
|
)
|
|
matched_user_ids.add(int(cur.fetchone()["id"]))
|
|
continue
|
|
|
|
matched_user_ids.add(int(existing["id"]))
|
|
password_hash = str(existing.get("password_hash") or "").strip() or default_hash
|
|
cur.execute(
|
|
"""
|
|
UPDATE auth.users
|
|
SET username = %s,
|
|
password_hash = %s,
|
|
display_name = %s,
|
|
member_id = %s,
|
|
is_active = TRUE,
|
|
updated_at = NOW()
|
|
WHERE id = %s
|
|
""",
|
|
(
|
|
username,
|
|
password_hash,
|
|
display_name,
|
|
member_id,
|
|
int(existing["id"]),
|
|
),
|
|
)
|
|
|
|
if matched_user_ids:
|
|
cur.execute(
|
|
"""
|
|
UPDATE auth.users
|
|
SET is_active = FALSE,
|
|
member_id = NULL,
|
|
updated_at = NOW()
|
|
WHERE created_from = 'member_import'
|
|
AND id <> ALL(%s)
|
|
AND member_id IS NOT NULL
|
|
""",
|
|
(sorted(matched_user_ids),),
|
|
)
|
|
else:
|
|
cur.execute(
|
|
"""
|
|
UPDATE auth.users
|
|
SET is_active = FALSE,
|
|
member_id = NULL,
|
|
updated_at = NOW()
|
|
WHERE created_from = 'member_import'
|
|
AND member_id IS NOT NULL
|
|
"""
|
|
)
|
|
|
|
ensure_default_admin_user(cur)
|
|
|
|
|
|
def fetch_members() -> list[dict[str, object]]:
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
SELECT id, name, employee_id, company, rank, role, department, grp, division, team, cell,
|
|
work_status, work_time, phone, email, seat_label, photo_url,
|
|
sort_order, created_at, updated_at
|
|
FROM members
|
|
ORDER BY sort_order ASC, id ASC
|
|
"""
|
|
)
|
|
return cur.fetchall()
|
|
|
|
|
|
def serialize_seat_map_payload(payload: SeatMapPayload) -> tuple[object, ...]:
|
|
return (
|
|
payload.name.strip(),
|
|
payload.source_type.strip() or "image",
|
|
payload.source_url.strip(),
|
|
payload.preview_svg,
|
|
payload.view_box_min_x,
|
|
payload.view_box_min_y,
|
|
payload.view_box_width,
|
|
payload.view_box_height,
|
|
payload.image_url.strip(),
|
|
payload.image_width,
|
|
payload.image_height,
|
|
payload.grid_rows,
|
|
payload.grid_cols,
|
|
payload.cell_gap,
|
|
payload.is_active,
|
|
)
|
|
|
|
|
|
def fetch_seat_map(seat_map_id: int) -> dict[str, object] | None:
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
SELECT id, name, source_type, source_url, preview_svg,
|
|
view_box_min_x, view_box_min_y, view_box_width, view_box_height,
|
|
image_url, image_width, image_height, grid_rows, grid_cols,
|
|
cell_gap, is_active, created_at, updated_at
|
|
FROM seat_maps
|
|
WHERE id = %s
|
|
""",
|
|
(seat_map_id,),
|
|
)
|
|
return cur.fetchone()
|
|
|
|
|
|
def fetch_active_seat_map() -> dict[str, object] | None:
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
SELECT id, name, source_type, source_url, preview_svg,
|
|
view_box_min_x, view_box_min_y, view_box_width, view_box_height,
|
|
image_url, image_width, image_height, grid_rows, grid_cols,
|
|
cell_gap, is_active, created_at, updated_at
|
|
FROM seat_maps
|
|
WHERE is_active = TRUE
|
|
ORDER BY updated_at DESC, id DESC
|
|
LIMIT 1
|
|
"""
|
|
)
|
|
return cur.fetchone()
|
|
|
|
|
|
def ensure_fixed_office_seat_map(office_key: str = FIXED_OFFICE_SOURCE_KEY, activate: bool = True) -> dict[str, object]:
|
|
config = FIXED_OFFICE_CONFIGS.get(office_key)
|
|
if not config:
|
|
raise HTTPException(status_code=404, detail="Fixed office configuration not found.")
|
|
template = parse_fixed_office_template(office_key)
|
|
slots = template["slots"]
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
SELECT id
|
|
FROM seat_maps
|
|
WHERE source_type = 'fixed_html'
|
|
AND source_url = %s
|
|
LIMIT 1
|
|
""",
|
|
(office_key,),
|
|
)
|
|
row = cur.fetchone()
|
|
if activate:
|
|
cur.execute("UPDATE seat_maps SET is_active = FALSE, updated_at = NOW() WHERE is_active = TRUE")
|
|
if row is None:
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO seat_maps (
|
|
name, image_url, source_type, source_url, preview_svg,
|
|
view_box_min_x, view_box_min_y, view_box_width, view_box_height,
|
|
image_width, image_height, grid_rows, grid_cols, cell_gap, is_active
|
|
)
|
|
VALUES (%s, '', 'fixed_html', %s, '', NULL, NULL, NULL, NULL, NULL, NULL, 1, 1, 0, %s)
|
|
RETURNING id
|
|
""",
|
|
(str(config["name"]), office_key, activate),
|
|
)
|
|
seat_map_id = int(cur.fetchone()["id"])
|
|
else:
|
|
seat_map_id = int(row["id"])
|
|
cur.execute(
|
|
"""
|
|
UPDATE seat_maps
|
|
SET name = %s,
|
|
source_type = 'fixed_html',
|
|
source_url = %s,
|
|
image_url = '',
|
|
preview_svg = '',
|
|
grid_rows = 1,
|
|
grid_cols = 1,
|
|
cell_gap = 0,
|
|
is_active = %s,
|
|
updated_at = NOW()
|
|
WHERE id = %s
|
|
""",
|
|
(str(config["name"]), office_key, activate, seat_map_id),
|
|
)
|
|
|
|
cur.execute("SELECT id, slot_key FROM seat_slots WHERE seat_map_id = %s", (seat_map_id,))
|
|
existing_slots = {str(item["slot_key"]): int(item["id"]) for item in cur.fetchall()}
|
|
incoming_keys = {str(slot["slot_key"]) for slot in slots}
|
|
|
|
for slot in slots:
|
|
slot_key = str(slot["slot_key"])
|
|
if slot_key in existing_slots:
|
|
cur.execute(
|
|
"""
|
|
UPDATE seat_slots
|
|
SET label = %s, x = %s, y = %s, rotation = %s, layer_name = %s, updated_at = NOW()
|
|
WHERE seat_map_id = %s AND slot_key = %s
|
|
""",
|
|
(
|
|
slot["label"],
|
|
slot["x"],
|
|
slot["y"],
|
|
slot["rotation"],
|
|
slot["layer_name"],
|
|
seat_map_id,
|
|
slot_key,
|
|
),
|
|
)
|
|
else:
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO seat_slots (seat_map_id, slot_key, label, x, y, rotation, layer_name)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s)
|
|
""",
|
|
(
|
|
seat_map_id,
|
|
slot_key,
|
|
slot["label"],
|
|
slot["x"],
|
|
slot["y"],
|
|
slot["rotation"],
|
|
slot["layer_name"],
|
|
),
|
|
)
|
|
|
|
if existing_slots:
|
|
stale_keys = [key for key in existing_slots if key not in incoming_keys]
|
|
if stale_keys:
|
|
cur.execute(
|
|
"DELETE FROM seat_slots WHERE seat_map_id = %s AND slot_key = ANY(%s)",
|
|
(seat_map_id, stale_keys),
|
|
)
|
|
conn.commit()
|
|
seat_map = fetch_seat_map(seat_map_id)
|
|
if seat_map is None:
|
|
raise HTTPException(status_code=500, detail="Fixed office seat map initialization failed.")
|
|
return seat_map
|
|
|
|
|
|
def compute_seat_label(row_index: int, col_index: int) -> str:
|
|
quotient = row_index
|
|
row_label = ""
|
|
while True:
|
|
quotient, remainder = divmod(quotient, 26)
|
|
row_label = chr(65 + remainder) + row_label
|
|
if quotient == 0:
|
|
break
|
|
quotient -= 1
|
|
return f"{row_label}-{col_index + 1:02d}"
|
|
|
|
|
|
def compute_slot_label(index: int) -> str:
|
|
return f"CHAIR-{index + 1:03d}"
|
|
|
|
|
|
def decode_segment_values(raw_base64: str) -> list[int]:
|
|
decoded = base64.b64decode(raw_base64.encode("ascii"))
|
|
if not decoded:
|
|
return []
|
|
return [item[0] for item in struct.iter_unpack("<i", decoded)]
|
|
|
|
|
|
def parse_fixed_office_template(office_key: str = FIXED_OFFICE_SOURCE_KEY) -> dict[str, object]:
|
|
cached = _fixed_office_cache.get(office_key)
|
|
if cached is not None:
|
|
return cached
|
|
|
|
config = FIXED_OFFICE_CONFIGS.get(office_key)
|
|
if not config:
|
|
raise HTTPException(status_code=404, detail="Fixed office configuration not found.")
|
|
|
|
html_path = Path(str(config["html_path"]))
|
|
payload_path = Path(str(config["payload_path"]))
|
|
if not html_path.exists():
|
|
raise HTTPException(status_code=500, detail=f"Fixed office viewer template not found: {office_key}")
|
|
if not payload_path.exists():
|
|
raise HTTPException(status_code=500, detail=f"Fixed office payload not found: {office_key}")
|
|
|
|
html = html_path.read_text(encoding="utf-8")
|
|
payload_js = payload_path.read_text(encoding="utf-8")
|
|
payload_match = re.search(r"window\.CHAIR_MAP_DATA\s*=\s*(\{.*\});?\s*$", payload_js, flags=re.S)
|
|
if not payload_match:
|
|
raise HTTPException(status_code=500, detail=f"Fixed office viewer data not found: {office_key}")
|
|
|
|
html = re.sub(
|
|
r'<script\s+src="\./[^"]+payload[^"]*\.js"></script>',
|
|
f"<script>{payload_js}</script>",
|
|
html,
|
|
count=1,
|
|
)
|
|
|
|
data = json.loads(payload_match.group(1))
|
|
chair_values = decode_segment_values(str(data["chairSegsB64"]))
|
|
slots: list[dict[str, object]] = []
|
|
for index, chair in enumerate(data["chairs"]):
|
|
slot_key, name, _kind, start, count = chair
|
|
min_x = math.inf
|
|
min_y = math.inf
|
|
max_x = -math.inf
|
|
max_y = -math.inf
|
|
start_index = int(start)
|
|
end_index = start_index + int(count)
|
|
for item_index in range(start_index, end_index):
|
|
offset = item_index * 4
|
|
x1 = chair_values[offset] / 10
|
|
y1 = chair_values[offset + 1] / 10
|
|
x2 = chair_values[offset + 2] / 10
|
|
y2 = chair_values[offset + 3] / 10
|
|
min_x = min(min_x, x1, x2)
|
|
min_y = min(min_y, y1, y2)
|
|
max_x = max(max_x, x1, x2)
|
|
max_y = max(max_y, y1, y2)
|
|
slots.append(
|
|
{
|
|
"slot_key": str(slot_key),
|
|
"label": str(slot_key),
|
|
"x": round((min_x + max_x) / 2, 3),
|
|
"y": round((min_y + max_y) / 2, 3),
|
|
"rotation": 0.0,
|
|
"layer_name": str(name),
|
|
}
|
|
)
|
|
parsed = {
|
|
"html": html,
|
|
"data": data,
|
|
"slots": slots,
|
|
}
|
|
_fixed_office_cache[office_key] = parsed
|
|
return parsed
|
|
|
|
|
|
def is_chair_layer(layer_name: str) -> bool:
|
|
raw = layer_name.strip().lower()
|
|
compact = raw.replace("-", "").replace("_", "").replace(" ", "")
|
|
return raw in {"chair", "_chair", "-chair"} or compact.endswith("chair")
|
|
|
|
|
|
def inspect_dxf_header(file_path: Path) -> tuple[str, str]:
|
|
with file_path.open("rb") as source:
|
|
header = source.read(128)
|
|
header_text = header.decode("latin-1", errors="ignore").replace("\x00", "")
|
|
preview = header[:32].hex(" ")
|
|
|
|
if header_text.startswith("AutoCAD Binary DXF"):
|
|
return ("binary_dxf", preview)
|
|
if header_text.startswith("0\nSECTION") or header_text.startswith("0\r\nSECTION"):
|
|
return ("ascii_dxf", preview)
|
|
if header.startswith(b"AC10"):
|
|
return ("dwg_or_dwg_like", preview)
|
|
return ("unknown", preview)
|
|
|
|
|
|
def iter_render_entities(entity: ezdxf.entities.DXFGraphic, inherited_layer: str | None = None, depth: int = 0) -> list[ezdxf.entities.DXFGraphic]:
|
|
if depth > 6:
|
|
return []
|
|
entity_type = entity.dxftype()
|
|
current_layer = inherited_layer or entity.dxf.layer
|
|
if entity_type == "INSERT":
|
|
expanded: list[ezdxf.entities.DXFGraphic] = []
|
|
try:
|
|
for child in entity.virtual_entities():
|
|
child_layer = child.dxf.layer
|
|
if child_layer == "0":
|
|
child.dxf.layer = current_layer
|
|
expanded.extend(iter_render_entities(child, inherited_layer=current_layer, depth=depth + 1))
|
|
except Exception:
|
|
return []
|
|
return expanded
|
|
if inherited_layer and entity.dxf.layer == "0":
|
|
entity.dxf.layer = inherited_layer
|
|
return [entity]
|
|
|
|
|
|
def get_entity_points(entity: ezdxf.entities.DXFGraphic) -> list[tuple[float, float]]:
|
|
entity_type = entity.dxftype()
|
|
if entity_type == "LINE":
|
|
return [
|
|
(float(entity.dxf.start.x), float(entity.dxf.start.y)),
|
|
(float(entity.dxf.end.x), float(entity.dxf.end.y)),
|
|
]
|
|
if entity_type == "LWPOLYLINE":
|
|
return [(float(point[0]), float(point[1])) for point in entity.get_points("xy")]
|
|
if entity_type == "POLYLINE":
|
|
return [(float(vertex.dxf.location.x), float(vertex.dxf.location.y)) for vertex in entity.vertices]
|
|
if entity_type == "CIRCLE":
|
|
center = entity.dxf.center
|
|
radius = float(entity.dxf.radius)
|
|
return [
|
|
(float(center.x - radius), float(center.y - radius)),
|
|
(float(center.x + radius), float(center.y + radius)),
|
|
]
|
|
if entity_type == "ARC":
|
|
center = entity.dxf.center
|
|
radius = float(entity.dxf.radius)
|
|
return [
|
|
(float(center.x - radius), float(center.y - radius)),
|
|
(float(center.x + radius), float(center.y + radius)),
|
|
]
|
|
if entity_type == "POINT":
|
|
location = entity.dxf.location
|
|
return [(float(location.x), float(location.y))]
|
|
if entity_type == "SPLINE":
|
|
try:
|
|
return [(float(point[0]), float(point[1])) for point in entity.flattening(2)]
|
|
except Exception:
|
|
return []
|
|
if entity_type == "ELLIPSE":
|
|
try:
|
|
return [(float(point[0]), float(point[1])) for point in entity.flattening(2)]
|
|
except Exception:
|
|
center = entity.dxf.center
|
|
major_axis = entity.dxf.major_axis
|
|
ratio = float(entity.dxf.ratio)
|
|
radius_x = math.hypot(float(major_axis.x), float(major_axis.y))
|
|
radius_y = radius_x * ratio
|
|
return [
|
|
(float(center.x - radius_x), float(center.y - radius_y)),
|
|
(float(center.x + radius_x), float(center.y + radius_y)),
|
|
]
|
|
if entity_type == "INSERT":
|
|
insert = entity.dxf.insert
|
|
return [(float(insert.x), float(insert.y))]
|
|
return []
|
|
|
|
|
|
def get_entity_center(entity: ezdxf.entities.DXFGraphic) -> tuple[float, float] | None:
|
|
points = get_entity_points(entity)
|
|
if not points:
|
|
return None
|
|
min_x = min(point[0] for point in points)
|
|
max_x = max(point[0] for point in points)
|
|
min_y = min(point[1] for point in points)
|
|
max_y = max(point[1] for point in points)
|
|
return ((min_x + max_x) / 2.0, (min_y + max_y) / 2.0)
|
|
|
|
|
|
def get_entity_bounds(entity: ezdxf.entities.DXFGraphic) -> tuple[float, float, float, float] | None:
|
|
points = get_entity_points(entity)
|
|
if not points:
|
|
return None
|
|
min_x = min(point[0] for point in points)
|
|
max_x = max(point[0] for point in points)
|
|
min_y = min(point[1] for point in points)
|
|
max_y = max(point[1] for point in points)
|
|
return (min_x, min_y, max_x, max_y)
|
|
|
|
|
|
def compute_bounds_from_points(points: list[tuple[float, float]]) -> tuple[float, float, float, float]:
|
|
min_x = min(point[0] for point in points)
|
|
max_x = max(point[0] for point in points)
|
|
min_y = min(point[1] for point in points)
|
|
max_y = max(point[1] for point in points)
|
|
return (min_x, min_y, max(max_x - min_x, 1.0), max(max_y - min_y, 1.0))
|
|
|
|
|
|
def percentile(values: list[float], ratio: float) -> float:
|
|
if not values:
|
|
return 0.0
|
|
ordered = sorted(values)
|
|
index = max(0, min(len(ordered) - 1, round((len(ordered) - 1) * ratio)))
|
|
return float(ordered[index])
|
|
|
|
|
|
def compute_focus_bounds(slot_points: list[tuple[float, float]]) -> tuple[float, float, float, float]:
|
|
x_values = [point[0] for point in slot_points]
|
|
y_values = [point[1] for point in slot_points]
|
|
min_x = percentile(x_values, 0.02)
|
|
max_x = percentile(x_values, 0.98)
|
|
min_y = percentile(y_values, 0.02)
|
|
max_y = percentile(y_values, 0.98)
|
|
width = max(max_x - min_x, 1.0)
|
|
height = max(max_y - min_y, 1.0)
|
|
pad_x = max(width * 0.08, 500.0)
|
|
pad_y = max(height * 0.08, 500.0)
|
|
return (min_x - pad_x, min_y - pad_y, max_x + pad_x, max_y + pad_y)
|
|
|
|
|
|
def get_entity_max_span(entity: ezdxf.entities.DXFGraphic) -> float:
|
|
bounds = get_entity_bounds(entity)
|
|
if bounds is None:
|
|
return 0.0
|
|
min_x, min_y, max_x, max_y = bounds
|
|
return max(max_x - min_x, max_y - min_y)
|
|
|
|
|
|
def compute_outline_bounds(entities: list[ezdxf.entities.DXFGraphic]) -> tuple[float, float, float, float] | None:
|
|
outline_layers = {"0", "0-COL", "WID", "XH", "CO-DOOR", "CO-DO-FR", "문", "회의실"}
|
|
outline_points: list[tuple[float, float]] = []
|
|
for entity in entities:
|
|
if is_chair_layer(entity.dxf.layer):
|
|
continue
|
|
if entity.dxf.layer not in outline_layers:
|
|
continue
|
|
if get_entity_max_span(entity) < 3000:
|
|
continue
|
|
outline_points.extend(get_entity_points(entity))
|
|
if not outline_points:
|
|
return None
|
|
min_x, min_y, width, height = compute_bounds_from_points(outline_points)
|
|
pad_x = max(width * 0.025, 300.0)
|
|
pad_y = max(height * 0.025, 300.0)
|
|
return (min_x - pad_x, min_y - pad_y, min_x + width + pad_x, min_y + height + pad_y)
|
|
|
|
|
|
def bounds_intersect(bounds: tuple[float, float, float, float], focus_bounds: tuple[float, float, float, float]) -> bool:
|
|
min_x, min_y, max_x, max_y = bounds
|
|
focus_min_x, focus_min_y, focus_max_x, focus_max_y = focus_bounds
|
|
return not (
|
|
max_x < focus_min_x
|
|
or min_x > focus_max_x
|
|
or max_y < focus_min_y
|
|
or min_y > focus_max_y
|
|
)
|
|
|
|
|
|
def line_svg(points: list[tuple[float, float]], css_class: str = "seatmap-dxf-entity") -> str:
|
|
if len(points) < 2:
|
|
return ""
|
|
coordinates = " ".join(f"{x:.2f},{-y:.2f}" for x, y in points)
|
|
return (
|
|
f'<polyline class="{css_class}" points="{coordinates}" fill="none" '
|
|
'stroke-linejoin="round" stroke-linecap="round" />'
|
|
)
|
|
|
|
|
|
def circle_svg(
|
|
center_x: float,
|
|
center_y: float,
|
|
radius: float,
|
|
stroke: str = "#475569",
|
|
fill: str = "none",
|
|
css_class: str = "seatmap-dxf-entity",
|
|
) -> str:
|
|
return (
|
|
f'<circle class="{css_class}" cx="{center_x:.2f}" cy="{-center_y:.2f}" r="{radius:.2f}" '
|
|
f'stroke="{stroke}" fill="{fill}" />'
|
|
)
|
|
|
|
|
|
def build_dxf_preview_svg(
|
|
entities: list[ezdxf.entities.DXFGraphic],
|
|
bounds: tuple[float, float, float, float],
|
|
) -> str:
|
|
min_x, min_y, width, height = bounds
|
|
max_y = min_y + height
|
|
svg_parts: list[str] = []
|
|
|
|
for entity in entities:
|
|
layer_name = entity.dxf.layer
|
|
is_chair = is_chair_layer(layer_name)
|
|
css_class = "seatmap-dxf-chair-entity" if is_chair else "seatmap-dxf-entity"
|
|
entity_type = entity.dxftype()
|
|
if entity_type in {"LINE", "LWPOLYLINE", "POLYLINE", "SPLINE", "ELLIPSE"}:
|
|
svg = line_svg(get_entity_points(entity), css_class=css_class)
|
|
if svg:
|
|
svg_parts.append(svg)
|
|
elif entity_type == "CIRCLE":
|
|
center = entity.dxf.center
|
|
svg_parts.append(
|
|
circle_svg(
|
|
float(center.x),
|
|
float(center.y),
|
|
float(entity.dxf.radius),
|
|
fill="none",
|
|
css_class=css_class,
|
|
)
|
|
)
|
|
elif entity_type == "ARC":
|
|
center = entity.dxf.center
|
|
radius = float(entity.dxf.radius)
|
|
start_angle = math.radians(float(entity.dxf.start_angle))
|
|
end_angle = math.radians(float(entity.dxf.end_angle))
|
|
start_x = float(center.x) + radius * math.cos(start_angle)
|
|
start_y = float(center.y) + radius * math.sin(start_angle)
|
|
end_x = float(center.x) + radius * math.cos(end_angle)
|
|
end_y = float(center.y) + radius * math.sin(end_angle)
|
|
large_arc = 1 if abs(float(entity.dxf.end_angle) - float(entity.dxf.start_angle)) > 180 else 0
|
|
svg_parts.append(
|
|
f'<path class="{css_class}" d="M {start_x:.2f} {-start_y:.2f} '
|
|
f'A {radius:.2f} {radius:.2f} 0 {large_arc} 0 {end_x:.2f} {-end_y:.2f}" fill="none" />'
|
|
)
|
|
|
|
view_box = f"{min_x:.2f} {-max_y:.2f} {max(width, 1.0):.2f} {max(height, 1.0):.2f}"
|
|
return (
|
|
f'<svg class="seatmap-preview-svg" viewBox="{view_box}" xmlns="http://www.w3.org/2000/svg" preserveAspectRatio="xMidYMid meet">'
|
|
'<rect width="100%" height="100%" fill="#ffffff" />'
|
|
+ "".join(svg_parts)
|
|
+ "</svg>"
|
|
)
|
|
|
|
|
|
def load_dxf_document(file_path: Path) -> ezdxf.document.Drawing:
|
|
try:
|
|
return ezdxf.readfile(file_path)
|
|
except OSError:
|
|
try:
|
|
document, _ = recover.readfile(file_path)
|
|
return document
|
|
except Exception as exc:
|
|
kind, preview = inspect_dxf_header(file_path)
|
|
if kind == "binary_dxf":
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"Binary DXF로 보이지만 해석에 실패했습니다. 가능하면 ASCII DXF로 다시 저장해 업로드하세요. 헤더={preview}",
|
|
) from exc
|
|
if kind == "dwg_or_dwg_like":
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"업로드한 파일은 DWG 계열 헤더(AC10xx)로 보입니다. DWG가 아니라 ASCII DXF로 다시 저장해 업로드하세요. 헤더={preview}",
|
|
) from exc
|
|
if kind == "ascii_dxf":
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"ASCII DXF로 보이지만 구조를 해석하지 못했습니다. 도면을 다른 DXF 버전으로 다시 저장해보세요. 헤더={preview}",
|
|
) from exc
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"업로드한 파일 형식을 판별하지 못했습니다. 확장자만 dxf인 파일일 수 있습니다. 헤더={preview}",
|
|
) from exc
|
|
|
|
|
|
def entity_to_segments(entity: ezdxf.entities.DXFGraphic, arc_steps: int = 24) -> list[tuple[float, float, float, float]]:
|
|
entity_type = entity.dxftype()
|
|
points = get_entity_points(entity)
|
|
if entity_type in {"LINE", "LWPOLYLINE", "POLYLINE", "SPLINE", "ELLIPSE"} and len(points) >= 2:
|
|
return [
|
|
(float(left[0]), float(left[1]), float(right[0]), float(right[1]))
|
|
for left, right in zip(points[:-1], points[1:])
|
|
]
|
|
|
|
if entity_type == "CIRCLE":
|
|
center = entity.dxf.center
|
|
radius = float(entity.dxf.radius)
|
|
samples = []
|
|
for index in range(arc_steps + 1):
|
|
angle = (math.tau * index) / arc_steps
|
|
samples.append(
|
|
(
|
|
float(center.x) + radius * math.cos(angle),
|
|
float(center.y) + radius * math.sin(angle),
|
|
)
|
|
)
|
|
return [
|
|
(float(left[0]), float(left[1]), float(right[0]), float(right[1]))
|
|
for left, right in zip(samples[:-1], samples[1:])
|
|
]
|
|
|
|
if entity_type == "ARC":
|
|
center = entity.dxf.center
|
|
radius = float(entity.dxf.radius)
|
|
start_angle = math.radians(float(entity.dxf.start_angle))
|
|
end_angle = math.radians(float(entity.dxf.end_angle))
|
|
if end_angle <= start_angle:
|
|
end_angle += math.tau
|
|
samples = []
|
|
for index in range(arc_steps + 1):
|
|
ratio = index / arc_steps
|
|
angle = start_angle + (end_angle - start_angle) * ratio
|
|
samples.append(
|
|
(
|
|
float(center.x) + radius * math.cos(angle),
|
|
float(center.y) + radius * math.sin(angle),
|
|
)
|
|
)
|
|
return [
|
|
(float(left[0]), float(left[1]), float(right[0]), float(right[1]))
|
|
for left, right in zip(samples[:-1], samples[1:])
|
|
]
|
|
|
|
return []
|
|
|
|
|
|
def build_dxf_artifacts(file_path: Path) -> tuple[dict[str, object], list[dict[str, object]], dict[str, object]]:
|
|
document = load_dxf_document(file_path)
|
|
modelspace = document.modelspace()
|
|
base_entities = [entity for entity in modelspace if entity.dxftype() in {"LINE", "LWPOLYLINE", "POLYLINE", "CIRCLE", "ARC", "INSERT", "SPLINE", "ELLIPSE"}]
|
|
all_entities: list[ezdxf.entities.DXFGraphic] = []
|
|
for entity in base_entities:
|
|
all_entities.extend(iter_render_entities(entity))
|
|
chair_entities: list[ezdxf.entities.DXFGraphic] = []
|
|
chair_points: list[tuple[float, float]] = []
|
|
for entity in all_entities:
|
|
if is_chair_layer(entity.dxf.layer):
|
|
chair_entities.append(entity)
|
|
chair_points.extend(get_entity_points(entity))
|
|
|
|
if not chair_entities:
|
|
raise HTTPException(status_code=400, detail="DXF 파일에서 chair 계열 레이어를 찾지 못했습니다.")
|
|
|
|
if not chair_points:
|
|
raise HTTPException(status_code=400, detail="DXF 좌표를 해석하지 못했습니다.")
|
|
|
|
slots: list[dict[str, object]] = []
|
|
for index, entity in enumerate(sorted(chair_entities, key=lambda item: (-(get_entity_center(item) or (0.0, 0.0))[1], (get_entity_center(item) or (0.0, 0.0))[0]))):
|
|
center = get_entity_center(entity)
|
|
if center is None:
|
|
continue
|
|
slots.append(
|
|
{
|
|
"slot_key": entity.dxf.handle,
|
|
"label": compute_slot_label(index),
|
|
"x": round(float(center[0]), 3),
|
|
"y": round(float(center[1]), 3),
|
|
"rotation": float(getattr(entity.dxf, "rotation", 0.0) or 0.0),
|
|
"layer_name": entity.dxf.layer,
|
|
}
|
|
)
|
|
|
|
if not slots:
|
|
raise HTTPException(status_code=400, detail="chair 레이어에서 좌석 위치를 추출하지 못했습니다.")
|
|
|
|
slot_points = [(float(slot["x"]), float(slot["y"])) for slot in slots]
|
|
focus_bounds = compute_outline_bounds(all_entities) or compute_focus_bounds(slot_points)
|
|
visible_entities: list[ezdxf.entities.DXFGraphic] = []
|
|
visible_points: list[tuple[float, float]] = []
|
|
for entity in all_entities:
|
|
entity_bounds = get_entity_bounds(entity)
|
|
if entity_bounds is None:
|
|
continue
|
|
if bounds_intersect(entity_bounds, focus_bounds):
|
|
visible_entities.append(entity)
|
|
visible_points.extend(get_entity_points(entity))
|
|
|
|
if not visible_entities or not visible_points:
|
|
visible_entities = all_entities
|
|
visible_points = chair_points
|
|
|
|
focus_min_x, focus_min_y, focus_max_x, focus_max_y = focus_bounds
|
|
min_x = focus_min_x
|
|
min_y = focus_min_y
|
|
width = max(focus_max_x - focus_min_x, 1.0)
|
|
height = max(focus_max_y - focus_min_y, 1.0)
|
|
|
|
preview_svg = build_dxf_preview_svg(visible_entities, (min_x, min_y, width, height))
|
|
metadata = {
|
|
"source_type": "dxf",
|
|
"view_box_min_x": round(min_x, 3),
|
|
"view_box_min_y": round(min_y, 3),
|
|
"view_box_width": round(width, 3),
|
|
"view_box_height": round(height, 3),
|
|
"preview_svg": preview_svg,
|
|
"grid_rows": 1,
|
|
"grid_cols": 1,
|
|
"image_width": None,
|
|
"image_height": None,
|
|
"cell_gap": 0,
|
|
}
|
|
|
|
slot_map = {str(slot["slot_key"]): slot for slot in slots}
|
|
chair_segments: list[list[float]] = []
|
|
chair_items: list[dict[str, object]] = []
|
|
background_segments: list[list[float]] = []
|
|
|
|
for entity in visible_entities:
|
|
segments = entity_to_segments(entity)
|
|
if not segments:
|
|
continue
|
|
|
|
if is_chair_layer(entity.dxf.layer):
|
|
slot = slot_map.get(str(entity.dxf.handle))
|
|
if not slot:
|
|
continue
|
|
start_index = len(chair_segments)
|
|
min_seg_x = math.inf
|
|
min_seg_y = math.inf
|
|
max_seg_x = -math.inf
|
|
max_seg_y = -math.inf
|
|
for x1, y1, x2, y2 in segments:
|
|
chair_segments.append([round(x1, 3), round(y1, 3), round(x2, 3), round(y2, 3)])
|
|
min_seg_x = min(min_seg_x, x1, x2)
|
|
min_seg_y = min(min_seg_y, y1, y2)
|
|
max_seg_x = max(max_seg_x, x1, x2)
|
|
max_seg_y = max(max_seg_y, y1, y2)
|
|
chair_items.append(
|
|
{
|
|
"key": str(slot["slot_key"]),
|
|
"label": slot["label"],
|
|
"kind": "chair",
|
|
"start": start_index,
|
|
"count": len(segments),
|
|
"min_x": round(min_seg_x, 3),
|
|
"min_y": round(min_seg_y, 3),
|
|
"max_x": round(max_seg_x, 3),
|
|
"max_y": round(max_seg_y, 3),
|
|
}
|
|
)
|
|
continue
|
|
|
|
for x1, y1, x2, y2 in segments:
|
|
background_segments.append([round(x1, 3), round(y1, 3), round(x2, 3), round(y2, 3)])
|
|
|
|
viewer_data = {
|
|
"meta": {
|
|
"background_segment_count": len(background_segments),
|
|
"chair_count": len(chair_items),
|
|
"chair_segment_count": len(chair_segments),
|
|
"world": {
|
|
"min_x": round(min_x, 3),
|
|
"min_y": round(min_y, 3),
|
|
"max_x": round(min_x + width, 3),
|
|
"max_y": round(min_y + height, 3),
|
|
"width": round(width, 3),
|
|
"height": round(height, 3),
|
|
},
|
|
},
|
|
"background_segments": background_segments,
|
|
"chair_segments": chair_segments,
|
|
"chairs": chair_items,
|
|
}
|
|
return metadata, slots, viewer_data
|
|
|
|
|
|
def parse_dxf_layout(file_path: Path) -> tuple[dict[str, object], list[dict[str, object]]]:
|
|
metadata, slots, _viewer_data = build_dxf_artifacts(file_path)
|
|
return metadata, slots
|
|
|
|
|
|
def fetch_seat_layout(seat_map_id: int) -> dict[str, object]:
|
|
seat_map = fetch_seat_map(seat_map_id)
|
|
if seat_map is None:
|
|
raise HTTPException(status_code=404, detail="Seat map not found.")
|
|
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
SELECT m.id, m.name, m.company, m.rank, m.role, m.department, m.grp, m.division,
|
|
m.team, m.cell, m.work_status, m.work_time, m.phone, m.email,
|
|
m.seat_label AS member_seat_label, m.photo_url, m.sort_order
|
|
FROM members m
|
|
ORDER BY m.sort_order ASC, m.id ASC
|
|
"""
|
|
)
|
|
members = cur.fetchall()
|
|
cur.execute(
|
|
"""
|
|
SELECT id, slot_key, label, x, y, rotation, layer_name
|
|
FROM seat_slots
|
|
WHERE seat_map_id = %s
|
|
ORDER BY label ASC, id ASC
|
|
""",
|
|
(seat_map_id,),
|
|
)
|
|
slots = cur.fetchall()
|
|
cur.execute(
|
|
"""
|
|
SELECT sp.member_id, sp.row_index, sp.col_index, sp.seat_label,
|
|
sp.seat_slot_id,
|
|
m.name, m.company, m.rank, m.role, m.department, m.grp, m.division,
|
|
m.team, m.cell, m.work_status, m.work_time, m.phone, m.email,
|
|
m.photo_url, m.sort_order
|
|
FROM seat_positions sp
|
|
JOIN members m ON m.id = sp.member_id
|
|
WHERE sp.seat_map_id = %s
|
|
ORDER BY sp.row_index ASC, sp.col_index ASC, m.sort_order ASC, m.id ASC
|
|
""",
|
|
(seat_map_id,),
|
|
)
|
|
placements = cur.fetchall()
|
|
viewer_data: dict[str, object] | None = None
|
|
office_key = str(seat_map.get("source_url") or FIXED_OFFICE_SOURCE_KEY)
|
|
fixed_office = FIXED_OFFICE_CONFIGS.get(office_key)
|
|
if seat_map["source_type"] == "fixed_html" and fixed_office:
|
|
template = parse_fixed_office_template(office_key)
|
|
viewer_data = {
|
|
"meta": {
|
|
"chair_count": len(template["slots"]),
|
|
"office": str(fixed_office["name"]),
|
|
}
|
|
}
|
|
elif seat_map["source_type"] == "dxf" and seat_map.get("source_url"):
|
|
filename = Path(str(seat_map["source_url"])).name
|
|
source_path = UPLOAD_DIR / filename
|
|
if source_path.exists():
|
|
try:
|
|
_metadata, _slots, viewer_data = build_dxf_artifacts(source_path)
|
|
except Exception:
|
|
viewer_data = None
|
|
return {
|
|
"seat_map": seat_map,
|
|
"members": members,
|
|
"slots": slots,
|
|
"placements": placements,
|
|
"viewer_data": viewer_data,
|
|
}
|
|
|
|
|
|
def build_center_chair_viewer_html(layout: dict[str, object]) -> str:
|
|
slot_key_by_id = {
|
|
int(slot["id"]): str(slot["slot_key"])
|
|
for slot in layout.get("slots", [])
|
|
if slot.get("id") is not None and slot.get("slot_key") is not None
|
|
}
|
|
members_by_id = {
|
|
int(member["id"]): member
|
|
for member in layout.get("members", [])
|
|
if member.get("id") is not None
|
|
}
|
|
placed_keys: list[str] = []
|
|
assignment_items: list[dict[str, object]] = []
|
|
for placement in layout.get("placements", []):
|
|
slot_id = placement.get("seat_slot_id")
|
|
if slot_id is None:
|
|
continue
|
|
slot_key = slot_key_by_id.get(int(slot_id))
|
|
if slot_key:
|
|
placed_keys.append(slot_key)
|
|
member = members_by_id.get(int(placement.get("member_id") or 0))
|
|
if member:
|
|
assignment_items.append(
|
|
{
|
|
"key": slot_key,
|
|
"member_id": int(member["id"]),
|
|
"name": str(member.get("name") or "-"),
|
|
"rank": str(member.get("rank") or "-"),
|
|
}
|
|
)
|
|
|
|
seat_map = layout.get("seat_map") or {}
|
|
placed_literal = json.dumps(sorted(set(placed_keys)), ensure_ascii=False, separators=(",", ":"))
|
|
assignments_literal = json.dumps(assignment_items, ensure_ascii=False, separators=(",", ":"))
|
|
if seat_map.get("source_type") == "fixed_html":
|
|
office_key = str(seat_map.get("source_url") or FIXED_OFFICE_SOURCE_KEY)
|
|
html = parse_fixed_office_template(office_key)["html"]
|
|
else:
|
|
viewer_data = layout.get("viewer_data")
|
|
if not isinstance(viewer_data, dict):
|
|
raise HTTPException(status_code=404, detail="DXF viewer data not found.")
|
|
template_path = Path(__file__).with_name("center_chair_viewer_template.html")
|
|
if not template_path.exists():
|
|
raise HTTPException(status_code=500, detail="Viewer template not found.")
|
|
html = template_path.read_text(encoding="utf-8")
|
|
data_literal = json.dumps(viewer_data, ensure_ascii=False, separators=(",", ":"))
|
|
html = re.sub(
|
|
r"const DATA = .*?;\n\s*function decodeSegments",
|
|
f"const DATA = {data_literal};\n function decodeSegments",
|
|
html,
|
|
count=1,
|
|
flags=re.S,
|
|
)
|
|
html = html.replace(
|
|
'const STORAGE_KEY = "ptc-chair-selection";\n const placed = new Set(JSON.parse(localStorage.getItem(STORAGE_KEY) || "[]"));',
|
|
f"const STORAGE_KEY = null;\n const placed = new Set({placed_literal});",
|
|
1,
|
|
)
|
|
html = html.replace(
|
|
""" ctx.strokeStyle = selected
|
|
? "rgba(220, 38, 38, 0.98)"
|
|
: active
|
|
? "rgba(15, 118, 110, 0.98)"
|
|
: chair.kind === "group"
|
|
? "rgba(16, 134, 149, 0.74)"
|
|
: "rgba(21, 149, 142, 0.8)";
|
|
ctx.lineWidth = (selected ? 2.6 : active ? 2.1 : baseWidth) / camera.scale;""",
|
|
""" ctx.strokeStyle = selected
|
|
? "rgba(220, 38, 38, 0.98)"
|
|
: "rgba(15, 118, 110, 0.88)";
|
|
ctx.lineWidth = (selected ? 2.6 : active ? 2.0 : 1.6) / camera.scale;""",
|
|
1,
|
|
)
|
|
html = html.replace(
|
|
"function persistPlaced() {\n localStorage.setItem(STORAGE_KEY, JSON.stringify([...placed]));\n }",
|
|
"function persistPlaced() {\n return;\n }",
|
|
1,
|
|
)
|
|
html = html.replace(
|
|
""" window.addEventListener("pointerup", (event) => {
|
|
if (dragging && dragStart) {
|
|
const move = Math.hypot(event.clientX - dragStart.x, event.clientY - dragStart.y);
|
|
if (move < 4) {
|
|
const rect = canvas.getBoundingClientRect();
|
|
const picked = pickChair(event.clientX - rect.left, event.clientY - rect.top);
|
|
if (picked) {
|
|
if (placed.has(picked.key)) placed.delete(picked.key);
|
|
else placed.add(picked.key);
|
|
persistPlaced();
|
|
}
|
|
}
|
|
}
|
|
dragging = false;
|
|
dragStart = null;
|
|
canvas.classList.remove("dragging");
|
|
requestDraw();
|
|
});""",
|
|
""" window.addEventListener("pointerup", () => {
|
|
dragging = false;
|
|
dragStart = null;
|
|
canvas.classList.remove("dragging");
|
|
requestDraw();
|
|
});""",
|
|
1,
|
|
)
|
|
html = html.replace(
|
|
""" document.getElementById("clear-btn").addEventListener("click", () => {
|
|
placed.clear();
|
|
persistPlaced();
|
|
requestDraw();
|
|
});""",
|
|
""" document.getElementById("clear-btn").addEventListener("click", () => {
|
|
requestDraw();
|
|
});""",
|
|
1,
|
|
)
|
|
bridge_script = """
|
|
<style>
|
|
#fit-btn { display: none !important; }
|
|
#clear-btn { display: none !important; }
|
|
.seat-popup {
|
|
position: absolute;
|
|
min-width: 190px;
|
|
padding: 12px 14px;
|
|
border-radius: 16px;
|
|
background: rgba(17,24,39,0.96);
|
|
color: white;
|
|
box-shadow: 0 18px 36px rgba(15,23,42,0.22);
|
|
z-index: 4;
|
|
}
|
|
.seat-popup[hidden] { display: none; }
|
|
.seat-popup strong { display: block; margin-bottom: 6px; font-size: 14px; }
|
|
.seat-popup div { font-size: 12px; line-height: 1.45; color: rgba(255,255,255,0.82); }
|
|
.seat-popup button {
|
|
margin-top: 10px;
|
|
width: 100%;
|
|
border: none;
|
|
border-radius: 12px;
|
|
padding: 8px 10px;
|
|
font: inherit;
|
|
font-weight: 700;
|
|
cursor: pointer;
|
|
color: white;
|
|
background: rgba(220, 38, 38, 0.98);
|
|
}
|
|
</style>
|
|
<script>
|
|
const seatAssignments = new Map();
|
|
let selectedChairKey = null;
|
|
let viewerMode = "default";
|
|
const popup = document.createElement("div");
|
|
popup.className = "seat-popup";
|
|
popup.hidden = true;
|
|
document.querySelector(".viewer").appendChild(popup);
|
|
|
|
function getAssignment(key) {
|
|
return seatAssignments.get(String(key)) || null;
|
|
}
|
|
|
|
function hideSeatPopup() {
|
|
popup.hidden = true;
|
|
popup.innerHTML = "";
|
|
}
|
|
|
|
function setViewerMode(mode) {
|
|
viewerMode = mode === "compact" || mode === "readonly" ? mode : "default";
|
|
const head = document.querySelector(".viewer-head");
|
|
const actions = document.querySelector(".viewer-actions");
|
|
if (head) head.style.display = viewerMode === "compact" ? "none" : "";
|
|
if (actions) actions.style.display = viewerMode !== "default" ? "none" : "";
|
|
if (viewerMode !== "default") {
|
|
hideSeatPopup();
|
|
selectedChairKey = null;
|
|
}
|
|
}
|
|
|
|
function showSeatPopup(chairKey, x, y) {
|
|
const assignment = getAssignment(chairKey);
|
|
if (!assignment) {
|
|
hideSeatPopup();
|
|
return;
|
|
}
|
|
popup.innerHTML = `
|
|
<strong>${assignment.name}</strong>
|
|
<div>직급: ${assignment.rank || "-"}</div>
|
|
<div>상태: 배치완료</div>
|
|
${viewerMode === "default" ? `<button type="button" data-seatmap-delete="${chairKey}">자리 비우기</button>` : ""}
|
|
`;
|
|
popup.style.left = `${x + 18}px`;
|
|
popup.style.top = `${y + 18}px`;
|
|
popup.hidden = false;
|
|
}
|
|
|
|
function focusChair(chairKey, padding = 2200) {
|
|
const chair = chairGeometry.find((item) => String(item.key) === String(chairKey));
|
|
if (!chair) return;
|
|
const rect = canvas.getBoundingClientRect();
|
|
const pad = 24;
|
|
const minX = chair.minX - padding;
|
|
const maxX = chair.maxX + padding;
|
|
const minY = chair.minY - padding;
|
|
const maxY = chair.maxY + padding;
|
|
const width = Math.max(1, maxX - minX);
|
|
const height = Math.max(1, maxY - minY);
|
|
camera.scale = Math.max(0.002, Math.min(2, Math.min((rect.width - pad * 2) / width, (rect.height - pad * 2) / height)));
|
|
camera.offsetX = pad - minX * camera.scale + (rect.width - pad * 2 - width * camera.scale) / 2;
|
|
camera.offsetY = pad - (world.maxY - maxY + world.minY) * camera.scale + (rect.height - pad * 2 - height * camera.scale) / 2;
|
|
requestDraw();
|
|
}
|
|
|
|
function setAssignments(items) {
|
|
seatAssignments.clear();
|
|
placed.clear();
|
|
(items || []).forEach((item) => {
|
|
const key = String(item.key || "");
|
|
if (!key) return;
|
|
const assignment = {
|
|
key,
|
|
name: item.name || "-",
|
|
rank: item.rank || "-",
|
|
memberId: Number(item.member_id || 0),
|
|
};
|
|
seatAssignments.set(key, assignment);
|
|
placed.add(key);
|
|
});
|
|
if (selectedChairKey && !seatAssignments.has(selectedChairKey)) {
|
|
selectedChairKey = null;
|
|
hideSeatPopup();
|
|
}
|
|
if (typeof requestDraw === "function") requestDraw();
|
|
}
|
|
|
|
renderTooltip = function renderTooltipOverride() {
|
|
if (!hovered) {
|
|
tooltip.classList.remove("visible");
|
|
hoverChip.textContent = "chair hover: none";
|
|
return;
|
|
}
|
|
const assignment = getAssignment(hovered.key);
|
|
hoverChip.textContent = assignment
|
|
? `chair hover: ${assignment.name}`
|
|
: "chair hover: 공석";
|
|
tooltip.innerHTML = assignment
|
|
? `
|
|
<strong>${assignment.name}</strong>
|
|
<div>직급: ${assignment.rank || "-"}</div>
|
|
<div>상태: 배치완료</div>
|
|
`
|
|
: `
|
|
<strong>공석</strong>
|
|
<div>좌석: ${hovered.key}</div>
|
|
<div>상태: 미배치</div>
|
|
`;
|
|
tooltip.style.left = `${pointer.x + 14}px`;
|
|
tooltip.style.top = `${pointer.y + 14}px`;
|
|
tooltip.classList.add("visible");
|
|
};
|
|
|
|
const originalDraw = draw;
|
|
draw = function drawWithAssignments() {
|
|
originalDraw();
|
|
if (!seatAssignments.size) return;
|
|
const rect = canvas.getBoundingClientRect();
|
|
ctx.setTransform(pixelRatio, 0, 0, pixelRatio, 0, 0);
|
|
ctx.textBaseline = "middle";
|
|
for (const chair of chairGeometry) {
|
|
const assignment = getAssignment(chair.key);
|
|
if (!assignment) continue;
|
|
const center = worldToScreen((chair.minX + chair.maxX) / 2, (chair.minY + chair.maxY) / 2);
|
|
if (center.x < -120 || center.x > rect.width + 120 || center.y < -50 || center.y > rect.height + 50) continue;
|
|
const primary = `${assignment.name}`;
|
|
const secondary = `${assignment.rank || "-"}`;
|
|
ctx.font = "700 12px Pretendard, sans-serif";
|
|
const primaryWidth = ctx.measureText(primary).width;
|
|
ctx.font = "600 10px Pretendard, sans-serif";
|
|
const secondaryWidth = ctx.measureText(secondary).width;
|
|
const boxWidth = Math.max(primaryWidth, secondaryWidth) + 20;
|
|
const boxHeight = 34;
|
|
const boxX = center.x - boxWidth / 2;
|
|
const boxY = center.y - 46;
|
|
ctx.fillStyle = "rgba(255,255,255,0.96)";
|
|
ctx.strokeStyle = "rgba(220,38,38,0.18)";
|
|
ctx.lineWidth = 1;
|
|
ctx.beginPath();
|
|
ctx.roundRect(boxX, boxY, boxWidth, boxHeight, 10);
|
|
ctx.fill();
|
|
ctx.stroke();
|
|
ctx.fillStyle = "#111827";
|
|
ctx.font = "700 12px Pretendard, sans-serif";
|
|
ctx.fillText(primary, boxX + 10, boxY + 12);
|
|
ctx.fillStyle = "#6b7280";
|
|
ctx.font = "600 10px Pretendard, sans-serif";
|
|
ctx.fillText(secondary, boxX + 10, boxY + 25);
|
|
}
|
|
};
|
|
|
|
window.__mhSeatmap = {
|
|
getCanvas() { return document.getElementById("canvas"); },
|
|
pickChairAt(x, y) { return typeof pickChair === "function" ? pickChair(x, y) : null; },
|
|
setPlaced(keys) {
|
|
placed.clear();
|
|
(keys || []).forEach((key) => placed.add(String(key)));
|
|
if (typeof requestDraw === "function") requestDraw();
|
|
},
|
|
setAssignments,
|
|
focusChair,
|
|
setViewerMode,
|
|
};
|
|
|
|
setAssignments(__INITIAL_ASSIGNMENTS__);
|
|
|
|
canvas.addEventListener("click", (event) => {
|
|
if (viewerMode === "compact") return;
|
|
const rect = canvas.getBoundingClientRect();
|
|
const picked = window.__mhSeatmap.pickChairAt(
|
|
event.clientX - rect.left,
|
|
event.clientY - rect.top,
|
|
);
|
|
if (!picked) {
|
|
selectedChairKey = null;
|
|
hideSeatPopup();
|
|
if (typeof requestDraw === "function") requestDraw();
|
|
return;
|
|
}
|
|
selectedChairKey = seatAssignments.has(String(picked.key)) ? String(picked.key) : null;
|
|
if (selectedChairKey) showSeatPopup(selectedChairKey, event.clientX - rect.left, event.clientY - rect.top);
|
|
else hideSeatPopup();
|
|
if (typeof requestDraw === "function") requestDraw();
|
|
});
|
|
|
|
popup.addEventListener("click", (event) => {
|
|
const button = event.target.closest("[data-seatmap-delete]");
|
|
if (!button) return;
|
|
const slotKey = String(button.dataset.seatmapDelete || "");
|
|
if (!slotKey) return;
|
|
selectedChairKey = null;
|
|
hideSeatPopup();
|
|
window.parent.postMessage({ type: "seatmap-clear-slot", key: slotKey }, window.location.origin);
|
|
});
|
|
|
|
canvas.addEventListener("contextmenu", (event) => {
|
|
event.preventDefault();
|
|
});
|
|
|
|
window.addEventListener("message", (event) => {
|
|
const data = event.data;
|
|
if (!data || typeof data !== "object") return;
|
|
if (data.type === "seatmap-set-placed") {
|
|
window.__mhSeatmap.setPlaced(Array.isArray(data.keys) ? data.keys : []);
|
|
}
|
|
if (data.type === "seatmap-set-assignments") {
|
|
window.__mhSeatmap.setAssignments(Array.isArray(data.items) ? data.items : []);
|
|
}
|
|
if (data.type === "seatmap-focus-chair") {
|
|
window.__mhSeatmap.focusChair(String(data.key || ""), Number(data.padding || 2200));
|
|
}
|
|
if (data.type === "seatmap-set-mode") {
|
|
window.__mhSeatmap.setViewerMode(String(data.mode || "default"));
|
|
}
|
|
});
|
|
</script>
|
|
"""
|
|
bridge_script = bridge_script.replace("__INITIAL_ASSIGNMENTS__", assignments_literal, 1)
|
|
html = html.replace("</body>", f"{bridge_script}\n</body>", 1)
|
|
return html
|
|
|
|
|
|
def save_seat_layout(seat_map_id: int, payload: SeatLayoutPayload) -> list[dict[str, object]]:
|
|
seat_map = fetch_seat_map(seat_map_id)
|
|
if seat_map is None:
|
|
raise HTTPException(status_code=404, detail="Seat map not found.")
|
|
|
|
member_ids: list[int] = []
|
|
occupied_cells: set[tuple[int, int]] = set()
|
|
occupied_slots: set[int] = set()
|
|
requires_slot = seat_map["source_type"] in {"dxf", "fixed_html"}
|
|
for item in payload.placements:
|
|
if requires_slot:
|
|
if item.seat_slot_id is None:
|
|
raise HTTPException(status_code=400, detail="고정 도면 자리배치도는 seat_slot_id가 필요합니다.")
|
|
if item.seat_slot_id in occupied_slots:
|
|
raise HTTPException(status_code=400, detail="같은 좌석에 둘 이상의 구성원을 배치할 수 없습니다.")
|
|
occupied_slots.add(item.seat_slot_id)
|
|
else:
|
|
if item.row_index >= int(seat_map["grid_rows"]) or item.col_index >= int(seat_map["grid_cols"]):
|
|
raise HTTPException(status_code=400, detail="좌표가 자리배치도 범위를 벗어났습니다.")
|
|
cell_key = (item.row_index, item.col_index)
|
|
if cell_key in occupied_cells:
|
|
raise HTTPException(status_code=400, detail="같은 칸에 둘 이상의 구성원을 배치할 수 없습니다.")
|
|
occupied_cells.add(cell_key)
|
|
member_ids.append(item.member_id)
|
|
|
|
if len(member_ids) != len(set(member_ids)):
|
|
raise HTTPException(status_code=400, detail="같은 구성원을 중복 배치할 수 없습니다.")
|
|
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
if member_ids:
|
|
cur.execute("SELECT id FROM members WHERE id = ANY(%s)", (member_ids,))
|
|
existing_ids = {int(row["id"]) for row in cur.fetchall()}
|
|
missing_ids = sorted(set(member_ids) - existing_ids)
|
|
if missing_ids:
|
|
raise HTTPException(status_code=400, detail=f"존재하지 않는 구성원 ID가 포함되어 있습니다: {missing_ids}")
|
|
|
|
if requires_slot:
|
|
slot_ids = sorted(occupied_slots)
|
|
cur.execute("SELECT id FROM seat_slots WHERE seat_map_id = %s AND id = ANY(%s)", (seat_map_id, slot_ids))
|
|
existing_slot_ids = {int(row["id"]) for row in cur.fetchall()}
|
|
missing_slot_ids = sorted(set(slot_ids) - existing_slot_ids)
|
|
if missing_slot_ids:
|
|
raise HTTPException(status_code=400, detail=f"존재하지 않는 좌석 슬롯 ID가 포함되어 있습니다: {missing_slot_ids}")
|
|
cur.execute("SELECT id, label FROM seat_slots WHERE seat_map_id = %s", (seat_map_id,))
|
|
slot_label_map = {int(row["id"]): row["label"] for row in cur.fetchall()}
|
|
else:
|
|
slot_label_map = {}
|
|
|
|
cur.execute("DELETE FROM seat_positions WHERE seat_map_id = %s AND NOT (member_id = ANY(%s))", (seat_map_id, member_ids))
|
|
for item in payload.placements:
|
|
seat_label = item.seat_label.strip() or (
|
|
slot_label_map.get(int(item.seat_slot_id), f"SLOT-{item.seat_slot_id}")
|
|
if requires_slot and item.seat_slot_id is not None
|
|
else compute_seat_label(item.row_index, item.col_index)
|
|
)
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO seat_positions (member_id, seat_map_id, seat_slot_id, row_index, col_index, seat_label, updated_at)
|
|
VALUES (%s, %s, %s, %s, %s, %s, NOW())
|
|
ON CONFLICT (member_id) DO UPDATE
|
|
SET seat_map_id = EXCLUDED.seat_map_id,
|
|
seat_slot_id = EXCLUDED.seat_slot_id,
|
|
row_index = EXCLUDED.row_index,
|
|
col_index = EXCLUDED.col_index,
|
|
seat_label = EXCLUDED.seat_label,
|
|
updated_at = NOW()
|
|
""",
|
|
(
|
|
item.member_id,
|
|
seat_map_id,
|
|
item.seat_slot_id if requires_slot else None,
|
|
item.row_index,
|
|
item.col_index,
|
|
seat_label,
|
|
),
|
|
)
|
|
else:
|
|
cur.execute("DELETE FROM seat_positions WHERE seat_map_id = %s", (seat_map_id,))
|
|
|
|
# Keep the denormalized member seat label in sync so organization views can
|
|
# immediately reflect the latest saved seat assignment after reload.
|
|
cur.execute(
|
|
"""
|
|
UPDATE members
|
|
SET seat_label = '',
|
|
updated_at = NOW()
|
|
WHERE id IN (
|
|
SELECT DISTINCT member_id
|
|
FROM seat_positions
|
|
WHERE seat_map_id = %s
|
|
UNION
|
|
SELECT id
|
|
FROM members
|
|
WHERE COALESCE(seat_label, '') <> ''
|
|
)
|
|
""",
|
|
(seat_map_id,),
|
|
)
|
|
cur.execute(
|
|
"""
|
|
UPDATE members AS m
|
|
SET seat_label = sp.seat_label,
|
|
updated_at = NOW()
|
|
FROM seat_positions AS sp
|
|
WHERE sp.member_id = m.id
|
|
"""
|
|
)
|
|
conn.commit()
|
|
|
|
return fetch_seat_layout(seat_map_id)["placements"]
|
|
|
|
|
|
def get_member_count() -> int:
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute("SELECT COUNT(*) AS count FROM members")
|
|
return int(cur.fetchone()["count"])
|
|
|
|
|
|
def merge_import_member(item: MemberPayload, existing: dict[str, object] | None) -> MemberPayload:
|
|
if existing is None:
|
|
return item
|
|
|
|
payload = item.model_copy(deep=True)
|
|
if not payload.photo_url.strip():
|
|
payload.photo_url = str(existing.get("photo_url") or "")
|
|
if not payload.seat_label.strip():
|
|
payload.seat_label = str(existing.get("seat_label") or "")
|
|
return payload
|
|
|
|
|
|
def pick_existing_member(
|
|
item: MemberPayload,
|
|
existing_by_employee_id: dict[str, list[dict[str, object]]],
|
|
existing_by_name: dict[str, list[dict[str, object]]],
|
|
matched_ids: set[int],
|
|
) -> dict[str, object] | None:
|
|
employee_id = item.employee_id.strip()
|
|
if employee_id:
|
|
for candidate in existing_by_employee_id.get(employee_id, []):
|
|
candidate_id = int(candidate["id"])
|
|
if candidate_id not in matched_ids:
|
|
return candidate
|
|
|
|
name = item.name.strip()
|
|
if name:
|
|
available = [
|
|
candidate
|
|
for candidate in existing_by_name.get(name, [])
|
|
if int(candidate["id"]) not in matched_ids
|
|
]
|
|
if len(available) == 1:
|
|
return available[0]
|
|
|
|
return None
|
|
|
|
|
|
def replace_members(items: list[MemberPayload]) -> list[dict[str, object]]:
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
SELECT id, name, employee_id, company, rank, role, department, grp, division, team, cell,
|
|
work_status, work_time, phone, email, seat_label, photo_url,
|
|
sort_order, created_at, updated_at
|
|
FROM members
|
|
ORDER BY id ASC
|
|
"""
|
|
)
|
|
existing_members = cur.fetchall()
|
|
|
|
existing_by_employee_id: dict[str, list[dict[str, object]]] = {}
|
|
existing_by_name: dict[str, list[dict[str, object]]] = {}
|
|
for member in existing_members:
|
|
employee_id = str(member.get("employee_id") or "").strip()
|
|
name = str(member.get("name") or "").strip()
|
|
if employee_id:
|
|
existing_by_employee_id.setdefault(employee_id, []).append(member)
|
|
if name:
|
|
existing_by_name.setdefault(name, []).append(member)
|
|
|
|
matched_ids: set[int] = set()
|
|
for index, item in enumerate(items):
|
|
existing = pick_existing_member(item, existing_by_employee_id, existing_by_name, matched_ids)
|
|
merged_item = merge_import_member(item, existing)
|
|
if existing is None:
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO members (
|
|
name, employee_id, company, rank, role, department, grp, division, team, cell,
|
|
work_status, work_time, phone, email, seat_label, photo_url, sort_order
|
|
)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
""",
|
|
serialize_member_payload(merged_item, index),
|
|
)
|
|
continue
|
|
|
|
matched_ids.add(int(existing["id"]))
|
|
cur.execute(
|
|
"""
|
|
UPDATE members
|
|
SET name = %s,
|
|
employee_id = %s,
|
|
company = %s,
|
|
rank = %s,
|
|
role = %s,
|
|
department = %s,
|
|
grp = %s,
|
|
division = %s,
|
|
team = %s,
|
|
cell = %s,
|
|
work_status = %s,
|
|
work_time = %s,
|
|
phone = %s,
|
|
email = %s,
|
|
seat_label = %s,
|
|
photo_url = %s,
|
|
sort_order = %s,
|
|
updated_at = NOW()
|
|
WHERE id = %s
|
|
""",
|
|
(*serialize_member_payload(merged_item, index), int(existing["id"])),
|
|
)
|
|
stale_ids = [int(member["id"]) for member in existing_members if int(member["id"]) not in matched_ids]
|
|
if stale_ids:
|
|
cur.execute("DELETE FROM members WHERE id = ANY(%s)", (stale_ids,))
|
|
sync_auth_users_from_members(cur)
|
|
conn.commit()
|
|
return fetch_members()
|
|
|
|
|
|
def rows_to_member_payloads(rows: list[list[object]]) -> list[MemberPayload]:
|
|
def normalize_header(value: object) -> str:
|
|
return str(value or "").strip().lower()
|
|
|
|
header_idx = detect_member_header_index(rows)
|
|
if header_idx < 0:
|
|
raise HTTPException(status_code=400, detail="지원하지 않는 파일 형식입니다. 필수 헤더(이름/부서 또는 name/part)를 찾지 못했습니다.")
|
|
|
|
headers = [normalize_header(value) for value in rows[header_idx]]
|
|
payloads: list[MemberPayload] = []
|
|
|
|
for row in rows[header_idx + 1 :]:
|
|
if not any(str(value or "").strip() for value in row):
|
|
continue
|
|
record: dict[str, object] = {}
|
|
for col_idx, header in enumerate(headers):
|
|
mapped = LEGACY_HEADER_MAP.get(header)
|
|
if not mapped:
|
|
continue
|
|
value = str(row[col_idx] if col_idx < len(row) and row[col_idx] is not None else "").strip()
|
|
if mapped == "phone":
|
|
value = normalize_phone(value)
|
|
record[mapped] = value
|
|
if not str(record.get("name", "")).strip():
|
|
continue
|
|
payloads.append(MemberPayload(**record))
|
|
return payloads
|
|
|
|
|
|
def parse_import_rows(file: UploadFile, content: bytes) -> list[MemberPayload]:
|
|
suffix = Path(file.filename or "").suffix.lower()
|
|
if suffix == ".csv":
|
|
text = content.decode("utf-8-sig")
|
|
rows = list(csv.reader(StringIO(text)))
|
|
return rows_to_member_payloads(rows)
|
|
if suffix in {".xlsx", ".xlsm", ".xltx", ".xltm"}:
|
|
workbook = load_workbook(BytesIO(content), data_only=True)
|
|
sheet = workbook[workbook.sheetnames[0]]
|
|
rows = [list(row) for row in sheet.iter_rows(values_only=True)]
|
|
return rows_to_member_payloads(rows)
|
|
raise HTTPException(status_code=400, detail="xlsx 또는 csv 파일만 업로드할 수 있습니다.")
|
|
|
|
|
|
def detect_member_header_index(rows: list[list[object]]) -> int:
|
|
def normalize_header(value: object) -> str:
|
|
return str(value or "").strip().lower()
|
|
|
|
return next(
|
|
(
|
|
idx
|
|
for idx, row in enumerate(rows)
|
|
if {"이름", "부서"}.issubset({str(value).strip() for value in row})
|
|
or {"name", "part"}.issubset({normalize_header(value) for value in row})
|
|
),
|
|
-1,
|
|
)
|
|
|
|
|
|
def clean_text(value: object) -> str:
|
|
return str(value or "").strip()
|
|
|
|
|
|
def canonicalize_member_name(
|
|
employee_id: str,
|
|
name: str,
|
|
by_employee_id: dict[str, dict[str, object]] | None = None,
|
|
aliases_by_name: dict[str, dict[str, object]] | None = None,
|
|
) -> str:
|
|
employee_id = clean_text(employee_id)
|
|
name = clean_text(name)
|
|
if aliases_by_name:
|
|
alias = aliases_by_name.get(name)
|
|
if alias:
|
|
canonical = clean_text(alias.get("canonical_name"))
|
|
if canonical:
|
|
return canonical
|
|
if employee_id and by_employee_id and employee_id in by_employee_id:
|
|
canonical = clean_text(by_employee_id[employee_id].get("name"))
|
|
if canonical:
|
|
return canonical
|
|
return name
|
|
|
|
|
|
def merge_members_with_mh_source(
|
|
organization_members: list[MemberPayload],
|
|
mh_work_logs: list[dict[str, object]],
|
|
overrides_by_employee_id: dict[str, dict[str, object]] | None = None,
|
|
retired_member_names: set[str] | None = None,
|
|
aliases_by_name: dict[str, dict[str, object]] | None = None,
|
|
) -> list[MemberPayload]:
|
|
merged: list[MemberPayload] = [item.model_copy(deep=True) for item in organization_members]
|
|
by_employee_id: dict[str, MemberPayload] = {}
|
|
by_name: dict[str, list[MemberPayload]] = {}
|
|
lookup_by_employee_id = {
|
|
clean_text(item.employee_id): {"name": item.name}
|
|
for item in merged
|
|
if clean_text(item.employee_id)
|
|
}
|
|
for item in merged:
|
|
employee_id = clean_text(item.employee_id)
|
|
if employee_id:
|
|
by_employee_id[employee_id] = item
|
|
name = clean_text(item.name)
|
|
if name:
|
|
by_name.setdefault(name, []).append(item)
|
|
|
|
for entry in mh_work_logs:
|
|
employee_id = clean_text(entry.get("employee_id"))
|
|
canonical_name = canonicalize_member_name(employee_id, str(entry.get("member_name") or ""), lookup_by_employee_id, aliases_by_name)
|
|
if canonical_name in (retired_member_names or set()):
|
|
continue
|
|
target = by_employee_id.get(employee_id) if employee_id else None
|
|
if target is None:
|
|
candidates = by_name.get(canonical_name, [])
|
|
if len(candidates) == 1:
|
|
target = candidates[0]
|
|
|
|
rank = clean_text(entry.get("title"))
|
|
department = clean_text(entry.get("team_category"))
|
|
team = clean_text(entry.get("team_name"))
|
|
work_status = clean_text(entry.get("user_state"))
|
|
|
|
if target is None:
|
|
target = MemberPayload(
|
|
name=canonical_name,
|
|
employee_id=employee_id,
|
|
rank=rank,
|
|
role=rank,
|
|
department=department,
|
|
team=team,
|
|
work_status=work_status,
|
|
)
|
|
merged.append(target)
|
|
if employee_id:
|
|
by_employee_id[employee_id] = target
|
|
lookup_by_employee_id[employee_id] = {"name": canonical_name}
|
|
by_name.setdefault(canonical_name, []).append(target)
|
|
continue
|
|
|
|
if employee_id and not clean_text(target.employee_id):
|
|
target.employee_id = employee_id
|
|
by_employee_id[employee_id] = target
|
|
lookup_by_employee_id[employee_id] = {"name": target.name}
|
|
if canonical_name and clean_text(target.name) != canonical_name and employee_id and clean_text(target.employee_id) == employee_id:
|
|
target.name = canonical_name
|
|
if rank and not clean_text(target.rank):
|
|
target.rank = rank
|
|
if rank and not clean_text(target.role):
|
|
target.role = rank
|
|
if department and not clean_text(target.department):
|
|
target.department = department
|
|
if team and not clean_text(target.team):
|
|
target.team = team
|
|
if work_status and not clean_text(target.work_status):
|
|
target.work_status = work_status
|
|
|
|
override = (overrides_by_employee_id or {}).get(employee_id)
|
|
if override:
|
|
for field_name in (
|
|
"name",
|
|
"company",
|
|
"rank",
|
|
"role",
|
|
"department",
|
|
"grp",
|
|
"division",
|
|
"team",
|
|
"cell",
|
|
"work_status",
|
|
"work_time",
|
|
"phone",
|
|
"email",
|
|
"seat_label",
|
|
"photo_url",
|
|
):
|
|
if field_name in override and override[field_name] is not None:
|
|
setattr(target, field_name, str(override[field_name]))
|
|
|
|
if overrides_by_employee_id:
|
|
for item in merged:
|
|
employee_id = clean_text(item.employee_id)
|
|
override = overrides_by_employee_id.get(employee_id)
|
|
if not override:
|
|
continue
|
|
for field_name in (
|
|
"name",
|
|
"company",
|
|
"rank",
|
|
"role",
|
|
"department",
|
|
"grp",
|
|
"division",
|
|
"team",
|
|
"cell",
|
|
"work_status",
|
|
"work_time",
|
|
"phone",
|
|
"email",
|
|
"seat_label",
|
|
"photo_url",
|
|
):
|
|
if field_name in override and override[field_name] is not None:
|
|
setattr(item, field_name, str(override[field_name]))
|
|
|
|
return merged
|
|
|
|
|
|
def parse_numeric(value: object) -> float:
|
|
if value is None:
|
|
return 0.0
|
|
if isinstance(value, (int, float)):
|
|
return float(value)
|
|
text = clean_text(value).replace(",", "")
|
|
if not text:
|
|
return 0.0
|
|
try:
|
|
return float(text)
|
|
except ValueError:
|
|
return 0.0
|
|
|
|
|
|
def normalize_key_for_source(value: object) -> str:
|
|
return str(value or "").strip().replace(" ", "").lower()
|
|
|
|
|
|
def normalize_project_key_for_analysis(value: object) -> str:
|
|
text = unicodedata.normalize("NFKC", str(value or "")).lower()
|
|
text = re.sub(r"[\u200b-\u200d\ufeff]", "", text)
|
|
return re.sub(r"[^0-9a-z가-힣]", "", text)
|
|
|
|
|
|
def parse_date_value(value: object) -> str | None:
|
|
if value is None:
|
|
return None
|
|
if isinstance(value, datetime):
|
|
return value.date().isoformat()
|
|
text = clean_text(value)
|
|
if not text:
|
|
return None
|
|
normalized = text.replace(". ", "-").replace(".", "-").replace("/", "-")
|
|
for fmt in ("%Y-%m-%d", "%Y-%m-%d %H:%M:%S"):
|
|
try:
|
|
return datetime.strptime(normalized, fmt).date().isoformat()
|
|
except ValueError:
|
|
continue
|
|
return None
|
|
|
|
|
|
def build_parsecsv_like_row(headers: list[str], row_json: dict[str, object]) -> dict[str, object]:
|
|
values = [clean_text(row_json.get(header, "")) for header in headers]
|
|
parsed: dict[str, object] = {"__values": values}
|
|
for index, header in enumerate(headers):
|
|
if header:
|
|
parsed[header] = values[index]
|
|
normalized = normalize_key_for_source(header)
|
|
if normalized:
|
|
parsed[f"__n_{normalized}"] = values[index]
|
|
return parsed
|
|
|
|
|
|
def build_parsecsv_like_row_from_values(headers: list[str], row_values: list[object]) -> dict[str, object]:
|
|
values = [clean_text(row_values[index] if index < len(row_values) else "") for index in range(len(headers))]
|
|
parsed: dict[str, object] = {"__values": values}
|
|
for index, header in enumerate(headers):
|
|
if header:
|
|
parsed[header] = values[index]
|
|
normalized = normalize_key_for_source(header)
|
|
if normalized:
|
|
parsed[f"__n_{normalized}"] = values[index]
|
|
return parsed
|
|
|
|
|
|
def build_sheet_row(headers: list[str], row_json: dict[str, object]) -> list[str]:
|
|
return [clean_text(row_json.get(header, "")) for header in headers]
|
|
|
|
|
|
def normalize_excel_cell(value: object) -> object:
|
|
if isinstance(value, datetime):
|
|
return value.isoformat(sep=" ")
|
|
if isinstance(value, (date, time)):
|
|
return value.isoformat()
|
|
return value
|
|
|
|
|
|
def parse_organization_source(path: Path) -> tuple[list[dict[str, object]], list[MemberPayload]]:
|
|
workbook = load_workbook(path, data_only=True)
|
|
sheet = workbook[workbook.sheetnames[0]]
|
|
rows = [list(row) for row in sheet.iter_rows(values_only=True)]
|
|
payloads = rows_to_member_payloads(rows)
|
|
header_idx = detect_member_header_index(rows)
|
|
headers = [clean_text(value).lower() for value in rows[header_idx]]
|
|
raw_rows: list[dict[str, object]] = []
|
|
for index, row in enumerate(rows[header_idx + 1 :], start=header_idx + 2):
|
|
values = [clean_text(value) for value in row]
|
|
if not any(values):
|
|
continue
|
|
raw_rows.append(
|
|
{
|
|
"row_index": index,
|
|
"row_json": {
|
|
header: values[col_idx] if col_idx < len(values) else ""
|
|
for col_idx, header in enumerate(headers)
|
|
if header
|
|
},
|
|
}
|
|
)
|
|
return raw_rows, payloads
|
|
|
|
|
|
def parse_mh_source(
|
|
path: Path,
|
|
) -> tuple[list[dict[str, object]], list[dict[str, object]], list[dict[str, object]], list[dict[str, object]], list[dict[str, object]]]:
|
|
workbook = load_workbook(path, data_only=True)
|
|
sheet = workbook["Sheet1"]
|
|
rows = [list(row) for row in sheet.iter_rows(values_only=True)]
|
|
headers = [clean_text(value) for value in rows[0]]
|
|
header_index = {header: idx for idx, header in enumerate(headers) if header}
|
|
raw_rows: list[dict[str, object]] = []
|
|
raw_pm_rows: list[dict[str, object]] = []
|
|
work_logs: list[dict[str, object]] = []
|
|
segments: list[dict[str, object]] = []
|
|
slot_specs = [
|
|
("메인업무", "메인업무 프로젝트 코드", "메인업무 프로젝트명", "메인업무 서브 코드", "메인업무 근무시간", None),
|
|
("추가업무1", "추가업무1 프로젝트 코드", "추가업무1 프로젝트명", "추가업무1 서브 코드", "추가업무1 근무시간", None),
|
|
("추가업무2", "추가업무2 프로젝트 코드", "추가업무2 프로젝트명", "추가업무2 서브 코드", "추가업무2 근무시간", None),
|
|
("추가업무3", "추가업무3 프로젝트 코드", "추가업무3 프로젝트명", "추가업무3 서브 코드", "추가업무3 근무시간", None),
|
|
("추가업무4", "추가업무4 프로젝트 코드", "추가업무4 프로젝트명", "추가업무4 서브 코드", "추가업무4 근무시간", None),
|
|
("추가업무5", "추가업무5 프로젝트 코드", "추가업무5 프로젝트명", "추가업무5 서브 코드", "추가업무5 근무시간", None),
|
|
("연장근무", "연장근무 프로젝트 코드", "연장근무 프로젝트명", "연장근무 서브코드", "연장근무 시간(실제)", "연장근무 시간(가공)"),
|
|
]
|
|
|
|
raw_rows.append(
|
|
{
|
|
"row_index": 1,
|
|
"row_json": {f"col_{col_idx}": normalize_excel_cell(value) for col_idx, value in enumerate(rows[0])},
|
|
"row_values": [normalize_excel_cell(value) for value in rows[0]],
|
|
}
|
|
)
|
|
|
|
for row_index, row in enumerate(rows[1:], start=2):
|
|
values = [clean_text(value) for value in row]
|
|
if not any(values):
|
|
continue
|
|
record = {
|
|
headers[col_idx]: values[col_idx] if col_idx < len(values) else ""
|
|
for col_idx in range(len(headers))
|
|
if headers[col_idx]
|
|
}
|
|
raw_rows.append(
|
|
{
|
|
"row_index": row_index,
|
|
"row_json": record,
|
|
"row_values": [normalize_excel_cell(value) for value in row],
|
|
}
|
|
)
|
|
work_logs.append(
|
|
{
|
|
"row_index": row_index,
|
|
"work_date": parse_date_value(row[header_index["근무일자"]]) if "근무일자" in header_index else None,
|
|
"employee_id": values[header_index["사원번호"]] if "사원번호" in header_index else "",
|
|
"member_name": values[header_index["이름"]] if "이름" in header_index else "",
|
|
"title": values[header_index["직책"]] if "직책" in header_index else "",
|
|
"team_category": values[header_index["팀 분류"]] if "팀 분류" in header_index else "",
|
|
"team_name": values[header_index["팀"]] if "팀" in header_index else "",
|
|
"user_state": values[header_index["user_state"]] if "user_state" in header_index else "",
|
|
"shift_hours": parse_numeric(row[header_index["시차시간"]]) if "시차시간" in header_index else 0.0,
|
|
"weekend_late_flag": values[header_index["주말/지각"]] if "주말/지각" in header_index else "",
|
|
"review_status": values[header_index["검토"]] if "검토" in header_index else "",
|
|
}
|
|
)
|
|
for slot_name, code_header, name_header, activity_header, hours_header, adjusted_header in slot_specs:
|
|
code_idx = header_index.get(code_header)
|
|
name_idx = header_index.get(name_header)
|
|
activity_idx = header_index.get(activity_header)
|
|
hours_idx = header_index.get(hours_header)
|
|
if code_idx is None or name_idx is None or activity_idx is None or hours_idx is None:
|
|
continue
|
|
business_type_idx = code_idx - 1
|
|
project_code = values[code_idx]
|
|
project_name = values[name_idx]
|
|
hours = parse_numeric(row[hours_idx])
|
|
if not project_code and not project_name and hours <= 0:
|
|
continue
|
|
segments.append(
|
|
{
|
|
"row_index": row_index,
|
|
"employee_id": values[header_index["사원번호"]] if "사원번호" in header_index else "",
|
|
"slot_name": slot_name,
|
|
"business_type": values[business_type_idx] if business_type_idx < len(values) else "",
|
|
"project_code": project_code,
|
|
"project_name": project_name,
|
|
"activity_code": values[activity_idx],
|
|
"hours": hours,
|
|
"overtime_hours_raw": hours if slot_name == "연장근무" else 0.0,
|
|
"overtime_hours_adjusted": parse_numeric(row[header_index[adjusted_header]]) if adjusted_header and adjusted_header in header_index else 0.0,
|
|
"is_overtime": slot_name == "연장근무",
|
|
}
|
|
)
|
|
|
|
pm_assignments: list[dict[str, object]] = []
|
|
if "Sheet2" in workbook.sheetnames:
|
|
pm_sheet = workbook["Sheet2"]
|
|
for row_index, row in enumerate(pm_sheet.iter_rows(values_only=True), start=1):
|
|
raw_pm_rows.append(
|
|
{
|
|
"row_index": row_index,
|
|
"row_values": [normalize_excel_cell(value) for value in row],
|
|
}
|
|
)
|
|
project_code = clean_text(row[0] if len(row) > 0 else "")
|
|
pm_name = clean_text(row[1] if len(row) > 1 else "")
|
|
if not project_code or not pm_name:
|
|
continue
|
|
pm_assignments.append({"row_index": row_index, "project_code": project_code, "pm_name": pm_name})
|
|
|
|
return raw_rows, raw_pm_rows, work_logs, segments, pm_assignments
|
|
|
|
|
|
def parse_payment_source(path: Path) -> tuple[list[dict[str, object]], list[dict[str, object]]]:
|
|
encodings = ["cp949", "utf-8-sig", "utf-8"]
|
|
last_error: Exception | None = None
|
|
for encoding in encodings:
|
|
try:
|
|
with path.open("r", encoding=encoding, newline="") as handle:
|
|
reader = csv.DictReader(handle)
|
|
raw_rows: list[dict[str, object]] = []
|
|
vouchers: list[dict[str, object]] = []
|
|
for row_index, row in enumerate(reader, start=2):
|
|
normalized = {clean_text(key): clean_text(value) for key, value in row.items() if key is not None}
|
|
if not any(normalized.values()):
|
|
continue
|
|
raw_rows.append({"row_index": row_index, "row_json": normalized})
|
|
vouchers.append(
|
|
{
|
|
"row_index": row_index,
|
|
"accounting_company": normalized.get("상신회사", ""),
|
|
"claim_date": parse_date_value(normalized.get("청구일")),
|
|
"issue_date": parse_date_value(normalized.get("발행일")),
|
|
"issue_month": normalized.get("발행월", ""),
|
|
"account_code": normalized.get("계정코드", ""),
|
|
"management_account_code": normalized.get("관리계정코드", ""),
|
|
"account_name": normalized.get("각사 계정명", ""),
|
|
"project_code": normalized.get("프로젝트코드", ""),
|
|
"project_name": normalized.get("사업명", ""),
|
|
"display_project_name": normalized.get("사업명(표출PJT)", ""),
|
|
"intranet_project_name": normalized.get("사업명(인트라넷기준)", ""),
|
|
"business_area": normalized.get("사업분야", ""),
|
|
"business_subarea": normalized.get("세부분야", ""),
|
|
"planning_dev_sales": normalized.get("기획/개발/영업", ""),
|
|
"main_category": normalized.get("대분류", ""),
|
|
"middle_category": normalized.get("중분류", ""),
|
|
"sub_category": normalized.get("소분류", ""),
|
|
"department_name": normalized.get("부서명", ""),
|
|
"team_name": normalized.get("팀명", ""),
|
|
"customer_name": normalized.get("거래처", ""),
|
|
"summary_text": normalized.get("적요", ""),
|
|
"debit_supply_amount": parse_numeric(normalized.get("차변공급가")),
|
|
"credit_supply_amount": parse_numeric(normalized.get("대변공급가")),
|
|
"expense_amount": parse_numeric(normalized.get("지출")),
|
|
"income_amount": parse_numeric(normalized.get("수입")),
|
|
"voucher_type": normalized.get("구분", ""),
|
|
"project_nature": normalized.get("프로젝트성격", ""),
|
|
}
|
|
)
|
|
return raw_rows, vouchers
|
|
except UnicodeDecodeError as exc:
|
|
last_error = exc
|
|
continue
|
|
raise HTTPException(status_code=400, detail=f"payment.csv 디코딩에 실패했습니다: {last_error}")
|
|
|
|
|
|
def parse_project_category_mapping_source(path: Path) -> list[dict[str, str]]:
|
|
encodings = ["cp949", "utf-8-sig", "utf-8"]
|
|
last_error: Exception | None = None
|
|
for encoding in encodings:
|
|
try:
|
|
with path.open("r", encoding=encoding, newline="") as handle:
|
|
reader = csv.DictReader(handle)
|
|
mappings: list[dict[str, str]] = []
|
|
for row in reader:
|
|
project_name = clean_text(row.get("프로젝트명"))
|
|
if not project_name:
|
|
continue
|
|
mappings.append(
|
|
{
|
|
"source_key": "ptj_csv",
|
|
"project_name": project_name,
|
|
"normalized_project_key": normalize_project_key_for_analysis(project_name),
|
|
"mapped_d1": clean_text(row.get("매출/비매출")),
|
|
"mapped_d2": clean_text(row.get("분야")),
|
|
"mapped_d3": clean_text(row.get("세부분야")),
|
|
}
|
|
)
|
|
return mappings
|
|
except UnicodeDecodeError as exc:
|
|
last_error = exc
|
|
continue
|
|
raise HTTPException(status_code=400, detail=f"ptj.csv 디코딩에 실패했습니다: {last_error}")
|
|
|
|
|
|
def fetch_member_lookup() -> tuple[dict[str, dict[str, object]], dict[str, list[dict[str, object]]]]:
|
|
members = fetch_members()
|
|
by_employee_id = {
|
|
clean_text(member.get("employee_id")): member
|
|
for member in members
|
|
if clean_text(member.get("employee_id"))
|
|
}
|
|
by_name: dict[str, list[dict[str, object]]] = {}
|
|
for member in members:
|
|
name = clean_text(member.get("name"))
|
|
if name:
|
|
by_name.setdefault(name, []).append(member)
|
|
return by_employee_id, by_name
|
|
|
|
|
|
def fetch_member_overrides() -> dict[str, dict[str, object]]:
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
SELECT employee_id, name, company, rank, role, department, grp, division, team, cell,
|
|
work_status, work_time, phone, email, seat_label, photo_url
|
|
FROM member_overrides
|
|
"""
|
|
)
|
|
return {
|
|
clean_text(row["employee_id"]): dict(row)
|
|
for row in cur.fetchall()
|
|
if clean_text(row["employee_id"])
|
|
}
|
|
|
|
|
|
def fetch_retired_member_names() -> set[str]:
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
SELECT name
|
|
FROM member_retirements
|
|
"""
|
|
)
|
|
return {
|
|
clean_text(row["name"])
|
|
for row in cur.fetchall()
|
|
if clean_text(row["name"])
|
|
}
|
|
|
|
|
|
def fetch_member_aliases() -> dict[str, dict[str, object]]:
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
SELECT alias_name, canonical_name, employee_id, note
|
|
FROM member_aliases
|
|
"""
|
|
)
|
|
return {
|
|
clean_text(row["alias_name"]): dict(row)
|
|
for row in cur.fetchall()
|
|
if clean_text(row["alias_name"])
|
|
}
|
|
|
|
|
|
def find_member_id(employee_id: str, name: str, by_employee_id: dict[str, dict[str, object]], by_name: dict[str, list[dict[str, object]]]) -> int | None:
|
|
employee_id = clean_text(employee_id)
|
|
if employee_id and employee_id in by_employee_id:
|
|
return int(by_employee_id[employee_id]["id"])
|
|
candidates = by_name.get(clean_text(name), [])
|
|
if len(candidates) == 1:
|
|
return int(candidates[0]["id"])
|
|
return None
|
|
|
|
|
|
def dedupe_member_payloads(items: list[MemberPayload]) -> tuple[list[MemberPayload], int]:
|
|
deduped: list[MemberPayload] = []
|
|
seen_keys: set[tuple[str, str]] = set()
|
|
duplicate_count = 0
|
|
for item in items:
|
|
key = (clean_text(item.employee_id), clean_text(item.name))
|
|
if key in seen_keys:
|
|
duplicate_count += 1
|
|
continue
|
|
seen_keys.add(key)
|
|
deduped.append(item)
|
|
return deduped, duplicate_count
|
|
|
|
|
|
def upsert_project(
|
|
cur,
|
|
project_cache: dict[str, int],
|
|
project_code: str,
|
|
project_name: str,
|
|
*,
|
|
display_name: str = "",
|
|
intranet_name: str = "",
|
|
business_area: str = "",
|
|
business_subarea: str = "",
|
|
project_nature: str = "",
|
|
main_category: str = "",
|
|
middle_category: str = "",
|
|
sub_category: str = "",
|
|
) -> int | None:
|
|
project_code = clean_text(project_code)
|
|
if not project_code:
|
|
return None
|
|
if project_code in project_cache:
|
|
return project_cache[project_code]
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO integration_projects (
|
|
project_code, project_name, display_name, intranet_name, business_area, business_subarea,
|
|
project_nature, main_category, middle_category, sub_category, updated_at
|
|
)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, NOW())
|
|
ON CONFLICT (project_code) DO UPDATE
|
|
SET project_name = EXCLUDED.project_name,
|
|
display_name = EXCLUDED.display_name,
|
|
intranet_name = EXCLUDED.intranet_name,
|
|
business_area = EXCLUDED.business_area,
|
|
business_subarea = EXCLUDED.business_subarea,
|
|
project_nature = EXCLUDED.project_nature,
|
|
main_category = EXCLUDED.main_category,
|
|
middle_category = EXCLUDED.middle_category,
|
|
sub_category = EXCLUDED.sub_category,
|
|
updated_at = NOW()
|
|
RETURNING id
|
|
""",
|
|
(
|
|
project_code,
|
|
clean_text(project_name) or project_code,
|
|
clean_text(display_name),
|
|
clean_text(intranet_name),
|
|
clean_text(business_area),
|
|
clean_text(business_subarea),
|
|
clean_text(project_nature),
|
|
clean_text(main_category),
|
|
clean_text(middle_category),
|
|
clean_text(sub_category),
|
|
),
|
|
)
|
|
project_id = int(cur.fetchone()["id"])
|
|
for alias_name, alias_type in (
|
|
(clean_text(project_name), "project_name"),
|
|
(clean_text(display_name), "display_name"),
|
|
(clean_text(intranet_name), "intranet_name"),
|
|
):
|
|
if not alias_name:
|
|
continue
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO integration_project_aliases (project_id, alias_name, alias_type)
|
|
VALUES (%s, %s, %s)
|
|
ON CONFLICT (project_id, alias_name, alias_type) DO NOTHING
|
|
""",
|
|
(project_id, alias_name, alias_type),
|
|
)
|
|
project_cache[project_code] = project_id
|
|
return project_id
|
|
|
|
|
|
def import_integration_sources() -> dict[str, object]:
|
|
organization_path = INCOMING_FILES_DIR / "organization.xlsx"
|
|
mh_path = INCOMING_FILES_DIR / "MH.xlsx"
|
|
payment_path = INCOMING_FILES_DIR / "payment.csv"
|
|
project_mapping_path = INCOMING_FILES_DIR / "ptj.csv"
|
|
for required_path in (organization_path, mh_path, payment_path):
|
|
if not required_path.exists():
|
|
raise HTTPException(status_code=404, detail=f"필수 통합 파일이 없습니다: {required_path.name}")
|
|
|
|
mh_raw_rows, mh_raw_pm_rows, mh_work_logs, mh_segments, mh_pm_assignments = parse_mh_source(mh_path)
|
|
organization_raw_rows, organization_members = parse_organization_source(organization_path)
|
|
organization_members, duplicate_member_count = dedupe_member_payloads(organization_members)
|
|
member_overrides = fetch_member_overrides()
|
|
retired_member_names = fetch_retired_member_names()
|
|
member_aliases = fetch_member_aliases()
|
|
organization_members = merge_members_with_mh_source(
|
|
organization_members,
|
|
mh_work_logs,
|
|
member_overrides,
|
|
retired_member_names,
|
|
member_aliases,
|
|
)
|
|
organization_members, duplicate_member_count = dedupe_member_payloads(organization_members)
|
|
payment_raw_rows, payment_vouchers = parse_payment_source(payment_path)
|
|
project_category_mappings = parse_project_category_mapping_source(project_mapping_path) if project_mapping_path.exists() else []
|
|
|
|
replace_members(organization_members)
|
|
members_by_employee_id, members_by_name = fetch_member_lookup()
|
|
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
TRUNCATE TABLE
|
|
integration_raw_mh_pm_rows,
|
|
integration_project_category_mappings,
|
|
integration_project_pm_assignments,
|
|
integration_project_aliases,
|
|
integration_work_log_segments,
|
|
integration_work_logs,
|
|
integration_vouchers,
|
|
integration_projects,
|
|
integration_raw_organization_rows,
|
|
integration_raw_mh_rows,
|
|
integration_raw_payment_rows,
|
|
integration_import_batches
|
|
RESTART IDENTITY
|
|
"""
|
|
)
|
|
|
|
batch_ids: dict[str, int] = {}
|
|
for source_key, source_name, source_path, row_count, meta_json in (
|
|
("organization", "조직 정보", str(organization_path), len(organization_raw_rows), {"members": len(organization_members)}),
|
|
("mh", "근무시간 데이터", str(mh_path), len(mh_raw_rows), {"work_logs": len(mh_work_logs), "segments": len(mh_segments), "pm_rows": len(mh_raw_pm_rows)}),
|
|
("payment", "전표 데이터", str(payment_path), len(payment_raw_rows), {"vouchers": len(payment_vouchers)}),
|
|
):
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO integration_import_batches (source_key, source_name, source_path, row_count, meta_json)
|
|
VALUES (%s, %s, %s, %s, %s::jsonb)
|
|
RETURNING id
|
|
""",
|
|
(source_key, source_name, source_path, row_count, json.dumps(meta_json, ensure_ascii=False)),
|
|
)
|
|
batch_ids[source_key] = int(cur.fetchone()["id"])
|
|
|
|
for item in organization_raw_rows:
|
|
cur.execute(
|
|
"INSERT INTO integration_raw_organization_rows (batch_id, row_index, row_json) VALUES (%s, %s, %s::jsonb)",
|
|
(batch_ids["organization"], item["row_index"], json.dumps(item["row_json"], ensure_ascii=False)),
|
|
)
|
|
for item in mh_raw_rows:
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO integration_raw_mh_rows (batch_id, row_index, row_json, row_values_json)
|
|
VALUES (%s, %s, %s::jsonb, %s::jsonb)
|
|
""",
|
|
(
|
|
batch_ids["mh"],
|
|
item["row_index"],
|
|
json.dumps(item["row_json"], ensure_ascii=False),
|
|
json.dumps(item["row_values"], ensure_ascii=False),
|
|
),
|
|
)
|
|
for item in mh_raw_pm_rows:
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO integration_raw_mh_pm_rows (batch_id, row_index, row_values_json)
|
|
VALUES (%s, %s, %s::jsonb)
|
|
""",
|
|
(batch_ids["mh"], item["row_index"], json.dumps(item["row_values"], ensure_ascii=False)),
|
|
)
|
|
for item in payment_raw_rows:
|
|
cur.execute(
|
|
"INSERT INTO integration_raw_payment_rows (batch_id, row_index, row_json) VALUES (%s, %s, %s::jsonb)",
|
|
(batch_ids["payment"], item["row_index"], json.dumps(item["row_json"], ensure_ascii=False)),
|
|
)
|
|
for item in project_category_mappings:
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO integration_project_category_mappings (
|
|
source_key, project_name, normalized_project_key, mapped_d1, mapped_d2, mapped_d3
|
|
)
|
|
VALUES (%s, %s, %s, %s, %s, %s)
|
|
ON CONFLICT (source_key, normalized_project_key) DO UPDATE
|
|
SET project_name = EXCLUDED.project_name,
|
|
mapped_d1 = EXCLUDED.mapped_d1,
|
|
mapped_d2 = EXCLUDED.mapped_d2,
|
|
mapped_d3 = EXCLUDED.mapped_d3,
|
|
updated_at = NOW()
|
|
""",
|
|
(
|
|
item["source_key"],
|
|
item["project_name"],
|
|
item["normalized_project_key"],
|
|
item["mapped_d1"],
|
|
item["mapped_d2"],
|
|
item["mapped_d3"],
|
|
),
|
|
)
|
|
|
|
project_cache: dict[str, int] = {}
|
|
for segment in mh_segments:
|
|
upsert_project(cur, project_cache, str(segment["project_code"]), str(segment["project_name"]))
|
|
for voucher in payment_vouchers:
|
|
upsert_project(
|
|
cur,
|
|
project_cache,
|
|
str(voucher["project_code"]),
|
|
str(voucher["project_name"]),
|
|
display_name=str(voucher["display_project_name"]),
|
|
intranet_name=str(voucher["intranet_project_name"]),
|
|
business_area=str(voucher["business_area"]),
|
|
business_subarea=str(voucher["business_subarea"]),
|
|
project_nature=str(voucher["project_nature"]),
|
|
main_category=str(voucher["main_category"]),
|
|
middle_category=str(voucher["middle_category"]),
|
|
sub_category=str(voucher["sub_category"]),
|
|
)
|
|
|
|
for item in mh_pm_assignments:
|
|
project_id = project_cache.get(str(item["project_code"]))
|
|
if not project_id:
|
|
project_id = upsert_project(cur, project_cache, str(item["project_code"]), str(item["project_code"]))
|
|
pm_name = canonicalize_member_name("", str(item["pm_name"]), members_by_employee_id, member_aliases)
|
|
member_id = find_member_id("", pm_name, members_by_employee_id, members_by_name)
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO integration_project_pm_assignments (project_id, member_id, pm_name, source_label)
|
|
VALUES (%s, %s, %s, 'mh_sheet2')
|
|
ON CONFLICT (project_id, source_label) DO UPDATE
|
|
SET member_id = EXCLUDED.member_id,
|
|
pm_name = EXCLUDED.pm_name
|
|
""",
|
|
(project_id, member_id, pm_name),
|
|
)
|
|
|
|
work_log_id_by_row_index: dict[int, int] = {}
|
|
for item in mh_work_logs:
|
|
canonical_name = canonicalize_member_name(str(item["employee_id"]), str(item["member_name"]), members_by_employee_id, member_aliases)
|
|
member_id = find_member_id(str(item["employee_id"]), canonical_name, members_by_employee_id, members_by_name)
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO integration_work_logs (
|
|
work_date, employee_id, member_id, member_name, title, team_category, team_name, user_state,
|
|
shift_hours, weekend_late_flag, review_status, source_row_index, raw_batch_id
|
|
)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
RETURNING id
|
|
""",
|
|
(
|
|
item["work_date"],
|
|
str(item["employee_id"]),
|
|
member_id,
|
|
canonical_name,
|
|
str(item["title"]),
|
|
str(item["team_category"]),
|
|
str(item["team_name"]),
|
|
str(item["user_state"]),
|
|
item["shift_hours"],
|
|
str(item["weekend_late_flag"]),
|
|
str(item["review_status"]),
|
|
item["row_index"],
|
|
batch_ids["mh"],
|
|
),
|
|
)
|
|
work_log_id_by_row_index[int(item["row_index"])] = int(cur.fetchone()["id"])
|
|
|
|
for item in mh_segments:
|
|
work_log_id = work_log_id_by_row_index.get(int(item["row_index"]))
|
|
if not work_log_id:
|
|
continue
|
|
project_id = project_cache.get(str(item["project_code"]))
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO integration_work_log_segments (
|
|
work_log_id, slot_name, project_id, project_code, project_name, business_type,
|
|
activity_code, hours, overtime_hours_raw, overtime_hours_adjusted, is_overtime
|
|
)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
""",
|
|
(
|
|
work_log_id,
|
|
str(item["slot_name"]),
|
|
project_id,
|
|
str(item["project_code"]),
|
|
str(item["project_name"]),
|
|
str(item["business_type"]),
|
|
str(item["activity_code"]),
|
|
item["hours"],
|
|
item["overtime_hours_raw"],
|
|
item["overtime_hours_adjusted"],
|
|
bool(item["is_overtime"]),
|
|
),
|
|
)
|
|
|
|
for item in payment_vouchers:
|
|
project_id = project_cache.get(str(item["project_code"]))
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO integration_vouchers (
|
|
accounting_company, claim_date, issue_date, issue_month, account_code, management_account_code, account_name,
|
|
project_id, project_code, project_name, display_project_name, intranet_project_name, business_area,
|
|
business_subarea, planning_dev_sales, main_category, middle_category, sub_category, department_name,
|
|
team_name, customer_name, summary_text, debit_supply_amount, credit_supply_amount, expense_amount,
|
|
income_amount, voucher_type, project_nature, raw_batch_id, source_row_index
|
|
)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
""",
|
|
(
|
|
str(item["accounting_company"]),
|
|
item["claim_date"],
|
|
item["issue_date"],
|
|
str(item["issue_month"]),
|
|
str(item["account_code"]),
|
|
str(item["management_account_code"]),
|
|
str(item["account_name"]),
|
|
project_id,
|
|
str(item["project_code"]),
|
|
str(item["project_name"]),
|
|
str(item["display_project_name"]),
|
|
str(item["intranet_project_name"]),
|
|
str(item["business_area"]),
|
|
str(item["business_subarea"]),
|
|
str(item["planning_dev_sales"]),
|
|
str(item["main_category"]),
|
|
str(item["middle_category"]),
|
|
str(item["sub_category"]),
|
|
str(item["department_name"]),
|
|
str(item["team_name"]),
|
|
str(item["customer_name"]),
|
|
str(item["summary_text"]),
|
|
item["debit_supply_amount"],
|
|
item["credit_supply_amount"],
|
|
item["expense_amount"],
|
|
item["income_amount"],
|
|
str(item["voucher_type"]),
|
|
str(item["project_nature"]),
|
|
batch_ids["payment"],
|
|
item["row_index"],
|
|
),
|
|
)
|
|
conn.commit()
|
|
|
|
return {
|
|
"batches": {
|
|
"organization": len(organization_raw_rows),
|
|
"mh": len(mh_raw_rows),
|
|
"payment": len(payment_raw_rows),
|
|
},
|
|
"members_synced": len(organization_members),
|
|
"deduped_members": duplicate_member_count,
|
|
"projects": len(project_cache),
|
|
"work_logs": len(mh_work_logs),
|
|
"work_log_segments": len(mh_segments),
|
|
"project_pm_assignments": len(mh_pm_assignments),
|
|
"vouchers": len(payment_vouchers),
|
|
"project_category_mappings": len(project_category_mappings),
|
|
}
|
|
|
|
|
|
def fetch_integration_summary() -> dict[str, object]:
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
counts: dict[str, int] = {}
|
|
for label, table_name in (
|
|
("members", "members"),
|
|
("projects", "integration_projects"),
|
|
("work_logs", "integration_work_logs"),
|
|
("work_log_segments", "integration_work_log_segments"),
|
|
("vouchers", "integration_vouchers"),
|
|
("organization_rows", "integration_raw_organization_rows"),
|
|
("mh_rows", "integration_raw_mh_rows"),
|
|
("payment_rows", "integration_raw_payment_rows"),
|
|
):
|
|
cur.execute(f"SELECT COUNT(*) AS count FROM {table_name}")
|
|
counts[label] = int(cur.fetchone()["count"])
|
|
cur.execute(
|
|
"""
|
|
SELECT source_key, source_name, source_path, row_count, imported_at, meta_json
|
|
FROM integration_import_batches
|
|
ORDER BY id ASC
|
|
"""
|
|
)
|
|
batches = cur.fetchall()
|
|
cur.execute(
|
|
"""
|
|
SELECT
|
|
MIN(work_date) AS min_work_date,
|
|
MAX(work_date) AS max_work_date
|
|
FROM integration_work_logs
|
|
"""
|
|
)
|
|
work_range = cur.fetchone()
|
|
cur.execute(
|
|
"""
|
|
SELECT
|
|
MIN(COALESCE(issue_date, claim_date)) AS min_voucher_date,
|
|
MAX(COALESCE(issue_date, claim_date)) AS max_voucher_date
|
|
FROM integration_vouchers
|
|
"""
|
|
)
|
|
voucher_range = cur.fetchone()
|
|
return {
|
|
"counts": counts,
|
|
"batches": batches,
|
|
"date_ranges": {
|
|
"work": work_range,
|
|
"voucher": voucher_range,
|
|
},
|
|
}
|
|
|
|
|
|
def normalize_date_filter(value: str | None) -> str | None:
|
|
text = clean_text(value)
|
|
if not text:
|
|
return None
|
|
normalized = text.replace("/", "-").replace(".", "-")
|
|
try:
|
|
return datetime.strptime(normalized, "%Y-%m-%d").date().isoformat()
|
|
except ValueError as exc:
|
|
raise HTTPException(status_code=400, detail=f"잘못된 날짜 형식입니다: {value}") from exc
|
|
|
|
|
|
def fetch_project_metrics(limit: int = 500, start_date: str | None = None, end_date: str | None = None) -> list[dict[str, object]]:
|
|
start_date = normalize_date_filter(start_date)
|
|
end_date = normalize_date_filter(end_date)
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
WITH project_base AS (
|
|
SELECT
|
|
CASE
|
|
WHEN COALESCE(project_code, '') <> '' THEN project_code
|
|
ELSE regexp_replace(
|
|
lower(COALESCE(NULLIF(project_name, ''), NULLIF(display_name, ''), NULLIF(intranet_name, ''), '')),
|
|
'[^0-9a-z가-힣]+',
|
|
'',
|
|
'g'
|
|
)
|
|
END AS project_key,
|
|
project_code,
|
|
project_name,
|
|
display_name,
|
|
business_area,
|
|
business_subarea
|
|
FROM integration_projects
|
|
),
|
|
work_by_project AS (
|
|
SELECT
|
|
CASE
|
|
WHEN COALESCE(project_code, '') <> '' THEN project_code
|
|
ELSE regexp_replace(
|
|
lower(COALESCE(NULLIF(project_name, ''), '')),
|
|
'[^0-9a-z가-힣]+',
|
|
'',
|
|
'g'
|
|
)
|
|
END AS project_key,
|
|
COALESCE(project_code, '') AS project_code,
|
|
COALESCE(NULLIF(project_name, ''), COALESCE(project_code, '')) AS project_name,
|
|
SUM(hours) AS total_hours,
|
|
SUM(overtime_hours_adjusted) AS overtime_hours,
|
|
COUNT(DISTINCT work_log_id) AS work_log_count
|
|
FROM integration_work_log_segments
|
|
JOIN integration_work_logs ON integration_work_logs.id = integration_work_log_segments.work_log_id
|
|
WHERE (%s::date IS NULL OR integration_work_logs.work_date >= %s::date)
|
|
AND (%s::date IS NULL OR integration_work_logs.work_date <= %s::date)
|
|
GROUP BY 1, 2, 3
|
|
),
|
|
voucher_by_project AS (
|
|
SELECT
|
|
CASE
|
|
WHEN COALESCE(project_code, '') <> '' THEN project_code
|
|
ELSE regexp_replace(
|
|
lower(COALESCE(NULLIF(project_name, ''), '')),
|
|
'[^0-9a-z가-힣]+',
|
|
'',
|
|
'g'
|
|
)
|
|
END AS project_key,
|
|
COALESCE(project_code, '') AS project_code,
|
|
COALESCE(NULLIF(project_name, ''), COALESCE(project_code, '')) AS project_name,
|
|
SUM(income_amount) AS total_income,
|
|
SUM(expense_amount) AS total_expense,
|
|
COUNT(*) AS voucher_count
|
|
FROM integration_vouchers
|
|
WHERE (%s::date IS NULL OR COALESCE(issue_date, claim_date) >= %s::date)
|
|
AND (%s::date IS NULL OR COALESCE(issue_date, claim_date) <= %s::date)
|
|
AND COALESCE(voucher_type, '') <> '제외'
|
|
GROUP BY 1, 2, 3
|
|
)
|
|
SELECT
|
|
COALESCE(p.project_code, w.project_code, v.project_code) AS project_code,
|
|
COALESCE(NULLIF(p.project_name, ''), NULLIF(v.project_name, ''), NULLIF(w.project_name, ''), COALESCE(p.project_code, w.project_code, v.project_code)) AS project_name,
|
|
COALESCE(NULLIF(p.display_name, ''), NULLIF(v.project_name, ''), NULLIF(w.project_name, ''), '') AS display_name,
|
|
COALESCE(p.business_area, '') AS business_area,
|
|
COALESCE(p.business_subarea, '') AS business_subarea,
|
|
COALESCE(v.total_income, 0) AS total_income,
|
|
COALESCE(v.total_expense, 0) AS total_expense,
|
|
COALESCE(v.total_income, 0) - COALESCE(v.total_expense, 0) AS profit,
|
|
COALESCE(w.total_hours, 0) AS total_hours,
|
|
COALESCE(w.overtime_hours, 0) AS overtime_hours,
|
|
COALESCE(v.voucher_count, 0) AS voucher_count,
|
|
COALESCE(w.work_log_count, 0) AS work_log_count
|
|
FROM project_base p
|
|
FULL OUTER JOIN work_by_project w ON w.project_key = p.project_key
|
|
FULL OUTER JOIN voucher_by_project v ON v.project_key = COALESCE(p.project_key, w.project_key)
|
|
ORDER BY project_code ASC
|
|
LIMIT %s
|
|
""",
|
|
(start_date, start_date, end_date, end_date, start_date, start_date, end_date, end_date, limit),
|
|
)
|
|
return cur.fetchall()
|
|
|
|
|
|
def fetch_member_metrics(limit: int = 500, start_date: str | None = None, end_date: str | None = None) -> list[dict[str, object]]:
|
|
start_date = normalize_date_filter(start_date)
|
|
end_date = normalize_date_filter(end_date)
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
SELECT
|
|
COALESCE(w.employee_id, m.employee_id, '') AS employee_id,
|
|
COALESCE(NULLIF(w.member_name, ''), m.name, '') AS member_name,
|
|
COALESCE(m.rank, '') AS rank,
|
|
COALESCE(NULLIF(w.team_name, ''), m.team, '') AS team_name,
|
|
COALESCE(NULLIF(w.team_category, ''), m.department, '') AS team_category,
|
|
COUNT(DISTINCT w.id) AS work_day_count,
|
|
COALESCE(SUM(s.hours), 0) AS total_hours,
|
|
COALESCE(SUM(s.overtime_hours_adjusted), 0) AS overtime_hours,
|
|
COUNT(DISTINCT NULLIF(s.project_code, '')) AS project_count
|
|
FROM integration_work_logs w
|
|
LEFT JOIN integration_work_log_segments s ON s.work_log_id = w.id
|
|
LEFT JOIN members m ON m.id = w.member_id
|
|
WHERE (%s::date IS NULL OR w.work_date >= %s::date)
|
|
AND (%s::date IS NULL OR w.work_date <= %s::date)
|
|
GROUP BY 1, 2, 3, 4, 5
|
|
ORDER BY member_name ASC
|
|
LIMIT %s
|
|
""",
|
|
(start_date, start_date, end_date, end_date, limit),
|
|
)
|
|
return cur.fetchall()
|
|
|
|
|
|
def fetch_team_metrics(start_date: str | None = None, end_date: str | None = None) -> list[dict[str, object]]:
|
|
start_date = normalize_date_filter(start_date)
|
|
end_date = normalize_date_filter(end_date)
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
SELECT
|
|
COALESCE(NULLIF(w.team_name, ''), m.team, '미지정 팀') AS team_name,
|
|
COALESCE(NULLIF(w.team_category, ''), m.department, '미지정 분류') AS team_category,
|
|
COUNT(DISTINCT w.member_id) FILTER (WHERE w.member_id IS NOT NULL) AS member_count,
|
|
COUNT(DISTINCT w.id) AS work_day_count,
|
|
COALESCE(SUM(s.hours), 0) AS total_hours,
|
|
COALESCE(SUM(s.overtime_hours_adjusted), 0) AS overtime_hours,
|
|
COUNT(DISTINCT NULLIF(s.project_code, '')) AS project_count
|
|
FROM integration_work_logs w
|
|
LEFT JOIN integration_work_log_segments s ON s.work_log_id = w.id
|
|
LEFT JOIN members m ON m.id = w.member_id
|
|
WHERE (%s::date IS NULL OR w.work_date >= %s::date)
|
|
AND (%s::date IS NULL OR w.work_date <= %s::date)
|
|
GROUP BY 1, 2
|
|
ORDER BY total_hours DESC, team_name ASC
|
|
""",
|
|
(start_date, start_date, end_date, end_date),
|
|
)
|
|
return cur.fetchall()
|
|
|
|
|
|
def fetch_project_breakdowns(start_date: str | None = None, end_date: str | None = None) -> dict[str, list[dict[str, object]]]:
|
|
start_date = normalize_date_filter(start_date)
|
|
end_date = normalize_date_filter(end_date)
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
SELECT
|
|
COALESCE(project_code, '') AS project_code,
|
|
COALESCE(NULLIF(project_name, ''), COALESCE(project_code, '')) AS project_name,
|
|
COALESCE(NULLIF(activity_code, ''), '미지정') AS activity_name,
|
|
SUM(hours) AS total_hours,
|
|
COUNT(DISTINCT work_log_id) AS work_log_count
|
|
FROM integration_work_log_segments
|
|
JOIN integration_work_logs ON integration_work_logs.id = integration_work_log_segments.work_log_id
|
|
WHERE (%s::date IS NULL OR integration_work_logs.work_date >= %s::date)
|
|
AND (%s::date IS NULL OR integration_work_logs.work_date <= %s::date)
|
|
GROUP BY 1, 2, 3
|
|
ORDER BY total_hours DESC, project_code ASC, activity_name ASC
|
|
""",
|
|
(start_date, start_date, end_date, end_date),
|
|
)
|
|
activity_rows = cur.fetchall()
|
|
|
|
cur.execute(
|
|
"""
|
|
SELECT
|
|
COALESCE(project_code, '') AS project_code,
|
|
COALESCE(NULLIF(project_name, ''), COALESCE(project_code, '')) AS project_name,
|
|
COALESCE(voucher_type, '미분류') AS expense_type,
|
|
SUM(expense_amount) AS total_expense
|
|
FROM integration_vouchers
|
|
WHERE (%s::date IS NULL OR COALESCE(issue_date, claim_date) >= %s::date)
|
|
AND (%s::date IS NULL OR COALESCE(issue_date, claim_date) <= %s::date)
|
|
AND COALESCE(voucher_type, '') <> '제외'
|
|
GROUP BY 1, 2, 3
|
|
ORDER BY total_expense DESC, project_code ASC, expense_type ASC
|
|
""",
|
|
(start_date, start_date, end_date, end_date),
|
|
)
|
|
expense_rows = cur.fetchall()
|
|
return {"activities": activity_rows, "expenses": expense_rows}
|
|
|
|
|
|
def payment_analysis_get_value(row: dict[str, object], candidates: list[str], fallback_idx: int = -1) -> object:
|
|
for candidate in candidates:
|
|
if candidate in row and row[candidate] not in (None, ""):
|
|
return row[candidate]
|
|
normalized = normalize_key_for_source(candidate)
|
|
if normalized:
|
|
key = f"__n_{normalized}"
|
|
if key in row and row[key] not in (None, ""):
|
|
return row[key]
|
|
values = row.get("__values") or []
|
|
if isinstance(values, list) and 0 <= fallback_idx < len(values):
|
|
return values[fallback_idx]
|
|
return ""
|
|
|
|
|
|
def payment_analysis_parse_number(value: object) -> float:
|
|
text = clean_text(value)
|
|
if not text:
|
|
return 0.0
|
|
filtered = re.sub(r"[^0-9.\-]", "", text)
|
|
if not filtered:
|
|
return 0.0
|
|
try:
|
|
return float(filtered)
|
|
except ValueError:
|
|
return 0.0
|
|
|
|
|
|
def round_half_up_to_int(value: float | Decimal) -> int:
|
|
return int(Decimal(str(value)).quantize(Decimal("1"), rounding=ROUND_HALF_UP))
|
|
|
|
|
|
def round_half_up_to_2(value: float | Decimal) -> float:
|
|
return float(Decimal(str(value)).quantize(Decimal("0.01"), rounding=ROUND_HALF_UP))
|
|
|
|
|
|
def calculate_labor_cost(hours: float | Decimal, rate: int | float | Decimal, multiplier: float | Decimal) -> int:
|
|
amount = Decimal(str(hours)) * Decimal(str(rate)) * Decimal(str(multiplier))
|
|
return round_half_up_to_int(amount)
|
|
|
|
|
|
def build_payment_work_rows_from_raw_mh(
|
|
raw_rows: list[list[object]],
|
|
category_by_project_key: dict[str, dict[str, str]],
|
|
fallback_category_by_project_key: dict[str, dict[str, str]],
|
|
) -> list[dict[str, object]]:
|
|
project_fields = [
|
|
{"name": "메인업무 프로젝트명", "hour": "메인업무 근무시간", "sub": "메인업무 서브 코드", "overtime": False, "name_idx": 10, "hour_idx": 12},
|
|
{"name": "추가업무1 프로젝트명", "hour": "추가업무1 근무시간", "sub": "추가업무1 서브 코드", "overtime": False, "name_idx": 16, "hour_idx": 18},
|
|
{"name": "추가업무2 프로젝트명", "hour": "추가업무2 근무시간", "sub": "추가업무2 서브 코드", "overtime": False, "name_idx": 21, "hour_idx": 23},
|
|
{"name": "추가업무3 프로젝트명", "hour": "추가업무3 근무시간", "sub": "추가업무3 서브 코드", "overtime": False, "name_idx": 26, "hour_idx": 28},
|
|
{"name": "추가업무4 프로젝트명", "hour": "추가업무4 근무시간", "sub": "추가업무4 서브 코드", "overtime": False, "name_idx": 31, "hour_idx": 33},
|
|
{"name": "추가업무5 프로젝트명", "hour": "추가업무5 근무시간", "sub": "추가업무5 서브 코드", "overtime": False, "name_idx": 36, "hour_idx": 38},
|
|
{"name": "연장근무 프로젝트명", "hour": "연장근무 시간(가공)", "sub": "연장근무 서브 코드", "overtime": True, "name_idx": 41, "hour_idx": 44},
|
|
]
|
|
work_rows: list[dict[str, object]] = []
|
|
for row_values in raw_rows:
|
|
row = build_parsecsv_like_row_from_values(MH_HEADER_ORDER, row_values)
|
|
overtime_hours_from_row = payment_analysis_parse_number(
|
|
payment_analysis_get_value(
|
|
row,
|
|
["연장근무 시간(가공)", "연장근무시간(가공)", "연장근무 시간", "연장근무시간", "추가근무"],
|
|
44,
|
|
)
|
|
)
|
|
segments: list[dict[str, object]] = []
|
|
for field in project_fields:
|
|
project_name = clean_text(payment_analysis_get_value(row, [field["name"]], field["name_idx"]))
|
|
hours = payment_analysis_parse_number(payment_analysis_get_value(row, [field["hour"]], field["hour_idx"]))
|
|
activity = clean_text(
|
|
payment_analysis_get_value(
|
|
row,
|
|
[field["sub"], field["sub"].replace(" ", ""), "서브 코드"],
|
|
)
|
|
)
|
|
if not project_name or hours <= 0:
|
|
continue
|
|
segments.append(
|
|
{
|
|
"project_name": project_name,
|
|
"activity": activity,
|
|
"hours": hours,
|
|
"overtime": field["overtime"],
|
|
}
|
|
)
|
|
if overtime_hours_from_row > 0 and not any(segment["overtime"] for segment in segments):
|
|
fallback_project = clean_text(
|
|
payment_analysis_get_value(
|
|
row,
|
|
["메인업무 프로젝트명", "프로젝트명", "사업명(표출PJT)", "D4"],
|
|
10,
|
|
)
|
|
)
|
|
fallback_activity = clean_text(
|
|
payment_analysis_get_value(
|
|
row,
|
|
["연장근무 서브 코드", "메인업무 서브 코드", "서브 코드"],
|
|
)
|
|
)
|
|
if fallback_project:
|
|
segments.append(
|
|
{
|
|
"project_name": fallback_project,
|
|
"activity": fallback_activity,
|
|
"hours": overtime_hours_from_row,
|
|
"overtime": True,
|
|
}
|
|
)
|
|
|
|
position = clean_text(payment_analysis_get_value(row, ["직책", "직급"]))
|
|
user_state = clean_text(payment_analysis_get_value(row, ["user_state", "User State", "user state", "userstate", "User_State"]))
|
|
weekend_flag = clean_text(payment_analysis_get_value(row, ["주말/지각"]))
|
|
is_weekend = "주말" in user_state or "주말" in weekend_flag
|
|
member_name = clean_text(payment_analysis_get_value(row, ["이름"]))
|
|
work_date = clean_text(payment_analysis_get_value(row, ["근무일자", "날짜", "일자"]))
|
|
imported_labor = payment_analysis_parse_number(payment_analysis_get_value(row, ["산정금액", "인건비"]))
|
|
if "이사" in position or "수석" in position:
|
|
rate = 46600
|
|
elif "책임" in position:
|
|
rate = 40500
|
|
elif "선임" in position:
|
|
rate = 35300
|
|
else:
|
|
rate = 28900
|
|
|
|
if imported_labor > 0 and segments:
|
|
weighted = []
|
|
total_weight = 0.0
|
|
for idx, segment in enumerate(segments):
|
|
multiplier = 1.5 if (is_weekend or segment["overtime"]) else 1.0
|
|
weight = segment["hours"] * multiplier
|
|
weighted.append((idx, weight))
|
|
total_weight += weight
|
|
allocations = [0] * len(segments)
|
|
if total_weight > 0:
|
|
raw_allocations = []
|
|
for idx, weight in weighted:
|
|
raw_value = (imported_labor * weight) / total_weight
|
|
base = math.floor(raw_value)
|
|
raw_allocations.append({"idx": idx, "base": base, "frac": raw_value - base})
|
|
remain = int(round(imported_labor - sum(part["base"] for part in raw_allocations)))
|
|
if remain > 0:
|
|
raw_allocations.sort(key=lambda item: (-item["frac"], item["idx"]))
|
|
for item in raw_allocations[:remain]:
|
|
item["base"] += 1
|
|
elif remain < 0:
|
|
raw_allocations.sort(key=lambda item: (item["frac"], item["idx"]))
|
|
for item in raw_allocations[: abs(remain)]:
|
|
item["base"] -= 1
|
|
raw_allocations.sort(key=lambda item: item["idx"])
|
|
allocations = [int(item["base"]) for item in raw_allocations]
|
|
else:
|
|
allocations = []
|
|
|
|
d1_from_row = clean_text(payment_analysis_get_value(row, ["D1", "매출/비매출"]))
|
|
d2_from_row = clean_text(payment_analysis_get_value(row, ["D2", "사업분야", "분야"]))
|
|
d3_from_row = clean_text(payment_analysis_get_value(row, ["D3", "세부분야"]))
|
|
for idx, segment in enumerate(segments):
|
|
project_key = normalize_project_key_for_analysis(segment["project_name"])
|
|
matched = category_by_project_key.get(project_key) or fallback_category_by_project_key.get(project_key) or {}
|
|
hours = float(segment["hours"])
|
|
if allocations:
|
|
labor = int(allocations[idx] or 0)
|
|
else:
|
|
multiplier = 1.5 if (is_weekend or segment["overtime"]) else 1.0
|
|
labor = calculate_labor_cost(hours, rate, multiplier)
|
|
parsed_row = {
|
|
"__values": [
|
|
work_date,
|
|
member_name,
|
|
position,
|
|
user_state,
|
|
segment["project_name"],
|
|
segment["activity"],
|
|
f"{hours:g}",
|
|
str(labor),
|
|
clean_text(matched.get("D1") or d1_from_row),
|
|
clean_text(matched.get("D2") or d2_from_row),
|
|
clean_text(matched.get("D3") or d3_from_row),
|
|
],
|
|
"근무일자": work_date,
|
|
"__n_근무일자": work_date,
|
|
"날짜": work_date,
|
|
"__n_날짜": work_date,
|
|
"이름": member_name,
|
|
"__n_이름": member_name,
|
|
"직책": position,
|
|
"__n_직책": position,
|
|
"직급": position,
|
|
"__n_직급": position,
|
|
"user_state": user_state,
|
|
"__n_user_state": user_state,
|
|
"프로젝트명 매칭": clean_text(matched.get("프로젝트명 매칭") or segment["project_name"]),
|
|
"__n_프로젝트명매칭": clean_text(matched.get("프로젝트명 매칭") or segment["project_name"]),
|
|
"프로젝트명": segment["project_name"],
|
|
"__n_프로젝트명": segment["project_name"],
|
|
"서브 코드": segment["activity"],
|
|
"__n_서브코드": segment["activity"],
|
|
"시간": f"{hours:g}",
|
|
"__n_시간": f"{hours:g}",
|
|
"근무시간": f"{hours:g}",
|
|
"__n_근무시간": f"{hours:g}",
|
|
"산정금액": str(labor),
|
|
"__n_산정금액": str(labor),
|
|
"인건비": str(labor),
|
|
"__n_인건비": str(labor),
|
|
"D1": clean_text(matched.get("D1") or d1_from_row),
|
|
"__n_d1": clean_text(matched.get("D1") or d1_from_row),
|
|
"매출/비매출": clean_text(matched.get("D1") or d1_from_row),
|
|
"__n_매출비매출": clean_text(matched.get("D1") or d1_from_row),
|
|
"D2": clean_text(matched.get("D2") or d2_from_row),
|
|
"__n_d2": clean_text(matched.get("D2") or d2_from_row),
|
|
"사업분야": clean_text(matched.get("D2") or d2_from_row),
|
|
"__n_사업분야": clean_text(matched.get("D2") or d2_from_row),
|
|
"분야": clean_text(matched.get("D2") or d2_from_row),
|
|
"__n_분야": clean_text(matched.get("D2") or d2_from_row),
|
|
"D3": clean_text(matched.get("D3") or d3_from_row),
|
|
"__n_d3": clean_text(matched.get("D3") or d3_from_row),
|
|
"세부분야": clean_text(matched.get("D3") or d3_from_row),
|
|
"__n_세부분야": clean_text(matched.get("D3") or d3_from_row),
|
|
"projectName": segment["project_name"],
|
|
"workDate": work_date,
|
|
"workerName": member_name,
|
|
"position": position,
|
|
"activity": segment["activity"],
|
|
"hours": hours,
|
|
"labor": labor,
|
|
"d1": clean_text(matched.get("D1") or d1_from_row),
|
|
"d2": clean_text(matched.get("D2") or d2_from_row),
|
|
"d3": clean_text(matched.get("D3") or d3_from_row),
|
|
}
|
|
work_rows.append(parsed_row)
|
|
return work_rows
|
|
|
|
|
|
def fetch_payment_source_rows() -> dict[str, object]:
|
|
summary = fetch_integration_summary()
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
SELECT row_json
|
|
FROM integration_raw_payment_rows
|
|
ORDER BY row_index ASC
|
|
"""
|
|
)
|
|
expense_rows = [build_parsecsv_like_row(PAYMENT_HEADER_ORDER, dict(row["row_json"])) for row in cur.fetchall()]
|
|
category_by_project_key: dict[str, dict[str, str]] = {}
|
|
for row in expense_rows:
|
|
project_name = clean_text(row.get("사업명(인트라넷기준)") or row.get("사업명(인트라넷 기준)") or "")
|
|
project_key = normalize_project_key_for_analysis(project_name)
|
|
if not project_key:
|
|
continue
|
|
category_by_project_key[project_key] = {
|
|
"D1": clean_text(row.get("대분류") or row.get("D1") or row.get("매출/비매출") or ""),
|
|
"D2": clean_text(row.get("중분류") or row.get("D2") or ""),
|
|
"D3": clean_text(row.get("소분류") or row.get("D3") or ""),
|
|
"프로젝트명 매칭": project_name,
|
|
}
|
|
cur.execute(
|
|
"""
|
|
SELECT normalized_project_key, project_name, mapped_d1, mapped_d2, mapped_d3
|
|
FROM integration_project_category_mappings
|
|
ORDER BY project_name ASC
|
|
"""
|
|
)
|
|
fallback_category_by_project_key = {
|
|
clean_text(row["normalized_project_key"]): {
|
|
"D1": clean_text(row["mapped_d1"]),
|
|
"D2": clean_text(row["mapped_d2"]),
|
|
"D3": clean_text(row["mapped_d3"]),
|
|
"프로젝트명 매칭": clean_text(row["project_name"]),
|
|
}
|
|
for row in cur.fetchall()
|
|
}
|
|
cur.execute(
|
|
"""
|
|
SELECT
|
|
w.work_date,
|
|
w.member_name,
|
|
w.title,
|
|
w.user_state,
|
|
w.weekend_late_flag,
|
|
s.project_name,
|
|
s.activity_code,
|
|
s.hours,
|
|
s.overtime_hours_adjusted,
|
|
s.is_overtime
|
|
FROM integration_work_log_segments s
|
|
JOIN integration_work_logs w ON w.id = s.work_log_id
|
|
ORDER BY w.work_date ASC, w.member_name ASC, s.project_name ASC, s.id ASC
|
|
"""
|
|
)
|
|
work_rows = []
|
|
for row in cur.fetchall():
|
|
project_name = clean_text(row["project_name"])
|
|
project_key = normalize_project_key_for_analysis(project_name)
|
|
matched = category_by_project_key.get(project_key) or fallback_category_by_project_key.get(project_key) or {}
|
|
position = clean_text(row["title"])
|
|
raw_hours = float(row["hours"] or 0)
|
|
adjusted_overtime_hours = float(row["overtime_hours_adjusted"] or 0)
|
|
hours = adjusted_overtime_hours if bool(row["is_overtime"]) else raw_hours
|
|
hours = round_half_up_to_2(hours)
|
|
rate = 28900
|
|
if "이사" in position or "수석" in position:
|
|
rate = 46600
|
|
elif "책임" in position:
|
|
rate = 40500
|
|
elif "선임" in position:
|
|
rate = 35300
|
|
labor = calculate_labor_cost(
|
|
hours,
|
|
rate,
|
|
1.5 if bool(row["is_overtime"]) or "주말" in clean_text(row["weekend_late_flag"]) else 1,
|
|
)
|
|
parsed_row = {
|
|
"__values": [
|
|
clean_text(row["work_date"]),
|
|
clean_text(row["member_name"]),
|
|
position,
|
|
clean_text(row["user_state"]),
|
|
project_name,
|
|
clean_text(row["activity_code"]),
|
|
f"{hours:g}",
|
|
str(labor),
|
|
clean_text(matched.get("D1", "")),
|
|
clean_text(matched.get("D2", "")),
|
|
clean_text(matched.get("D3", "")),
|
|
],
|
|
"근무일자": clean_text(row["work_date"]),
|
|
"__n_근무일자": clean_text(row["work_date"]),
|
|
"날짜": clean_text(row["work_date"]),
|
|
"__n_날짜": clean_text(row["work_date"]),
|
|
"이름": clean_text(row["member_name"]),
|
|
"__n_이름": clean_text(row["member_name"]),
|
|
"직책": position,
|
|
"__n_직책": position,
|
|
"직급": position,
|
|
"__n_직급": position,
|
|
"user_state": clean_text(row["user_state"]),
|
|
"__n_user_state": clean_text(row["user_state"]),
|
|
"프로젝트명 매칭": clean_text(matched.get("프로젝트명 매칭") or project_name),
|
|
"__n_프로젝트명매칭": clean_text(matched.get("프로젝트명 매칭") or project_name),
|
|
"프로젝트명": project_name,
|
|
"__n_프로젝트명": project_name,
|
|
"서브 코드": clean_text(row["activity_code"]),
|
|
"__n_서브코드": clean_text(row["activity_code"]),
|
|
"시간": f"{hours:g}",
|
|
"__n_시간": f"{hours:g}",
|
|
"근무시간": f"{hours:g}",
|
|
"__n_근무시간": f"{hours:g}",
|
|
"산정금액": str(labor),
|
|
"__n_산정금액": str(labor),
|
|
"인건비": str(labor),
|
|
"__n_인건비": str(labor),
|
|
"D1": clean_text(matched.get("D1", "")),
|
|
"__n_d1": clean_text(matched.get("D1", "")),
|
|
"D2": clean_text(matched.get("D2", "")),
|
|
"__n_d2": clean_text(matched.get("D2", "")),
|
|
"D3": clean_text(matched.get("D3", "")),
|
|
"__n_d3": clean_text(matched.get("D3", "")),
|
|
}
|
|
work_rows.append(parsed_row)
|
|
return {
|
|
"expense_rows": expense_rows,
|
|
"work_rows": work_rows,
|
|
"date_ranges": summary["date_ranges"],
|
|
}
|
|
|
|
|
|
def fetch_mh_source_rows() -> dict[str, object]:
|
|
summary = fetch_integration_summary()
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
SELECT row_values_json
|
|
FROM integration_raw_mh_rows
|
|
ORDER BY row_index ASC
|
|
"""
|
|
)
|
|
sheet_rows = [list(row["row_values_json"]) for row in cur.fetchall()]
|
|
cur.execute(
|
|
"""
|
|
SELECT row_values_json
|
|
FROM integration_raw_mh_pm_rows
|
|
ORDER BY row_index ASC
|
|
"""
|
|
)
|
|
pm_rows = [list(row["row_values_json"]) for row in cur.fetchall()]
|
|
return {
|
|
"teamData": sheet_rows,
|
|
"pmSheet": pm_rows,
|
|
"date_ranges": summary["date_ranges"],
|
|
}
|
|
|
|
|
|
@app.on_event("startup")
|
|
def startup() -> None:
|
|
UPLOAD_DIR.mkdir(parents=True, exist_ok=True)
|
|
LEGACY_STATIC_DIR.mkdir(parents=True, exist_ok=True)
|
|
INCOMING_FILES_DIR.mkdir(parents=True, exist_ok=True)
|
|
init_db()
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
sync_auth_users_from_members(cur)
|
|
conn.commit()
|
|
|
|
|
|
app.mount("/legacy/static", StaticFiles(directory=LEGACY_STATIC_DIR, check_dir=False), name="legacy-static")
|
|
|
|
|
|
@app.get("/api/health")
|
|
def health() -> dict[str, object]:
|
|
checks = {
|
|
"upload_dir": UPLOAD_DIR.exists(),
|
|
}
|
|
|
|
try:
|
|
member_count = get_member_count()
|
|
checks["database"] = True
|
|
except Exception:
|
|
member_count = None
|
|
checks["database"] = False
|
|
|
|
status = "ok" if all(checks.values()) else "degraded"
|
|
return {
|
|
"status": status,
|
|
"checks": checks,
|
|
"member_count": member_count,
|
|
"timestamp": datetime.utcnow().isoformat() + "Z",
|
|
}
|
|
|
|
|
|
@app.post("/api/auth/login")
|
|
def auth_login(
|
|
request: Request,
|
|
username: str = Form(...),
|
|
password: str = Form(...),
|
|
) -> dict[str, object]:
|
|
normalized_username = username.strip().lower()
|
|
if not normalized_username or not password.strip():
|
|
raise HTTPException(status_code=400, detail="사번과 비밀번호를 입력해주세요.")
|
|
|
|
ip_address = request.client.host if request.client else None
|
|
user_agent = request.headers.get("user-agent", "")
|
|
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
SELECT u.id, u.username, u.password_hash, u.display_name, u.role, u.member_id, u.is_active,
|
|
m.rank
|
|
FROM auth.users u
|
|
LEFT JOIN members m ON m.id = u.member_id
|
|
WHERE LOWER(u.username) = %s
|
|
""",
|
|
(normalized_username,),
|
|
)
|
|
user = cur.fetchone()
|
|
|
|
if user is None:
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO auth.login_audit_logs (username, success, failure_reason, ip_address, user_agent)
|
|
VALUES (%s, FALSE, %s, %s, %s)
|
|
""",
|
|
(normalized_username, "unknown_user", ip_address, user_agent),
|
|
)
|
|
conn.commit()
|
|
raise HTTPException(status_code=401, detail="사번 또는 비밀번호가 올바르지 않습니다.")
|
|
|
|
if not bool(user.get("is_active")):
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO auth.login_audit_logs (username, user_id, success, failure_reason, ip_address, user_agent)
|
|
VALUES (%s, %s, FALSE, %s, %s, %s)
|
|
""",
|
|
(normalized_username, int(user["id"]), "inactive_user", ip_address, user_agent),
|
|
)
|
|
conn.commit()
|
|
raise HTTPException(status_code=403, detail="비활성화된 계정입니다.")
|
|
|
|
if not verify_password(password, str(user.get("password_hash") or "")):
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO auth.login_audit_logs (username, user_id, success, failure_reason, ip_address, user_agent)
|
|
VALUES (%s, %s, FALSE, %s, %s, %s)
|
|
""",
|
|
(normalized_username, int(user["id"]), "invalid_password", ip_address, user_agent),
|
|
)
|
|
conn.commit()
|
|
raise HTTPException(status_code=401, detail="사번 또는 비밀번호가 올바르지 않습니다.")
|
|
|
|
expires_at = datetime.now(timezone.utc) + timedelta(hours=AUTH_SESSION_HOURS)
|
|
session_id = uuid.uuid4()
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO auth.sessions (id, user_id, expires_at, ip_address, user_agent)
|
|
VALUES (%s, %s, %s, %s, %s)
|
|
""",
|
|
(session_id, int(user["id"]), expires_at, ip_address, user_agent),
|
|
)
|
|
cur.execute(
|
|
"UPDATE auth.users SET last_login_at = NOW(), updated_at = NOW() WHERE id = %s",
|
|
(int(user["id"]),),
|
|
)
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO auth.login_audit_logs (username, user_id, success, failure_reason, ip_address, user_agent)
|
|
VALUES (%s, %s, TRUE, NULL, %s, %s)
|
|
""",
|
|
(normalized_username, int(user["id"]), ip_address, user_agent),
|
|
)
|
|
conn.commit()
|
|
|
|
return build_auth_session_payload(user, session_id, expires_at)
|
|
|
|
|
|
@app.post("/api/auth/logout")
|
|
def auth_logout(authorization: str | None = Header(default=None)) -> dict[str, bool]:
|
|
token = extract_bearer_token(authorization)
|
|
if not token:
|
|
return {"ok": True}
|
|
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
UPDATE auth.sessions
|
|
SET revoked_at = NOW()
|
|
WHERE id = %s
|
|
AND revoked_at IS NULL
|
|
""",
|
|
(token,),
|
|
)
|
|
conn.commit()
|
|
return {"ok": True}
|
|
|
|
|
|
@app.get("/api/auth/me")
|
|
def auth_me(authorization: str | None = Header(default=None)) -> dict[str, object]:
|
|
token = extract_bearer_token(authorization)
|
|
if not token:
|
|
raise HTTPException(status_code=401, detail="인증 정보가 없습니다.")
|
|
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
SELECT s.id AS session_id, s.expires_at, s.revoked_at,
|
|
u.id, u.username, u.display_name, u.role, u.member_id, u.is_active,
|
|
m.rank
|
|
FROM auth.sessions s
|
|
JOIN auth.users u ON u.id = s.user_id
|
|
LEFT JOIN members m ON m.id = u.member_id
|
|
WHERE s.id = %s
|
|
""",
|
|
(token,),
|
|
)
|
|
row = cur.fetchone()
|
|
|
|
if row is None or row.get("revoked_at") is not None:
|
|
raise HTTPException(status_code=401, detail="세션이 유효하지 않습니다.")
|
|
|
|
expires_at = row["expires_at"]
|
|
now_utc = datetime.now(timezone.utc)
|
|
if expires_at is None or expires_at <= now_utc:
|
|
raise HTTPException(status_code=401, detail="세션이 만료되었습니다.")
|
|
|
|
if not bool(row.get("is_active")):
|
|
raise HTTPException(status_code=403, detail="비활성화된 계정입니다.")
|
|
|
|
return build_auth_session_payload(row, uuid.UUID(str(row["session_id"])), expires_at)
|
|
|
|
|
|
@app.post("/api/mock-login")
|
|
def mock_login(username: str = Form(...), password: str = Form(...)) -> dict[str, object]:
|
|
if not MOCK_LOGIN_ENABLED:
|
|
raise HTTPException(status_code=403, detail="Mock login is disabled.")
|
|
if not username.strip() or not password.strip():
|
|
raise HTTPException(status_code=400, detail="Username and password are required.")
|
|
return {
|
|
"user": {
|
|
"username": username.strip(),
|
|
"display_name": username.strip(),
|
|
"role": "admin",
|
|
},
|
|
"session_expires_at": datetime.utcnow().isoformat() + "Z",
|
|
}
|
|
|
|
|
|
@app.get("/api/members")
|
|
def list_members() -> dict[str, list[dict[str, object]]]:
|
|
return {"items": fetch_members()}
|
|
|
|
|
|
@app.post("/api/members")
|
|
def create_member(payload: MemberPayload) -> dict[str, object]:
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute("SELECT COALESCE(MAX(sort_order), -1) + 1 AS next_order FROM members")
|
|
next_order = int(cur.fetchone()["next_order"])
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO members (
|
|
name, employee_id, company, rank, role, department, grp, division, team, cell,
|
|
work_status, work_time, phone, email, seat_label, photo_url, sort_order
|
|
)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
RETURNING id, name, employee_id, company, rank, role, department, grp, division, team, cell,
|
|
work_status, work_time, phone, email, seat_label, photo_url,
|
|
sort_order, created_at, updated_at
|
|
""",
|
|
serialize_member_payload(payload, payload.sort_order if payload.sort_order is not None else next_order),
|
|
)
|
|
member = cur.fetchone()
|
|
sync_auth_users_from_members(cur)
|
|
conn.commit()
|
|
return {"item": member}
|
|
|
|
|
|
@app.put("/api/members/bulk-sync")
|
|
def bulk_sync_members(payload: MemberBulkPayload) -> dict[str, list[dict[str, object]]]:
|
|
return {"items": replace_members(payload.items)}
|
|
|
|
|
|
@app.put("/api/members/{member_id}")
|
|
def update_member(member_id: int, payload: MemberPayload) -> dict[str, object]:
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute(
|
|
"""
|
|
UPDATE members
|
|
SET name = %s,
|
|
employee_id = %s,
|
|
company = %s,
|
|
rank = %s,
|
|
role = %s,
|
|
department = %s,
|
|
grp = %s,
|
|
division = %s,
|
|
team = %s,
|
|
cell = %s,
|
|
work_status = %s,
|
|
work_time = %s,
|
|
phone = %s,
|
|
email = %s,
|
|
seat_label = %s,
|
|
photo_url = %s,
|
|
sort_order = COALESCE(%s, sort_order),
|
|
updated_at = NOW()
|
|
WHERE id = %s
|
|
RETURNING id, name, employee_id, company, rank, role, department, grp, division, team, cell,
|
|
work_status, work_time, phone, email, seat_label, photo_url,
|
|
sort_order, created_at, updated_at
|
|
""",
|
|
(*serialize_member_payload(payload, payload.sort_order or 0)[:-1], payload.sort_order, member_id),
|
|
)
|
|
member = cur.fetchone()
|
|
if member is None:
|
|
raise HTTPException(status_code=404, detail="Member not found.")
|
|
sync_auth_users_from_members(cur)
|
|
conn.commit()
|
|
return {"item": member}
|
|
|
|
|
|
@app.delete("/api/members/{member_id}")
|
|
def delete_member(member_id: int) -> dict[str, bool]:
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute("DELETE FROM members WHERE id = %s", (member_id,))
|
|
deleted = cur.rowcount > 0
|
|
if deleted:
|
|
sync_auth_users_from_members(cur)
|
|
conn.commit()
|
|
if not deleted:
|
|
raise HTTPException(status_code=404, detail="Member not found.")
|
|
return {"ok": True}
|
|
|
|
|
|
@app.post("/api/members/import")
|
|
async def import_members(file: UploadFile = File(...)) -> dict[str, list[dict[str, object]]]:
|
|
content = await file.read()
|
|
items = parse_import_rows(file, content)
|
|
return {"items": replace_members(items)}
|
|
|
|
|
|
@app.post("/api/integration/import")
|
|
def import_integration_data() -> dict[str, object]:
|
|
return import_integration_sources()
|
|
|
|
|
|
@app.get("/api/integration/summary")
|
|
def integration_summary() -> dict[str, object]:
|
|
return fetch_integration_summary()
|
|
|
|
|
|
@app.get("/api/integration/projects")
|
|
def integration_projects(limit: int = 500, start_date: str | None = None, end_date: str | None = None) -> dict[str, list[dict[str, object]]]:
|
|
safe_limit = max(1, min(limit, 5000))
|
|
return {"items": fetch_project_metrics(safe_limit, start_date=start_date, end_date=end_date)}
|
|
|
|
|
|
@app.get("/api/integration/members")
|
|
def integration_members(limit: int = 500, start_date: str | None = None, end_date: str | None = None) -> dict[str, list[dict[str, object]]]:
|
|
safe_limit = max(1, min(limit, 5000))
|
|
return {"items": fetch_member_metrics(safe_limit, start_date=start_date, end_date=end_date)}
|
|
|
|
|
|
@app.get("/api/integration/teams")
|
|
def integration_teams(start_date: str | None = None, end_date: str | None = None) -> dict[str, list[dict[str, object]]]:
|
|
return {"items": fetch_team_metrics(start_date=start_date, end_date=end_date)}
|
|
|
|
|
|
@app.get("/api/integration/project-breakdowns")
|
|
def integration_project_breakdowns(start_date: str | None = None, end_date: str | None = None) -> dict[str, list[dict[str, object]]]:
|
|
return fetch_project_breakdowns(start_date=start_date, end_date=end_date)
|
|
|
|
|
|
@app.get("/api/integration/payment-source")
|
|
def integration_payment_source() -> dict[str, object]:
|
|
return fetch_payment_source_rows()
|
|
|
|
|
|
@app.get("/api/integration/mh-source")
|
|
def integration_mh_source() -> dict[str, object]:
|
|
return fetch_mh_source_rows()
|
|
|
|
|
|
@app.get("/api/integration/mh-workbook")
|
|
def integration_mh_workbook() -> FileResponse:
|
|
target = INCOMING_FILES_DIR / "MH.xlsx"
|
|
if not target.exists():
|
|
raise HTTPException(status_code=404, detail="MH workbook not found.")
|
|
return FileResponse(
|
|
target,
|
|
media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
|
filename="MH.xlsx",
|
|
)
|
|
|
|
|
|
@app.post("/api/uploads/profile-photo")
|
|
def upload_profile_photo(file: UploadFile = File(...), member_name: str = Form("")) -> dict[str, str]:
|
|
suffix = Path(file.filename or "").suffix.lower()
|
|
if suffix not in {".png", ".jpg", ".jpeg", ".webp", ".gif"}:
|
|
raise HTTPException(status_code=400, detail="Only image files are allowed.")
|
|
stem = member_name.strip().replace(" ", "-") or "member"
|
|
filename = f"{datetime.utcnow().strftime('%Y%m%d%H%M%S')}-{stem}-{uuid.uuid4().hex[:8]}{suffix}"
|
|
target = UPLOAD_DIR / filename
|
|
with target.open("wb") as out_file:
|
|
shutil.copyfileobj(file.file, out_file)
|
|
return {"url": f"/uploads/{filename}"}
|
|
|
|
|
|
@app.post("/api/uploads/seat-map-image")
|
|
def upload_seat_map_image(file: UploadFile = File(...), seat_map_name: str = Form("")) -> dict[str, str]:
|
|
suffix = Path(file.filename or "").suffix.lower()
|
|
if suffix not in {".png", ".jpg", ".jpeg", ".webp", ".gif"}:
|
|
raise HTTPException(status_code=400, detail="Only image files are allowed.")
|
|
stem = seat_map_name.strip().replace(" ", "-") or "seat-map"
|
|
filename = f"seat-map-{datetime.utcnow().strftime('%Y%m%d%H%M%S')}-{stem}-{uuid.uuid4().hex[:8]}{suffix}"
|
|
target = UPLOAD_DIR / filename
|
|
with target.open("wb") as out_file:
|
|
shutil.copyfileobj(file.file, out_file)
|
|
return {"url": f"/uploads/{filename}"}
|
|
|
|
|
|
@app.post("/api/seat-maps/dxf")
|
|
async def create_dxf_seat_map(file: UploadFile = File(...), name: str = Form(...)) -> dict[str, object]:
|
|
suffix = Path(file.filename or "").suffix.lower()
|
|
if suffix != ".dxf":
|
|
raise HTTPException(status_code=400, detail="DXF 파일만 업로드할 수 있습니다.")
|
|
|
|
stem = name.strip().replace(" ", "-") or "seat-map"
|
|
filename = f"seat-map-{datetime.utcnow().strftime('%Y%m%d%H%M%S')}-{stem}-{uuid.uuid4().hex[:8]}{suffix}"
|
|
target = UPLOAD_DIR / filename
|
|
content = await file.read()
|
|
with target.open("wb") as out_file:
|
|
out_file.write(content)
|
|
|
|
try:
|
|
metadata, slots = parse_dxf_layout(target)
|
|
except Exception:
|
|
raise
|
|
|
|
payload = SeatMapPayload(
|
|
name=name.strip(),
|
|
source_type="dxf",
|
|
source_url=f"/uploads/{filename}",
|
|
image_url="",
|
|
preview_svg=metadata["preview_svg"],
|
|
view_box_min_x=metadata["view_box_min_x"],
|
|
view_box_min_y=metadata["view_box_min_y"],
|
|
view_box_width=metadata["view_box_width"],
|
|
view_box_height=metadata["view_box_height"],
|
|
image_width=None,
|
|
image_height=None,
|
|
grid_rows=1,
|
|
grid_cols=1,
|
|
cell_gap=0,
|
|
is_active=True,
|
|
)
|
|
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
cur.execute("UPDATE seat_maps SET is_active = FALSE, updated_at = NOW() WHERE is_active = TRUE")
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO seat_maps (
|
|
name, source_type, source_url, preview_svg,
|
|
view_box_min_x, view_box_min_y, view_box_width, view_box_height,
|
|
image_url, image_width, image_height, grid_rows, grid_cols, cell_gap, is_active
|
|
)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
RETURNING id, name, source_type, source_url, preview_svg,
|
|
view_box_min_x, view_box_min_y, view_box_width, view_box_height,
|
|
image_url, image_width, image_height, grid_rows, grid_cols,
|
|
cell_gap, is_active, created_at, updated_at
|
|
""",
|
|
serialize_seat_map_payload(payload),
|
|
)
|
|
seat_map = cur.fetchone()
|
|
for slot in slots:
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO seat_slots (seat_map_id, slot_key, label, x, y, rotation, layer_name)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s)
|
|
""",
|
|
(
|
|
seat_map["id"],
|
|
slot["slot_key"],
|
|
slot["label"],
|
|
slot["x"],
|
|
slot["y"],
|
|
slot["rotation"],
|
|
slot["layer_name"],
|
|
),
|
|
)
|
|
conn.commit()
|
|
|
|
return fetch_seat_layout(int(seat_map["id"]))
|
|
|
|
|
|
@app.get("/api/seat-maps/active")
|
|
def get_active_seat_map(office_key: str | None = None) -> dict[str, dict[str, object]]:
|
|
requested_key = (office_key or "").strip() or FIXED_OFFICE_SOURCE_KEY
|
|
seat_map = ensure_fixed_office_seat_map(requested_key, activate=requested_key == FIXED_OFFICE_SOURCE_KEY)
|
|
if seat_map is None:
|
|
raise HTTPException(status_code=404, detail="Active seat map not found.")
|
|
return {"item": seat_map}
|
|
|
|
|
|
@app.post("/api/seat-maps")
|
|
def create_seat_map(payload: SeatMapPayload) -> dict[str, dict[str, object]]:
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
if payload.is_active:
|
|
cur.execute("UPDATE seat_maps SET is_active = FALSE, updated_at = NOW() WHERE is_active = TRUE")
|
|
cur.execute(
|
|
"""
|
|
INSERT INTO seat_maps (
|
|
name, source_type, source_url, preview_svg,
|
|
view_box_min_x, view_box_min_y, view_box_width, view_box_height,
|
|
image_url, image_width, image_height, grid_rows, grid_cols, cell_gap, is_active
|
|
)
|
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
|
RETURNING id, name, source_type, source_url, preview_svg,
|
|
view_box_min_x, view_box_min_y, view_box_width, view_box_height,
|
|
image_url, image_width, image_height, grid_rows, grid_cols,
|
|
cell_gap, is_active, created_at, updated_at
|
|
""",
|
|
serialize_seat_map_payload(payload),
|
|
)
|
|
seat_map = cur.fetchone()
|
|
conn.commit()
|
|
return {"item": seat_map}
|
|
|
|
|
|
@app.put("/api/seat-maps/{seat_map_id}")
|
|
def update_seat_map(seat_map_id: int, payload: SeatMapPayload) -> dict[str, dict[str, object]]:
|
|
with get_conn() as conn:
|
|
with conn.cursor() as cur:
|
|
if payload.source_type != "dxf":
|
|
cur.execute(
|
|
"""
|
|
SELECT COUNT(*) AS count
|
|
FROM seat_positions
|
|
WHERE seat_map_id = %s
|
|
AND (row_index >= %s OR col_index >= %s)
|
|
""",
|
|
(seat_map_id, payload.grid_rows, payload.grid_cols),
|
|
)
|
|
out_of_bounds_count = int(cur.fetchone()["count"])
|
|
if out_of_bounds_count > 0:
|
|
raise HTTPException(status_code=400, detail="현재 배치된 좌석이 새 그리드 범위를 벗어납니다. 먼저 좌석 배치를 정리하세요.")
|
|
if payload.is_active:
|
|
cur.execute("UPDATE seat_maps SET is_active = FALSE, updated_at = NOW() WHERE is_active = TRUE AND id <> %s", (seat_map_id,))
|
|
cur.execute(
|
|
"""
|
|
UPDATE seat_maps
|
|
SET name = %s,
|
|
source_type = %s,
|
|
source_url = %s,
|
|
preview_svg = %s,
|
|
view_box_min_x = %s,
|
|
view_box_min_y = %s,
|
|
view_box_width = %s,
|
|
view_box_height = %s,
|
|
image_url = %s,
|
|
image_width = %s,
|
|
image_height = %s,
|
|
grid_rows = %s,
|
|
grid_cols = %s,
|
|
cell_gap = %s,
|
|
is_active = %s,
|
|
updated_at = NOW()
|
|
WHERE id = %s
|
|
RETURNING id, name, source_type, source_url, preview_svg,
|
|
view_box_min_x, view_box_min_y, view_box_width, view_box_height,
|
|
image_url, image_width, image_height, grid_rows, grid_cols,
|
|
cell_gap, is_active, created_at, updated_at
|
|
""",
|
|
(*serialize_seat_map_payload(payload), seat_map_id),
|
|
)
|
|
seat_map = cur.fetchone()
|
|
if seat_map is None:
|
|
raise HTTPException(status_code=404, detail="Seat map not found.")
|
|
conn.commit()
|
|
return {"item": seat_map}
|
|
|
|
|
|
@app.get("/api/seat-maps/{seat_map_id}/layout")
|
|
def get_seat_layout(seat_map_id: int) -> dict[str, object]:
|
|
return fetch_seat_layout(seat_map_id)
|
|
|
|
|
|
@app.get("/api/seat-maps/{seat_map_id}/viewer")
|
|
def get_seat_map_viewer(seat_map_id: int) -> HTMLResponse:
|
|
layout = fetch_seat_layout(seat_map_id)
|
|
seat_map = layout.get("seat_map") or {}
|
|
if seat_map.get("source_type") not in {"dxf", "fixed_html"}:
|
|
raise HTTPException(status_code=400, detail="Viewer is only available for supported seat maps.")
|
|
return HTMLResponse(build_center_chair_viewer_html(layout))
|
|
|
|
|
|
@app.put("/api/seat-maps/{seat_map_id}/layout")
|
|
def update_seat_layout(seat_map_id: int, payload: SeatLayoutPayload) -> dict[str, list[dict[str, object]]]:
|
|
return {"items": save_seat_layout(seat_map_id, payload)}
|
|
|
|
|
|
@app.get("/legacy/organization")
|
|
def legacy_organization() -> FileResponse:
|
|
target = LEGACY_DIR / "DashBoard-organization.html"
|
|
if not target.exists():
|
|
raise HTTPException(status_code=404, detail="Legacy dashboard file not found.")
|
|
return FileResponse(target)
|
|
|
|
|
|
@app.get("/legacy/organization-backup")
|
|
def legacy_organization_backup() -> FileResponse:
|
|
target = LEGACY_DIR / "DashBoard-organization-backup.html"
|
|
if not target.exists():
|
|
raise HTTPException(status_code=404, detail="Legacy dashboard backup not found.")
|
|
return FileResponse(target)
|
|
|
|
|
|
@app.get("/integrations/payment")
|
|
def integration_payment() -> FileResponse:
|
|
target = INCOMING_FILES_DIR / "payment.html"
|
|
if not target.exists():
|
|
raise HTTPException(status_code=404, detail="Payment integration file not found.")
|
|
return FileResponse(target)
|
|
|
|
|
|
@app.get("/integrations/mh")
|
|
def integration_mh() -> FileResponse:
|
|
target = INCOMING_FILES_DIR / "mh.html"
|
|
if not target.exists():
|
|
raise HTTPException(status_code=404, detail="MH integration file not found.")
|
|
return FileResponse(target)
|
|
|
|
|
|
@app.get("/uploads/{filename}")
|
|
def get_upload(filename: str) -> FileResponse:
|
|
target = UPLOAD_DIR / filename
|
|
if not target.exists():
|
|
raise HTTPException(status_code=404, detail="Upload not found.")
|
|
return FileResponse(target)
|