Improve ledger filters and dev sync checks

This commit is contained in:
hyunho
2026-04-02 11:17:01 +09:00
parent f8ea345882
commit c0564ee326
4 changed files with 455 additions and 105 deletions

View File

@@ -12,6 +12,7 @@ echo "[smoke] running 8081 endpoint checks"
docker exec mh-dashboard-organization-dev-backend-1 python - <<'PY'
import sys
import urllib.request
import json
checks = [
@@ -43,6 +44,21 @@ for name, url, needle in checks:
except Exception as exc:
failed.append(f"{name}: {exc}")
try:
with urllib.request.urlopen("http://127.0.0.1:8000/api/integration/summary", timeout=8) as response:
payload = json.loads(response.read().decode())
counts = payload.get("counts") or {}
work_logs = int(counts.get("work_logs") or 0)
vouchers = int(counts.get("vouchers") or 0)
if work_logs <= 0:
failed.append(f"analysis-summary: work_logs is {work_logs}")
if vouchers <= 0:
failed.append(f"analysis-summary: vouchers is {vouchers}")
if work_logs > 0 and vouchers > 0:
print(f"[ok] analysis-summary -> work_logs={work_logs}, vouchers={vouchers}")
except Exception as exc:
failed.append(f"analysis-summary: {exc}")
if failed:
print("[smoke] failures detected:")
for item in failed:

View File

@@ -9,6 +9,28 @@ DEV_PROJECT_NAME="${DEV_PROJECT_NAME:-mh-dashboard-organization-dev}"
DEV_COMPOSE_FILE="${DEV_COMPOSE_FILE:-${DEV_DIR}/docker-compose.8081.yml}"
SCOPE="${1:-minimal}"
ANALYSIS_TABLES=(
integration_import_batches
integration_raw_organization_rows
integration_raw_mh_rows
integration_raw_mh_pm_rows
integration_raw_payment_rows
integration_project_aliases
integration_project_category_mappings
integration_project_pm_assignments
integration_projects
integration_work_logs
integration_work_log_segments
integration_vouchers
)
MINIMAL_PRESERVE_TABLES=(
integration_project_pm_assignments
integration_work_logs
integration_work_log_segments
integration_vouchers
)
if [[ ! -f "${PROD_DIR}/docker-compose.yml" ]]; then
echo "Production workspace not found: ${PROD_DIR}" >&2
exit 1
@@ -38,35 +60,11 @@ case "${SCOPE}" in
)
;;
analysis)
TABLES=(
integration_import_batches
integration_raw_organization_rows
integration_raw_mh_rows
integration_raw_mh_pm_rows
integration_raw_payment_rows
integration_project_aliases
integration_project_category_mappings
integration_project_pm_assignments
integration_projects
integration_work_logs
integration_work_log_segments
integration_vouchers
)
TABLES=("${ANALYSIS_TABLES[@]}")
;;
full)
TABLES=(
integration_import_batches
integration_raw_organization_rows
integration_raw_mh_rows
integration_raw_mh_pm_rows
integration_raw_payment_rows
integration_project_aliases
integration_project_category_mappings
integration_project_pm_assignments
integration_projects
integration_work_logs
integration_work_log_segments
integration_vouchers
"${ANALYSIS_TABLES[@]}"
member_aliases
member_overrides
member_retirements
@@ -81,6 +79,16 @@ case "${SCOPE}" in
;;
esac
PRESERVE_TABLES=()
if [[ "${SCOPE}" == "minimal" ]]; then
PRESERVE_TABLES=("${MINIMAL_PRESERVE_TABLES[@]}")
fi
DUMP_TABLES=("${TABLES[@]}")
if [[ ${#PRESERVE_TABLES[@]} -gt 0 ]]; then
DUMP_TABLES+=("${PRESERVE_TABLES[@]}")
fi
PROD_COMPOSE=(docker compose --project-directory "${PROD_DIR}")
DEV_COMPOSE=(docker compose -p "${DEV_PROJECT_NAME}" --env-file "${DEV_DIR}/.env" -f "${DEV_COMPOSE_FILE}")
@@ -129,7 +137,7 @@ echo "[4/8] Building truncate script for ${SCOPE} scope"
echo "[5/8] Dumping ${SCOPE} data from 8080 source DB"
TABLE_ARGS=()
for table in "${TABLES[@]}"; do
for table in "${DUMP_TABLES[@]}"; do
TABLE_ARGS+=(-t "public.${table}")
done
run_compose "${PROD_DIR}" "${PROD_COMPOSE[@]}" exec -T db \
@@ -193,7 +201,7 @@ echo "[7.8/8] Resetting serial sequences"
echo "SELECT setval(pg_get_serial_sequence('public.member_retirements', 'id'), COALESCE((SELECT MAX(id) FROM public.member_retirements), 1), true);"
echo "SELECT setval(pg_get_serial_sequence('public.seat_maps', 'id'), COALESCE((SELECT MAX(id) FROM public.seat_maps), 1), true);"
echo "SELECT setval(pg_get_serial_sequence('public.seat_slots', 'id'), COALESCE((SELECT MAX(id) FROM public.seat_slots), 1), true);"
if [[ "${SCOPE}" == "analysis" || "${SCOPE}" == "full" ]]; then
if [[ "${SCOPE}" == "analysis" || "${SCOPE}" == "full" || "${#PRESERVE_TABLES[@]}" -gt 0 ]]; then
echo "SELECT setval(pg_get_serial_sequence('public.integration_import_batches', 'id'), COALESCE((SELECT MAX(id) FROM public.integration_import_batches), 1), true);"
echo "SELECT setval(pg_get_serial_sequence('public.integration_raw_organization_rows', 'id'), COALESCE((SELECT MAX(id) FROM public.integration_raw_organization_rows), 1), true);"
echo "SELECT setval(pg_get_serial_sequence('public.integration_raw_mh_rows', 'id'), COALESCE((SELECT MAX(id) FROM public.integration_raw_mh_rows), 1), true);"
@@ -236,7 +244,7 @@ UNION ALL
SELECT 'auth_users', COUNT(*)::text FROM auth.users
ORDER BY table_name;
SQL
if [[ "${SCOPE}" == "analysis" || "${SCOPE}" == "full" ]]; then
if [[ "${SCOPE}" == "analysis" || "${SCOPE}" == "full" || "${#PRESERVE_TABLES[@]}" -gt 0 ]]; then
cat <<'SQL'
SELECT 'integration_work_logs', COUNT(*)::text FROM public.integration_work_logs
UNION ALL