내역 정리

This commit is contained in:
admin hmac
2025-11-04 16:45:03 +09:00
commit f1a3ff71cb
23 changed files with 1200 additions and 0 deletions

2
.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
memgraph-data
memgraph-data/*

24
Dockerfile Normal file
View File

@@ -0,0 +1,24 @@
# Use the official Memgraph image as a base
FROM memgraph/memgraph-mage:latest
# Define arguments for user and group IDs, with defaults
ARG UID=1001
ARG GID=1001
# Switch to the root user to perform administrative tasks
USER root
# Install gosu for privilege dropping
RUN sed -i 's/archive.ubuntu.com/mirror.kakao.com/g' /etc/apt/sources.list.d/ubuntu.sources
RUN apt-get update && apt-get install -y gosu && rm -rf /var/lib/apt/lists/*
# Create a new group and user with the specified UID and GID
RUN groupadd -g ${GID} user && \
useradd -u ${UID} -g ${GID} -m -s /bin/bash user
# Copy the entrypoint script and make it executable
COPY scripts/entrypoint.sh /usr/local/bin/
RUN chmod +x /usr/local/bin/entrypoint.sh
# Set the entrypoint script to be executed when the container starts
ENTRYPOINT ["entrypoint.sh"]

66
all_paths_analysis.cypher Normal file
View File

@@ -0,0 +1,66 @@
// All Scenarios Critical Path Comparison Query
// Finds the Critical Path (longest duration path) for each scenario
// and returns a comparison table.
// Optional: Parameter for a list of scenario IDs to compare
// Example: :scenario_ids => ['linear-process', 'parallel-process', 'what-if-parallel']
// 1. Get all Scenario IDs (or filter by provided list)
MATCH (s:Scenario)
WHERE ($scenario_ids IS NULL OR s.id IN $scenario_ids)
WITH COLLECT(s.id) AS scenario_ids
// 2. For each scenario, find its Critical Path duration and cost
UNWIND scenario_ids AS current_scenario_id
// Find start nodes for the current scenario
MATCH (start_node:Job)
OPTIONAL MATCH (start_node)<-[r_in:PRECEDES {scenario: current_scenario_id}]-()
WITH current_scenario_id, start_node, r_in WHERE r_in IS NULL
// Find end nodes for the current scenario
MATCH (end_node:Job)
OPTIONAL MATCH (end_node)-[r_out:PRECEDES {scenario: current_scenario_id}]->()
WITH current_scenario_id, start_node, end_node, r_out WHERE r_out IS NULL
// Match all valid paths for the current scenario
MATCH path = (start_node)-[rels:PRECEDES*]->(end_node)
WHERE ALL(r IN rels WHERE r.scenario = current_scenario_id)
// For each job in the path, calculate its effective duration and cost
WITH current_scenario_id, path, nodes(path) AS jobs_on_path
UNWIND jobs_on_path AS job
OPTIONAL MATCH (:Scenario {id: current_scenario_id})-[m:MODIFIES]->(job)
WITH
current_scenario_id,
path,
COLLECT({
name: job.name,
bd: job.base_duration,
ed: COALESCE(m.new_duration, job.base_duration),
ec: COALESCE(m.new_cost, job.base_cost)
}) AS job_data
// Calculate totals for each path
WITH
current_scenario_id,
path,
job_data,
REDUCE(s = 0, x IN job_data | s + x.ed) AS total_duration,
REDUCE(s = 0, x IN job_data | s + x.ec) AS total_cost
// Find the Critical Path (longest duration) for the current scenario
ORDER BY total_duration DESC
LIMIT 1
// Return the critical path details for the current scenario
RETURN
current_scenario_id AS scenario_id,
[j IN job_data | j.name] AS critical_path_jobs,
[j IN job_data | j.bd] AS critical_path_base_durations,
[j IN job_data | j.ed] AS critical_path_effective_durations,
total_duration AS critical_path_total_duration,
total_cost AS critical_path_total_cost
// 3. Order the final comparison table by critical path duration
ORDER BY critical_path_total_duration DESC;

28
conf/memgraph.conf Normal file
View File

@@ -0,0 +1,28 @@
# This is a Memgraph configuration file.
# For a complete list of options, please visit:
# https://memgraph.com/docs/database-management/configuration
# Bolt protocol port
--bolt-port=7687
# Also log to stderr
--also-log-to-stderr=true
# The directory where Memgraph stores its data
--data-directory=/var/lib/memgraph
# The level of detail of the log messages
--log-level=INFO
# The address to which the Bolt server will bind to
--bolt-address=0.0.0.0
# The address to which the replication server will bind to
--replication-address=0.0.0.0
# Enable authentication
--auth-enabled=false
--storage-properties-on-edges=true

View File

@@ -0,0 +1,25 @@
// Query to Create a New Scenario and its Delta (MODIFIES relationship)
// This allows defining a new 'what-if' scenario by specifying changes to a job's properties.
// Parameters:
// :new_scenario_id => 'what-if-2'
// :description => 'Job 5 기간 단축 시나리오'
// :target_job_id => 5
// :new_duration_value => 3
// :new_cost_value => 50
// 1. Create the new Scenario node if it doesn't already exist
MERGE (s:Scenario {id: $new_scenario_id})
ON CREATE SET s.description = $description
// 2. Find the target Job to modify
MATCH (j:Job {id: $target_job_id})
// 3. Create or update the MODIFIES relationship between the Scenario and the Job
// This relationship holds the delta (the new property values for this scenario)
MERGE (s)-[m:MODIFIES]->(j)
ON CREATE SET m.new_duration = $new_duration_value, m.new_cost = $new_cost_value
ON MATCH SET m.new_duration = $new_duration_value, m.new_cost = $new_cost_value
// 4. Return the created/updated Scenario and the modified Job for confirmation
RETURN s, m, j;

View File

@@ -0,0 +1,29 @@
// Critical Path Analysis Query (Optimized)
// Finds the longest path in terms of total duration of jobs.
// 1. Find all potential start nodes first
MATCH (start_node:Job)
WHERE NOT EXISTS((:Job)-[:PRECEDES]->(start_node))
// 2. Find all potential end nodes first
MATCH (end_node:Job)
WHERE NOT EXISTS((end_node)-[:PRECEDES]->(:Job))
// 3. Now, match paths ONLY between the pre-filtered start and end nodes
MATCH path = (start_node)-[:PRECEDES*]->(end_node)
// 4. Calculate the total duration for each path
WITH
path,
REDUCE(totalDuration = 0, job IN nodes(path) | totalDuration + job.duration) AS total_duration
// 5. Return the path and its total duration for visualization
RETURN
path,
total_duration
// 6. Order by the total duration in descending order to find the longest path
ORDER BY total_duration DESC
// 7. Limit to the top 1 result, which is the Critical Path
LIMIT 1;

20
data/job_types.csv Normal file
View File

@@ -0,0 +1,20 @@
id,name,standard_duration,standard_cost
JT01,터널 입구 굴착,10,150
JT02,숏크리트 타설,5,80
JT03,강지보 설치,7,120
JT04,방수 및 배수시설 설치,8,100
JT05,철근 조립,6,90
JT06,내부 라이닝 콘크리트 타설,12,200
JT07,조명 및 환기시설 설치,9,110
JT08,안전시설물 설치,4,60
JT09,포장 및 차선 도색,7,95
JT10,TBM 준비,15,500
JT11,TBM 굴진,30,1200
JT12,세그먼트 조립,25,800
JT13,그라우팅,10,150
JT14,TBM 해체 및 반출,12,300
JT15,전기/통신 케이블 설치,8,130
JT16,CCTV 및 VMS 설치,5,70
JT17,소방시설 설치,6,85
JT18,최종 점검,3,50
JT19,개통 준비,2,30
1 id name standard_duration standard_cost
2 JT01 터널 입구 굴착 10 150
3 JT02 숏크리트 타설 5 80
4 JT03 강지보 설치 7 120
5 JT04 방수 및 배수시설 설치 8 100
6 JT05 철근 조립 6 90
7 JT06 내부 라이닝 콘크리트 타설 12 200
8 JT07 조명 및 환기시설 설치 9 110
9 JT08 안전시설물 설치 4 60
10 JT09 포장 및 차선 도색 7 95
11 JT10 TBM 준비 15 500
12 JT11 TBM 굴진 30 1200
13 JT12 세그먼트 조립 25 800
14 JT13 그라우팅 10 150
15 JT14 TBM 해체 및 반출 12 300
16 JT15 전기/통신 케이블 설치 8 130
17 JT16 CCTV 및 VMS 설치 5 70
18 JT17 소방시설 설치 6 85
19 JT18 최종 점검 3 50
20 JT19 개통 준비 2 30

21
data/jobs.csv Normal file
View File

@@ -0,0 +1,21 @@
id,type_id,name,duration,cost,job_no
1,JT01,터널 입구 굴착,10,150,JOB-001
2,JT02,1차 숏크리트 타설,5,80,JOB-002
3,JT03,강지보 설치,7,120,JOB-003
4,JT02,2차 숏크리트 타설,5,80,JOB-004
5,JT04,방수 및 배수시설 설치,8,100,JOB-005
6,JT05,철근 조립,6,90,JOB-006
7,JT06,내부 라이닝 콘크리트 타설,12,200,JOB-007
8,JT07,조명 및 환기시설 설치,9,110,JOB-008
9,JT08,안전시설물 설치,4,60,JOB-009
10,JT09,포장 및 차선 도색,7,95,JOB-010
11,JT10,TBM 준비,15,500,JOB-011
12,JT11,TBM 굴진,30,1200,JOB-012
13,JT12,세그먼트 조립,25,800,JOB-013
14,JT13,그라우팅,10,150,JOB-014
15,JT14,TBM 해체 및 반출,12,300,JOB-015
16,JT15,전기/통신 케이블 설치,8,130,JOB-016
17,JT16,CCTV 및 VMS 설치,5,70,JOB-017
18,JT17,소방시설 설치,6,85,JOB-018
19,JT18,최종 점검,3,50,JOB-019
20,JT19,개통 준비,2,30,JOB-020
1 id type_id name duration cost job_no
2 1 JT01 터널 입구 굴착 10 150 JOB-001
3 2 JT02 1차 숏크리트 타설 5 80 JOB-002
4 3 JT03 강지보 설치 7 120 JOB-003
5 4 JT02 2차 숏크리트 타설 5 80 JOB-004
6 5 JT04 방수 및 배수시설 설치 8 100 JOB-005
7 6 JT05 철근 조립 6 90 JOB-006
8 7 JT06 내부 라이닝 콘크리트 타설 12 200 JOB-007
9 8 JT07 조명 및 환기시설 설치 9 110 JOB-008
10 9 JT08 안전시설물 설치 4 60 JOB-009
11 10 JT09 포장 및 차선 도색 7 95 JOB-010
12 11 JT10 TBM 준비 15 500 JOB-011
13 12 JT11 TBM 굴진 30 1200 JOB-012
14 13 JT12 세그먼트 조립 25 800 JOB-013
15 14 JT13 그라우팅 10 150 JOB-014
16 15 JT14 TBM 해체 및 반출 12 300 JOB-015
17 16 JT15 전기/통신 케이블 설치 8 130 JOB-016
18 17 JT16 CCTV 및 VMS 설치 5 70 JOB-017
19 18 JT17 소방시설 설치 6 85 JOB-018
20 19 JT18 최종 점검 3 50 JOB-019
21 20 JT19 개통 준비 2 30 JOB-020

15
data/mgmt_status.csv Normal file
View File

@@ -0,0 +1,15 @@
id,type,status
CS_APP,Construction,APPROVED
CS_CON,Construction,CONFIRMED
CS_PLN,Construction,PLANNING
PS_EXE,Project,EXECUTING
PS_PLN,Project,PLANNING
PM_PAID,Payment,PAID
PM_APL,Payment,APPLIED
PM_NON,Payment,NONE
QS_APP,Quality,APPROVED
QS_CON,Quality,CONFIRMED
QS_PLN,Quality,PLANNING
SS_APP,Safety,APPROVED
SS_CON,Safety,CONFIRMED
SS_PLN,Safety,PLANNING
1 id type status
2 CS_APP Construction APPROVED
3 CS_CON Construction CONFIRMED
4 CS_PLN Construction PLANNING
5 PS_EXE Project EXECUTING
6 PS_PLN Project PLANNING
7 PM_PAID Payment PAID
8 PM_APL Payment APPLIED
9 PM_NON Payment NONE
10 QS_APP Quality APPROVED
11 QS_CON Quality CONFIRMED
12 QS_PLN Quality PLANNING
13 SS_APP Safety APPROVED
14 SS_CON Safety CONFIRMED
15 SS_PLN Safety PLANNING

17
data/object_types.csv Normal file
View File

@@ -0,0 +1,17 @@
id,name,category
OT01,굴착기,장비
OT02,숏크리트 펌프,장비
OT03,강지보재,자재
OT04,방수시트,자재
OT05,콘크리트 믹서,장비
OT06,철근,자재
OT07,조명등,자재
OT08,환풍기,장비
OT09,TBM,장비
OT10,세그먼트,자재
OT11,그라우트 믹서,장비
OT12,케이블,자재
OT13,CCTV,장비
OT14,소화기,자재
OT15,차선도색기,장비
OT16,숏크리트,자재
1 id name category
2 OT01 굴착기 장비
3 OT02 숏크리트 펌프 장비
4 OT03 강지보재 자재
5 OT04 방수시트 자재
6 OT05 콘크리트 믹서 장비
7 OT06 철근 자재
8 OT07 조명등 자재
9 OT08 환풍기 장비
10 OT09 TBM 장비
11 OT10 세그먼트 자재
12 OT11 그라우트 믹서 장비
13 OT12 케이블 자재
14 OT13 CCTV 장비
15 OT14 소화기 자재
16 OT15 차선도색기 장비
17 OT16 숏크리트 자재

18
data/objects.csv Normal file
View File

@@ -0,0 +1,18 @@
id,type_id,name,obj_no
OBJ01,OT01,굴착기-A01,EQ-001
OBJ02,OT02,숏크리트 펌프-A,EQ-002
OBJ03,OT03,강지보재-L100,MTR-001
OBJ04,OT04,방수시트-S20,MTR-002
OBJ05,OT05,콘크리트 믹서-T1,EQ-003
OBJ06,OT06,철근-D16,MTR-003
OBJ07,OT07,조명등-LED-1,MTR-004
OBJ08,OT08,환풍기-F1,EQ-004
OBJ09,OT09,TBM-Shield-1,EQ-005
OBJ10,OT10,세그먼트-A타입,MTR-005
OBJ11,OT11,그라우트 믹서-G1,EQ-006
OBJ12,OT12,전원 케이블-HV-1,MTR-006
OBJ13,OT13,CCTV-001,EQ-007
OBJ14,OT14,소화기-P1,MTR-007
OBJ15,OT15,차선도색기-Y1,EQ-008
OBJ16,OT16,숏크리트-Batch1,MTR-008
OBJ17,OT16,숏크리트-Batch2,MTR-009
1 id type_id name obj_no
2 OBJ01 OT01 굴착기-A01 EQ-001
3 OBJ02 OT02 숏크리트 펌프-A EQ-002
4 OBJ03 OT03 강지보재-L100 MTR-001
5 OBJ04 OT04 방수시트-S20 MTR-002
6 OBJ05 OT05 콘크리트 믹서-T1 EQ-003
7 OBJ06 OT06 철근-D16 MTR-003
8 OBJ07 OT07 조명등-LED-1 MTR-004
9 OBJ08 OT08 환풍기-F1 EQ-004
10 OBJ09 OT09 TBM-Shield-1 EQ-005
11 OBJ10 OT10 세그먼트-A타입 MTR-005
12 OBJ11 OT11 그라우트 믹서-G1 EQ-006
13 OBJ12 OT12 전원 케이블-HV-1 MTR-006
14 OBJ13 OT13 CCTV-001 EQ-007
15 OBJ14 OT14 소화기-P1 MTR-007
16 OBJ15 OT15 차선도색기-Y1 EQ-008
17 OBJ16 OT16 숏크리트-Batch1 MTR-008
18 OBJ17 OT16 숏크리트-Batch2 MTR-009

50
data/relations.csv Normal file
View File

@@ -0,0 +1,50 @@
from_id,to_id,type,quantity,unit
# Job IS_A JobType
1,JT01,IS_A,,
2,JT02,IS_A,,
3,JT03,IS_A,,
4,JT02,IS_A,,
5,JT04,IS_A,,
# Object IS_A ObjectType
OBJ01,OT01,IS_A,,
OBJ02,OT02,IS_A,,
OBJ16,OT16,IS_A,,
OBJ17,OT16,IS_A,,
# Job PRECEDES Job
1,2,PRECEDES,,
2,3,PRECEDES,,
3,4,PRECEDES,,
4,5,PRECEDES,,
# Job REQUIRES Object (with quantity)
1,OBJ01,REQUIRES,1,unit
2,OBJ02,REQUIRES,1,unit
2,OBJ16,REQUIRES,50,ton
3,OBJ03,REQUIRES,100,meter
4,OBJ02,REQUIRES,1,unit
4,OBJ17,REQUIRES,40,ton
# Job HAS_STATUS MgmtStatus
1,CS_APP,HAS_STATUS,,
1,PS_EXE,HAS_STATUS,,
1,PM_PAID,HAS_STATUS,,
1,QS_APP,HAS_STATUS,,
1,SS_APP,HAS_STATUS,,
2,CS_APP,HAS_STATUS,,
2,PS_EXE,HAS_STATUS,,
2,PM_PAID,HAS_STATUS,,
2,QS_APP,HAS_STATUS,,
2,SS_APP,HAS_STATUS,,
3,CS_CON,HAS_STATUS,,
3,PS_EXE,HAS_STATUS,,
3,PM_APL,HAS_STATUS,,
3,QS_CON,HAS_STATUS,,
3,SS_CON,HAS_STATUS,,
4,CS_PLN,HAS_STATUS,,
4,PS_PLN,HAS_STATUS,,
4,PM_NON,HAS_STATUS,,
4,QS_PLN,HAS_STATUS,,
4,SS_PLN,HAS_STATUS,,
5,CS_PLN,HAS_STATUS,,
5,PS_PLN,HAS_STATUS,,
5,PM_NON,HAS_STATUS,,
5,QS_PLN,HAS_STATUS,,
5,SS_PLN,HAS_STATUS,,
1 from_id,to_id,type,quantity,unit
2 # Job IS_A JobType
3 1,JT01,IS_A,,
4 2,JT02,IS_A,,
5 3,JT03,IS_A,,
6 4,JT02,IS_A,,
7 5,JT04,IS_A,,
8 # Object IS_A ObjectType
9 OBJ01,OT01,IS_A,,
10 OBJ02,OT02,IS_A,,
11 OBJ16,OT16,IS_A,,
12 OBJ17,OT16,IS_A,,
13 # Job PRECEDES Job
14 1,2,PRECEDES,,
15 2,3,PRECEDES,,
16 3,4,PRECEDES,,
17 4,5,PRECEDES,,
18 # Job REQUIRES Object (with quantity)
19 1,OBJ01,REQUIRES,1,unit
20 2,OBJ02,REQUIRES,1,unit
21 2,OBJ16,REQUIRES,50,ton
22 3,OBJ03,REQUIRES,100,meter
23 4,OBJ02,REQUIRES,1,unit
24 4,OBJ17,REQUIRES,40,ton
25 # Job HAS_STATUS MgmtStatus
26 1,CS_APP,HAS_STATUS,,
27 1,PS_EXE,HAS_STATUS,,
28 1,PM_PAID,HAS_STATUS,,
29 1,QS_APP,HAS_STATUS,,
30 1,SS_APP,HAS_STATUS,,
31 2,CS_APP,HAS_STATUS,,
32 2,PS_EXE,HAS_STATUS,,
33 2,PM_PAID,HAS_STATUS,,
34 2,QS_APP,HAS_STATUS,,
35 2,SS_APP,HAS_STATUS,,
36 3,CS_CON,HAS_STATUS,,
37 3,PS_EXE,HAS_STATUS,,
38 3,PM_APL,HAS_STATUS,,
39 3,QS_CON,HAS_STATUS,,
40 3,SS_CON,HAS_STATUS,,
41 4,CS_PLN,HAS_STATUS,,
42 4,PS_PLN,HAS_STATUS,,
43 4,PM_NON,HAS_STATUS,,
44 4,QS_PLN,HAS_STATUS,,
45 4,SS_PLN,HAS_STATUS,,
46 5,CS_PLN,HAS_STATUS,,
47 5,PS_PLN,HAS_STATUS,,
48 5,PM_NON,HAS_STATUS,,
49 5,QS_PLN,HAS_STATUS,,
50 5,SS_PLN,HAS_STATUS,,

34
docker-compose.yml Normal file
View File

@@ -0,0 +1,34 @@
services:
memgraph:
build:
context: .
dockerfile: Dockerfile
args:
UID: 1001
GID: 1001
container_name: memgraph-mage
pull_policy: always
environment:
- storage-properties-on-edges=true
ulimits:
stack:
soft: 33554432
hard: 33554432
ports:
- "7687:7687"
- "7444:7444"
volumes:
- ./memgraph-data:/var/lib/memgraph
- ./conf/memgraph.conf:/etc/memgraph/memgraph.conf
lab:
image: memgraph/lab:latest
container_name: memgraph-lab
pull_policy: always
ports:
- "3000:3000"
depends_on:
- memgraph
environment:
- QUICK_CONNECT_MG_HOST=memgraph
- QUICK_CONNECT_MG_PORT=7687

131
docs/PRD.md Normal file
View File

@@ -0,0 +1,131 @@
## Memgraph 기반 터널 시공 관리 의사결정 프레임워크 PoC: PRD (Product Requirements Document)
### 1. 개요 (Overview)
본 문서는 터널 시공 과정에서 발생하는 복잡한 작업(Job)과 객체(Object) 간의 관계를 그래프 데이터베이스인 Memgraph를 활용하여 모델링하고, 이를 통해 프로젝트의 Critical Path를 분석하여 효율적인 공정 관리를 지원하는 의사결정 프레임워크 PoC(Proof of Concept)의 요구사항을 정의합니다.
**1.1. 문제 정의 (Problem Statement)**
터널 시공 프로젝트는 수많은 작업과 객체가 복잡한 선후행 및 종속 관계로 얽혀 있어, 전통적인 RDB나 표(Spreadsheet) 기반의 관리 방식으로는 전체 공정의 흐름과 병목 현상을 직관적으로 파악하기 어렵습니다. 이로 인해 특정 작업의 지연이 전체 프로젝트에 미치는 영향을 예측하고 선제적으로 대응하는 데 한계가 있으며, 자원 배분의 최적화를 이루기 어렵습니다.
**1.2. 제안 해결책 (Proposed Solution): 하이브리드 데이터 아키텍처**
자주 변경되지 않는 마스터 데이터(JobType, ObjectType 등)와 동적으로 변하는 공정 시나리오 데이터를 분리하는 하이브리드 아키텍처를 채택합니다. 본 PoC에서는 Memgraph 내에서 이 개념을 시뮬레이션합니다.
* **베이스 데이터 (Base Data):** `Job` 노드는 `base_duration`, `base_cost` 등 원본 데이터를 속성으로 가집니다. 모든 `Job` 간의 기본적인 선후행 관계(`PRECEDES`)는 'base' 시나리오로 정의됩니다.
* **시나리오와 변경점 (Scenario & Delta):**
* 별도의 **`Scenario` 노드**를 두어 "TBM 지연 가정"과 같은 분석 시나리오를 정의합니다.
* **`MODIFIES` 관계**를 사용하여, 특정 `Scenario`가 특정 `Job`의 속성(예: `duration`)을 어떻게 변경하는지에 대한 **'변경점(Delta)'** 정보만을 저장합니다.
* 이를 통해 전체 그래프를 복제하지 않고도, 최소한의 데이터로 수많은 "What-if" 시나리오를 효율적으로 관리하고 분석할 수 있습니다.
PoC의 최종 목표는, 특정 시나리오를 분석할 때 **베이스 데이터에 이 '변경점'들을 실시간으로 적용(override)**하여 Critical Path를 계산하고, 이를 통해 가장 최적화된 공정을 찾는 동적 의사결정 지원 과정을 검증하는 것입니다.
### 2. PoC 목표 및 성공 기준 (Goals & Success Criteria)
**2.1. 목표**
* **목표 1:** '베이스 데이터 + 변경점(Delta)' 개념을 포함한 하이브리드 그래프 데이터 모델을 성공적으로 설계하고 구축한다.
* **목표 2:** 특정 시나리오(변경점 세트)가 주어졌을 때, 베이스 데이터에 변경점을 동적으로 적용하여 해당 시나리오의 Critical Path를 정확하게 계산하는 쿼리를 개발한다.
* **목표 3:** 여러 시나리오의 분석 결과를 비교하여, 프로젝트 관리자가 최적의 공정(가장 짧은 Critical Path를 가진)을 선택할 수 있는 의사결정 과정을 시뮬레이션한다.
**2.2. 성공 기준**
* **정량적 기준:**
* 정의된 Job 및 Object 속성을 90% 이상 포함하는 그래프 스키마를 완성한다.
* 최소 20개 이상의 Job 노드와 10개 이상의 Object 노드로 구성된 샘플 데이터셋을 Memgraph에 삽입 완료한다.
* 작업 기간(duration) 속성을 기반으로, 단일 Cypher 쿼리 실행을 통해 가장 긴 경로(Critical Path)를 10초 이내에 반환한다.
* **정성적 기준:**
* Memgraph Lab을 통해 시각화된 그래프가 시공 프로세스의 관계를 직관적으로 표현한다.
* 도출된 Critical Path가 논리적으로 타당하며, 프로젝트 관리자가 이를 기반으로 의사결정을 내릴 수 있음을 설명할 수 있다.
### 3. PoC 범위 (Scope)
**3.1. 포함되는 범위 (In-Scope)**
* **데이터 모델링:**
* `Job`, `Object` 노드 정의 (제시된 속성 기반)
* 노드 간의 관계(`PRECEDES`, `DEPENDS_ON` 등) 엣지 정의
* **데이터 수명주기:**
* 샘플 데이터셋(CSV 또는 JSON) 정의 및 생성
* Cypher 쿼리를 이용한 데이터 삽입(Create) 및 조회(Read)
* **핵심 기능 개발:**
* Critical Path 분석을 위한 Cypher 쿼리 작성
* **검증:**
* Memgraph Lab을 이용한 데이터 시각화 및 확인
* 쿼리 결과 분석 및 자원 재배치 시뮬레이션 시나리오 문서화
**3.2. 제외되는 범위 (Out-of-Scope)**
* **애플리케이션/UI 개발:** 별도의 웹/앱 UI 개발은 진행하지 않으며, 모든 작업은 Memgraph Lab 또는 CLI 환경에서 수행합니다.
* **실시간 데이터 연동:** 외부 시스템과의 실시간 데이터 연동은 고려하지 않습니다.
* **LLM 연동:** 자연어 처리 및 LLM 연동은 본 PoC의 범위를 벗어나며, 성공적인 완료 후 다음 단계 과제로 고려합니다.
* **상세 속성 전체 모델링:** `Object`의 '수많은 타입별 세부 속성'은 PoC의 복잡도를 고려하여, 대표적인 2~3개 타입의 핵심 속성만 모델링에 포함합니다.
### 4. 기능 명세 (Feature Specifications)
**FS-1: 그래프 데이터 모델 설계**
* **노드 (Nodes):**
* `Job`: 작업 정보를 담는 노드.
* **주요 속성:** `job_no`, `name`, `id`, `start_date`, `end_date`, `duration` (Critical Path 계산을 위한 핵심 속성, `end_date` - `start_date`로 계산), `status` (계획, 확인, 승인 등) 등
* `Object`: 시공 객체 정보를 담는 노드.
* **주요 속성:** `obj_no`, `name`, `id`, `type`, `bim_model_id`, `version`
* **엣지 (Edges):**
* `PRECEDES`: Job 간의 선후행 관계. (A `Job` -> B `Job`)
* `HAS_OBJECT`: Job이 특정 Object와 연관됨을 나타내는 관계. (A `Job` -> B `Object`)
* `PARENT_OF`: Object 간의 계층 구조 관계. (A `Object` -> B `Object`)
**FS-2: 데이터 삽입 및 조회**
* 미리 정의된 샘플 데이터(CSV)를 `LOAD CSV` Cypher 구문을 사용하여 Memgraph에 일괄 삽입하는 스크립트를 작성합니다.
* 기본적인 노드 및 관계 조회 쿼리를 작성하여 데이터가 정상적으로 삽입되었는지 확인합니다.
**FS-3: Critical Path 분석**
* `Job` 노드의 `duration` 속성의 합이 가장 큰 경로를 찾는 Cypher 쿼리를 작성합니다.
* 쿼리는 `PRECEDES` 엣지를 따라 순차적으로 연결된 `Job` 노드들의 경로를 탐색해야 합니다.
* **예시 쿼리 골격:**
```cypher
MATCH path = (start_node:Job)-[:PRECEDES*]->(end_node:Job)
WHERE NOT ()-[:PRECEDES]->(start_node) AND NOT (end_node)-[:PRECEDES]->()
WITH nodes(path) AS path_nodes
UNWIND path_nodes as node
WITH path_nodes, sum(node.duration) as total_duration
RETURN path_nodes, total_duration
ORDER BY total_duration DESC
LIMIT 1;
```
**FS-4: 시나리오 기반 What-if 분석**
* FS-3에서 도출된 Critical Path 상의 Job들과, 그렇지 않은 Job(Slack이 있는 Job)들을 구분합니다.
* "만약 Slack이 있는 B작업의 자원을 Critical Path에 있는 A작업으로 재배치한다면, A작업의 `duration`이 단축되어 전체 프로젝트 기간이 줄어들 수 있다"는 가설을 세우고, 이를 설명하는 시나리오를 문서로 작성합니다. (실제 데이터 변경 없이 개념 증명)
**FS-5: 시나리오 관리 (스냅샷)**
* 기존에 존재하는 특정 시나리오(예: 'scenario-A')의 모든 `PRECEDES` 관계를 복제하여 새로운 시나리오(예: 'scenario-B')를 생성하는 Cypher 쿼리를 작성합니다.
* 생성된 신규 시나리오 내에서 특정 `Job`의 속성(예: `duration`)을 변경하거나, `PRECEDES` 관계를 수정하여 공정 변화를 시뮬레이션할 수 있어야 합니다.
### 5. 검증 시나리오 (Validation Scenario)
1. **준비:** `docker-compose.yml`을 사용하여 Memgraph 인스턴스를 실행합니다.
2. **데이터 로딩:** 작성된 데이터 삽입 스크립트를 실행하여 샘플 데이터를 Memgraph에 로드합니다.
3. **데이터 확인:** Memgraph Lab에 접속하여 노드와 엣지가 의도한 대로 생성되었는지 시각적으로 확인합니다.
4. **Critical Path 실행:** 작성된 Critical Path 분석 쿼리를 실행하고, 가장 긴 경로와 총 소요 기간을 확인합니다.
5. **결과 분석:**
* 쿼리 결과로 나온 Job 리스트가 프로젝트의 병목 구간임을 확인합니다.
* 해당 경로에 포함되지 않은 다른 작업 중, 유사한 자원을 사용하거나 대기 시간이 긴 작업을 식별합니다.
* 식별된 작업을 기반으로 자원 재배치 시나리오를 설명하고, 이를 통해 얻을 수 있는 기대효과(공정 단축)를 제시하며 PoC의 유용성을 증명합니다.
### 6. 기술 스택 (Tech Stack)
* **Database:** Memgraph
* **Orchestration:** Docker, Docker Compose
* **Query & Visualization:** Cypher, Memgraph Lab
* **Data Format:** CSV or JSON
### 7. 향후 계획 (Next Steps)
본 PoC가 성공적으로 완료되면, 다음 단계를 통해 프레임워크를 고도화할 수 있습니다.
* **Phase 2 (Application Layer):** 데이터 조회 및 수정을 위한 API 서버 및 간단한 대시보드 UI 개발
* **Phase 3 (Advanced Analysis):** What-if 시뮬레이션(특정 작업 지연 시 영향도 분석), 병목 현상 예측 등 고급 분석 기능 추가
* **Phase 4 (LLM Integration):** 자연어 질의(예: "현재 가장 중요한 작업이 뭐야?")를 통해 분석 결과를 얻거나, 분석 리포트를 자동 생성하는 LLM 연동 기능 개발

57
docs/todo.md Normal file
View File

@@ -0,0 +1,57 @@
# PoC 개발 할 일 목록 (To-Do List)
이 문서는 Memgraph 기반 터널 시공 관리 PoC를 위해 수행해야 할 작업들을 순서대로 나열합니다.
### 단계 1: 환경 설정 및 데이터 모델링 (하이브리드 아키텍처)
- [x] `docker-compose.yml`에 데이터 영속성을 위한 볼륨 설정 추가
- [ ] **데이터 모델 재설계:**
- [ ] `Job` 노드에 `base_duration`, `base_cost` 속성 추가
- [ ] `Scenario` 노드 정의 (`id`, `description`)
- [ ] `MODIFIES` 관계 정의 (`Scenario` -> `Job`, 변경될 속성 포함. 예: `new_duration`)
- [ ] `PRECEDES` 관계는 `scenario` 속성 대신 기본 공정 흐름으로 단일화
- [ ] Memgraph 서비스 재시작하여 안정적인 환경 확보
### 단계 2: 샘플 데이터 준비
- [ ] `import_embedded_clean.cypher` 스크립트 수정
- [ ] `Job` 노드 생성 시 `duration`, `cost``base_duration`, `base_cost`로 변경
- [ ] 기본 분석 시나리오를 위한 `Scenario` 노드 생성 (예: `id: 'what-if-1', description: 'TBM 지연 가정'`)
- [ ] `MODIFIES` 관계 생성: 'what-if-1' 시나리오가 특정 `Job``duration`을 변경하도록 설정
### 단계 3: 데이터 가져오기 및 확인
- [ ] 수정된 `import_embedded_clean.cypher` 스크립트 실행
- [ ] 데이터 확인: `MATCH (s:Scenario)-[r:MODIFIES]->(j:Job) RETURN s, r, j` 쿼리로 변경점 데이터 확인
### 단계 4: 동적 분석 기능 개발 (쿼리 작성)
- [ ] **(쿼리 1) 동적 Critical Path 분석 쿼리 작성 (`dynamic_cp_analysis.cypher`)**
- [ ] 시나리오 ID를 파라미터로 받도록 작성 (`$scenario_id`)
- [ ] **핵심 로직:**
1. 모든 `Job` 노드를 가져옴
2. `$scenario_id`에 해당하는 `Scenario`와 연결된 `MODIFIES` 관계를 찾음
3. `MODIFIES` 관계가 있는 `Job`은 변경된 속성(예: `new_duration`)을 `effective_duration`으로 사용
4. `MODIFIES` 관계가 없는 `Job``base_duration``effective_duration`으로 사용
5. 계산된 `effective_duration`의 합계를 기준으로 Critical Path 분석
- [ ] **(쿼리 2) 신규 시나리오(변경점) 생성 쿼리 작성 (`create_scenario_delta.cypher`)**
- [ ] 신규 시나리오 ID, 설명, 변경할 Job ID, 변경할 속성값을 파라미터로 받음
- [ ] `Scenario` 노드를 생성하고 `MODIFIES` 관계를 설정하는 기능
### 단계 5: PoC 시나리오 검증
- [ ] **(검증 1) 베이스라인 분석**
- `dynamic_cp_analysis.cypher` 쿼리에 존재하지 않는 시나리오 ID (예: 'base')를 넣어 실행 -> 모든 Job이 `base_duration`을 사용하여 Critical Path 계산 결과 확인
- [ ] **(검증 2) 변경점 시나리오 분석**
- `dynamic_cp_analysis.cypher` 쿼리에 `scenario_id: 'what-if-1'`을 넣어 실행
- 변경된 `duration`이 적용되어 Critical Path가 다르게 계산되는지 확인
- [ ] **(검증 3) 신규 시나리오 동적 생성 및 분석**
- `create_scenario_delta.cypher` 쿼리를 이용해 'what-if-2' 시나리오와 새로운 변경점을 즉석에서 생성
- `dynamic_cp_analysis.cypher`로 'what-if-2'를 분석하여 결과가 예상대로 나오는지 확인
### 단계 6: 데이터 무결성 및 검증 (연구)
- [ ] 데이터 임포트 시 순환 관계(Cycle) 발생을 방지하기 위한 검증 로직 연구
- [ ] 방법 1: 애플리케이션 레이어에서 데이터 입력/수정 시 검증
- [ ] 방법 2: Memgraph의 제약(Constraint) 또는 트리거(Trigger) 기능 활용 가능성 조사
- [ ] 방법 3: 주기적으로 순환을 탐지하는 Cypher 쿼리 스크립트 작성 및 실행

View File

@@ -0,0 +1,38 @@
// Dynamic Critical Path Analysis Query for Graph Visualization (Pure Cypher, Final & Performant)
// Collects all paths into a single list to ensure full graph rendering in Memgraph Lab.
// Parameter for the scenario ID
// Example: :scenario_id => 'parallel-process'
// 1. Find start nodes for the scenario
MATCH (start_node:Job)
OPTIONAL MATCH (start_node)<-[r_in:PRECEDES {scenario: $scenario_id}]-()
WITH start_node, r_in WHERE r_in IS NULL
MATCH (end_node:Job)
OPTIONAL MATCH (end_node)-[r_out:PRECEDES {scenario: $scenario_id}]->()
WITH start_node, end_node, r_out WHERE r_out IS NULL
// 2. Match all valid paths for the scenario
MATCH path = (start_node)-[rels:PRECEDES*]->(end_node)
WHERE ALL(r IN rels WHERE r.scenario = $scenario_id)
// 3. For each job in the path, calculate its effective duration
WITH path, nodes(path) AS jobs_on_path
UNWIND jobs_on_path AS job
OPTIONAL MATCH (:Scenario {id: $scenario_id})-[m:MODIFIES]->(job)
WITH
path,
COLLECT({
effective_duration: COALESCE(m.new_duration, job.base_duration)
}) AS jobs_with_deltas
// 4. Calculate total duration for each path
WITH
path,
REDUCE(totalDuration = 0, data IN jobs_with_deltas | totalDuration + data.effective_duration) AS total_duration
// 5. Collect all paths, ordered by total_duration, into a single list
WITH COLLECT({path: path, total_duration: total_duration}) AS paths_with_duration
UNWIND paths_with_duration AS p_wd
ORDER BY p_wd.total_duration DESC
RETURN COLLECT(p_wd.path) AS all_paths;

92
import.cypher Normal file
View File

@@ -0,0 +1,92 @@
-- 1. (Optional) Clean up the database
MATCH (n) DETACH DELETE n;
-- 2. Create indexes for faster matching
CREATE INDEX ON :JobType(id);
CREATE INDEX ON :ObjectType(id);
CREATE INDEX ON :Job(id);
CREATE INDEX ON :Object(id);
CREATE INDEX ON :MgmtStatus(id);
-- 3. Create Nodes from CSV files
-- Job Types
LOAD CSV FROM '/data/job_types.csv' WITH HEADER AS row
CREATE (n:JobType {
id: row.id,
name: row.name,
standard_duration: toInteger(row.standard_duration),
standard_cost: toInteger(row.standard_cost)
});
-- Object Types
LOAD CSV FROM '/data/object_types.csv' WITH HEADER AS row
CREATE (n:ObjectType {
id: row.id,
name: row.name,
category: row.category
});
-- Job Instances
LOAD CSV FROM '/data/jobs.csv' WITH HEADER AS row
CREATE (n:Job {
id: toInteger(row.id),
name: row.name,
duration: toInteger(row.duration),
cost: toInteger(row.cost),
job_no: row.job_no
});
-- Object Instances
LOAD CSV FROM '/data/objects.csv' WITH HEADER AS row
CREATE (n:Object {
id: row.id,
name: row.name,
obj_no: row.obj_no
});
-- Management Status Nodes
LOAD CSV FROM '/data/mgmt_status.csv' WITH HEADER AS row
CREATE (n:MgmtStatus {
id: row.id,
type: row.type,
status: row.status
});
-- 4. Create Relationships from relations.csv
-- Job IS_A JobType
LOAD CSV FROM '/data/relations.csv' WITH HEADER AS row
WITH row WHERE row.type = 'IS_A' AND toIntegerOrNull(row.from_id) IS NOT NULL
MATCH (from:Job {id: toInteger(row.from_id)})
MATCH (to:JobType {id: row.to_id})
CREATE (from)-[:IS_A]->(to);
-- Object IS_A ObjectType
LOAD CSV FROM '/data/relations.csv' WITH HEADER AS row
WITH row WHERE row.type = 'IS_A' AND toIntegerOrNull(row.from_id) IS NULL
MATCH (from:Object {id: row.from_id})
MATCH (to:ObjectType {id: row.to_id})
CREATE (from)-[:IS_A]->(to);
-- Job PRECEDES Job
LOAD CSV FROM '/data/relations.csv' WITH HEADER AS row
WITH row WHERE row.type = 'PRECEDES'
MATCH (from:Job {id: toInteger(row.from_id)})
MATCH (to:Job {id: toInteger(row.to_id)})
CREATE (from)-[:PRECEDES]->(to);
-- Job REQUIRES Object
LOAD CSV FROM '/data/relations.csv' WITH HEADER AS row
WITH row WHERE row.type = 'REQUIRES'
MATCH (from:Job {id: toInteger(row.from_id)})
MATCH (to:Object {id: row.to_id})
CREATE (from)-[:REQUIRES {quantity: toInteger(row.quantity), unit: row.unit}]->(to);
-- Job HAS_STATUS MgmtStatus
LOAD CSV FROM '/data/relations.csv' WITH HEADER AS row
WITH row WHERE row.type = 'HAS_STATUS'
MATCH (from:Job {id: toInteger(row.from_id)})
MATCH (to:MgmtStatus {id: row.to_id})
CREATE (from)-[:HAS_STATUS]->(to);

184
import_embedded.cypher Normal file
View File

@@ -0,0 +1,184 @@
-- 1. Clean up the database
MATCH (n) DETACH DELETE n;
-- 2. Create indexes for faster matching
CREATE INDEX ON :JobType(id);
CREATE INDEX ON :ObjectType(id);
CREATE INDEX ON :Job(id);
CREATE INDEX ON :Object(id);
CREATE INDEX ON :MgmtStatus(id);
-- 3. Create Nodes with embedded data
-- Job Types
UNWIND [
{id: 'JT01', name: '터널 입구 굴착', duration: 10, cost: 150},
{id: 'JT02', name: '숏크리트 타설', duration: 5, cost: 80},
{id: 'JT03', name: '강지보 설치', duration: 7, cost: 120},
{id: 'JT04', name: '방수 및 배수시설 설치', duration: 8, cost: 100},
{id: 'JT05', name: '철근 조립', duration: 6, cost: 90},
{id: 'JT06', name: '내부 라이닝 콘크리트 타설', duration: 12, cost: 200},
{id: 'JT07', name: '조명 및 환기시설 설치', duration: 9, cost: 110},
{id: 'JT08', name: '안전시설물 설치', duration: 4, cost: 60},
{id: 'JT09', name: '포장 및 차선 도색', duration: 7, cost: 95},
{id: 'JT10', name: 'TBM 준비', duration: 15, cost: 500},
{id: 'JT11', name: 'TBM 굴진', duration: 30, cost: 1200},
{id: 'JT12', name: '세그먼트 조립', duration: 25, cost: 800},
{id: 'JT13', name: '그라우팅', duration: 10, cost: 150},
{id: 'JT14', name: 'TBM 해체 및 반출', duration: 12, cost: 300},
{id: 'JT15', name: '전기/통신 케이블 설치', duration: 8, cost: 130},
{id: 'JT16', name: 'CCTV 및 VMS 설치', duration: 5, cost: 70},
{id: 'JT17', name: '소방시설 설치', duration: 6, cost: 85},
{id: 'JT18', name: '최종 점검', duration: 3, cost: 50},
{id: 'JT19', name: '개통 준비', duration: 2, cost: 30}
] AS row
CREATE (n:JobType {id: row.id, name: row.name, standard_duration: row.duration, standard_cost: row.cost});
-- Object Types
UNWIND [
{id: 'OT01', name: '굴착기', category: '장비'},
{id: 'OT02', name: '숏크리트 펌프', category: '장비'},
{id: 'OT03', name: '강지보재', category: '자재'},
{id: 'OT04', name: '방수시트', category: '자재'},
{id: 'OT05', name: '콘크리트 믹서', category: '장비'},
{id: 'OT06', name: '철근', category: '자재'},
{id: 'OT07', name: '조명등', category: '자재'},
{id: 'OT08', name: '환풍기', category: '장비'},
{id: 'OT09', name: 'TBM', category: '장비'},
{id: 'OT10', name: '세그먼트', category: '자재'},
{id: 'OT11', name: '그라우트 믹서', category: '장비'},
{id: 'OT12', name: '케이블', category: '자재'},
{id: 'OT13', name: 'CCTV', category: '장비'},
{id: 'OT14', name: '소화기', category: '자재'},
{id: 'OT15', name: '차선도색기', category: '장비'},
{id: 'OT16', name: '숏크리트', category: '자재'}
] AS row
CREATE (n:ObjectType {id: row.id, name: row.name, category: row.category});
-- Job Instances
UNWIND [
{id: 1, name: '터널 입구 굴착', duration: 10, cost: 150, job_no: 'JOB-001'},
{id: 2, name: '1차 숏크리트 타설', duration: 5, cost: 80, job_no: 'JOB-002'},
{id: 3, name: '강지보 설치', duration: 7, cost: 120, job_no: 'JOB-003'},
{id: 4, name: '2차 숏크리트 타설', duration: 5, cost: 80, job_no: 'JOB-004'},
{id: 5, name: '방수 및 배수시설 설치', duration: 8, cost: 100, job_no: 'JOB-005'},
{id: 6, name: '철근 조립', duration: 6, cost: 90, job_no: 'JOB-006'},
{id: 7, name: '내부 라이닝 콘크리트 타설', duration: 12, cost: 200, job_no: 'JOB-007'},
{id: 8, name: '조명 및 환기시설 설치', duration: 9, cost: 110, job_no: 'JOB-008'},
{id: 9, name: '안전시설물 설치', duration: 4, cost: 60, job_no: 'JOB-009'},
{id: 10, name: '포장 및 차선 도색', duration: 7, cost: 95, job_no: 'JOB-010'},
{id: 11, name: 'TBM 준비', duration: 15, cost: 500, job_no: 'JOB-011'},
{id: 12, name: 'TBM 굴진', duration: 30, cost: 1200, job_no: 'JOB-012'},
{id: 13, name: '세그먼트 조립', duration: 25, cost: 800, job_no: 'JOB-013'},
{id: 14, name: '그라우팅', duration: 10, cost: 150, job_no: 'JOB-014'},
{id: 15, name: 'TBM 해체 및 반출', duration: 12, cost: 300, job_no: 'JOB-015'},
{id: 16, name: '전기/통신 케이블 설치', duration: 8, cost: 130, job_no: 'JOB-016'},
{id: 17, name: 'CCTV 및 VMS 설치', duration: 5, cost: 70, job_no: 'JOB-017'},
{id: 18, name: '소방시설 설치', duration: 6, cost: 85, job_no: 'JOB-018'},
{id: 19, name: '최종 점검', duration: 3, cost: 50, job_no: 'JOB-019'},
{id: 20, name: '개통 준비', duration: 2, cost: 30, job_no: 'JOB-020'}
] AS row
CREATE (n:Job {id: row.id, name: row.name, duration: row.duration, cost: row.cost, job_no: row.job_no});
-- Object Instances
UNWIND [
{id: 'OBJ01', name: '굴착기-A01', obj_no: 'EQ-001'},
{id: 'OBJ02', name: '숏크리트 펌프-A', obj_no: 'EQ-002'},
{id: 'OBJ03', name: '강지보재-L100', obj_no: 'MTR-001'},
{id: 'OBJ04', name: '방수시트-S20', obj_no: 'MTR-002'},
{id: 'OBJ05', name: '콘크리트 믹서-T1', obj_no: 'EQ-003'},
{id: 'OBJ06', name: '철근-D16', obj_no: 'MTR-003'},
{id: 'OBJ07', name: '조명등-LED-1', obj_no: 'MTR-004'},
{id: 'OBJ08', name: '환풍기-F1', obj_no: 'EQ-004'},
{id: 'OBJ09', name: 'TBM-Shield-1', obj_no: 'EQ-005'},
{id: 'OBJ10', name: '세그먼트-A타입', obj_no: 'MTR-005'},
{id: 'OBJ11', name: '그라우트 믹서-G1', obj_no: 'EQ-006'},
{id: 'OBJ12', name: '전원 케이블-HV-1', obj_no: 'MTR-006'},
{id: 'OBJ13', name: 'CCTV-001', obj_no: 'EQ-007'},
{id: 'OBJ14', name: '소화기-P1', obj_no: 'MTR-007'},
{id: 'OBJ15', name: '차선도색기-Y1', obj_no: 'EQ-008'},
{id: 'OBJ16', name: '숏크리트-Batch1', obj_no: 'MTR-008'},
{id: 'OBJ17', name: '숏크리트-Batch2', obj_no: 'MTR-009'}
] AS row
CREATE (n:Object {id: row.id, name: row.name, obj_no: row.obj_no});
-- Management Status Nodes
UNWIND [
{id: 'CS_APP', type: 'Construction', status: 'APPROVED'},
{id: 'CS_CON', type: 'Construction', status: 'CONFIRMED'},
{id: 'CS_PLN', type: 'Construction', status: 'PLANNING'},
{id: 'PS_EXE', type: 'Project', status: 'EXECUTING'},
{id: 'PS_PLN', type: 'Project', status: 'PLANNING'},
{id: 'PM_PAID', type: 'Payment', status: 'PAID'},
{id: 'PM_APL', type: 'Payment', status: 'APPLIED'},
{id: 'PM_NON', type: 'Payment', status: 'NONE'},
{id: 'QS_APP', type: 'Quality', status: 'APPROVED'},
{id: 'QS_CON', type: 'Quality', status: 'CONFIRMED'},
{id: 'QS_PLN', type: 'Quality', status: 'PLANNING'},
{id: 'SS_APP', type: 'Safety', status: 'APPROVED'},
{id: 'SS_CON', type: 'Safety', status: 'CONFIRMED'},
{id: 'SS_PLN', type: 'Safety', status: 'PLANNING'}
] AS row
CREATE (n:MgmtStatus {id: row.id, type: row.type, status: row.status});
-- 4. Create Relationships
-- Job IS_A JobType
UNWIND [
{from: 1, to: 'JT01'}, {from: 2, to: 'JT02'}, {from: 3, to: 'JT03'}, {from: 4, to: 'JT02'}, {from: 5, to: 'JT04'},
{from: 6, to: 'JT05'}, {from: 7, to: 'JT06'}, {from: 8, to: 'JT07'}, {from: 9, to: 'JT08'}, {from: 10, to: 'JT09'},
{from: 11, to: 'JT10'}, {from: 12, to: 'JT11'}, {from: 13, to: 'JT12'}, {from: 14, to: 'JT13'}, {from: 15, to: 'JT14'},
{from: 16, to: 'JT15'}, {from: 17, to: 'JT16'}, {from: 18, to: 'JT17'}, {from: 19, to: 'JT18'}, {from: 20, to: 'JT19'}
] AS rel
MATCH (from:Job {id: rel.from})
MATCH (to:JobType {id: rel.to})
CREATE (from)-[:IS_A]->(to);
-- Object IS_A ObjectType
UNWIND [
{from: 'OBJ01', to: 'OT01'}, {from: 'OBJ02', to: 'OT02'}, {from: 'OBJ03', to: 'OT03'}, {from: 'OBJ04', to: 'OT04'},
{from: 'OBJ05', to: 'OT05'}, {from: 'OBJ06', to: 'OT06'}, {from: 'OBJ07', to: 'OT07'}, {from: 'OBJ08', to: 'OT08'},
{from: 'OBJ09', to: 'OT09'}, {from: 'OBJ10', to: 'OT10'}, {from: 'OBJ11', to: 'OT11'}, {from: 'OBJ12', to: 'OT12'},
{from: 'OBJ13', to: 'OT13'}, {from: 'OBJ14', to: 'OT14'}, {from: 'OBJ15', to: 'OT15'}, {from: 'OBJ16', to: 'OT16'},
{from: 'OBJ17', to: 'OT16'}
] AS rel
MATCH (from:Object {id: rel.from})
MATCH (to:ObjectType {id: rel.to})
CREATE (from)-[:IS_A]->(to);
-- Job PRECEDES Job
UNWIND [
{from: 1, to: 2}, {from: 2, to: 3}, {from: 3, to: 4}, {from: 4, to: 5}, {from: 5, to: 6}, {from: 6, to: 7},
{from: 7, to: 8}, {from: 8, to: 9}, {from: 9, to: 10}, {from: 11, to: 12}, {from: 12, to: 13}, {from: 13, to: 14},
{from: 14, to: 12}, {from: 14, to: 15}, {from: 15, to: 16}, {from: 16, to: 17}, {from: 17, to: 18}, {from: 18, to: 19},
{from: 19, to: 20}
] AS rel
MATCH (from:Job {id: rel.from})
MATCH (to:Job {id: rel.to})
CREATE (from)-[:PRECEDES]->(to);
-- Job REQUIRES Object
UNWIND [
{from: 1, to: 'OBJ01', quantity: 1, unit: 'unit'},
{from: 2, to: 'OBJ02', quantity: 1, unit: 'unit'},
{from: 2, to: 'OBJ16', quantity: 50, unit: 'ton'},
{from: 3, to: 'OBJ03', quantity: 100, unit: 'meter'},
{from: 4, to: 'OBJ02', quantity: 1, unit: 'unit'},
{from: 4, to: 'OBJ17', quantity: 40, unit: 'ton'}
] AS rel
MATCH (from:Job {id: rel.from})
MATCH (to:Object {id: rel.to})
CREATE (from)-[:REQUIRES {quantity: rel.quantity, unit: rel.unit}]->(to);
-- Job HAS_STATUS MgmtStatus
UNWIND [
{from: 1, to: 'CS_APP'}, {from: 1, to: 'PS_EXE'}, {from: 1, to: 'PM_PAID'}, {from: 1, to: 'QS_APP'}, {from: 1, to: 'SS_APP'},
{from: 2, to: 'CS_APP'}, {from: 2, to: 'PS_EXE'}, {from: 2, to: 'PM_PAID'}, {from: 2, to: 'QS_APP'}, {from: 2, to: 'SS_APP'},
{from: 3, to: 'CS_CON'}, {from: 3, to: 'PS_EXE'}, {from: 3, to: 'PM_APL'}, {from: 3, to: 'QS_CON'}, {from: 3, to: 'SS_CON'},
{from: 4, to: 'CS_PLN'}, {from: 4, to: 'PS_PLN'}, {from: 4, to: 'PM_NON'}, {from: 4, to: 'QS_PLN'}, {from: 4, to: 'SS_PLN'},
{from: 5, to: 'CS_PLN'}, {from: 5, to: 'PS_PLN'}, {from: 5, to: 'PM_NON'}, {from: 5, to: 'QS_PLN'}, {from: 5, to: 'SS_PLN'}
] AS rel
MATCH (from:Job {id: rel.from})
MATCH (to:MgmtStatus {id: rel.to})
CREATE (from)-[:HAS_STATUS]->(to);

View File

@@ -0,0 +1,183 @@
MATCH (n) DETACH DELETE n;
CREATE INDEX ON :JobType(id);
CREATE INDEX ON :ObjectType(id);
CREATE INDEX ON :Job(id);
CREATE INDEX ON :Object(id);
CREATE INDEX ON :MgmtStatus(id);
CREATE INDEX ON :Scenario(id);
UNWIND [
{id: 'JT01', name: '터널 입구 굴착', duration: 10, cost: 150},
{id: 'JT02', name: '숏크리트 타설', duration: 5, cost: 80},
{id: 'JT03', name: '강지보 설치', duration: 7, cost: 120},
{id: 'JT04', name: '방수 및 배수시설 설치', duration: 8, cost: 100},
{id: 'JT05', name: '철근 조립', duration: 6, cost: 90},
{id: 'JT06', name: '내부 라이닝 콘크리트 타설', duration: 12, cost: 200},
{id: 'JT07', name: '조명 및 환기시설 설치', duration: 9, cost: 110},
{id: 'JT08', name: '안전시설물 설치', duration: 4, cost: 60},
{id: 'JT09', name: '포장 및 차선 도색', duration: 7, cost: 95},
{id: 'JT10', name: 'TBM 준비', duration: 15, cost: 500},
{id: 'JT11', name: 'TBM 굴진', duration: 30, cost: 1200},
{id: 'JT12', name: '세그먼트 조립', duration: 25, cost: 800},
{id: 'JT13', name: '그라우팅', duration: 10, cost: 150},
{id: 'JT14', name: 'TBM 해체 및 반출', duration: 12, cost: 300},
{id: 'JT15', name: '전기/통신 케이블 설치', duration: 8, cost: 130},
{id: 'JT16', name: 'CCTV 및 VMS 설치', duration: 5, cost: 70},
{id: 'JT17', name: '소방시설 설치', duration: 6, cost: 85},
{id: 'JT18', name: '최종 점검', duration: 3, cost: 50},
{id: 'JT19', name: '개통 준비', duration: 2, cost: 30}
] AS row
CREATE (n:JobType {id: row.id, name: row.name, standard_duration: row.duration, standard_cost: row.cost});
UNWIND [
{id: 'OT01', name: '굴착기', category: '장비'},
{id: 'OT02', name: '숏크리트 펌프', category: '장비'},
{id: 'OT03', name: '강지보재', category: '자재'},
{id: 'OT04', name: '방수시트', category: '자재'},
{id: 'OT05', name: '콘크리트 믹서', category: '장비'},
{id: 'OT06', name: '철근', category: '자재'},
{id: 'OT07', name: '조명등', category: '자재'},
{id: 'OT08', name: '환풍기', category: '장비'},
{id: 'OT09', name: 'TBM', category: '장비'},
{id: 'OT10', name: '세그먼트', category: '자재'},
{id: 'OT11', name: '그라우트 믹서', category: '장비'},
{id: 'OT12', name: '케이블', category: '자재'},
{id: 'OT13', name: 'CCTV', category: '장비'},
{id: 'OT14', name: '소화기', category: '자재'},
{id: 'OT15', name: '차선도색기', category: '장비'},
{id: 'OT16', name: '숏크리트', category: '자재'}
] AS row
CREATE (n:ObjectType {id: row.id, name: row.name, category: row.category});
UNWIND [
{id: 1, name: '터널 입구 굴착', base_duration: 10, base_cost: 150, job_no: 'JOB-001'},
{id: 2, name: '1차 숏크리트 타설', base_duration: 5, base_cost: 80, job_no: 'JOB-002'},
{id: 3, name: '강지보 설치', base_duration: 7, base_cost: 120, job_no: 'JOB-003'},
{id: 4, name: '2차 숏크리트 타설', base_duration: 5, base_cost: 80, job_no: 'JOB-004'},
{id: 5, name: '방수 및 배수시설 설치', base_duration: 8, base_cost: 100, job_no: 'JOB-005'},
{id: 6, name: '철근 조립', base_duration: 6, base_cost: 90, job_no: 'JOB-006'},
{id: 7, name: '내부 라이닝 콘크리트 타설', base_duration: 12, base_cost: 200, job_no: 'JOB-007'},
{id: 8, name: '조명 및 환기시설 설치', base_duration: 9, base_cost: 110, job_no: 'JOB-008'},
{id: 9, name: '안전시설물 설치', base_duration: 4, base_cost: 60, job_no: 'JOB-009'},
{id: 10, name: '포장 및 차선 도색', base_duration: 7, base_cost: 95, job_no: 'JOB-010'},
{id: 11, name: 'TBM 준비', base_duration: 15, base_cost: 500, job_no: 'JOB-011'},
{id: 12, name: 'TBM 굴진', base_duration: 30, base_cost: 1200, job_no: 'JOB-012'},
{id: 13, name: '세그먼트 조립', base_duration: 25, base_cost: 800, job_no: 'JOB-013'},
{id: 14, name: '그라우팅', base_duration: 10, base_cost: 150, job_no: 'JOB-014'},
{id: 15, name: 'TBM 해체 및 반출', base_duration: 12, base_cost: 300, job_no: 'JOB-015'},
{id: 16, name: '전기/통신 케이블 설치', base_duration: 8, base_cost: 130, job_no: 'JOB-016'},
{id: 17, name: 'CCTV 및 VMS 설치', base_duration: 5, base_cost: 70, job_no: 'JOB-017'},
{id: 18, name: '소방시설 설치', base_duration: 6, base_cost: 85, job_no: 'JOB-018'},
{id: 19, name: '최종 점검', base_duration: 3, base_cost: 50, job_no: 'JOB-019'},
{id: 20, name: '개통 준비', base_duration: 2, base_cost: 30, job_no: 'JOB-020'}
] AS row
CREATE (n:Job {id: row.id, name: row.name, base_duration: row.base_duration, base_cost: row.base_cost, job_no: row.job_no});
UNWIND [
{id: 'OBJ01', name: '굴착기-A01', obj_no: 'EQ-001'},
{id: 'OBJ02', name: '숏크리트 펌프-A', obj_no: 'EQ-002'},
{id: 'OBJ03', name: '강지보재-L100', obj_no: 'MTR-001'},
{id: 'OBJ04', name: '방수시트-S20', obj_no: 'MTR-002'},
{id: 'OBJ05', name: '콘크리트 믹서-T1', obj_no: 'EQ-003'},
{id: 'OBJ06', name: '철근-D16', obj_no: 'MTR-003'},
{id: 'OBJ07', name: '조명등-LED-1', obj_no: 'MTR-004'},
{id: 'OBJ08', name: '환풍기-F1', obj_no: 'EQ-004'},
{id: 'OBJ09', name: 'TBM-Shield-1', obj_no: 'EQ-005'},
{id: 'OBJ10', name: '세그먼트-A타입', obj_no: 'MTR-05'},
{id: 'OBJ11', name: '그라우트 믹서-G1', obj_no: 'EQ-006'},
{id: 'OBJ12', name: '전원 케이블-HV-1', obj_no: 'MTR-006'},
{id: 'OBJ13', name: 'CCTV-001', obj_no: 'EQ-007'},
{id: 'OBJ14', name: '소화기-P1', obj_no: 'MTR-007'},
{id: 'OBJ15', name: '차선도색기-Y1', obj_no: 'EQ-008'},
{id: 'OBJ16', name: '숏크리트-Batch1', obj_no: 'MTR-008'},
{id: 'OBJ17', name: '숏크리트-Batch2', obj_no: 'MTR-009'}
] AS row
CREATE (n:Object {id: row.id, name: row.name, obj_no: row.obj_no});
UNWIND [
{id: 'CS_APP', type: 'Construction', status: 'APPROVED'},
{id: 'CS_CON', type: 'Construction', status: 'CONFIRMED'},
{id: 'CS_PLN', type: 'Construction', status: 'PLANNING'},
{id: 'PS_EXE', type: 'Project', status: 'EXECUTING'},
{id: 'PS_PLN', type: 'Project', status: 'PLANNING'},
{id: 'PM_PAID', type: 'Payment', status: 'PAID'},
{id: 'PM_APL', type: 'Payment', status: 'APPLIED'},
{id: 'PM_NON', type: 'Payment', status: 'NONE'},
{id: 'QS_APP', type: 'Quality', status: 'APPROVED'},
{id: 'QS_CON', type: 'Quality', status: 'CONFIRMED'},
{id: 'QS_PLN', type: 'Quality', status: 'PLANNING'},
{id: 'SS_APP', type: 'Safety', status: 'APPROVED'},
{id: 'SS_CON', type: 'Safety', status: 'CONFIRMED'},
{id: 'SS_PLN', type: 'Safety', status: 'PLANNING'}
] AS row
CREATE (n:MgmtStatus {id: row.id, type: row.type, status: row.status});
// Scenario Nodes
UNWIND [
{id: 'base', description: '기본 공정 시나리오'},
{id: 'what-if-1', description: 'TBM 굴진 지연 가정 시나리오'}
] AS row
CREATE (n:Scenario {id: row.id, description: row.description});
// MODIFIES Relationships (for what-if-1 scenario)
UNWIND [
{scenario_id: 'what-if-1', job_id: 12, new_duration: 50, new_cost: 1500}
] AS delta
MATCH (s:Scenario {id: delta.scenario_id})
MATCH (j:Job {id: delta.job_id})
CREATE (s)-[:MODIFIES {new_duration: delta.new_duration, new_cost: delta.new_cost}]->(j);
UNWIND [
{from: 1, to: 'JT01'}, {from: 2, to: 'JT02'}, {from: 3, to: 'JT03'}, {from: 4, to: 'JT02'}, {from: 5, to: 'JT04'},
{from: 6, to: 'JT05'}, {from: 7, to: 'JT06'}, {from: 8, to: 'JT07'}, {from: 9, to: 'JT08'}, {from: 10, to: 'JT09'},
{from: 11, to: 'JT10'}, {from: 12, to: 'JT11'}, {from: 13, to: 'JT12'}, {from: 14, to: 'JT13'}, {from: 15, to: 'JT14'},
{from: 16, to: 'JT15'}, {from: 17, to: 'JT16'}, {from: 18, to: 'JT17'}, {from: 19, to: 'JT18'}, {from: 20, to: 'JT19'}
] AS rel
MATCH (from:Job {id: rel.from})
MATCH (to:JobType {id: rel.to})
CREATE (from)-[:IS_A]->(to);
UNWIND [
{from: 'OBJ01', to: 'OT01'}, {from: 'OBJ02', to: 'OT02'}, {from: 'OBJ03', to: 'OT03'}, {from: 'OBJ04', to: 'OT04'},
{from: 'OBJ05', to: 'OT05'}, {from: 'OBJ06', to: 'OT06'}, {from: 'OBJ07', to: 'OT07'}, {from: 'OBJ08', to: 'OT08'},
{from: 'OBJ09', to: 'OT09'}, {from: 'OBJ10', to: 'OT10'}, {from: 'OBJ11', to: 'OT11'}, {from: 'OBJ12', to: 'OT12'},
{from: 'OBJ13', to: 'OT13'}, {from: 'OBJ14', to: 'OT14'}, {from: 'OBJ15', to: 'OT15'}, {from: 'OBJ16', to: 'OT16'},
{from: 'OBJ17', to: 'OT16'}
] AS rel
MATCH (from:Object {id: rel.from})
MATCH (to:ObjectType {id: rel.to})
CREATE (from)-[:IS_A]->(to);
UNWIND [
{from: 1, to: 2}, {from: 2, to: 3}, {from: 3, to: 4}, {from: 4, to: 5}, {from: 5, to: 6}, {from: 6, to: 7},
{from: 7, to: 8}, {from: 8, to: 9}, {from: 9, to: 10}, {from: 11, to: 12}, {from: 12, to: 13}, {from: 13, to: 14},
{from: 14, to: 15}, {from: 15, to: 16}, {from: 16, to: 17}, {from: 17, to: 18}, {from: 18, to: 19},
{from: 19, to: 20}
] AS rel
MATCH (from:Job {id: rel.from})
MATCH (to:Job {id: rel.to})
CREATE (from)-[:PRECEDES]->(to);
UNWIND [
{from: 1, to: 'OBJ01', quantity: 1, unit: 'unit'},
{from: 2, to: 'OBJ02', quantity: 1, unit: 'unit'},
{from: 2, to: 'OBJ16', quantity: 50, unit: 'ton'},
{from: 3, to: 'OBJ03', quantity: 100, unit: 'meter'},
{from: 4, to: 'OBJ02', quantity: 1, unit: 'unit'},
{from: 4, to: 'OBJ17', quantity: 40, unit: 'ton'}
] AS rel
MATCH (from:Job {id: rel.from})
MATCH (to:Object {id: rel.to})
CREATE (from)-[:REQUIRES {quantity: rel.quantity, unit: rel.unit}]->(to);
UNWIND [
{from: 1, to: 'CS_APP'}, {from: 1, to: 'PS_EXE'}, {from: 1, to: 'PM_PAID'}, {from: 1, to: 'QS_APP'}, {from: 1, to: 'SS_APP'},
{from: 2, to: 'CS_APP'}, {from: 2, to: 'PS_EXE'}, {from: 2, to: 'PM_PAID'}, {from: 2, to: 'QS_APP'}, {from: 2, to: 'SS_APP'},
{from: 3, to: 'CS_CON'}, {from: 3, to: 'PS_EXE'}, {from: 3, to: 'PM_APL'}, {from: 3, to: 'QS_CON'}, {from: 3, to: 'SS_CON'},
{from: 4, to: 'CS_PLN'}, {from: 4, to: 'PS_PLN'}, {from: 4, to: 'PM_NON'}, {from: 4, to: 'QS_PLN'}, {from: 4, to: 'SS_PLN'},
{from: 5, to: 'CS_PLN'}, {from: 5, to: 'PS_PLN'}, {from: 5, to: 'PM_NON'}, {from: 5, to: 'QS_PLN'}, {from: 5, to: 'SS_PLN'}
] AS rel
MATCH (from:Job {id: rel.from})
MATCH (to:MgmtStatus {id: rel.to})
CREATE (from)-[:HAS_STATUS]->(to);

View File

@@ -0,0 +1,88 @@
// Comprehensive Data Generation Script
// This script deletes all existing data, creates JobTypes from the provided list,
// generates multiple Job instances for each type, and wires them into different scenarios.
// 1. Clean up the database
MATCH (n) DETACH DELETE n;
// 2. Create Indexes
CREATE INDEX ON :JobType(id);
CREATE INDEX ON :Job(id);
CREATE INDEX ON :Scenario(id);
// 3. Create JobType Nodes from the provided examples
UNWIND [
{id: 'JOB1', name: '전단면굴착1#제어발파_굴진장3m초과발파'},
{id: 'JOB2', name: '막장면1#시공계획서작성'},
{id: 'JOB3', name: '막장면1#시공측량'},
{id: 'JOB4', name: '막장면1#측점검측'},
{id: 'JOB5', name: '막장면1#암판정시행'},
{id: 'JOB6', name: '막장면1#천공준비'},
{id: 'JOB7', name: '막장면1#막장면천공'},
{id: 'JOB8', name: '막장면1#막장면천공시행'},
{id: 'JOB9', name: '전단면굴착1#발파준비'},
{id: 'JOB10', name: '전단면굴착1#발파작업'},
{id: 'JOB11', name: '전단면굴착1#환기시행'},
{id: 'JOB12', name: '전단면굴착1#측량및마킹'},
{id: 'JOB13', name: '버력1#버력처리'},
{id: 'JOB14', name: '버력1#부석정리'},
{id: 'JOB15', name: '버력1#버력처리준비'},
{id: 'JOB16', name: '버력1#버력처리시행'},
{id: 'JOB17', name: '버력1#부석제거뒷정리'},
{id: 'JOB18', name: '버력1#운반차입환'},
{id: 'JOB19', name: '강섬유보강숏크리트1#본선부설치'},
{id: 'JOB20', name: '강섬유보강숏크리트1#뜬돌제거'},
{id: 'JOB21', name: '강섬유보강숏크리트1#측량및여굴량확인'},
{id: 'JOB22', name: '강섬유보강숏크리트1#Sealing시행'},
{id: 'JOB23', name: '강섬유보강숏크리트1_1#본선부설치'},
{id: 'JOB24', name: '강섬유보강숏크리트1_1#타설준비'},
{id: 'JOB25', name: '강섬유보강숏크리트1_1#바닥정리및면정리'},
{id: 'JOB26', name: '강섬유보강숏크리트1_1#뿜어붙이기'},
{id: 'JOB27', name: '강섬유보강숏크리트1_1#잔재제거'},
{id: 'JOB28', name: '강섬유보강숏크리트1_1#장비점검'},
{id: 'JOB29', name: '전단면굴착2#제어발파_굴진장3m초과발파'}
] AS jt
CREATE (:JobType {id: jt.id, name: jt.name, base_duration: 10 + toInteger(substring(jt.id, 3)), base_cost: 100 + (10 * toInteger(substring(jt.id, 3)))});
// 4. Create PRECEDES relationships between JobTypes (The "Absolute Rules")
UNWIND [
{from: 'JOB2', to: 'JOB3'}, {from: 'JOB3', to: 'JOB4'}, {from: 'JOB4', to: 'JOB5'},
{from: 'JOB5', to: 'JOB6'}, {from: 'JOB6', to: 'JOB7'}, {from: 'JOB7', to: 'JOB8'},
{from: 'JOB8', to: 'JOB9'}, {from: 'JOB9', to: 'JOB10'}, {from: 'JOB10', to: 'JOB11'},
{from: 'JOB11', to: 'JOB12'}, {from: 'JOB12', to: 'JOB13'}, {from: 'JOB12', to: 'JOB14'}, // Fork
{from: 'JOB14', to: 'JOB15'}, {from: 'JOB15', to: 'JOB16'}, {from: 'JOB16', to: 'JOB17'},
{from: 'JOB17', to: 'JOB18'}, {from: 'JOB18', to: 'JOB19'}, {from: 'JOB18', to: 'JOB20'}, // Fork
{from: 'JOB20', to: 'JOB21'}, {from: 'JOB21', to: 'JOB22'}, {from: 'JOB22', to: 'JOB23'},
{from: 'JOB22', to: 'JOB24'}, // Fork
{from: 'JOB24', to: 'JOB25'}, {from: 'JOB25', to: 'JOB26'}, {from: 'JOB26', to: 'JOB27'},
{from: 'JOB27', to: 'JOB28'}, {from: 'JOB28', to: 'JOB29'}
] AS rel
MATCH (from:JobType {id: rel.from}), (to:JobType {id: rel.to})
CREATE (from)-[:PRECEDES]->(to);
// 5. Generate Job Instances (e.g., 2 instances per JobType)
MATCH (jt:JobType)
UNWIND range(1, 2) AS i
CREATE (:Job {
id: jt.id + '-' + i,
name: jt.name + ' (Instance ' + i + ')',
base_duration: jt.base_duration,
base_cost: jt.base_cost
})-[:IS_A]->(jt);
// 6. Create Scenarios
// Scenario A: Simple Linear Process (Always use instance 1)
CREATE (:Scenario {id: 'linear-process', description: '단순 순차 공정 시나리오'});
MATCH (from_jt:JobType)-[:PRECEDES]->(to_jt:JobType)
MATCH (from_j:Job {id: from_jt.id + '-1'}), (to_j:Job {id: to_jt.id + '-1'})
CREATE (from_j)-[:PRECEDES {scenario: 'linear-process'}]->(to_j);
// Scenario B: Parallel Process (Use instance 2 and model forks)
CREATE (:Scenario {id: 'parallel-process', description: '병렬 공정 시나리오'});
MATCH (from_jt:JobType)-[:PRECEDES]->(to_jt:JobType)
MATCH (from_j:Job {id: from_jt.id + '-2'}), (to_j:Job {id: to_jt.id + '-2'})
CREATE (from_j)-[:PRECEDES {scenario: 'parallel-process'}]->(to_j);
// Add a specific modification for a scenario to test delta logic
CREATE (:Scenario {id: 'what-if-parallel', description: '병렬 공정 중 JOB16 지연 가정'})-[:MODIFIES {new_duration: 99}]->(:Job {id: 'JOB16-2'});

View File

@@ -0,0 +1,57 @@
// Path Analysis Query for Table View (Pure Cypher, Final & Performant)
// Calculates and returns path data in a clear, tabular format for a specific scenario.
// Parameter for the scenario ID
// Example: :scenario_id => 'parallel-process'
// 1. Find start nodes for the scenario using OPTIONAL MATCH
MATCH (start_node:Job)
OPTIONAL MATCH (start_node)<-[r_in:PRECEDES {scenario: $scenario_id}]-()
WITH start_node, r_in
WHERE r_in IS NULL
// 2. Find end nodes for the scenario using OPTIONAL MATCH
MATCH (end_node:Job)
OPTIONAL MATCH (end_node)-[r_out:PRECEDES {scenario: $scenario_id}]->()
WITH start_node, end_node, r_out
WHERE r_out IS NULL
// 3. Match paths and filter relationships efficiently using WHERE ALL
MATCH path = (start_node)-[rels:PRECEDES*]->(end_node)
WHERE ALL(r IN rels WHERE r.scenario = $scenario_id)
// 4. For each job in the path, calculate its effective duration and cost
WITH path, nodes(path) AS jobs_on_path
UNWIND jobs_on_path AS job
OPTIONAL MATCH (:Scenario {id: $scenario_id})-[m:MODIFIES]->(job)
WITH
path,
job,
COALESCE(m.new_duration, job.base_duration) AS effective_duration,
COALESCE(m.new_cost, job.base_cost) AS effective_cost
// 5. Group by path and aggregate the pre-calculated effective values
WITH
path,
COLLECT({
name: job.name,
bd: job.base_duration,
ed: effective_duration,
ec: effective_cost
}) AS job_data
WITH
path,
job_data,
REDUCE(s = 0, x IN job_data | s + x.ed) AS total_duration,
REDUCE(s = 0, x IN job_data | s + x.ec) AS total_cost
// 6. Return the calculated data for a clean table view
RETURN
[j IN job_data | j.name] AS path_jobs,
[j IN job_data | j.bd] AS base_durations,
[j IN job_data | j.ed] AS effective_durations,
total_duration,
total_cost
// 7. Order by duration
ORDER BY total_duration DESC;

10
scripts/entrypoint.sh Normal file
View File

@@ -0,0 +1,10 @@
#!/bin/bash
set -e
echo "Entrypoint script running as $(whoami)"
echo "Fixing permissions for /var/lib/memgraph..."
chown -R user:user /var/lib/memgraph
echo "Starting Memgraph process as user 'user'..."
exec gosu user /usr/lib/memgraph/memgraph

View File

@@ -0,0 +1,11 @@
// Event Simulation Query: Job Delay
// Simulates an event where a specific job's duration increases.
// Match the specific Job instance (e.g., 'TBM 준비' with id 11)
MATCH (j:Job {id: 11})
// Use SET to update its duration property
SET j.duration = 50
// Return the updated job to confirm the change
RETURN j.name, j.duration;