Merge pull request #23 from ktds-dg0501/feature/distribution

merge feature/distribution into develop branch
This commit is contained in:
이선민 2025-10-29 10:14:56 +09:00 committed by GitHub
commit 23265b5849
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
50 changed files with 2815 additions and 1206 deletions

View File

@ -0,0 +1,31 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="AiServiceApplication" type="SpringBootApplicationConfigurationType" factoryName="Spring Boot" nameIsGenerated="true">
<option name="ACTIVE_PROFILES" />
<module name="kt-event-marketing.ai-service.main" />
<option name="SPRING_BOOT_MAIN_CLASS" value="com.kt.ai.AiApplication" />
<extension name="coverage">
<pattern>
<option name="PATTERN" value="com.kt.ai.*" />
<option name="ENABLED" value="true" />
</pattern>
</extension>
<envs>
<env name="SERVER_PORT" value="8081" />
<env name="DB_HOST" value="4.230.112.141" />
<env name="DB_PORT" value="5432" />
<env name="DB_NAME" value="aidb" />
<env name="DB_USERNAME" value="eventuser" />
<env name="DB_PASSWORD" value="Hi5Jessica!" />
<env name="REDIS_HOST" value="20.214.210.71" />
<env name="REDIS_PORT" value="6379" />
<env name="REDIS_PASSWORD" value="Hi5Jessica!" />
<env name="KAFKA_BOOTSTRAP_SERVERS" value="4.230.50.63:9092" />
<env name="KAFKA_CONSUMER_GROUP" value="ai" />
<env name="JPA_DDL_AUTO" value="update" />
<env name="JPA_SHOW_SQL" value="false" />
</envs>
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

View File

@ -0,0 +1,31 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="AnalyticsServiceApplication" type="SpringBootApplicationConfigurationType" factoryName="Spring Boot" nameIsGenerated="true">
<option name="ACTIVE_PROFILES" />
<module name="kt-event-marketing.analytics-service.main" />
<option name="SPRING_BOOT_MAIN_CLASS" value="com.kt.analytics.AnalyticsApplication" />
<extension name="coverage">
<pattern>
<option name="PATTERN" value="com.kt.analytics.*" />
<option name="ENABLED" value="true" />
</pattern>
</extension>
<envs>
<env name="SERVER_PORT" value="8087" />
<env name="DB_HOST" value="4.230.49.9" />
<env name="DB_PORT" value="5432" />
<env name="DB_NAME" value="analyticdb" />
<env name="DB_USERNAME" value="eventuser" />
<env name="DB_PASSWORD" value="Hi5Jessica!" />
<env name="REDIS_HOST" value="20.214.210.71" />
<env name="REDIS_PORT" value="6379" />
<env name="REDIS_PASSWORD" value="Hi5Jessica!" />
<env name="KAFKA_BOOTSTRAP_SERVERS" value="4.230.50.63:9092" />
<env name="KAFKA_CONSUMER_GROUP" value="analytic" />
<env name="JPA_DDL_AUTO" value="update" />
<env name="JPA_SHOW_SQL" value="false" />
</envs>
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

View File

@ -0,0 +1,29 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="ContentServiceApplication" type="SpringBootApplicationConfigurationType" factoryName="Spring Boot" nameIsGenerated="true">
<option name="ACTIVE_PROFILES" />
<module name="kt-event-marketing.content-service.main" />
<option name="SPRING_BOOT_MAIN_CLASS" value="com.kt.content.ContentApplication" />
<extension name="coverage">
<pattern>
<option name="PATTERN" value="com.kt.content.*" />
<option name="ENABLED" value="true" />
</pattern>
</extension>
<envs>
<env name="SERVER_PORT" value="8084" />
<env name="DB_HOST" value="4.217.131.139" />
<env name="DB_PORT" value="5432" />
<env name="DB_NAME" value="contentdb" />
<env name="DB_USERNAME" value="eventuser" />
<env name="DB_PASSWORD" value="Hi5Jessica!" />
<env name="REDIS_HOST" value="20.214.210.71" />
<env name="REDIS_PORT" value="6379" />
<env name="REDIS_PASSWORD" value="Hi5Jessica!" />
<env name="JPA_DDL_AUTO" value="update" />
<env name="JPA_SHOW_SQL" value="false" />
</envs>
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

View File

@ -0,0 +1,31 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="DistributionServiceApplication" type="SpringBootApplicationConfigurationType" factoryName="Spring Boot" nameIsGenerated="true">
<option name="ACTIVE_PROFILES" />
<module name="kt-event-marketing.distribution-service.main" />
<option name="SPRING_BOOT_MAIN_CLASS" value="com.kt.distribution.DistributionApplication" />
<extension name="coverage">
<pattern>
<option name="PATTERN" value="com.kt.distribution.*" />
<option name="ENABLED" value="true" />
</pattern>
</extension>
<envs>
<env name="SERVER_PORT" value="8085" />
<env name="DB_HOST" value="4.217.133.59" />
<env name="DB_PORT" value="5432" />
<env name="DB_NAME" value="distributiondb" />
<env name="DB_USERNAME" value="eventuser" />
<env name="DB_PASSWORD" value="Hi5Jessica!" />
<env name="REDIS_HOST" value="20.214.210.71" />
<env name="REDIS_PORT" value="6379" />
<env name="REDIS_PASSWORD" value="Hi5Jessica!" />
<env name="KAFKA_BOOTSTRAP_SERVERS" value="4.230.50.63:9092" />
<env name="KAFKA_CONSUMER_GROUP" value="distribution-service" />
<env name="JPA_DDL_AUTO" value="update" />
<env name="JPA_SHOW_SQL" value="false" />
</envs>
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

View File

@ -0,0 +1,31 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="EventServiceApplication" type="SpringBootApplicationConfigurationType" factoryName="Spring Boot" nameIsGenerated="true">
<option name="ACTIVE_PROFILES" />
<module name="kt-event-marketing.event-service.main" />
<option name="SPRING_BOOT_MAIN_CLASS" value="com.kt.event.EventApplication" />
<extension name="coverage">
<pattern>
<option name="PATTERN" value="com.kt.event.*" />
<option name="ENABLED" value="true" />
</pattern>
</extension>
<envs>
<env name="SERVER_PORT" value="8082" />
<env name="DB_HOST" value="20.249.177.232" />
<env name="DB_PORT" value="5432" />
<env name="DB_NAME" value="eventdb" />
<env name="DB_USERNAME" value="eventuser" />
<env name="DB_PASSWORD" value="Hi5Jessica!" />
<env name="REDIS_HOST" value="20.214.210.71" />
<env name="REDIS_PORT" value="6379" />
<env name="REDIS_PASSWORD" value="Hi5Jessica!" />
<env name="KAFKA_BOOTSTRAP_SERVERS" value="4.230.50.63:9092" />
<env name="DISTRIBUTION_SERVICE_URL" value="http://localhost:8085" />
<env name="JPA_DDL_AUTO" value="update" />
<env name="JPA_SHOW_SQL" value="false" />
</envs>
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

View File

@ -0,0 +1,29 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="UserServiceApplication" type="SpringBootApplicationConfigurationType" factoryName="Spring Boot" nameIsGenerated="true">
<option name="ACTIVE_PROFILES" />
<module name="kt-event-marketing.user-service.main" />
<option name="SPRING_BOOT_MAIN_CLASS" value="com.kt.user.UserApplication" />
<extension name="coverage">
<pattern>
<option name="PATTERN" value="com.kt.user.*" />
<option name="ENABLED" value="true" />
</pattern>
</extension>
<envs>
<env name="SERVER_PORT" value="8083" />
<env name="DB_HOST" value="20.249.125.115" />
<env name="DB_PORT" value="5432" />
<env name="DB_NAME" value="userdb" />
<env name="DB_USERNAME" value="eventuser" />
<env name="DB_PASSWORD" value="Hi5Jessica!" />
<env name="REDIS_HOST" value="20.214.210.71" />
<env name="REDIS_PORT" value="6379" />
<env name="REDIS_PASSWORD" value="Hi5Jessica!" />
<env name="JPA_DDL_AUTO" value="update" />
<env name="JPA_SHOW_SQL" value="false" />
</envs>
<method v="2">
<option name="Make" enabled="true" />
</method>
</configuration>
</component>

View File

@ -11,10 +11,10 @@ info:
- Retry 패턴 및 Fallback 처리
## 배포 채널
- **우리동네TV**: 영상 콘텐츠 업로드
- **링고비즈**: 연결음 업데이트
- **지니TV**: 광고 등록
- **SNS**: Instagram, Naver Blog, Kakao Channel
- **우리동네TV** (URIDONGNETV): 영상 콘텐츠 업로드
- **링고비즈** (RINGOBIZ): 연결음 업데이트
- **지니TV** (GINITV): 광고 등록
- **SNS**: Instagram (INSTAGRAM), Naver Blog (NAVER), Kakao Channel (KAKAO)
## Resilience 패턴
- Circuit Breaker: 채널별 독립적 장애 격리
@ -79,23 +79,21 @@ paths:
summary: 다중 채널 배포 예시
value:
eventId: "evt-12345"
title: "신규 고객 환영 이벤트"
description: "신규 고객님을 위한 특별 할인 이벤트"
imageUrl: "https://cdn.example.com/images/event-main.jpg"
channels:
- type: "WOORIDONGNE_TV"
config:
radius: "1km"
timeSlots:
- "weekday_evening"
- "weekend_lunch"
- type: "INSTAGRAM"
config:
scheduledTime: "2025-11-01T10:00:00Z"
- type: "NAVER_BLOG"
config:
scheduledTime: "2025-11-01T10:30:00Z"
contentUrls:
instagram: "https://cdn.example.com/images/event-instagram.jpg"
naverBlog: "https://cdn.example.com/images/event-naver.jpg"
kakaoChannel: "https://cdn.example.com/images/event-kakao.jpg"
- "URIDONGNETV"
- "INSTAGRAM"
- "NAVER"
channelSettings:
URIDONGNETV:
radius: "1km"
timeSlot: "evening"
INSTAGRAM:
scheduledTime: "2025-11-01T10:00:00"
NAVER:
scheduledTime: "2025-11-01T10:30:00"
responses:
'200':
description: 배포 완료
@ -107,25 +105,29 @@ paths:
allSuccess:
summary: 모든 채널 배포 성공
value:
distributionId: "dist-12345"
eventId: "evt-12345"
status: "COMPLETED"
completedAt: "2025-11-01T09:00:00Z"
results:
- channel: "WOORIDONGNE_TV"
status: "SUCCESS"
success: true
channelResults:
- channel: "URIDONGNETV"
success: true
distributionId: "wtv-uuid-12345"
estimatedViews: 1000
message: "배포 완료"
estimatedReach: 1000
executionTimeMs: 234
- channel: "INSTAGRAM"
status: "SUCCESS"
postUrl: "https://instagram.com/p/generated-post-id"
postId: "ig-post-12345"
message: "게시 완료"
- channel: "NAVER_BLOG"
status: "SUCCESS"
postUrl: "https://blog.naver.com/store123/generated-post"
message: "게시 완료"
success: true
distributionId: "ig-uuid-12345"
estimatedReach: 500
executionTimeMs: 456
- channel: "NAVER"
success: true
distributionId: "naver-uuid-12345"
estimatedReach: 300
executionTimeMs: 123
successCount: 3
failureCount: 0
completedAt: "2025-11-01T09:00:00"
totalExecutionTimeMs: 1234
message: "배포가 성공적으로 완료되었습니다"
'400':
description: 잘못된 요청
content:
@ -217,67 +219,77 @@ paths:
value:
eventId: "evt-12345"
overallStatus: "COMPLETED"
completedAt: "2025-11-01T09:00:00Z"
startedAt: "2025-11-01T08:58:00"
completedAt: "2025-11-01T09:00:00"
channels:
- channel: "WOORIDONGNE_TV"
- channel: "URIDONGNETV"
status: "COMPLETED"
distributionId: "wtv-uuid-12345"
estimatedViews: 1500
completedAt: "2025-11-01T09:00:00Z"
- channel: "RINGO_BIZ"
completedAt: "2025-11-01T09:00:00"
- channel: "RINGOBIZ"
status: "COMPLETED"
updateTimestamp: "2025-11-01T09:00:00Z"
- channel: "GENIE_TV"
updateTimestamp: "2025-11-01T09:00:00"
completedAt: "2025-11-01T09:00:00"
- channel: "GINITV"
status: "COMPLETED"
adId: "gtv-uuid-12345"
impressionSchedule:
- "2025-11-01 18:00-20:00"
- "2025-11-02 12:00-14:00"
completedAt: "2025-11-01T09:00:00"
- channel: "INSTAGRAM"
status: "COMPLETED"
postUrl: "https://instagram.com/p/generated-post-id"
postId: "ig-post-12345"
- channel: "NAVER_BLOG"
completedAt: "2025-11-01T09:00:00"
- channel: "NAVER"
status: "COMPLETED"
postUrl: "https://blog.naver.com/store123/generated-post"
- channel: "KAKAO_CHANNEL"
completedAt: "2025-11-01T09:00:00"
- channel: "KAKAO"
status: "COMPLETED"
messageId: "kakao-msg-12345"
completedAt: "2025-11-01T09:00:00"
inProgress:
summary: 배포 진행중 상태
value:
eventId: "evt-12345"
overallStatus: "IN_PROGRESS"
startedAt: "2025-11-01T08:58:00Z"
startedAt: "2025-11-01T08:58:00"
channels:
- channel: "WOORIDONGNE_TV"
- channel: "URIDONGNETV"
status: "COMPLETED"
distributionId: "wtv-uuid-12345"
estimatedViews: 1500
completedAt: "2025-11-01T08:59:00"
- channel: "INSTAGRAM"
status: "IN_PROGRESS"
progress: 50
- channel: "NAVER_BLOG"
- channel: "NAVER"
status: "PENDING"
partialFailure:
summary: 일부 채널 실패 상태
value:
eventId: "evt-12345"
overallStatus: "PARTIAL_FAILURE"
completedAt: "2025-11-01T09:00:00Z"
startedAt: "2025-11-01T08:58:00"
completedAt: "2025-11-01T09:00:00"
channels:
- channel: "WOORIDONGNE_TV"
- channel: "URIDONGNETV"
status: "COMPLETED"
distributionId: "wtv-uuid-12345"
estimatedViews: 1500
completedAt: "2025-11-01T08:59:00"
- channel: "INSTAGRAM"
status: "FAILED"
errorMessage: "Instagram API 타임아웃"
retries: 3
lastRetryAt: "2025-11-01T08:59:30Z"
- channel: "NAVER_BLOG"
lastRetryAt: "2025-11-01T08:59:30"
- channel: "NAVER"
status: "COMPLETED"
postUrl: "https://blog.naver.com/store123/generated-post"
completedAt: "2025-11-01T09:00:00"
'404':
description: 배포 이력을 찾을 수 없음
content:
@ -305,196 +317,133 @@ components:
required:
- eventId
- channels
- contentUrls
properties:
eventId:
type: string
description: 이벤트 ID
example: "evt-12345"
title:
type: string
description: 이벤트 제목
example: "신규 고객 환영 이벤트"
description:
type: string
description: 이벤트 설명
example: "신규 고객님을 위한 특별 할인 이벤트"
imageUrl:
type: string
description: 이미지 URL (CDN)
example: "https://cdn.example.com/images/event-main.jpg"
channels:
type: array
description: 배포할 채널 목록
minItems: 1
items:
$ref: '#/components/schemas/ChannelConfig'
contentUrls:
type: string
enum:
- URIDONGNETV
- RINGOBIZ
- GINITV
- INSTAGRAM
- NAVER
- KAKAO
example: ["URIDONGNETV", "INSTAGRAM", "NAVER"]
channelSettings:
type: object
description: 플랫폼별 콘텐츠 URL
properties:
wooridongneTV:
type: string
description: 우리동네TV 영상 URL (15초)
example: "https://cdn.example.com/videos/event-15s.mp4"
ringoBiz:
type: string
description: 링고비즈 연결음 파일 URL
example: "https://cdn.example.com/audio/ringtone.mp3"
genieTV:
type: string
description: 지니TV 광고 영상 URL
example: "https://cdn.example.com/videos/event-ad.mp4"
instagram:
type: string
description: Instagram 이미지 URL (1080x1080)
example: "https://cdn.example.com/images/event-instagram.jpg"
naverBlog:
type: string
description: Naver Blog 이미지 URL (800x600)
example: "https://cdn.example.com/images/event-naver.jpg"
kakaoChannel:
type: string
description: Kakao Channel 이미지 URL (800x800)
example: "https://cdn.example.com/images/event-kakao.jpg"
ChannelConfig:
type: object
required:
- type
properties:
type:
type: string
description: 채널 타입
enum:
- WOORIDONGNE_TV
- RINGO_BIZ
- GENIE_TV
- INSTAGRAM
- NAVER_BLOG
- KAKAO_CHANNEL
example: "INSTAGRAM"
config:
type: object
description: 채널별 설정 (채널에 따라 다름)
additionalProperties: true
description: 채널별 추가 설정 (Optional)
additionalProperties:
type: object
additionalProperties: true
example:
scheduledTime: "2025-11-01T10:00:00Z"
caption: "이벤트 안내"
hashtags:
- "이벤트"
- "할인"
URIDONGNETV:
radius: "1km"
timeSlot: "evening"
INSTAGRAM:
scheduledTime: "2025-11-01T10:00:00"
DistributionResponse:
type: object
required:
- distributionId
- eventId
- status
- results
- success
- channelResults
- successCount
- failureCount
properties:
distributionId:
type: string
description: 배포 ID
example: "dist-12345"
eventId:
type: string
description: 이벤트 ID
example: "evt-12345"
status:
type: string
description: 전체 배포 상태
enum:
- PENDING
- IN_PROGRESS
- COMPLETED
- PARTIAL_FAILURE
- FAILED
example: "COMPLETED"
startedAt:
type: string
format: date-time
description: 배포 시작 시각
example: "2025-11-01T08:59:00Z"
success:
type: boolean
description: 배포 성공 여부 (모든 채널 또는 일부 채널 성공)
example: true
channelResults:
type: array
description: 채널별 배포 결과
items:
$ref: '#/components/schemas/ChannelDistributionResult'
successCount:
type: integer
description: 성공한 채널 수
example: 3
failureCount:
type: integer
description: 실패한 채널 수
example: 0
completedAt:
type: string
format: date-time
description: 배포 완료 시각
example: "2025-11-01T09:00:00Z"
results:
type: array
description: 채널별 배포 결과
items:
$ref: '#/components/schemas/ChannelResult'
example: "2025-11-01T09:00:00"
totalExecutionTimeMs:
type: integer
format: int64
description: 전체 배포 소요 시간 (ms)
example: 1234
message:
type: string
description: 메시지
example: "배포가 성공적으로 완료되었습니다"
ChannelResult:
ChannelDistributionResult:
type: object
required:
- channel
- status
- success
properties:
channel:
type: string
description: 채널 타입
enum:
- WOORIDONGNE_TV
- RINGO_BIZ
- GENIE_TV
- URIDONGNETV
- RINGOBIZ
- GINITV
- INSTAGRAM
- NAVER_BLOG
- KAKAO_CHANNEL
- NAVER
- KAKAO
example: "INSTAGRAM"
status:
type: string
description: 채널별 배포 상태
enum:
- PENDING
- IN_PROGRESS
- SUCCESS
- FAILED
example: "SUCCESS"
success:
type: boolean
description: 배포 성공 여부
example: true
distributionId:
type: string
description: 채널별 배포 ID (우리동네TV, 지니TV)
example: "wtv-uuid-12345"
estimatedViews:
description: 배포 ID (성공 시)
example: "dist-uuid-12345"
estimatedReach:
type: integer
description: 예상 노출 수 (우리동네TV, 지니TV)
description: 예상 노출 수 (성공 시)
example: 1500
updateTimestamp:
type: string
format: date-time
description: 업데이트 완료 시각 (링고비즈)
example: "2025-11-01T09:00:00Z"
adId:
type: string
description: 광고 ID (지니TV)
example: "gtv-uuid-12345"
impressionSchedule:
type: array
description: 노출 스케줄 (지니TV)
items:
type: string
example:
- "2025-11-01 18:00-20:00"
- "2025-11-02 12:00-14:00"
postUrl:
type: string
description: 게시물 URL (Instagram, Naver Blog)
example: "https://instagram.com/p/generated-post-id"
postId:
type: string
description: 게시물 ID (Instagram)
example: "ig-post-12345"
messageId:
type: string
description: 메시지 ID (Kakao Channel)
example: "kakao-msg-12345"
message:
type: string
description: 결과 메시지
example: "배포 완료"
errorMessage:
type: string
description: 오류 메시지 (실패 시)
description: 에러 메시지 (실패 시)
example: "Instagram API 타임아웃"
retries:
executionTimeMs:
type: integer
description: 재시도 횟수
example: 0
lastRetryAt:
type: string
format: date-time
description: 마지막 재시도 시각
example: "2025-11-01T08:59:30Z"
format: int64
description: 배포 소요 시간 (ms)
example: 234
DistributionStatusResponse:
type: object
@ -544,12 +493,12 @@ components:
type: string
description: 채널 타입
enum:
- WOORIDONGNE_TV
- RINGO_BIZ
- GENIE_TV
- URIDONGNETV
- RINGOBIZ
- GINITV
- INSTAGRAM
- NAVER_BLOG
- KAKAO_CHANNEL
- NAVER
- KAKAO
example: "INSTAGRAM"
status:
type: string
@ -569,7 +518,7 @@ components:
distributionId:
type: string
description: 채널별 배포 ID
example: "wtv-uuid-12345"
example: "dist-uuid-12345"
estimatedViews:
type: integer
description: 예상 노출 수
@ -578,35 +527,35 @@ components:
type: string
format: date-time
description: 업데이트 완료 시각
example: "2025-11-01T09:00:00Z"
example: "2025-11-01T09:00:00"
adId:
type: string
description: 광고 ID
description: 광고 ID (지니TV)
example: "gtv-uuid-12345"
impressionSchedule:
type: array
description: 노출 스케줄
description: 노출 스케줄 (지니TV)
items:
type: string
example:
- "2025-11-01 18:00-20:00"
postUrl:
type: string
description: 게시물 URL
description: 게시물 URL (Instagram, Naver Blog)
example: "https://instagram.com/p/generated-post-id"
postId:
type: string
description: 게시물 ID
description: 게시물 ID (Instagram)
example: "ig-post-12345"
messageId:
type: string
description: 메시지 ID
description: 메시지 ID (Kakao Channel)
example: "kakao-msg-12345"
completedAt:
type: string
format: date-time
description: 완료 시각
example: "2025-11-01T09:00:00Z"
example: "2025-11-01T09:00:00"
errorMessage:
type: string
description: 오류 메시지
@ -619,7 +568,7 @@ components:
type: string
format: date-time
description: 마지막 재시도 시각
example: "2025-11-01T08:59:30Z"
example: "2025-11-01T08:59:30"
ErrorResponse:
type: object

View File

@ -0,0 +1,51 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="distribution-service" type="GradleRunConfiguration" factoryName="Gradle">
<ExternalSystemSettings>
<option name="env">
<map>
<entry key="GINITV_API_URL" value="http://localhost:9003/api/ginitv" />
<entry key="INSTAGRAM_API_URL" value="http://localhost:9004/api/instagram" />
<entry key="KAFKA_BOOTSTRAP_SERVERS" value="20.249.182.13:9095,4.217.131.59:9095" />
<entry key="KAFKA_CONSUMER_GROUP" value="distribution-service" />
<entry key="KAFKA_ENABLED" value="true" />
<entry key="KAKAO_API_URL" value="http://localhost:9006/api/kakao" />
<entry key="LOG_FILE" value="logs/distribution-service.log" />
<entry key="NAVER_API_URL" value="http://localhost:9005/api/naver" />
<entry key="RINGOBIZ_API_URL" value="http://localhost:9002/api/ringobiz" />
<entry key="SERVER_PORT" value="8085" />
<entry key="URIDONGNETV_API_URL" value="http://localhost:9001/api/uridongnetv" />
</map>
</option>
<option name="executionName" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="externalSystemIdString" value="GRADLE" />
<option name="scriptParameters" value="" />
<option name="taskDescriptions">
<list />
</option>
<option name="taskNames">
<list>
<option value="distribution-service:bootRun" />
</list>
</option>
<option name="vmOptions" />
</ExternalSystemSettings>
<ExternalSystemDebugServerProcess>true</ExternalSystemDebugServerProcess>
<ExternalSystemReattachDebugProcess>true</ExternalSystemReattachDebugProcess>
<EXTENSION ID="com.intellij.execution.ExternalSystemRunConfigurationJavaExtension">
<extension name="net.ashald.envfile">
<option name="IS_ENABLED" value="false" />
<option name="IS_SUBST" value="false" />
<option name="IS_PATH_MACRO_SUPPORTED" value="false" />
<option name="IS_IGNORE_MISSING_FILES" value="false" />
<option name="IS_ENABLE_EXPERIMENTAL_INTEGRATIONS" value="false" />
<ENTRIES>
<ENTRY IS_ENABLED="true" PARSER="runconfig" IS_EXECUTABLE="false" />
</ENTRIES>
</extension>
</EXTENSION>
<DebugAllEnabled>false</DebugAllEnabled>
<RunAsTest>false</RunAsTest>
<method v="2" />
</configuration>
</component>

View File

@ -0,0 +1,23 @@
package com.kt.distribution;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.kafka.annotation.EnableKafka;
/**
* Distribution Service Application
* 다중 채널 배포 관리 서비스
*
* @author System Architect
* @since 2025-10-23
*/
@SpringBootApplication
@EnableKafka
@EnableFeignClients
public class DistributionApplication {
public static void main(String[] args) {
SpringApplication.run(DistributionApplication.class, args);
}
}

View File

@ -0,0 +1,86 @@
package com.kt.distribution.adapter;
import com.kt.distribution.dto.ChannelDistributionResult;
import com.kt.distribution.dto.DistributionRequest;
import io.github.resilience4j.bulkhead.annotation.Bulkhead;
import io.github.resilience4j.circuitbreaker.annotation.CircuitBreaker;
import io.github.resilience4j.retry.annotation.Retry;
import lombok.extern.slf4j.Slf4j;
/**
* Abstract Channel Adapter
* 공통 로직 Resilience4j 적용
*
* @author System Architect
* @since 2025-10-23
*/
@Slf4j
public abstract class AbstractChannelAdapter implements ChannelAdapter {
/**
* 채널로 배포 실행 (Resilience4j 적용)
*
* @param request DistributionRequest
* @return ChannelDistributionResult
*/
@Override
@CircuitBreaker(name = "channelApi", fallbackMethod = "fallback")
@Retry(name = "channelApi")
@Bulkhead(name = "channelApi")
public ChannelDistributionResult distribute(DistributionRequest request) {
long startTime = System.currentTimeMillis();
try {
log.info("Starting distribution to channel: {}, eventId: {}",
getChannelType(), request.getEventId());
// 실제 외부 API 호출 (구현체에서 구현)
ChannelDistributionResult result = executeDistribution(request);
result.setExecutionTimeMs(System.currentTimeMillis() - startTime);
log.info("Distribution completed successfully: channel={}, eventId={}, executionTime={}ms",
getChannelType(), request.getEventId(), result.getExecutionTimeMs());
return result;
} catch (Exception e) {
long executionTime = System.currentTimeMillis() - startTime;
log.error("Distribution failed: channel={}, eventId={}, error={}",
getChannelType(), request.getEventId(), e.getMessage(), e);
return ChannelDistributionResult.builder()
.channel(getChannelType())
.success(false)
.errorMessage(e.getMessage())
.executionTimeMs(executionTime)
.build();
}
}
/**
* 실제 외부 API 호출 로직 (구현체에서 구현)
*
* @param request DistributionRequest
* @return ChannelDistributionResult
*/
protected abstract ChannelDistributionResult executeDistribution(DistributionRequest request);
/**
* Fallback 메서드 (Circuit Breaker Open )
*
* @param request DistributionRequest
* @param throwable Throwable
* @return ChannelDistributionResult
*/
protected ChannelDistributionResult fallback(DistributionRequest request, Throwable throwable) {
log.warn("Fallback triggered for channel: {}, eventId: {}, reason: {}",
getChannelType(), request.getEventId(), throwable.getMessage());
return ChannelDistributionResult.builder()
.channel(getChannelType())
.success(false)
.errorMessage("Circuit Breaker Open: " + throwable.getMessage())
.executionTimeMs(0)
.build();
}
}

View File

@ -0,0 +1,30 @@
package com.kt.distribution.adapter;
import com.kt.distribution.dto.ChannelDistributionResult;
import com.kt.distribution.dto.ChannelType;
import com.kt.distribution.dto.DistributionRequest;
/**
* Channel Adapter Interface
* 채널별 배포 API를 호출하는 인터페이스
*
* @author System Architect
* @since 2025-10-23
*/
public interface ChannelAdapter {
/**
* 지원하는 채널 타입
*
* @return ChannelType
*/
ChannelType getChannelType();
/**
* 채널로 배포 실행
*
* @param request DistributionRequest
* @return ChannelDistributionResult
*/
ChannelDistributionResult distribute(DistributionRequest request);
}

View File

@ -0,0 +1,45 @@
package com.kt.distribution.adapter;
import com.kt.distribution.dto.ChannelDistributionResult;
import com.kt.distribution.dto.ChannelType;
import com.kt.distribution.dto.DistributionRequest;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.util.UUID;
/**
* 지니TV Adapter
* 지니TV 광고 등록 API 호출
*
* @author System Architect
* @since 2025-10-23
*/
@Slf4j
@Component
public class GiniTvAdapter extends AbstractChannelAdapter {
@Value("${channel.apis.ginitv.url}")
private String apiUrl;
@Override
public ChannelType getChannelType() {
return ChannelType.GINITV;
}
@Override
protected ChannelDistributionResult executeDistribution(DistributionRequest request) {
log.debug("Calling GiniTV API: url={}, eventId={}", apiUrl, request.getEventId());
// TODO: 실제 API 호출 (현재는 Mock)
String distributionId = "GTIV-" + UUID.randomUUID().toString();
return ChannelDistributionResult.builder()
.channel(ChannelType.GINITV)
.success(true)
.distributionId(distributionId)
.estimatedReach(10000) // TV 광고 노출
.build();
}
}

View File

@ -0,0 +1,45 @@
package com.kt.distribution.adapter;
import com.kt.distribution.dto.ChannelDistributionResult;
import com.kt.distribution.dto.ChannelType;
import com.kt.distribution.dto.DistributionRequest;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.util.UUID;
/**
* Instagram Adapter
* Instagram 포스팅 API 호출
*
* @author System Architect
* @since 2025-10-23
*/
@Slf4j
@Component
public class InstagramAdapter extends AbstractChannelAdapter {
@Value("${channel.apis.instagram.url}")
private String apiUrl;
@Override
public ChannelType getChannelType() {
return ChannelType.INSTAGRAM;
}
@Override
protected ChannelDistributionResult executeDistribution(DistributionRequest request) {
log.debug("Calling Instagram API: url={}, eventId={}", apiUrl, request.getEventId());
// TODO: 실제 API 호출 (현재는 Mock)
String distributionId = "INSTA-" + UUID.randomUUID().toString();
return ChannelDistributionResult.builder()
.channel(ChannelType.INSTAGRAM)
.success(true)
.distributionId(distributionId)
.estimatedReach(3000) // 팔로워 기반 예상 노출
.build();
}
}

View File

@ -0,0 +1,45 @@
package com.kt.distribution.adapter;
import com.kt.distribution.dto.ChannelDistributionResult;
import com.kt.distribution.dto.ChannelType;
import com.kt.distribution.dto.DistributionRequest;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.util.UUID;
/**
* Kakao Channel Adapter
* Kakao Channel 포스팅 API 호출
*
* @author System Architect
* @since 2025-10-23
*/
@Slf4j
@Component
public class KakaoAdapter extends AbstractChannelAdapter {
@Value("${channel.apis.kakao.url}")
private String apiUrl;
@Override
public ChannelType getChannelType() {
return ChannelType.KAKAO;
}
@Override
protected ChannelDistributionResult executeDistribution(DistributionRequest request) {
log.debug("Calling Kakao API: url={}, eventId={}", apiUrl, request.getEventId());
// TODO: 실제 API 호출 (현재는 Mock)
String distributionId = "KAKAO-" + UUID.randomUUID().toString();
return ChannelDistributionResult.builder()
.channel(ChannelType.KAKAO)
.success(true)
.distributionId(distributionId)
.estimatedReach(4000) // 채널 친구 기반
.build();
}
}

View File

@ -0,0 +1,45 @@
package com.kt.distribution.adapter;
import com.kt.distribution.dto.ChannelDistributionResult;
import com.kt.distribution.dto.ChannelType;
import com.kt.distribution.dto.DistributionRequest;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.util.UUID;
/**
* Naver Blog Adapter
* Naver Blog 포스팅 API 호출
*
* @author System Architect
* @since 2025-10-23
*/
@Slf4j
@Component
public class NaverAdapter extends AbstractChannelAdapter {
@Value("${channel.apis.naver.url}")
private String apiUrl;
@Override
public ChannelType getChannelType() {
return ChannelType.NAVER;
}
@Override
protected ChannelDistributionResult executeDistribution(DistributionRequest request) {
log.debug("Calling Naver API: url={}, eventId={}", apiUrl, request.getEventId());
// TODO: 실제 API 호출 (현재는 Mock)
String distributionId = "NAVER-" + UUID.randomUUID().toString();
return ChannelDistributionResult.builder()
.channel(ChannelType.NAVER)
.success(true)
.distributionId(distributionId)
.estimatedReach(2000) // 블로그 방문자 기반
.build();
}
}

View File

@ -0,0 +1,45 @@
package com.kt.distribution.adapter;
import com.kt.distribution.dto.ChannelDistributionResult;
import com.kt.distribution.dto.ChannelType;
import com.kt.distribution.dto.DistributionRequest;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.util.UUID;
/**
* 링고비즈 Adapter
* 링고비즈 연결음 업데이트 API 호출
*
* @author System Architect
* @since 2025-10-23
*/
@Slf4j
@Component
public class RingoBizAdapter extends AbstractChannelAdapter {
@Value("${channel.apis.ringobiz.url}")
private String apiUrl;
@Override
public ChannelType getChannelType() {
return ChannelType.RINGOBIZ;
}
@Override
protected ChannelDistributionResult executeDistribution(DistributionRequest request) {
log.debug("Calling RingoBiz API: url={}, eventId={}", apiUrl, request.getEventId());
// TODO: 실제 API 호출 (현재는 Mock)
String distributionId = "RBIZ-" + UUID.randomUUID().toString();
return ChannelDistributionResult.builder()
.channel(ChannelType.RINGOBIZ)
.success(true)
.distributionId(distributionId)
.estimatedReach(1000) // 연결음 사용자
.build();
}
}

View File

@ -0,0 +1,72 @@
package com.kt.distribution.adapter;
import com.kt.distribution.dto.ChannelDistributionResult;
import com.kt.distribution.dto.ChannelType;
import com.kt.distribution.dto.DistributionRequest;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.springframework.web.client.RestTemplate;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
* 우리동네TV Adapter
* 우리동네TV API 호출
*
* @author System Architect
* @since 2025-10-23
*/
@Slf4j
@Component
public class UriDongNeTvAdapter extends AbstractChannelAdapter {
@Value("${channel.apis.uridongnetv.url}")
private String apiUrl;
private final RestTemplate restTemplate = new RestTemplate();
@Override
public ChannelType getChannelType() {
return ChannelType.URIDONGNETV;
}
@Override
protected ChannelDistributionResult executeDistribution(DistributionRequest request) {
log.debug("Calling UriDongNeTV API: url={}, eventId={}", apiUrl, request.getEventId());
// 외부 API 호출 준비
Map<String, Object> payload = new HashMap<>();
payload.put("eventId", request.getEventId());
payload.put("title", request.getTitle());
payload.put("videoUrl", request.getImageUrl()); // 이미지를 영상으로 변환한 URL
payload.put("radius", getChannelSetting(request, "radius", "500m"));
payload.put("timeSlot", getChannelSetting(request, "timeSlot", "evening"));
// TODO: 실제 API 호출 (현재는 Mock)
// ResponseEntity<Map> response = restTemplate.postForEntity(apiUrl + "/distribute", payload, Map.class);
// Mock 응답
String distributionId = "UDTV-" + UUID.randomUUID().toString();
int estimatedReach = 5000;
return ChannelDistributionResult.builder()
.channel(ChannelType.URIDONGNETV)
.success(true)
.distributionId(distributionId)
.estimatedReach(estimatedReach)
.build();
}
private String getChannelSetting(DistributionRequest request, String key, String defaultValue) {
if (request.getChannelSettings() != null) {
Map<String, Object> settings = request.getChannelSettings().get(ChannelType.URIDONGNETV.name());
if (settings != null && settings.containsKey(key)) {
return settings.get(key).toString();
}
}
return defaultValue;
}
}

View File

@ -0,0 +1,59 @@
package com.kt.distribution.config;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.serializer.JsonSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
/**
* Kafka Configuration
* Kafka Producer 설정
*
* @author System Architect
* @since 2025-10-23
*/
@Configuration
@ConditionalOnProperty(name = "spring.kafka.enabled", havingValue = "true", matchIfMissing = false)
public class KafkaConfig {
private static final Logger log = LoggerFactory.getLogger(KafkaConfig.class);
@Value("${spring.kafka.bootstrap-servers:localhost:9092}")
private String bootstrapServers;
@Bean
@Primary
public ProducerFactory<String, Object> producerFactory() {
log.info("Initializing Kafka ProducerFactory with bootstrap servers: {}", bootstrapServers);
Map<String, Object> configProps = new HashMap<>();
configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
configProps.put(JsonSerializer.ADD_TYPE_INFO_HEADERS, false);
log.debug("Kafka Producer Config: {}", configProps);
return new DefaultKafkaProducerFactory<>(configProps);
}
@Bean
@Primary
public KafkaTemplate<String, Object> kafkaTemplate() {
log.info("Creating KafkaTemplate with custom ProducerFactory");
return new KafkaTemplate<>(producerFactory());
}
}

View File

@ -0,0 +1,52 @@
package com.kt.distribution.config;
import io.swagger.v3.oas.models.OpenAPI;
import io.swagger.v3.oas.models.info.Contact;
import io.swagger.v3.oas.models.info.Info;
import io.swagger.v3.oas.models.servers.Server;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.util.List;
/**
* OpenAPI (Swagger) Configuration
* Swagger UI 설정 API 문서화
*
* @author System Architect
* @since 2025-10-24
*/
@Configuration
public class OpenApiConfig {
@Bean
public OpenAPI openAPI() {
return new OpenAPI()
.info(new Info()
.title("Distribution Service API")
.description("""
KT AI 기반 소상공인 이벤트 자동 생성 서비스의 다중 채널 배포 관리 API
## 주요 기능
- 다중 채널 동시 배포 (우리동네TV, 링고비즈, 지니TV, SNS)
- 배포 상태 실시간 모니터링
- Circuit Breaker 기반 장애 격리
- Retry 패턴 Fallback 처리
""")
.version("1.0.0")
.contact(new Contact()
.name("Digital Garage Team")
.email("support@kt-event-marketing.com")))
.servers(List.of(
new Server()
.url("http://localhost:8085")
.description("Local Development Server"),
new Server()
.url("https://dev-api.kt-event-marketing.com/distribution/v1")
.description("Development Server"),
new Server()
.url("https://api.kt-event-marketing.com/distribution/v1")
.description("Production Server")
));
}
}

View File

@ -0,0 +1,32 @@
package com.kt.distribution.config;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.CorsRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
/**
* Web Configuration
* CORS 설정 기타 관련 설정
*
* @author System Architect
* @since 2025-10-24
*/
@Configuration
public class WebConfig implements WebMvcConfigurer {
/**
* CORS 설정
* - 모든 origin 허용 (개발 환경)
* - 모든 HTTP 메서드 허용
* - Credentials 허용
*/
@Override
public void addCorsMappings(CorsRegistry registry) {
registry.addMapping("/**")
.allowedOriginPatterns("*")
.allowedMethods("GET", "POST", "PUT", "DELETE", "PATCH", "OPTIONS")
.allowedHeaders("*")
.allowCredentials(true)
.maxAge(3600);
}
}

View File

@ -0,0 +1,124 @@
package com.kt.distribution.controller;
import com.kt.distribution.dto.DistributionRequest;
import com.kt.distribution.dto.DistributionResponse;
import com.kt.distribution.dto.DistributionStatusResponse;
import com.kt.distribution.service.DistributionService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.media.Content;
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import io.swagger.v3.oas.annotations.responses.ApiResponses;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
/**
* Distribution Controller
* POST /distribution/distribute - 다중 채널 배포 실행
* GET /distribution/{eventId}/status - 배포 상태 조회
*
* @author System Architect
* @since 2025-10-23
*/
@Slf4j
@RestController
@RequestMapping("/distribution")
@RequiredArgsConstructor
@Tag(name = "Distribution", description = "다중 채널 배포 관리 API")
public class DistributionController {
private final DistributionService distributionService;
/**
* 다중 채널 배포 실행
* UFR-DIST-010: 다중채널배포
*
* @param request DistributionRequest
* @return DistributionResponse
*/
@Operation(
summary = "다중 채널 배포 요청",
description = """
이벤트 콘텐츠를 선택된 채널들에 동시 배포합니다.
## 처리 흐름
1. 배포 요청 검증 (이벤트 ID, 채널 목록, 콘텐츠 데이터)
2. 채널별 병렬 배포 실행 (1분 이내 완료 목표)
3. Circuit Breaker로 장애 채널 격리
4. 실패 Retry (지수 백오프: 1s, 2s, 4s)
5. Fallback: 실패 채널 스킵 알림
"""
)
@ApiResponses(value = {
@ApiResponse(
responseCode = "200",
description = "배포 완료",
content = @Content(schema = @Schema(implementation = DistributionResponse.class))
),
@ApiResponse(responseCode = "400", description = "잘못된 요청"),
@ApiResponse(responseCode = "404", description = "이벤트를 찾을 수 없음"),
@ApiResponse(responseCode = "500", description = "서버 내부 오류")
})
@PostMapping("/distribute")
public ResponseEntity<DistributionResponse> distribute(@RequestBody DistributionRequest request) {
log.info("Received distribution request: eventId={}, channels={}",
request.getEventId(), request.getChannels());
DistributionResponse response = distributionService.distribute(request);
log.info("Distribution request processed: eventId={}, success={}, successCount={}, failureCount={}",
response.getEventId(), response.isSuccess(),
response.getSuccessCount(), response.getFailureCount());
return ResponseEntity.ok(response);
}
/**
* 배포 상태 조회
* UFR-DIST-020: 배포상태조회
*
* @param eventId 이벤트 ID
* @return DistributionStatusResponse
*/
@Operation(
summary = "배포 상태 조회",
description = """
특정 이벤트의 배포 상태를 실시간으로 조회합니다.
## 조회 정보
- 전체 배포 상태 (진행중, 완료, 부분성공, 실패)
- 채널별 배포 상태 결과
- 실패 채널 상세 정보 (오류 유형, 재시도 횟수)
"""
)
@ApiResponses(value = {
@ApiResponse(
responseCode = "200",
description = "배포 상태 조회 성공",
content = @Content(schema = @Schema(implementation = DistributionStatusResponse.class))
),
@ApiResponse(responseCode = "404", description = "배포 이력을 찾을 수 없음"),
@ApiResponse(responseCode = "500", description = "서버 내부 오류")
})
@GetMapping("/{eventId}/status")
public ResponseEntity<DistributionStatusResponse> getDistributionStatus(
@Parameter(description = "이벤트 ID", required = true, example = "evt-12345")
@PathVariable String eventId) {
log.info("Received distribution status request: eventId={}", eventId);
DistributionStatusResponse response = distributionService.getDistributionStatus(eventId);
log.info("Distribution status retrieved: eventId={}, overallStatus={}",
eventId, response.getOverallStatus());
if ("NOT_FOUND".equals(response.getOverallStatus())) {
return ResponseEntity.notFound().build();
}
return ResponseEntity.ok(response);
}
}

View File

@ -0,0 +1,49 @@
package com.kt.distribution.dto;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* 채널별 배포 결과
*
* @author System Architect
* @since 2025-10-23
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class ChannelDistributionResult {
/**
* 채널 타입
*/
private ChannelType channel;
/**
* 배포 성공 여부
*/
private boolean success;
/**
* 배포 ID (성공 )
*/
private String distributionId;
/**
* 예상 노출 (성공 )
*/
private Integer estimatedReach;
/**
* 에러 메시지 (실패 )
*/
private String errorMessage;
/**
* 배포 소요 시간 (ms)
*/
private long executionTimeMs;
}

View File

@ -0,0 +1,100 @@
package com.kt.distribution.dto;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.time.LocalDateTime;
import java.util.List;
/**
* 채널별 배포 상태 DTO
*
* 채널의 배포 진행 상태 결과 정보를 담습니다.
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class ChannelStatus {
/**
* 채널 타입
*/
private ChannelType channel;
/**
* 채널별 배포 상태
* - PENDING: 대기
* - IN_PROGRESS: 진행
* - COMPLETED: 완료
* - FAILED: 실패
*/
private String status;
/**
* 진행률 (0-100, IN_PROGRESS 상태일 사용)
*/
private Integer progress;
/**
* 채널별 배포 ID (우리동네TV, 지니TV )
*/
private String distributionId;
/**
* 예상 노출 (우리동네TV, 지니TV)
*/
private Integer estimatedViews;
/**
* 업데이트 완료 시각 (링고비즈)
*/
private LocalDateTime updateTimestamp;
/**
* 이벤트 ID
*/
private String eventId;
/**
* 노출 스케줄 (지니TV)
*/
private List<String> impressionSchedule;
/**
* 게시물 URL (Instagram, Naver Blog)
*/
private String postUrl;
/**
* 게시물 ID (Instagram)
*/
private String postId;
/**
* 메시지 ID (Kakao Channel)
*/
private String messageId;
/**
* 완료 시각
*/
private LocalDateTime completedAt;
/**
* 오류 메시지 (실패 )
*/
private String errorMessage;
/**
* 재시도 횟수
*/
private Integer retries;
/**
* 마지막 재시도 시각
*/
private LocalDateTime lastRetryAt;
}

View File

@ -0,0 +1,32 @@
package com.kt.distribution.dto;
/**
* 배포 채널 타입
*
* @author System Architect
* @since 2025-10-23
*/
public enum ChannelType {
URIDONGNETV("우리동네TV", "TV"),
RINGOBIZ("링고비즈", "CALL"),
GINITV("지니TV", "TV"),
INSTAGRAM("Instagram", "SNS"),
NAVER("Naver Blog", "SNS"),
KAKAO("Kakao Channel", "SNS");
private final String displayName;
private final String category;
ChannelType(String displayName, String category) {
this.displayName = displayName;
this.category = category;
}
public String getDisplayName() {
return displayName;
}
public String getCategory() {
return category;
}
}

View File

@ -0,0 +1,54 @@
package com.kt.distribution.dto;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.List;
import java.util.Map;
/**
* 배포 요청 DTO
* POST /api/distribution/distribute
*
* @author System Architect
* @since 2025-10-23
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class DistributionRequest {
/**
* 이벤트 ID
*/
private String eventId;
/**
* 이벤트 제목
*/
private String title;
/**
* 이벤트 설명
*/
private String description;
/**
* 이미지 URL (CDN)
*/
private String imageUrl;
/**
* 배포할 채널 목록
*/
private List<ChannelType> channels;
/**
* 채널별 추가 설정 (Optional)
* : { "URIDONGNETV": { "radius": "1km", "timeSlot": "evening" } }
*/
private Map<String, Map<String, Object>> channelSettings;
}

View File

@ -0,0 +1,63 @@
package com.kt.distribution.dto;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.time.LocalDateTime;
import java.util.List;
/**
* 배포 응답 DTO
* POST /api/distribution/distribute
*
* @author System Architect
* @since 2025-10-23
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class DistributionResponse {
/**
* 이벤트 ID
*/
private String eventId;
/**
* 배포 성공 여부 (모든 채널 또는 일부 채널 성공)
*/
private boolean success;
/**
* 채널별 배포 결과
*/
private List<ChannelDistributionResult> channelResults;
/**
* 성공한 채널
*/
private int successCount;
/**
* 실패한 채널
*/
private int failureCount;
/**
* 배포 완료 시각
*/
private LocalDateTime completedAt;
/**
* 전체 배포 소요 시간 (ms)
*/
private long totalExecutionTimeMs;
/**
* 메시지
*/
private String message;
}

View File

@ -0,0 +1,52 @@
package com.kt.distribution.dto;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.time.LocalDateTime;
import java.util.List;
/**
* 배포 상태 조회 응답 DTO
*
* 특정 이벤트의 전체 배포 상태 채널별 상세 상태 정보를 담습니다.
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class DistributionStatusResponse {
/**
* 이벤트 ID
*/
private String eventId;
/**
* 전체 배포 상태
* - PENDING: 대기
* - IN_PROGRESS: 진행
* - COMPLETED: 완료
* - PARTIAL_FAILURE: 부분 성공
* - FAILED: 실패
* - NOT_FOUND: 배포 이력 없음
*/
private String overallStatus;
/**
* 배포 시작 시각
*/
private LocalDateTime startedAt;
/**
* 배포 완료 시각
*/
private LocalDateTime completedAt;
/**
* 채널별 배포 상태 목록
*/
private List<ChannelStatus> channels;
}

View File

@ -0,0 +1,168 @@
package com.kt.distribution.entity;
import com.kt.distribution.dto.ChannelType;
import jakarta.persistence.*;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.time.LocalDateTime;
/**
* 채널별 배포 상태 엔티티
*
* 채널의 배포 진행 상태 결과 정보를 저장합니다.
*
* @author Backend Developer
* @since 2025-10-24
*/
@Entity
@Table(name = "channel_status", indexes = {
@Index(name = "idx_distribution_channel", columnList = "distribution_status_id, channel")
})
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class ChannelStatusEntity {
/**
* 채널 상태 ID (Primary Key)
*/
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
/**
* 배포 상태 (Foreign Key)
*/
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "distribution_status_id", nullable = false)
private DistributionStatus distributionStatus;
/**
* 채널 타입
*/
@Enumerated(EnumType.STRING)
@Column(name = "channel", nullable = false, length = 20)
private ChannelType channel;
/**
* 채널별 배포 상태
* - PENDING: 대기
* - IN_PROGRESS: 진행
* - COMPLETED: 완료
* - FAILED: 실패
*/
@Column(name = "status", nullable = false, length = 20)
private String status;
/**
* 진행률 (0-100, IN_PROGRESS 상태일 사용)
*/
@Column(name = "progress")
private Integer progress;
/**
* 채널별 배포 ID (우리동네TV, 지니TV )
*/
@Column(name = "distribution_id", length = 100)
private String distributionId;
/**
* 예상 노출 (우리동네TV, 지니TV)
*/
@Column(name = "estimated_views")
private Integer estimatedViews;
/**
* 업데이트 완료 시각 (링고비즈)
*/
@Column(name = "update_timestamp")
private LocalDateTime updateTimestamp;
/**
* 이벤트 ID
*/
@Column(name = "event_id", length = 100)
private String eventId;
/**
* 노출 스케줄 (지니TV) - JSON 형태로 저장
*/
@Column(name = "impression_schedule", columnDefinition = "TEXT")
private String impressionSchedule;
/**
* 게시물 URL (Instagram, Naver Blog)
*/
@Column(name = "post_url", columnDefinition = "TEXT")
private String postUrl;
/**
* 게시물 ID (Instagram)
*/
@Column(name = "post_id", length = 100)
private String postId;
/**
* 메시지 ID (Kakao Channel)
*/
@Column(name = "message_id", length = 100)
private String messageId;
/**
* 완료 시각
*/
@Column(name = "completed_at")
private LocalDateTime completedAt;
/**
* 오류 메시지 (실패 )
*/
@Column(name = "error_message", columnDefinition = "TEXT")
private String errorMessage;
/**
* 재시도 횟수
*/
@Column(name = "retries")
@Builder.Default
private Integer retries = 0;
/**
* 마지막 재시도 시각
*/
@Column(name = "last_retry_at")
private LocalDateTime lastRetryAt;
/**
* 생성 시각
*/
@Column(name = "created_at", nullable = false, updatable = false)
private LocalDateTime createdAt;
/**
* 수정 시각
*/
@Column(name = "updated_at")
private LocalDateTime updatedAt;
/**
* 생성 자동으로 생성 시각 설정
*/
@PrePersist
protected void onCreate() {
createdAt = LocalDateTime.now();
updatedAt = LocalDateTime.now();
}
/**
* 수정 자동으로 수정 시각 설정
*/
@PreUpdate
protected void onUpdate() {
updatedAt = LocalDateTime.now();
}
}

View File

@ -0,0 +1,118 @@
package com.kt.distribution.entity;
import jakarta.persistence.*;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
/**
* 배포 상태 엔티티
*
* 이벤트의 전체 배포 상태 정보를 저장합니다.
*
* @author Backend Developer
* @since 2025-10-24
*/
@Entity
@Table(name = "distribution_status", indexes = {
@Index(name = "idx_event_id", columnList = "event_id", unique = true)
})
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class DistributionStatus {
/**
* 배포 상태 ID (Primary Key)
*/
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
/**
* 이벤트 ID (Unique)
*/
@Column(name = "event_id", nullable = false, unique = true, length = 100)
private String eventId;
/**
* 전체 배포 상태
* - PENDING: 대기
* - IN_PROGRESS: 진행
* - COMPLETED: 완료
* - PARTIAL_FAILURE: 부분 성공
* - FAILED: 실패
*/
@Column(name = "overall_status", nullable = false, length = 20)
private String overallStatus;
/**
* 배포 시작 시각
*/
@Column(name = "started_at")
private LocalDateTime startedAt;
/**
* 배포 완료 시각
*/
@Column(name = "completed_at")
private LocalDateTime completedAt;
/**
* 채널별 배포 상태 목록 (1:N 관계)
*/
@OneToMany(mappedBy = "distributionStatus", cascade = CascadeType.ALL, orphanRemoval = true, fetch = FetchType.LAZY)
@Builder.Default
private List<ChannelStatusEntity> channels = new ArrayList<>();
/**
* 생성 시각
*/
@Column(name = "created_at", nullable = false, updatable = false)
private LocalDateTime createdAt;
/**
* 수정 시각
*/
@Column(name = "updated_at")
private LocalDateTime updatedAt;
/**
* 생성 자동으로 생성 시각 설정
*/
@PrePersist
protected void onCreate() {
createdAt = LocalDateTime.now();
updatedAt = LocalDateTime.now();
}
/**
* 수정 자동으로 수정 시각 설정
*/
@PreUpdate
protected void onUpdate() {
updatedAt = LocalDateTime.now();
}
/**
* 채널 상태 추가 헬퍼 메서드
*/
public void addChannelStatus(ChannelStatusEntity channelStatus) {
channels.add(channelStatus);
channelStatus.setDistributionStatus(this);
}
/**
* 채널 상태 제거 헬퍼 메서드
*/
public void removeChannelStatus(ChannelStatusEntity channelStatus) {
channels.remove(channelStatus);
channelStatus.setDistributionStatus(null);
}
}

View File

@ -0,0 +1,40 @@
package com.kt.distribution.event;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* 배포된 채널 정보
* Kafka 이벤트에 포함되는 채널별 상세 정보
*
* @author System Architect
* @since 2025-10-29
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class DistributedChannelInfo {
/**
* 채널명 (: "우리동네TV", "지니TV", "링고비즈")
*/
private String channel;
/**
* 채널 타입 (: "TV", "CALL", "SNS")
*/
private String channelType;
/**
* 배포 상태 (SUCCESS, FAILED)
*/
private String status;
/**
* 예상 조회수
*/
private Integer expectedViews;
}

View File

@ -0,0 +1,40 @@
package com.kt.distribution.event;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.time.LocalDateTime;
import java.util.List;
/**
* Distribution Completed Event
* 배포 완료 Kafka로 발행하는 이벤트
*
* @author System Architect
* @since 2025-10-23
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class DistributionCompletedEvent {
/**
* 이벤트 ID
*/
private String eventId;
/**
* 배포 완료된 채널 상세 정보 목록
*/
private List<DistributedChannelInfo> distributedChannels;
/**
* 배포 완료 시각
*/
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSSSSS")
private LocalDateTime completedAt;
}

View File

@ -0,0 +1,173 @@
package com.kt.distribution.mapper;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.kt.distribution.dto.ChannelStatus;
import com.kt.distribution.dto.DistributionStatusResponse;
import com.kt.distribution.entity.ChannelStatusEntity;
import com.kt.distribution.entity.DistributionStatus;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
/**
* 배포 상태 Mapper
*
* Entity와 DTO 간의 변환을 담당합니다.
*
* @author Backend Developer
* @since 2025-10-24
*/
@Slf4j
@Component
@RequiredArgsConstructor
public class DistributionStatusMapper {
private final ObjectMapper objectMapper;
/**
* DistributionStatusResponse DTO를 DistributionStatus Entity로 변환
*
* @param dto DistributionStatusResponse DTO
* @return DistributionStatus Entity
*/
public DistributionStatus toEntity(DistributionStatusResponse dto) {
if (dto == null) {
return null;
}
DistributionStatus entity = DistributionStatus.builder()
.eventId(dto.getEventId())
.overallStatus(dto.getOverallStatus())
.startedAt(dto.getStartedAt())
.completedAt(dto.getCompletedAt())
.build();
// 채널 상태 변환 추가
if (dto.getChannels() != null) {
List<ChannelStatusEntity> channelEntities = dto.getChannels().stream()
.map(channelDto -> toChannelEntity(channelDto, entity))
.collect(Collectors.toList());
channelEntities.forEach(entity::addChannelStatus);
}
return entity;
}
/**
* DistributionStatus Entity를 DistributionStatusResponse DTO로 변환
*
* @param entity DistributionStatus Entity
* @return DistributionStatusResponse DTO
*/
public DistributionStatusResponse toDto(DistributionStatus entity) {
if (entity == null) {
return null;
}
List<ChannelStatus> channelDtos = entity.getChannels() != null
? entity.getChannels().stream()
.map(this::toChannelDto)
.collect(Collectors.toList())
: Collections.emptyList();
return DistributionStatusResponse.builder()
.eventId(entity.getEventId())
.overallStatus(entity.getOverallStatus())
.startedAt(entity.getStartedAt())
.completedAt(entity.getCompletedAt())
.channels(channelDtos)
.build();
}
/**
* ChannelStatus DTO를 ChannelStatusEntity로 변환
*
* @param dto ChannelStatus DTO
* @param distributionStatus 부모 DistributionStatus Entity
* @return ChannelStatusEntity
*/
private ChannelStatusEntity toChannelEntity(ChannelStatus dto, DistributionStatus distributionStatus) {
if (dto == null) {
return null;
}
// impressionSchedule를 JSON 문자열로 변환
String impressionScheduleJson = null;
if (dto.getImpressionSchedule() != null && !dto.getImpressionSchedule().isEmpty()) {
try {
impressionScheduleJson = objectMapper.writeValueAsString(dto.getImpressionSchedule());
} catch (JsonProcessingException e) {
log.error("Failed to serialize impressionSchedule", e);
}
}
return ChannelStatusEntity.builder()
.distributionStatus(distributionStatus)
.channel(dto.getChannel())
.status(dto.getStatus())
.progress(dto.getProgress())
.distributionId(dto.getDistributionId())
.estimatedViews(dto.getEstimatedViews())
.updateTimestamp(dto.getUpdateTimestamp())
.eventId(dto.getEventId())
.impressionSchedule(impressionScheduleJson)
.postUrl(dto.getPostUrl())
.postId(dto.getPostId())
.messageId(dto.getMessageId())
.completedAt(dto.getCompletedAt())
.errorMessage(dto.getErrorMessage())
.retries(dto.getRetries())
.lastRetryAt(dto.getLastRetryAt())
.build();
}
/**
* ChannelStatusEntity를 ChannelStatus DTO로 변환
*
* @param entity ChannelStatusEntity
* @return ChannelStatus DTO
*/
private ChannelStatus toChannelDto(ChannelStatusEntity entity) {
if (entity == null) {
return null;
}
// JSON 문자열을 List<String>으로 변환
List<String> impressionScheduleList = null;
if (entity.getImpressionSchedule() != null && !entity.getImpressionSchedule().isEmpty()) {
try {
impressionScheduleList = objectMapper.readValue(
entity.getImpressionSchedule(),
new TypeReference<List<String>>() {}
);
} catch (JsonProcessingException e) {
log.error("Failed to deserialize impressionSchedule", e);
}
}
return ChannelStatus.builder()
.channel(entity.getChannel())
.status(entity.getStatus())
.progress(entity.getProgress())
.distributionId(entity.getDistributionId())
.estimatedViews(entity.getEstimatedViews())
.updateTimestamp(entity.getUpdateTimestamp())
.eventId(entity.getEventId())
.impressionSchedule(impressionScheduleList)
.postUrl(entity.getPostUrl())
.postId(entity.getPostId())
.messageId(entity.getMessageId())
.completedAt(entity.getCompletedAt())
.errorMessage(entity.getErrorMessage())
.retries(entity.getRetries())
.lastRetryAt(entity.getLastRetryAt())
.build();
}
}

View File

@ -0,0 +1,53 @@
package com.kt.distribution.repository;
import com.kt.distribution.entity.DistributionStatus;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import java.util.Optional;
/**
* 배포 상태 JPA Repository
*
* 배포 상태를 데이터베이스에 영구 저장하고 조회합니다.
*
* @author Backend Developer
* @since 2025-10-24
*/
@Repository
public interface DistributionStatusJpaRepository extends JpaRepository<DistributionStatus, Long> {
/**
* 이벤트 ID로 배포 상태 조회
*
* @param eventId 이벤트 ID
* @return 배포 상태 (없으면 Optional.empty())
*/
Optional<DistributionStatus> findByEventId(String eventId);
/**
* 이벤트 ID로 배포 상태 조회 (채널 상태 Fetch Join)
*
* @param eventId 이벤트 ID
* @return 배포 상태 (채널 상태 포함, 없으면 Optional.empty())
*/
@Query("SELECT d FROM DistributionStatus d LEFT JOIN FETCH d.channels WHERE d.eventId = :eventId")
Optional<DistributionStatus> findByEventIdWithChannels(@Param("eventId") String eventId);
/**
* 이벤트 ID로 배포 상태 존재 여부 확인
*
* @param eventId 이벤트 ID
* @return 존재 여부
*/
boolean existsByEventId(String eventId);
/**
* 이벤트 ID로 배포 상태 삭제
*
* @param eventId 이벤트 ID
*/
void deleteByEventId(String eventId);
}

View File

@ -0,0 +1,97 @@
package com.kt.distribution.repository;
import com.kt.distribution.dto.DistributionStatusResponse;
import com.kt.distribution.entity.DistributionStatus;
import com.kt.distribution.mapper.DistributionStatusMapper;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import java.util.Optional;
/**
* 배포 상태 저장소
*
* PostgreSQL 데이터베이스를 사용하여 배포 상태를 영구 저장합니다.
*
* @author Backend Developer
* @since 2025-10-24
*/
@Slf4j
@Repository
@RequiredArgsConstructor
public class DistributionStatusRepository {
private final DistributionStatusJpaRepository jpaRepository;
private final DistributionStatusMapper mapper;
/**
* 배포 상태 저장
*
* @param eventId 이벤트 ID
* @param status 배포 상태 DTO
*/
@Transactional
public void save(String eventId, DistributionStatusResponse status) {
log.debug("Saving distribution status: eventId={}, overallStatus={}", eventId, status.getOverallStatus());
// 기존 데이터가 있으면 업데이트, 없으면 새로 생성
Optional<DistributionStatus> existingStatus = jpaRepository.findByEventIdWithChannels(eventId);
if (existingStatus.isPresent()) {
// 기존 데이터 업데이트
DistributionStatus entity = existingStatus.get();
entity.setOverallStatus(status.getOverallStatus());
entity.setStartedAt(status.getStartedAt());
entity.setCompletedAt(status.getCompletedAt());
// 기존 채널 상태 모두 삭제 새로 추가
entity.getChannels().clear();
DistributionStatus newEntity = mapper.toEntity(status);
if (newEntity.getChannels() != null) {
newEntity.getChannels().forEach(entity::addChannelStatus);
}
jpaRepository.save(entity);
} else {
// 새로 생성
DistributionStatus entity = mapper.toEntity(status);
jpaRepository.save(entity);
}
}
/**
* 배포 상태 조회
*
* @param eventId 이벤트 ID
* @return 배포 상태 DTO (없으면 Optional.empty())
*/
@Transactional(readOnly = true)
public Optional<DistributionStatusResponse> findByEventId(String eventId) {
log.debug("Finding distribution status: eventId={}", eventId);
return jpaRepository.findByEventIdWithChannels(eventId)
.map(mapper::toDto);
}
/**
* 배포 상태 삭제
*
* @param eventId 이벤트 ID
*/
@Transactional
public void delete(String eventId) {
log.debug("Deleting distribution status: eventId={}", eventId);
jpaRepository.deleteByEventId(eventId);
}
/**
* 모든 배포 상태 삭제 (테스트용)
*/
@Transactional
public void deleteAll() {
log.debug("Deleting all distribution statuses");
jpaRepository.deleteAll();
}
}

View File

@ -0,0 +1,264 @@
package com.kt.distribution.service;
import com.kt.distribution.adapter.ChannelAdapter;
import com.kt.distribution.dto.*;
import com.kt.distribution.event.DistributionCompletedEvent;
import com.kt.distribution.repository.DistributionStatusRepository;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.stream.Collectors;
/**
* Distribution Service
* 다중 채널 병렬 배포 Kafka Event 발행
*
* @author System Architect
* @since 2025-10-23
*/
@Slf4j
@Service
public class DistributionService {
private final List<ChannelAdapter> channelAdapters;
private final Optional<KafkaEventPublisher> kafkaEventPublisher;
private final DistributionStatusRepository statusRepository;
@Autowired
public DistributionService(List<ChannelAdapter> channelAdapters,
Optional<KafkaEventPublisher> kafkaEventPublisher,
DistributionStatusRepository statusRepository) {
this.channelAdapters = channelAdapters;
this.kafkaEventPublisher = kafkaEventPublisher;
this.statusRepository = statusRepository;
}
// 병렬 실행을 위한 ExecutorService (채널별 스레드 )
private final ExecutorService executorService = Executors.newFixedThreadPool(10);
/**
* 다중 채널 병렬 배포
*
* @param request DistributionRequest
* @return DistributionResponse
*/
public DistributionResponse distribute(DistributionRequest request) {
LocalDateTime startedAt = LocalDateTime.now();
long startTime = System.currentTimeMillis();
log.info("Starting multi-channel distribution: eventId={}, channels={}",
request.getEventId(), request.getChannels());
// 배포 시작 상태 저장 (IN_PROGRESS)
saveInProgressStatus(request.getEventId(), request.getChannels(), startedAt);
// 채널 어댑터 매핑 (타입별)
Map<String, ChannelAdapter> adapterMap = channelAdapters.stream()
.collect(Collectors.toMap(
adapter -> adapter.getChannelType().name(),
adapter -> adapter
));
// 병렬 배포 실행
List<CompletableFuture<ChannelDistributionResult>> futures = request.getChannels().stream()
.map(channelType -> {
ChannelAdapter adapter = adapterMap.get(channelType.name());
if (adapter == null) {
log.warn("No adapter found for channel: {}", channelType);
return CompletableFuture.completedFuture(
ChannelDistributionResult.builder()
.channel(channelType)
.success(false)
.errorMessage("Adapter not found")
.build()
);
}
// 비동기 실행
return CompletableFuture.supplyAsync(
() -> adapter.distribute(request),
executorService
);
})
.collect(Collectors.toList());
// 모든 배포 완료 대기
CompletableFuture<Void> allOf = CompletableFuture.allOf(
futures.toArray(new CompletableFuture[0])
);
allOf.join(); // 블로킹 대기 (최대 1분 목표)
// 결과 수집
List<ChannelDistributionResult> results = futures.stream()
.map(CompletableFuture::join)
.collect(Collectors.toList());
long totalExecutionTime = System.currentTimeMillis() - startTime;
LocalDateTime completedAt = LocalDateTime.now();
// 성공/실패 카운트
long successCount = results.stream().filter(ChannelDistributionResult::isSuccess).count();
long failureCount = results.size() - successCount;
log.info("Multi-channel distribution completed: eventId={}, successCount={}, failureCount={}, totalTime={}ms",
request.getEventId(), successCount, failureCount, totalExecutionTime);
// 배포 완료 상태 저장 (COMPLETED/PARTIAL_FAILURE/FAILED)
saveCompletedStatus(request.getEventId(), results, startedAt, completedAt, successCount, failureCount);
// Kafka Event 발행
publishDistributionCompletedEvent(request.getEventId(), results);
// 응답 생성
return DistributionResponse.builder()
.eventId(request.getEventId())
.success(successCount > 0) // 1개 이상 성공하면 성공으로 간주
.channelResults(results)
.successCount((int) successCount)
.failureCount((int) failureCount)
.completedAt(completedAt)
.totalExecutionTimeMs(totalExecutionTime)
.message(String.format("Distribution completed: %d succeeded, %d failed",
successCount, failureCount))
.build();
}
/**
* 배포 상태 조회
*
* @param eventId 이벤트 ID
* @return 배포 상태
*/
public DistributionStatusResponse getDistributionStatus(String eventId) {
return statusRepository.findByEventId(eventId)
.orElse(DistributionStatusResponse.builder()
.eventId(eventId)
.overallStatus("NOT_FOUND")
.channels(List.of())
.build());
}
/**
* 배포 시작 상태 저장 (IN_PROGRESS)
*
* @param eventId 이벤트 ID
* @param channels 배포 채널 목록
* @param startedAt 시작 시각
*/
private void saveInProgressStatus(String eventId, List<ChannelType> channels, LocalDateTime startedAt) {
List<ChannelStatus> channelStatuses = channels.stream()
.map(channelType -> ChannelStatus.builder()
.channel(channelType)
.status("PENDING")
.eventId(eventId)
.build())
.collect(Collectors.toList());
DistributionStatusResponse status = DistributionStatusResponse.builder()
.eventId(eventId)
.overallStatus("IN_PROGRESS")
.startedAt(startedAt)
.channels(channelStatuses)
.build();
statusRepository.save(eventId, status);
}
/**
* 배포 완료 상태 저장
*
* @param eventId 이벤트 ID
* @param results 배포 결과
* @param startedAt 시작 시각
* @param completedAt 완료 시각
* @param successCount 성공 개수
* @param failureCount 실패 개수
*/
private void saveCompletedStatus(String eventId, List<ChannelDistributionResult> results,
LocalDateTime startedAt, LocalDateTime completedAt,
long successCount, long failureCount) {
// 전체 상태 결정
String overallStatus;
if (successCount == 0) {
overallStatus = "FAILED";
} else if (failureCount == 0) {
overallStatus = "COMPLETED";
} else {
overallStatus = "PARTIAL_FAILURE";
}
// ChannelDistributionResult ChannelStatus 변환
List<ChannelStatus> channelStatuses = results.stream()
.map(result -> convertToChannelStatus(result, eventId, completedAt))
.collect(Collectors.toList());
DistributionStatusResponse status = DistributionStatusResponse.builder()
.eventId(eventId)
.overallStatus(overallStatus)
.startedAt(startedAt)
.completedAt(completedAt)
.channels(channelStatuses)
.build();
statusRepository.save(eventId, status);
}
/**
* ChannelDistributionResult를 ChannelStatus로 변환
*
* @param result 배포 결과
* @param eventId 이벤트 ID
* @param completedAt 완료 시각
* @return 채널 상태
*/
private ChannelStatus convertToChannelStatus(ChannelDistributionResult result, String eventId, LocalDateTime completedAt) {
return ChannelStatus.builder()
.channel(result.getChannel())
.status(result.isSuccess() ? "COMPLETED" : "FAILED")
.distributionId(result.getDistributionId())
.estimatedViews(result.getEstimatedReach())
.eventId(eventId)
.completedAt(completedAt)
.errorMessage(result.getErrorMessage())
.build();
}
/**
* DistributionCompleted 이벤트 발행
*
* @param eventId 이벤트 ID
* @param results 채널별 배포 결과
*/
private void publishDistributionCompletedEvent(String eventId, List<ChannelDistributionResult> results) {
if (kafkaEventPublisher.isEmpty()) {
log.warn("KafkaEventPublisher not available - skipping event publishing");
return;
}
List<com.kt.distribution.event.DistributedChannelInfo> distributedChannels = results.stream()
.map(result -> com.kt.distribution.event.DistributedChannelInfo.builder()
.channel(result.getChannel().getDisplayName())
.channelType(result.getChannel().getCategory())
.status(result.isSuccess() ? "SUCCESS" : "FAILED")
.expectedViews(result.getEstimatedReach())
.build())
.collect(Collectors.toList());
DistributionCompletedEvent event = DistributionCompletedEvent.builder()
.eventId(eventId)
.distributedChannels(distributedChannels)
.completedAt(LocalDateTime.now())
.build();
kafkaEventPublisher.get().publishDistributionCompleted(event);
}
}

View File

@ -0,0 +1,62 @@
package com.kt.distribution.service;
import com.kt.distribution.event.DistributionCompletedEvent;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.stereotype.Service;
import java.util.concurrent.CompletableFuture;
/**
* Kafka Event Publisher
* DistributionCompleted 이벤트를 Kafka로 발행
*
* @author System Architect
* @since 2025-10-23
*/
@Slf4j
@Service
@ConditionalOnProperty(name = "spring.kafka.enabled", havingValue = "true", matchIfMissing = true)
@RequiredArgsConstructor
public class KafkaEventPublisher {
private final KafkaTemplate<String, Object> kafkaTemplate;
@Value("${kafka.topics.distribution-completed}")
private static String distributionCompletedTopic = "distribution-completed";
/**
* 배포 완료 이벤트 발행
*
* @param event DistributionCompletedEvent
*/
public void publishDistributionCompleted(DistributionCompletedEvent event) {
try {
log.info("Publishing DistributionCompletedEvent: eventId={}, channels={}",
event.getEventId(), event.getDistributedChannels().size());
CompletableFuture<SendResult<String, Object>> future =
kafkaTemplate.send(distributionCompletedTopic, event.getEventId(), event);
future.whenComplete((result, ex) -> {
if (ex == null) {
log.info("DistributionCompletedEvent published successfully: topic={}, partition={}, offset={}",
distributionCompletedTopic,
result.getRecordMetadata().partition(),
result.getRecordMetadata().offset());
} else {
log.error("Failed to publish DistributionCompletedEvent: eventId={}, error={}",
event.getEventId(), ex.getMessage(), ex);
}
});
} catch (Exception e) {
log.error("Error publishing DistributionCompletedEvent: eventId={}, error={}",
event.getEventId(), e.getMessage(), e);
}
}
}

View File

@ -0,0 +1,149 @@
server:
port: 8085
spring:
application:
name: distribution-service
# Database Configuration
datasource:
driver-class-name: org.postgresql.Driver
url: jdbc:postgresql://${DB_HOST:4.217.133.59}:${DB_PORT:5432}/${DB_NAME:distributiondb}
username: ${DB_USERNAME:eventuser}
password: ${DB_PASSWORD:Hi5Jessica!}
hikari:
maximum-pool-size: 10
minimum-idle: 5
connection-timeout: 30000
idle-timeout: 600000
max-lifetime: 1800000
jpa:
hibernate:
ddl-auto: ${JPA_DDL_AUTO:update}
properties:
hibernate:
dialect: org.hibernate.dialect.PostgreSQLDialect
format_sql: true
show_sql: ${JPA_SHOW_SQL:false}
# Redis Configuration
data:
redis:
host: ${REDIS_HOST:20.214.210.71}
port: ${REDIS_PORT:6379}
password: ${REDIS_PASSWORD:Hi5Jessica!}
timeout: 3000ms
lettuce:
pool:
max-active: 8
max-idle: 8
min-idle: 2
# Disable security and kafka auto-configuration
autoconfigure:
exclude:
- org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration
- org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration
- org.springframework.boot.autoconfigure.kafka.KafkaAutoConfiguration
kafka:
enabled: ${KAFKA_ENABLED:true}
bootstrap-servers: ${KAFKA_BOOTSTRAP_SERVERS:20.249.182.13:9095,4.217.131.59:9095}
producer:
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.springframework.kafka.support.serializer.JsonSerializer
properties:
spring.json.type.mapping: distributionCompleted:com.kt.distribution.event.DistributionCompletedEvent
consumer:
group-id: ${KAFKA_CONSUMER_GROUP:distribution-service}
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.springframework.kafka.support.serializer.JsonDeserializer
properties:
spring.json.trusted.packages: '*'
# Kafka Topics
kafka:
topics:
distribution-completed: distribution-completed
# Resilience4j Configuration
resilience4j:
circuitbreaker:
instances:
channelApi:
failure-rate-threshold: 50
slow-call-rate-threshold: 50
slow-call-duration-threshold: 5000ms
wait-duration-in-open-state: 30s
permitted-number-of-calls-in-half-open-state: 3
sliding-window-type: COUNT_BASED
sliding-window-size: 10
minimum-number-of-calls: 5
retry:
instances:
channelApi:
max-attempts: 3
wait-duration: 1s
exponential-backoff-multiplier: 2
retry-exceptions:
- java.net.SocketTimeoutException
- java.net.ConnectException
- org.springframework.web.client.ResourceAccessException
bulkhead:
instances:
channelApi:
max-concurrent-calls: 10
max-wait-duration: 0ms
# External Channel APIs (Mock URLs)
channel:
apis:
uridongnetv:
url: ${URIDONGNETV_API_URL:http://localhost:9001/api/uridongnetv}
timeout: 10000
ringobiz:
url: ${RINGOBIZ_API_URL:http://localhost:9002/api/ringobiz}
timeout: 10000
ginitv:
url: ${GINITV_API_URL:http://localhost:9003/api/ginitv}
timeout: 10000
instagram:
url: ${INSTAGRAM_API_URL:http://localhost:9004/api/instagram}
timeout: 10000
naver:
url: ${NAVER_API_URL:http://localhost:9005/api/naver}
timeout: 10000
kakao:
url: ${KAKAO_API_URL:http://localhost:9006/api/kakao}
timeout: 10000
# Springdoc OpenAPI (Swagger)
springdoc:
api-docs:
path: /v3/api-docs
enabled: true
swagger-ui:
path: /swagger-ui.html
enabled: true
operations-sorter: alpha
tags-sorter: alpha
display-request-duration: true
show-actuator: true
# Logging
logging:
file:
name: ${LOG_FILE:logs/distribution-service.log}
logback:
rollingpolicy:
max-file-size: 10MB
max-history: 7
total-size-cap: 100MB
level:
com.kt.distribution: DEBUG
org.springframework.kafka: INFO
io.github.resilience4j: DEBUG
org.springframework.web: DEBUG

View File

@ -0,0 +1,172 @@
[
{
"eventId": "evt-test-001",
"title": "봄맞이 삼겹살 50% 할인 이벤트",
"description": "3월 한정 특별 이벤트! 삼겹살 1인분 무료 증정",
"imageUrl": "https://cdn.example.com/event-image-001.jpg",
"channels": ["URIDONGNETV", "INSTAGRAM", "KAKAO", "NAVER"],
"channelSettings": {
"URIDONGNETV": {
"radius": "1km",
"timeSlot": "evening"
}
}
},
{
"eventId": "evt-test-002",
"title": "신규 고객 환영! 치킨 3,000원 할인",
"description": "처음 방문하시는 고객님께 특별 할인 쿠폰 제공. 기간 내 사용 가능",
"imageUrl": "https://cdn.example.com/event-image-002.jpg",
"channels": ["INSTAGRAM", "KAKAO", "NAVER"],
"channelSettings": {
"INSTAGRAM": {
"hashtags": ["치킨", "할인", "신규고객"]
}
}
},
{
"eventId": "evt-test-003",
"title": "주말 특가! 피자 1+1 이벤트",
"description": "토요일, 일요일 한정! 모든 피자 1+1 행사. 배달 주문 가능",
"imageUrl": "https://cdn.example.com/event-image-003.jpg",
"channels": ["URIDONGNETV", "KAKAO"],
"channelSettings": {
"URIDONGNETV": {
"radius": "2km",
"timeSlot": "lunch"
},
"KAKAO": {
"targetAge": "20-40",
"sendTime": "11:00"
}
}
},
{
"eventId": "evt-test-004",
"title": "여름 시즌 냉면 페스티벌",
"description": "시원한 냉면과 함께하는 여름! 전 메뉴 20% 할인",
"imageUrl": "https://cdn.example.com/event-image-004.jpg",
"channels": ["URIDONGNETV", "INSTAGRAM", "NAVER"],
"channelSettings": {
"URIDONGNETV": {
"radius": "500m",
"timeSlot": "afternoon"
}
}
},
{
"eventId": "evt-test-005",
"title": "리뷰 작성 시 음료 무료!",
"description": "네이버 리뷰 작성하고 아메리카노 1잔 무료로 받아가세요",
"imageUrl": "https://cdn.example.com/event-image-005.jpg",
"channels": ["NAVER", "INSTAGRAM"],
"channelSettings": {
"NAVER": {
"reviewRequired": true
}
}
},
{
"eventId": "evt-test-006",
"title": "생일 축하! 케이크 30% 할인",
"description": "생일 당일 방문 시 신분증 제시하면 케이크 30% 할인. 예약 필수",
"imageUrl": "https://cdn.example.com/event-image-006.jpg",
"channels": ["KAKAO", "INSTAGRAM"],
"channelSettings": {
"KAKAO": {
"reservationRequired": true,
"targetAge": "all"
}
}
},
{
"eventId": "evt-test-007",
"title": "점심 시간 특가! 런치 세트 8,000원",
"description": "평일 11:30~14:00 런치 세트 메뉴 8,000원. 커피 포함",
"imageUrl": "https://cdn.example.com/event-image-007.jpg",
"channels": ["URIDONGNETV", "NAVER"],
"channelSettings": {
"URIDONGNETV": {
"radius": "1.5km",
"timeSlot": "lunch"
}
}
},
{
"eventId": "evt-test-008",
"title": "가족 나들이 패키지 20% 할인",
"description": "4인 가족 세트 메뉴 20% 할인! 키즈 메뉴 포함",
"imageUrl": "https://cdn.example.com/event-image-008.jpg",
"channels": ["KAKAO", "INSTAGRAM", "NAVER"],
"channelSettings": {
"KAKAO": {
"targetAge": "30-50",
"sendTime": "10:00"
}
}
},
{
"eventId": "evt-test-009",
"title": "야간 할인! 저녁 9시 이후 전 메뉴 15% OFF",
"description": "저녁 9시 이후 방문 시 모든 메뉴 15% 할인. 포장 가능",
"imageUrl": "https://cdn.example.com/event-image-009.jpg",
"channels": ["URIDONGNETV", "INSTAGRAM"],
"channelSettings": {
"URIDONGNETV": {
"radius": "1km",
"timeSlot": "evening"
}
}
},
{
"eventId": "evt-test-010",
"title": "SNS 팔로우 이벤트! 디저트 무료",
"description": "인스타그램 팔로우 후 인증하면 디저트 1개 무료 제공",
"imageUrl": "https://cdn.example.com/event-image-010.jpg",
"channels": ["INSTAGRAM", "KAKAO"],
"channelSettings": {
"INSTAGRAM": {
"followRequired": true,
"hashtags": ["팔로우이벤트", "디저트무료", "맛집"]
}
}
},
{
"eventId": "evt-test-011",
"title": "지니TV 특별 프로모션! 한우 세트 40% 할인",
"description": "지니TV 시청자 한정! 최상급 한우 세트 메뉴 40% 할인. 예약 시 와인 1병 무료 제공",
"imageUrl": "https://cdn.example.com/event-image-011.jpg",
"channels": ["GINITV", "KAKAO", "NAVER"],
"channelSettings": {
"GINITV": {
"targetRegion": "서울/경기",
"timeSlot": "primetime",
"duration": "15s",
"targetAge": "30-60"
},
"KAKAO": {
"targetAge": "30-60",
"sendTime": "18:00"
}
}
},
{
"eventId": "evt-test-012",
"title": "저녁 시간대 지니TV 광고! 족발·보쌈 특가",
"description": "TV 광고 보고 주문하신 분께 사이드 메뉴 무료 증정",
"imageUrl": "https://cdn.example.com/event-image-012.jpg",
"channels": ["GINITV", "URIDONGNETV"],
"channelSettings": {
"GINITV": {
"targetRegion": "전국",
"timeSlot": "evening",
"duration": "10s",
"targetAge": "20-50"
},
"URIDONGNETV": {
"radius": "3km",
"timeSlot": "evening"
}
}
}
]

View File

@ -0,0 +1,34 @@
#!/bin/bash
# Distribution Service API 테스트 스크립트
echo "=== Distribution Service API Test ==="
echo ""
# 1. Health Check (추후 추가 예정)
# echo "1. Health Check..."
# curl -X GET http://localhost:8085/actuator/health
# echo ""
# 2. 다중 채널 배포 테스트
echo "1. Testing Multi-Channel Distribution..."
echo ""
curl -X POST http://localhost:8085/api/distribution/distribute \
-H "Content-Type: application/json" \
-d '{
"eventId": "evt-test-001",
"title": "봄맞이 삼겹살 50% 할인 이벤트",
"description": "3월 한정 특별 이벤트! 삼겹살 1인분 무료 증정",
"imageUrl": "https://cdn.example.com/event-image.jpg",
"channels": ["URIDONGNETV", "INSTAGRAM", "KAKAO", "NAVER"],
"channelSettings": {
"URIDONGNETV": {
"radius": "1km",
"timeSlot": "evening"
}
}
}' | jq '.'
echo ""
echo "=== Test Completed ==="

View File

@ -1,63 +0,0 @@
# Kafka 메시지 확인 스크립트 (Windows PowerShell)
#
# 사용법: .\check-kafka-messages.ps1
$KAFKA_SERVER = "4.230.50.63:9092"
Write-Host "========================================" -ForegroundColor Cyan
Write-Host "📊 Kafka 토픽 메시지 확인" -ForegroundColor Cyan
Write-Host "========================================" -ForegroundColor Cyan
Write-Host ""
# Kafka 설치 확인
$kafkaPath = Read-Host "Kafka 설치 경로를 입력하세요 (예: C:\kafka)"
if (-not (Test-Path "$kafkaPath\bin\windows\kafka-console-consumer.bat")) {
Write-Host "❌ Kafka가 해당 경로에 설치되어 있지 않습니다." -ForegroundColor Red
exit 1
}
Write-Host "✅ Kafka 경로 확인: $kafkaPath" -ForegroundColor Green
Write-Host ""
# 토픽 선택
Write-Host "확인할 토픽을 선택하세요:" -ForegroundColor Yellow
Write-Host " 1. event.created (이벤트 생성)"
Write-Host " 2. participant.registered (참여자 등록)"
Write-Host " 3. distribution.completed (배포 완료)"
Write-Host " 4. 모두 확인"
Write-Host ""
$choice = Read-Host "선택 (1-4)"
$topics = @()
switch ($choice) {
"1" { $topics = @("event.created") }
"2" { $topics = @("participant.registered") }
"3" { $topics = @("distribution.completed") }
"4" { $topics = @("event.created", "participant.registered", "distribution.completed") }
default {
Write-Host "❌ 잘못된 선택입니다." -ForegroundColor Red
exit 1
}
}
# 각 토픽별 메시지 확인
foreach ($topic in $topics) {
Write-Host ""
Write-Host "========================================" -ForegroundColor Cyan
Write-Host "📩 토픽: $topic" -ForegroundColor Cyan
Write-Host "========================================" -ForegroundColor Cyan
# 최근 5개 메시지만 확인
& "$kafkaPath\bin\windows\kafka-console-consumer.bat" `
--bootstrap-server $KAFKA_SERVER `
--topic $topic `
--from-beginning `
--max-messages 5 `
--timeout-ms 5000 2>&1 | Out-String | Write-Host
Write-Host ""
}
Write-Host "✅ 확인 완료!" -ForegroundColor Green

View File

@ -1,96 +0,0 @@
# Mermaid Syntax Checker using Docker Container
# Similar to PlantUML checker - keeps container running for better performance
param(
[Parameter(Mandatory=$true, Position=0)]
[string]$FilePath
)
# Check if file exists
if (-not (Test-Path $FilePath)) {
Write-Host "Error: File not found: $FilePath" -ForegroundColor Red
exit 1
}
# Get absolute path
$absolutePath = (Resolve-Path $FilePath).Path
$fileName = Split-Path $absolutePath -Leaf
Write-Host "`nChecking Mermaid syntax for: $fileName" -ForegroundColor Cyan
Write-Host ("=" * 60) -ForegroundColor Gray
# Check if mermaid container is running
$containerRunning = docker ps --filter "name=mermaid-cli" --format "{{.Names}}" 2>$null
if (-not $containerRunning) {
Write-Host "Error: Mermaid CLI container is not running." -ForegroundColor Red
Write-Host "Please follow the setup instructions in the Mermaid guide to start the container." -ForegroundColor Yellow
Write-Host "`nQuick setup commands:" -ForegroundColor Cyan
Write-Host ""
Write-Host "# 1. Start container with root privileges (port 48080)" -ForegroundColor Green
Write-Host "docker run -d --rm --name mermaid-cli -u root -p 48080:8080 --entrypoint sh minlag/mermaid-cli:latest -c `"while true;do sleep 3600; done`"" -ForegroundColor White
Write-Host ""
Write-Host "# 2. Install Chromium and dependencies" -ForegroundColor Green
Write-Host "docker exec mermaid-cli sh -c `"apk add --no-cache chromium chromium-chromedriver nss freetype harfbuzz ca-certificates ttf-freefont`"" -ForegroundColor White
Write-Host ""
Write-Host "# 3. Create Puppeteer configuration" -ForegroundColor Green
Write-Host "docker exec mermaid-cli sh -c `"echo '{```"executablePath```": ```"/usr/bin/chromium-browser```", ```"args```": [```"--no-sandbox```", ```"--disable-setuid-sandbox```", ```"--disable-dev-shm-usage```"]}' > /tmp/puppeteer-config.json`"" -ForegroundColor White
Write-Host ""
exit 1
}
# Set Puppeteer configuration file path
$puppeteerConfigFile = "/tmp/puppeteer-config.json"
# Generate unique temp filename
$timestamp = Get-Date -Format "yyyyMMddHHmmss"
$processId = $PID
$tempFile = "/tmp/mermaid_${timestamp}_${processId}.mmd"
$outputFile = "/tmp/mermaid_${timestamp}_${processId}.svg"
try {
# Copy file to container
Write-Host "Copying file to container..." -ForegroundColor Gray
docker cp "$absolutePath" "mermaid-cli:$tempFile" 2>&1 | Out-Null
if ($LASTEXITCODE -ne 0) {
Write-Host "Error: Failed to copy file to container" -ForegroundColor Red
exit 1
}
# Run syntax check with Puppeteer configuration
Write-Host "Running syntax check..." -ForegroundColor Gray
$output = docker exec mermaid-cli sh -c "cd /home/mermaidcli && node_modules/.bin/mmdc -i '$tempFile' -o '$outputFile' -p '$puppeteerConfigFile' -q" 2>&1
$exitCode = $LASTEXITCODE
if ($exitCode -eq 0) {
Write-Host "`nSuccess: Mermaid syntax is valid!" -ForegroundColor Green
} else {
Write-Host "`nError: Mermaid syntax validation failed!" -ForegroundColor Red
Write-Host "`nError details:" -ForegroundColor Red
# Parse and display error messages
$errorLines = $output -split "`n"
foreach ($line in $errorLines) {
if ($line -match "Error:|Parse error|Expecting|Syntax error") {
Write-Host " $line" -ForegroundColor Red
} elseif ($line -match "line \d+|at line") {
Write-Host " $line" -ForegroundColor Yellow
} elseif ($line.Trim() -ne "") {
Write-Host " $line" -ForegroundColor DarkRed
}
}
exit 1
}
} finally {
# Clean up temp files
Write-Host "`nCleaning up..." -ForegroundColor Gray
docker exec mermaid-cli rm -f "$tempFile" "$outputFile" 2>&1 | Out-Null
}
Write-Host "`nValidation complete!" -ForegroundColor Cyan
# Note: Container is kept running for subsequent checks
# To stop: docker stop mermaid-cli && docker rm mermaid-cli

View File

@ -1,107 +0,0 @@
#!/bin/bash
# Mermaid Syntax Checker using Docker Container
# Similar to PlantUML checker - keeps container running for better performance
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[0;33m'
CYAN='\033[0;36m'
GRAY='\033[0;90m'
NC='\033[0m' # No Color
# Check if file path is provided
if [ -z "$1" ]; then
echo -e "${RED}Error: No file path provided${NC}"
echo "Usage: $0 <mermaid-file>"
exit 1
fi
FILE_PATH="$1"
# Check if file exists
if [ ! -f "$FILE_PATH" ]; then
echo -e "${RED}Error: File not found: $FILE_PATH${NC}"
exit 1
fi
# Get absolute path
ABSOLUTE_PATH=$(realpath "$FILE_PATH")
FILE_NAME=$(basename "$ABSOLUTE_PATH")
echo -e "\n${CYAN}Checking Mermaid syntax for: $FILE_NAME${NC}"
echo -e "${GRAY}$(printf '=%.0s' {1..60})${NC}"
# Check if mermaid container is running
CONTAINER_RUNNING=$(docker ps --filter "name=mermaid-cli" --format "{{.Names}}" 2>/dev/null)
if [ -z "$CONTAINER_RUNNING" ]; then
echo -e "${RED}Error: Mermaid CLI container is not running.${NC}"
echo -e "${YELLOW}Please follow the setup instructions in the Mermaid guide to start the container.${NC}"
echo -e "\n${CYAN}Quick setup commands:${NC}"
echo ""
echo -e "${GREEN}# 1. Start container with root privileges (port 48080)${NC}"
echo -e "${NC}docker run -d --rm --name mermaid-cli -u root -p 48080:8080 --entrypoint sh minlag/mermaid-cli:latest -c \"while true;do sleep 3600; done\"${NC}"
echo ""
echo -e "${GREEN}# 2. Install Chromium and dependencies${NC}"
echo -e "${NC}docker exec mermaid-cli sh -c \"apk add --no-cache chromium chromium-chromedriver nss freetype harfbuzz ca-certificates ttf-freefont\"${NC}"
echo ""
echo -e "${GREEN}# 3. Create Puppeteer configuration${NC}"
echo -e "${NC}docker exec mermaid-cli sh -c \"echo '{\\\"executablePath\\\": \\\"/usr/bin/chromium-browser\\\", \\\"args\\\": [\\\"--no-sandbox\\\", \\\"--disable-setuid-sandbox\\\", \\\"--disable-dev-shm-usage\\\"]}' > /tmp/puppeteer-config.json\"${NC}"
echo ""
exit 1
fi
# Set Puppeteer configuration file path
PUPPETEER_CONFIG_FILE="/tmp/puppeteer-config.json"
# Generate unique temp filename
TIMESTAMP=$(date +"%Y%m%d%H%M%S")
PID=$$
TEMP_FILE="/tmp/mermaid_${TIMESTAMP}_${PID}.mmd"
OUTPUT_FILE="/tmp/mermaid_${TIMESTAMP}_${PID}.svg"
# Copy file to container
echo -e "${GRAY}Copying file to container...${NC}"
docker cp "$ABSOLUTE_PATH" "mermaid-cli:$TEMP_FILE" >/dev/null 2>&1
if [ $? -ne 0 ]; then
echo -e "${RED}Error: Failed to copy file to container${NC}"
exit 1
fi
# Run syntax check with Puppeteer configuration
echo -e "${GRAY}Running syntax check...${NC}"
OUTPUT=$(docker exec mermaid-cli sh -c "cd /home/mermaidcli && node_modules/.bin/mmdc -i '$TEMP_FILE' -o '$OUTPUT_FILE' -p '$PUPPETEER_CONFIG_FILE' -q" 2>&1)
EXIT_CODE=$?
if [ $EXIT_CODE -eq 0 ]; then
echo -e "\n${GREEN}Success: Mermaid syntax is valid!${NC}"
else
echo -e "\n${RED}Error: Mermaid syntax validation failed!${NC}"
echo -e "\n${RED}Error details:${NC}"
# Parse and display error messages
while IFS= read -r line; do
if [[ $line == *"Error:"* ]] || [[ $line == *"Parse error"* ]] || [[ $line == *"Expecting"* ]] || [[ $line == *"Syntax error"* ]]; then
echo -e " ${RED}$line${NC}"
elif [[ $line == *"line"* ]] && [[ $line =~ [0-9]+ ]]; then
echo -e " ${YELLOW}$line${NC}"
elif [[ ! -z "$line" ]]; then
echo -e " ${RED}$line${NC}"
fi
done <<< "$OUTPUT"
# Clean up and exit with error
docker exec mermaid-cli rm -f "$TEMP_FILE" "$OUTPUT_FILE" >/dev/null 2>&1
exit 1
fi
# Clean up temp files
echo -e "\n${GRAY}Cleaning up...${NC}"
docker exec mermaid-cli rm -f "$TEMP_FILE" "$OUTPUT_FILE" >/dev/null 2>&1
echo -e "\n${CYAN}Validation complete!${NC}"
# Note: Container is kept running for subsequent checks
# To stop: docker stop mermaid-cli && docker rm mermaid-cli

View File

@ -1,66 +0,0 @@
param(
[Parameter(Mandatory=$false)]
[string]$FilePath = "C:\home\workspace\tripgen\design\backend\system\azure-physical-architecture.txt"
)
Write-Host "=== PlantUML Syntax Checker ===" -ForegroundColor Cyan
Write-Host "Target file: $FilePath" -ForegroundColor Yellow
# Check if file exists
if (-not (Test-Path $FilePath)) {
Write-Host "❌ File not found: $FilePath" -ForegroundColor Red
exit 1
}
# Execute directly in PowerShell
$timestamp = Get-Date -Format 'yyyyMMddHHmmss'
$tempFile = "/tmp/puml_$timestamp.puml"
# Copy file
Write-Host "`n1. Copying file..." -ForegroundColor Gray
Write-Host " Temporary file: $tempFile"
docker cp $FilePath "plantuml:$tempFile"
if ($LASTEXITCODE -ne 0) {
Write-Host "❌ File copy failed" -ForegroundColor Red
exit 1
}
Write-Host " ✅ Copy completed" -ForegroundColor Green
# Find JAR file path
Write-Host "`n2. Looking for PlantUML JAR file..." -ForegroundColor Gray
$JAR_PATH = docker exec plantuml sh -c "find / -name 'plantuml*.jar' 2>/dev/null | head -1"
Write-Host " JAR path: $JAR_PATH"
Write-Host " ✅ JAR file confirmed" -ForegroundColor Green
# Syntax check
Write-Host "`n3. Running syntax check..." -ForegroundColor Gray
$syntaxOutput = docker exec plantuml sh -c "java -jar $JAR_PATH -checkonly $tempFile 2>&1"
if ($LASTEXITCODE -eq 0) {
Write-Host "`n✅ Syntax check passed!" -ForegroundColor Green
Write-Host " No syntax errors found in the diagram." -ForegroundColor Green
} else {
Write-Host "`n❌ Syntax errors detected!" -ForegroundColor Red
Write-Host "Error details:" -ForegroundColor Red
Write-Host $syntaxOutput -ForegroundColor Yellow
# Detailed error check
Write-Host "`nAnalyzing detailed errors..." -ForegroundColor Yellow
$detailError = docker exec plantuml sh -c "java -jar $JAR_PATH -failfast -v $tempFile 2>&1"
$errorLines = $detailError | Select-String "Error line"
if ($errorLines) {
Write-Host "`n📍 Error locations:" -ForegroundColor Magenta
$errorLines | ForEach-Object {
Write-Host " $($_.Line)" -ForegroundColor Red
}
}
}
# Clean up temporary file
Write-Host "`n4. Cleaning up temporary files..." -ForegroundColor Gray
docker exec plantuml sh -c "rm -f $tempFile" 2>$null
Write-Host " ✅ Cleanup completed" -ForegroundColor Green
Write-Host "`n=== Check completed ===" -ForegroundColor Cyan

View File

@ -1,50 +0,0 @@
#!/bin/bash
# PlantUML file syntax checker script
# Usage: ./check_plantuml.sh <file_to_check>
# Check parameters
if [ $# -eq 0 ]; then
echo "Usage: $0 <file_to_check>"
echo "Example: $0 diagram.puml"
exit 1
fi
# File to check parameter
CHECK_FILE="$1"
# Check if file exists
if [ ! -f "$CHECK_FILE" ]; then
echo "Error: File '$CHECK_FILE' does not exist."
exit 1
fi
# 1. Generate unique filename (prevent conflicts)
TEMP_FILE="/tmp/puml_$(date +%s)_$$.puml"
# 2. Copy file
echo "Copying file to Docker container..."
docker cp "$CHECK_FILE" plantuml:"$TEMP_FILE"
# 3. Find JAR file location
echo "Finding PlantUML JAR file location..."
JAR_PATH=$(docker exec plantuml find / -name "plantuml*.jar" 2>/dev/null | head -1)
if [ -z "$JAR_PATH" ]; then
echo "Error: PlantUML JAR file not found."
exit 1
fi
# 4. Syntax check
echo "Running PlantUML syntax check..."
docker exec plantuml java -jar "$JAR_PATH" -checkonly "$TEMP_FILE"
# 5. Detailed error check (if needed)
echo "Checking detailed error information..."
docker exec plantuml sh -c "cd /tmp && java -jar $JAR_PATH -failfast -v $TEMP_FILE 2>&1 | grep -E 'Error line'"
# 6. Clean up temporary file
echo "Cleaning up temporary files..."
docker exec -u root plantuml rm -f "$TEMP_FILE"
echo "Check completed."

View File

@ -1,65 +0,0 @@
#!/bin/bash
# Extract mermaid charts from markdown file
INPUT_FILE="define/시장조사-차트.md"
OUTPUT_DIR="define/charts"
echo "Extracting Mermaid charts from: $INPUT_FILE"
echo "Output directory: $OUTPUT_DIR"
echo ""
# Create output directory
mkdir -p "$OUTPUT_DIR"
# Counter
chart_num=0
# Read file and extract charts
in_mermaid=false
current_chart=""
current_title=""
while IFS= read -r line || [ -n "$line" ]; do
# Check for section header (## number. title)
if [[ $line =~ ^##[[:space:]]([0-9]+)\.[[:space:]](.+)$ ]]; then
num="${BASH_REMATCH[1]}"
title="${BASH_REMATCH[2]}"
current_title=$(printf "chart%02d_%s" "$num" "${title// /_}")
current_title="${current_title//\//_}"
fi
# Check for mermaid start
if [[ $line == '```mermaid' ]]; then
in_mermaid=true
current_chart=""
continue
fi
# Check for mermaid end
if [[ $line == '```' ]] && $in_mermaid; then
# Save chart
if [ -n "$current_title" ]; then
filename="${current_title}.mmd"
echo "$current_chart" > "$OUTPUT_DIR/$filename"
echo " ✓ Saved: $filename"
((chart_num++))
fi
in_mermaid=false
current_chart=""
continue
fi
# Collect chart lines
if $in_mermaid; then
if [ -n "$current_chart" ]; then
current_chart+=$'\n'
fi
current_chart+="$line"
fi
done < "$INPUT_FILE"
echo ""
echo "✅ Successfully extracted $chart_num charts!"
echo ""
echo "Chart files saved in: $OUTPUT_DIR"

View File

@ -1,47 +0,0 @@
# Mermaid Chart Extractor
# Extracts Mermaid charts from markdown and saves them as individual .mmd files
$markdownFile = "define/시장조사-차트.md"
$outputDir = "define/charts"
Write-Host "Extracting Mermaid charts from: $markdownFile"
Write-Host "Output directory: $outputDir`n"
# Create output directory
if (-not (Test-Path $outputDir)) {
New-Item -ItemType Directory -Path $outputDir | Out-Null
}
# Read markdown file
$content = Get-Content $markdownFile -Raw -Encoding UTF8
# Extract all mermaid blocks with their section headers
$pattern = '## (\d+)\. (.+?)\n\n```mermaid\n(.*?)```'
$matches = [regex]::Matches($content, $pattern, [System.Text.RegularExpressions.RegexOptions]::Singleline)
Write-Host "Found $($matches.Count) Mermaid charts`n"
# Save each chart
$count = 0
foreach ($match in $matches) {
$num = $match.Groups[1].Value
$title = $match.Groups[2].Value
$chartCode = $match.Groups[3].Value
# Clean filename
$filename = "chart$($num.PadLeft(2,'0'))_$($title.Replace(' ', '_').Replace('/', '_')).mmd"
$filepath = Join-Path $outputDir $filename
# Write mermaid code
$chartCode.Trim() | Out-File -FilePath $filepath -Encoding UTF8 -NoNewline
Write-Host " ✓ Saved: $filename"
$count++
}
Write-Host "`n✅ Successfully extracted $count charts!"
Write-Host "`nChart files saved in: $outputDir"
Write-Host "`nNext steps:"
Write-Host "1. Use Mermaid Live Editor: https://mermaid.live/"
Write-Host "2. Copy-paste each .mmd file content"
Write-Host "3. Export as PNG or SVG"

View File

@ -1,61 +0,0 @@
#!/usr/bin/env python3
"""
Mermaid Chart Extractor
Extracts Mermaid charts from markdown and saves them as individual .mmd files
"""
import re
import os
from pathlib import Path
def extract_mermaid_charts(markdown_file, output_dir):
"""Extract all mermaid code blocks from markdown file"""
# Read markdown file
with open(markdown_file, 'r', encoding='utf-8') as f:
content = f.read()
# Find all mermaid code blocks
pattern = r'```mermaid\n(.*?)```'
matches = re.findall(pattern, content, re.DOTALL)
# Create output directory
os.makedirs(output_dir, exist_ok=True)
# Extract chart titles from markdown headers
title_pattern = r'## (\d+)\. (.+?)\n\n```mermaid'
titles = re.findall(title_pattern, content, re.DOTALL)
print(f"Found {len(matches)} Mermaid charts")
# Save each chart as separate .mmd file
for i, (chart_code, (num, title)) in enumerate(zip(matches, titles), 1):
# Clean filename
filename = f"chart{num:02d}_{title.replace(' ', '_').replace('/', '_')}.mmd"
filepath = os.path.join(output_dir, filename)
# Write mermaid code
with open(filepath, 'w', encoding='utf-8') as f:
f.write(chart_code.strip())
print(f" ✓ Saved: {filename}")
return len(matches)
if __name__ == "__main__":
# Configuration
markdown_file = "define/시장조사-차트.md"
output_dir = "define/charts"
print(f"Extracting Mermaid charts from: {markdown_file}")
print(f"Output directory: {output_dir}\n")
count = extract_mermaid_charts(markdown_file, output_dir)
print(f"\n✅ Successfully extracted {count} charts!")
print(f"\nNext steps:")
print(f"1. Use Mermaid Live Editor: https://mermaid.live/")
print(f"2. Copy-paste each .mmd file content")
print(f"3. Export as PNG or SVG")

View File

@ -1,101 +0,0 @@
@echo off
REM ============================================
REM Kafka/Redis 통합 테스트 스크립트
REM ============================================
echo ============================================
echo Kafka/Redis 통합 테스트 시작
echo ============================================
echo.
REM 현재 디렉토리 확인
cd /d "%~dp0\.."
echo 현재 디렉토리: %CD%
echo.
REM 로그 디렉토리 확인 및 생성
if not exist "logs" mkdir logs
echo 로그 디렉토리: %CD%\logs
echo.
REM 테스트 타임스탬프
set TEST_TIMESTAMP=%date:~0,4%%date:~5,2%%date:~8,2%_%time:~0,2%%time:~3,2%%time:~6,2%
set TEST_TIMESTAMP=%TEST_TIMESTAMP: =0%
set TEST_LOG=logs\kafka-redis-test_%TEST_TIMESTAMP%.log
echo ============================================
echo 1단계: Kafka 수동 테스트 메시지 전송
echo ============================================
echo.
echo Kafka 메시지 전송 중...
gradlew ai-service:runKafkaManualTest > %TEST_LOG% 2>&1
if %ERRORLEVEL% EQU 0 (
echo ✓ Kafka 메시지 전송 완료
) else (
echo ✗ Kafka 메시지 전송 실패 ^(Error Code: %ERRORLEVEL%^)
echo 로그 파일을 확인하세요: %TEST_LOG%
)
echo.
echo ============================================
echo 2단계: AI 서비스 Consumer 처리 대기
echo ============================================
echo.
echo AI 서비스가 Kafka 메시지를 처리할 때까지 60초 대기...
timeout /t 60 /nobreak > nul
echo ✓ 대기 완료
echo.
echo ============================================
echo 3단계: Job 상태 확인 ^(Redis^)
echo ============================================
echo.
echo Job 상태 조회 중...
curl -s "http://localhost:8083/api/v1/ai-service/internal/jobs/manual-job-001/status" >> %TEST_LOG% 2>&1
if %ERRORLEVEL% EQU 0 (
echo ✓ Job 상태 조회 성공
curl -s "http://localhost:8083/api/v1/ai-service/internal/jobs/manual-job-001/status"
) else (
echo ✗ Job 상태 조회 실패
)
echo.
echo ============================================
echo 4단계: AI 추천 결과 확인 ^(Redis^)
echo ============================================
echo.
echo AI 추천 결과 조회 중...
curl -s "http://localhost:8083/api/v1/ai-service/internal/recommendations/manual-event-001" >> %TEST_LOG% 2>&1
if %ERRORLEVEL% EQU 0 (
echo ✓ AI 추천 결과 조회 성공
curl -s "http://localhost:8083/api/v1/ai-service/internal/recommendations/manual-event-001"
) else (
echo ✗ AI 추천 결과 조회 실패
)
echo.
echo ============================================
echo 5단계: 모든 테스트 메시지 상태 확인
echo ============================================
echo.
echo [Job 001] 상태 확인:
curl -s "http://localhost:8083/api/v1/ai-service/internal/jobs/manual-job-001/status" | findstr "status"
echo.
echo [Job 002] 상태 확인:
curl -s "http://localhost:8083/api/v1/ai-service/internal/jobs/manual-job-002/status" | findstr "status"
echo.
echo [Job 003] 상태 확인:
curl -s "http://localhost:8083/api/v1/ai-service/internal/jobs/manual-job-003/status" | findstr "status"
echo.
echo ============================================
echo 테스트 완료
echo ============================================
echo.
echo 상세 로그 파일: %TEST_LOG%
echo.
echo 수동 확인 명령어:
echo - Job 상태: curl http://localhost:8083/api/v1/ai-service/internal/jobs/{jobId}/status
echo - AI 추천: curl http://localhost:8083/api/v1/ai-service/internal/recommendations/{eventId}
echo.
pause

View File

@ -1,37 +0,0 @@
@echo off
REM Kafka 수동 테스트 실행 스크립트 (Windows)
cd /d %~dp0\..
echo ================================================
echo Kafka Manual Test - AI Service
echo ================================================
echo.
echo 이 스크립트는 Kafka에 테스트 메시지를 전송합니다.
echo ai-service가 실행 중이어야 메시지를 처리할 수 있습니다.
echo.
echo Kafka Brokers: 20.249.182.13:9095, 4.217.131.59:9095
echo Topic: ai-event-generation-job
echo.
echo ================================================
echo.
REM 테스트 클래스 실행
.\gradlew ai-service:test --tests "com.kt.ai.test.manual.KafkaManualTest" --info
echo.
echo ================================================
echo 테스트 완료!
echo.
echo 결과 확인:
echo 1. Job 상태 조회:
echo curl http://localhost:8083/api/v1/ai-service/internal/jobs/manual-job-001/status
echo.
echo 2. AI 추천 결과 조회:
echo curl http://localhost:8083/api/v1/ai-service/internal/recommendations/manual-event-001
echo.
echo 3. Redis 키 확인:
echo curl http://localhost:8083/api/v1/ai-service/internal/recommendations/debug/redis-keys
echo ================================================
pause

View File

@ -1,303 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Tripgen Service Runner Script
Reads execution profiles from {service-name}/.run/{service-name}.run.xml and runs services accordingly.
Usage:
python run-config.py <service-name>
Examples:
python run-config.py user-service
python run-config.py location-service
python run-config.py trip-service
python run-config.py ai-service
"""
import os
import sys
import subprocess
import xml.etree.ElementTree as ET
from pathlib import Path
import argparse
def get_project_root():
"""Find project root directory"""
current_dir = Path(__file__).parent.absolute()
while current_dir.parent != current_dir:
if (current_dir / 'gradlew').exists() or (current_dir / 'gradlew.bat').exists():
return current_dir
current_dir = current_dir.parent
# If gradlew not found, assume parent directory of develop as project root
return Path(__file__).parent.parent.absolute()
def parse_run_configurations(project_root, service_name=None):
"""Parse run configuration files from .run directories"""
configurations = {}
if service_name:
# Parse specific service configuration
run_config_path = project_root / service_name / '.run' / f'{service_name}.run.xml'
if run_config_path.exists():
config = parse_single_run_config(run_config_path, service_name)
if config:
configurations[service_name] = config
else:
print(f"[ERROR] Cannot find run configuration: {run_config_path}")
else:
# Find all service directories
service_dirs = ['user-service', 'location-service', 'trip-service', 'ai-service']
for service in service_dirs:
run_config_path = project_root / service / '.run' / f'{service}.run.xml'
if run_config_path.exists():
config = parse_single_run_config(run_config_path, service)
if config:
configurations[service] = config
return configurations
def parse_single_run_config(config_path, service_name):
"""Parse a single run configuration file"""
try:
tree = ET.parse(config_path)
root = tree.getroot()
# Find configuration element
config = root.find('.//configuration[@type="GradleRunConfiguration"]')
if config is None:
print(f"[WARNING] No Gradle configuration found in {config_path}")
return None
# Extract environment variables
env_vars = {}
env_option = config.find('.//option[@name="env"]')
if env_option is not None:
env_map = env_option.find('map')
if env_map is not None:
for entry in env_map.findall('entry'):
key = entry.get('key')
value = entry.get('value')
if key and value:
env_vars[key] = value
# Extract task names
task_names = []
task_names_option = config.find('.//option[@name="taskNames"]')
if task_names_option is not None:
task_list = task_names_option.find('list')
if task_list is not None:
for option in task_list.findall('option'):
value = option.get('value')
if value:
task_names.append(value)
if env_vars or task_names:
return {
'env_vars': env_vars,
'task_names': task_names,
'config_path': str(config_path)
}
return None
except ET.ParseError as e:
print(f"[ERROR] XML parsing error in {config_path}: {e}")
return None
except Exception as e:
print(f"[ERROR] Error reading {config_path}: {e}")
return None
def get_gradle_command(project_root):
"""Return appropriate Gradle command for OS"""
if os.name == 'nt': # Windows
gradle_bat = project_root / 'gradlew.bat'
if gradle_bat.exists():
return str(gradle_bat)
return 'gradle.bat'
else: # Unix-like (Linux, macOS)
gradle_sh = project_root / 'gradlew'
if gradle_sh.exists():
return str(gradle_sh)
return 'gradle'
def run_service(service_name, config, project_root):
"""Run service"""
print(f"[START] Starting {service_name} service...")
# Set environment variables
env = os.environ.copy()
for key, value in config['env_vars'].items():
env[key] = value
print(f" [ENV] {key}={value}")
# Prepare Gradle command
gradle_cmd = get_gradle_command(project_root)
# Execute tasks
for task_name in config['task_names']:
print(f"\n[RUN] Executing: {task_name}")
cmd = [gradle_cmd, task_name]
try:
# Execute from project root directory
process = subprocess.Popen(
cmd,
cwd=project_root,
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
bufsize=1,
encoding='utf-8',
errors='replace'
)
print(f"[CMD] Command: {' '.join(cmd)}")
print(f"[DIR] Working directory: {project_root}")
print("=" * 50)
# Real-time output
for line in process.stdout:
print(line.rstrip())
# Wait for process completion
process.wait()
if process.returncode == 0:
print(f"\n[SUCCESS] {task_name} execution completed")
else:
print(f"\n[FAILED] {task_name} execution failed (exit code: {process.returncode})")
return False
except KeyboardInterrupt:
print(f"\n[STOP] Interrupted by user")
process.terminate()
return False
except Exception as e:
print(f"\n[ERROR] Execution error: {e}")
return False
return True
def list_available_services(configurations):
"""List available services"""
print("[LIST] Available services:")
print("=" * 40)
for service_name, config in configurations.items():
if config['task_names']:
print(f" [SERVICE] {service_name}")
if 'config_path' in config:
print(f" +-- Config: {config['config_path']}")
for task in config['task_names']:
print(f" +-- Task: {task}")
print(f" +-- {len(config['env_vars'])} environment variables")
print()
def main():
"""Main function"""
parser = argparse.ArgumentParser(
description='Tripgen Service Runner Script',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
python run-config.py user-service
python run-config.py location-service
python run-config.py trip-service
python run-config.py ai-service
python run-config.py --list
"""
)
parser.add_argument(
'service_name',
nargs='?',
help='Service name to run'
)
parser.add_argument(
'--list', '-l',
action='store_true',
help='List available services'
)
args = parser.parse_args()
# Find project root
project_root = get_project_root()
print(f"[INFO] Project root: {project_root}")
# Parse run configurations
print("[INFO] Reading run configuration files...")
configurations = parse_run_configurations(project_root)
if not configurations:
print("[ERROR] No execution configurations found")
return 1
print(f"[INFO] Found {len(configurations)} execution configurations")
# List services request
if args.list:
list_available_services(configurations)
return 0
# If service name not provided
if not args.service_name:
print("\n[ERROR] Please provide service name")
list_available_services(configurations)
print("Usage: python run-config.py <service-name>")
return 1
# Find service
service_name = args.service_name
# Try to parse specific service configuration if not found
if service_name not in configurations:
print(f"[INFO] Trying to find configuration for '{service_name}'...")
configurations = parse_run_configurations(project_root, service_name)
if service_name not in configurations:
print(f"[ERROR] Cannot find '{service_name}' service")
list_available_services(configurations)
return 1
config = configurations[service_name]
if not config['task_names']:
print(f"[ERROR] No executable tasks found for '{service_name}' service")
return 1
# Execute service
print(f"\n[TARGET] Starting '{service_name}' service execution")
print("=" * 50)
success = run_service(service_name, config, project_root)
if success:
print(f"\n[COMPLETE] '{service_name}' service started successfully!")
return 0
else:
print(f"\n[FAILED] Failed to start '{service_name}' service")
return 1
if __name__ == '__main__':
try:
exit_code = main()
sys.exit(exit_code)
except KeyboardInterrupt:
print("\n[STOP] Interrupted by user")
sys.exit(1)
except Exception as e:
print(f"\n[ERROR] Unexpected error occurred: {e}")
sys.exit(1)