diff --git a/.github/workflows/dev-deploy.yml b/.github/workflows/dev-deploy.yml index 2ae2a3e7..a1e0d2b7 100644 --- a/.github/workflows/dev-deploy.yml +++ b/.github/workflows/dev-deploy.yml @@ -2,7 +2,7 @@ name: Dev Deployment on: push: - branches: [ develop, feat/enhance-recommendation ] + branches: [ develop, feat/migrate-on-premise ] jobs: build: @@ -30,7 +30,7 @@ jobs: password: ${{ secrets.DOCKER_PASSWORD }} - name: Build Docker image - run: docker build -t devsolfe/server:latest . + run: docker buildx build --platform linux/arm64 -t devsolfe/server:latest . - name: Push Docker image to Docker Hub run: docker push devsolfe/server:latest @@ -47,7 +47,7 @@ jobs: with: host: ${{ secrets.SERVER_HOST }} username: ${{ secrets.SSH_USERNAME }} - key: ${{ secrets.SSH_KEY }} + password: ${{ secrets.SSH_KEY }} port: ${{ secrets.PORT }} script: | docker pull devsolfe/server:latest @@ -63,15 +63,20 @@ jobs: docker run -d \ --name server \ + --restart unless-stopped \ + --network npm_nginx-proxy \ -e SPRING_PROFILES_ACTIVE=dev \ -e 'SENTRY_AUTH_TOKEN=${{ secrets.SENTRY_AUTH_TOKEN }}' \ -e SENTRY_DSN=${{ vars.SENTRY_DSN }} \ -e 'MONGODB_URI_DEV=${{ secrets.MONGODB_URI_DEV }}' \ -e MONGODB_DATABASE_DEV=${{ secrets.MONGODB_DATABASE_DEV }} \ -e S3_REGION=${{ secrets.S3_REGION }} \ - -e S3_STATIC_NAME=${{ secrets.S3_STATIC_NAME }} \ -e S3_AI_INPUT_NAME=${{ vars.S3_AI_INPUT_NAME }} \ -e S3_AI_OUTPUT_NAME=${{ vars.S3_AI_OUTPUT_NAME }} \ + -e CF_R2_ENDPOINT=${{ vars.CF_R2_ENDPOINT }} \ + -e CF_R2_ACCESS_KEY=${{ secrets.CF_R2_ACCESS_KEY }} \ + -e CF_R2_SECRET_KEY=${{ secrets.CF_R2_SECRET_KEY }} \ + -e CF_R2_STATIC_BUCKET=${{ vars.CF_R2_STATIC_BUCKET }} \ -e IMPORT_API_KEY=${{ secrets.IMPORT_API_KEY }} \ -e AWS_ACCESS_KEY=${{ secrets.AWS_ACCESS_KEY }} \ -e AWS_SECRET_KEY=${{ secrets.AWS_SECRET_KEY }} \ @@ -81,7 +86,7 @@ jobs: -e FIREBASE_CONFIG_BASE64=${{ secrets.FIREBASE_CONFIG_BASE64 }} \ -e SWAGGER_USERNAME=${{ secrets.SWAGGER_USERNAME }} \ -e SWAGGER_PASSWORD=${{ secrets.SWAGGER_PASSWORD }} \ - -p 80:8080 \ + -p 8080:8080 \ devsolfe/server docker image prune -f \ No newline at end of file diff --git a/.github/workflows/prod-deploy.yml b/.github/workflows/prod-deploy.yml deleted file mode 100644 index cee3a959..00000000 --- a/.github/workflows/prod-deploy.yml +++ /dev/null @@ -1,75 +0,0 @@ -name: Prod Deployment - -on: - push: - branches: [ main ] - -jobs: - deploy: - name: Deploy - runs-on: ubuntu-latest - environment: production - - env: - AWS_REGION: us-east-1 - ECR_REGISTRY: ${{ vars.ECR_REGISTRY }} - ECR_REPOSITORY: ${{ vars.ECR_REPOSITORY }} - ECS_SERVICE: ${{ vars.ECS_SERVICE }} - ECS_CLUSTER: ${{ vars.ECS_CLUSTER }} - ECS_TASK_DEFINITION: ./task-definition.json - CONTAINER_NAME: api-spring-prod - - steps: - - name: Checkout - uses: actions/checkout@v3 - - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.AWS_ECR_IAM_ACCESS_KEY }} - aws-secret-access-key: ${{ secrets.AWS_ECR_IAM_SECRET_KEY }} - aws-region: ${{ env.AWS_REGION }} - - - name: Login to Amazon ECR - id: login-ecr - uses: aws-actions/amazon-ecr-login@v1 - with: - mask-password: 'true' - - - name: Set up JDK 17 - uses: actions/setup-java@v3 - with: - java-version: '17' - distribution: 'adopt' - - - name: Build with Gradle - run: ./gradlew build -x test - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - - name: Build, tag, and push image to Amazon ECR - id: build-image - env: - ECR_REGISTRY: ${{ steps.login-ecr.outputs.registry }} - IMAGE_TAG: ${{ github.sha }} - working-directory: . - run: | - docker buildx build --platform linux/arm64 --push -t $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG . - echo "::set-output name=image::$ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG" - - - name: Fill in the new image ID in the Amazon ECS task definition - id: task-def - uses: aws-actions/amazon-ecs-render-task-definition@v1 - with: - task-definition: ${{ env.ECS_TASK_DEFINITION }} - container-name: ${{ env.CONTAINER_NAME }} - image: ${{ steps.build-image.outputs.image }} - - - name: Deploy Amazon ECS task definition - uses: aws-actions/amazon-ecs-deploy-task-definition@v1 - with: - task-definition: ${{ steps.task-def.outputs.task-definition }} - service: ${{ env.ECS_SERVICE }} - cluster: ${{ env.ECS_CLUSTER }} - wait-for-service-stability: true \ No newline at end of file diff --git a/src/main/java/com/linglevel/api/auth/filter/TestAuthFilter.java b/src/main/java/com/linglevel/api/auth/filter/TestAuthFilter.java index ec8c5c61..d146342d 100644 --- a/src/main/java/com/linglevel/api/auth/filter/TestAuthFilter.java +++ b/src/main/java/com/linglevel/api/auth/filter/TestAuthFilter.java @@ -20,7 +20,7 @@ import java.util.Optional; @Component -@Profile({"dev", "local"}) +@Profile({"local"}) public class TestAuthFilter extends OncePerRequestFilter { private final UserRepository userRepository; diff --git a/src/main/java/com/linglevel/api/s3/config/S3Config.java b/src/main/java/com/linglevel/api/s3/config/S3Config.java index 75c8b139..ad4e5a38 100644 --- a/src/main/java/com/linglevel/api/s3/config/S3Config.java +++ b/src/main/java/com/linglevel/api/s3/config/S3Config.java @@ -8,9 +8,12 @@ import software.amazon.awssdk.regions.Region; import software.amazon.awssdk.services.s3.S3Client; +import java.net.URI; + @Configuration public class S3Config { + // AWS S3 Configuration (for AI buckets) @Value("${aws.s3.region}") private String region; @@ -20,23 +23,51 @@ public class S3Config { @Value("${aws.secret-key}") private String secretKey; - @Value("${aws.s3.static.bucket}") - private String staticBucketName; - @Value("${aws.s3.ai.input.bucket}") private String aiInputBucketName; @Value("${aws.s3.ai.output.bucket}") private String aiOutputBucketName; + // Cloudflare R2 Configuration (for Static files) + @Value("${cf.r2.endpoint}") + private String r2Endpoint; + + @Value("${cf.r2.access-key}") + private String r2AccessKey; + + @Value("${cf.r2.secret-key}") + private String r2SecretKey; + + @Value("${cf.r2.static.bucket}") + private String r2StaticBucketName; + + /** + * AWS S3 클라이언트 (AI Input/Output 버킷용) + */ @Bean("s3AiClient") public S3Client s3AiClient() { - return createS3Client(); + AwsBasicCredentials credentials = AwsBasicCredentials.create(accessKey, secretKey); + + return S3Client.builder() + .region(Region.of(region)) + .credentialsProvider(StaticCredentialsProvider.create(credentials)) + .build(); } + /** + * Cloudflare R2 클라이언트 (Static 파일용) + * R2는 S3 호환 API를 제공하므로 endpoint만 변경하면 됩니다. + */ @Bean("s3StaticClient") public S3Client s3StaticClient() { - return createS3Client(); + AwsBasicCredentials credentials = AwsBasicCredentials.create(r2AccessKey, r2SecretKey); + + return S3Client.builder() + .endpointOverride(URI.create(r2Endpoint)) + .region(Region.of("auto")) // R2는 auto region 사용 + .credentialsProvider(StaticCredentialsProvider.create(credentials)) + .build(); } @Bean("aiInputBucketName") @@ -51,15 +82,6 @@ public String aiOutputBucketName() { @Bean("staticBucketName") public String staticBucketName() { - return staticBucketName; - } - - private S3Client createS3Client() { - AwsBasicCredentials credentials = AwsBasicCredentials.create(accessKey, secretKey); - - return S3Client.builder() - .region(Region.of(region)) - .credentialsProvider(StaticCredentialsProvider.create(credentials)) - .build(); + return r2StaticBucketName; } } \ No newline at end of file diff --git a/src/main/resources/application-dev.properties b/src/main/resources/application-dev.properties index e0d42c60..5ac61eb9 100644 --- a/src/main/resources/application-dev.properties +++ b/src/main/resources/application-dev.properties @@ -6,12 +6,11 @@ spring.data.mongodb.database=${MONGODB_DATABASE_DEV} # Redis Configuration spring.data.redis.host=${REDIS_ENDPOINT_DEV} spring.data.redis.port=6379 -spring.data.redis.ssl.enabled=true # Logging for Dev Environment -logging.level.com.linglevel.api=DEBUG +logging.level.com.linglevel.api=INFO logging.level.org.springframework.data.mongodb=WARN -logging.level.root=DEBUG +logging.level.root=INFO # Swagger UI Configuration springdoc.swagger-ui.enabled=true @@ -21,6 +20,3 @@ swagger.servers=https://dev.linglevel.com # Swagger Login Configuration api.docs.user.username=${SWAGGER_USERNAME} api.docs.user.password=${SWAGGER_PASSWORD} - -# S3 URL -aws.s3.static.url=https://s3.${S3_REGION}.amazonaws.com/${S3_STATIC_NAME} \ No newline at end of file diff --git a/src/main/resources/application-local.properties b/src/main/resources/application-local.properties index ea63eee1..c104d73b 100644 --- a/src/main/resources/application-local.properties +++ b/src/main/resources/application-local.properties @@ -23,6 +23,3 @@ swagger.servers=http://localhost:8080 # Swagger Login Configuration api.docs.user.username=${SWAGGER_USERNAME} api.docs.user.password=${SWAGGER_PASSWORD} - -# S3 URL -aws.s3.static.url=https://s3.${S3_REGION}.amazonaws.com/${S3_STATIC_NAME} \ No newline at end of file diff --git a/src/main/resources/application-prod.properties b/src/main/resources/application-prod.properties index 77f6ec19..de5469c7 100644 --- a/src/main/resources/application-prod.properties +++ b/src/main/resources/application-prod.properties @@ -6,7 +6,6 @@ spring.data.mongodb.database=${MONGODB_DATABASE_PROD} # Redis Configuration spring.data.redis.host=${REDIS_ENDPOINT_PROD} spring.data.redis.port=6379 -spring.data.redis.ssl.enabled=true # Logging for Production Environment logging.level.com.linglevel.api=INFO @@ -17,6 +16,3 @@ logging.level.root=INFO springdoc.swagger-ui.enabled=false springdoc.api-docs.enabled=false swagger.servers=https://api.linglevel.com - -# S3 URL -aws.s3.static.url=https://static.linglevel.com \ No newline at end of file diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 9b3f0c07..b3f13192 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -51,11 +51,19 @@ firebase.config=${FIREBASE_CONFIG_BASE64} # Redis spring.data.redis.ssl.enabled=false -# S3 +# AWS S3 (AI Input/Output buckets) aws.s3.region=${S3_REGION} aws.s3.ai.input.bucket=${S3_AI_INPUT_NAME} aws.s3.ai.output.bucket=${S3_AI_OUTPUT_NAME} -aws.s3.static.bucket=${S3_STATIC_NAME} + +# Cloudflare R2 (Static files) +cf.r2.endpoint=${CF_R2_ENDPOINT} +cf.r2.access-key=${CF_R2_ACCESS_KEY} +cf.r2.secret-key=${CF_R2_SECRET_KEY} +cf.r2.static.bucket=${CF_R2_STATIC_BUCKET} + +# R2 Public URL (Common for all environments) +aws.s3.static.url=https://static.linglevel.com # Sentry sentry.dsn=${SENTRY_DSN}