Add Docker infrastructure for local services

This commit is contained in:
2025-06-02 14:04:18 +02:00
parent 6f5366e040
commit b40bd4b44b
6 changed files with 147 additions and 1 deletions

80
docker-compose.yml Normal file
View File

@@ -0,0 +1,80 @@
version: '3.8'
services:
redis:
image: redis:7
ports:
- "6379:6379"
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 5
mongodb:
image: mongo:7
ports:
- "27017:27017"
volumes:
- mongo-data:/data/db
healthcheck:
test: ["CMD", "mongo", "--eval", "db.adminCommand('ping')"]
interval: 10s
timeout: 5s
retries: 5
minio:
image: minio/minio
command: server /data --console-address ":9001"
environment:
MINIO_ACCESS_KEY: minioadmin
MINIO_SECRET_KEY: minioadmin
ports:
- "9000:9000"
- "9001:9001"
volumes:
- minio-data:/data
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s
timeout: 20s
retries: 5
worker:
build:
context: .
dockerfile: docker/worker/Dockerfile
depends_on:
redis:
condition: service_healthy
mongodb:
condition: service_healthy
minio:
condition: service_healthy
environment:
REDIS_URL: redis://redis:6379
REDIS_QUEUE_NAME: rss-feed-queue
MONGODB_URL: mongodb://mongodb:27017
MONGODB_DB_NAME: ingestrss
MONGODB_COLLECTION_NAME: rss_feeds
MINIO_ENDPOINT: http://minio:9000
MINIO_ACCESS_KEY: minioadmin
MINIO_SECRET_KEY: minioadmin
MINIO_BUCKET: ingestrss
STORAGE_STRATEGY: s3
LOG_LEVEL: INFO
scheduler:
build:
context: .
dockerfile: docker/scheduler/Dockerfile
depends_on:
redis:
condition: service_healthy
mongodb:
condition: service_healthy
environment:
REDIS_URL: redis://redis:6379
REDIS_QUEUE_NAME: rss-feed-queue
MONGODB_URL: mongodb://mongodb:27017
MONGODB_DB_NAME: ingestrss
MONGODB_COLLECTION_NAME: rss_feeds
LOG_LEVEL: INFO
volumes:
mongo-data:
minio-data:

View File

@@ -0,0 +1,6 @@
FROM python:3.12-slim
WORKDIR /app
COPY requirements.txt ./
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
CMD ["python", "local_services/scheduler.py"]

6
docker/worker/Dockerfile Normal file
View File

@@ -0,0 +1,6 @@
FROM python:3.12-slim
WORKDIR /app
COPY requirements.txt ./
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
CMD ["python", "local_services/worker.py"]

View File

@@ -1,6 +1,7 @@
import os
import sys
import json
import subprocess
import boto3
from dotenv import load_dotenv
import logging
@@ -28,6 +29,10 @@ from src.infra.lambdas.lambda_utils.update_lambda_env_vars import update_env_var
from src.feed_management.upload_rss_feeds import upload_rss_feeds
def main():
if "--local" in sys.argv:
subprocess.run(["docker", "compose", "up", "-d"], check=False)
return
# Deploy infrastructure
deploy_infrastructure()
logging.info("Finished Deploying Infrastructure")

View File

@@ -0,0 +1,27 @@
import os
import time
import logging
import importlib
handler = importlib.import_module(
'src.infra.lambdas.RSSQueueFiller.lambda.lambda_function'
).handler
logging.basicConfig(level=os.getenv("LOG_LEVEL", "INFO"))
logger = logging.getLogger(__name__)
INTERVAL_MINUTES = int(os.getenv("SCHEDULER_INTERVAL_MINUTES", "240"))
def main():
logger.info("Starting scheduler loop")
while True:
try:
handler({}, None)
except Exception as exc:
logger.exception("Scheduler job failed: %s", exc)
time.sleep(INTERVAL_MINUTES * 60)
if __name__ == "__main__":
main()

22
local_services/worker.py Normal file
View File

@@ -0,0 +1,22 @@
import os
import time
import logging
from src.infra.lambdas.RSSFeedProcessorLambda.src.lambda_function import lambda_handler
logging.basicConfig(level=os.getenv("LOG_LEVEL", "INFO"))
logger = logging.getLogger(__name__)
SLEEP_SECONDS = int(os.getenv("WORKER_SLEEP_SECONDS", "5"))
def main():
logger.info("Starting worker loop")
while True:
try:
lambda_handler({}, None)
except Exception as exc:
logger.exception("Worker iteration failed: %s", exc)
time.sleep(SLEEP_SECONDS)
if __name__ == "__main__":
main()