diff --git a/README.md b/README.md index abbdbcf..9ee5407 100644 --- a/README.md +++ b/README.md @@ -29,5 +29,5 @@ pip freeze > requirements.txt The S3 Recent File Cleaner is best run as a once off docker image. The image is already built so can be run with the following command. ```bash -docker run --rm -e S3_ACCESS_KEY=your_access_key -e S3_SECRET_KEY=your_secret_key -e S3_ENDPOINT=your_endpoint liamsgit.dev/liampietralla/s3-recent-file-cleaner:latest +docker run --rm -e S3_ACCESS_KEY=your_access_key -e S3_SECRET_KEY=your_secret_key -e S3_ENDPOINT=your_endpoint -e EXCLUDE_BUCKETS=your_excluded_buckets,another-bucket liamsgit.dev/liampietralla/s3-recent-file-cleaner:latest ``` \ No newline at end of file diff --git a/main.py b/main.py index ff77585..1287544 100644 --- a/main.py +++ b/main.py @@ -6,6 +6,7 @@ DEFAULT = '' S3_ENDPOINT = os.getenv('S3_ENDPOINT', DEFAULT) S3_ACCESS_KEY = os.getenv('S3_ACCESS_KEY', DEFAULT) S3_SECRET_KEY = os.getenv('S3_SECRET_KEY', DEFAULT) +EXCLUDE_BUCKETS = os.getenv('EXCLUDE_BUCKETS', DEFAULT).split(',') DEFAULT_FILES_TO_KEEP = 5 def validate_env_vars(): @@ -34,6 +35,10 @@ def clean_files(): for bucket in buckets: try: bucket_name = bucket['Name'] + # Skip excluded buckets + if bucket_name in EXCLUDE_BUCKETS: + print(f'Skipping excluded bucket: {bucket_name}') + continue print(f'Bucket: {bucket_name}') # Get all files in the bucket