added exclude buckets option
All checks were successful
Publish / Publish Docker Container (push) Successful in 2m27s
All checks were successful
Publish / Publish Docker Container (push) Successful in 2m27s
This commit is contained in:
@@ -29,5 +29,5 @@ pip freeze > requirements.txt
|
||||
The S3 Recent File Cleaner is best run as a once off docker image. The image is already built so can be run with the following command.
|
||||
|
||||
```bash
|
||||
docker run --rm -e S3_ACCESS_KEY=your_access_key -e S3_SECRET_KEY=your_secret_key -e S3_ENDPOINT=your_endpoint liamsgit.dev/liampietralla/s3-recent-file-cleaner:latest
|
||||
docker run --rm -e S3_ACCESS_KEY=your_access_key -e S3_SECRET_KEY=your_secret_key -e S3_ENDPOINT=your_endpoint -e EXCLUDE_BUCKETS=your_excluded_buckets,another-bucket liamsgit.dev/liampietralla/s3-recent-file-cleaner:latest
|
||||
```
|
||||
5
main.py
5
main.py
@@ -6,6 +6,7 @@ DEFAULT = ''
|
||||
S3_ENDPOINT = os.getenv('S3_ENDPOINT', DEFAULT)
|
||||
S3_ACCESS_KEY = os.getenv('S3_ACCESS_KEY', DEFAULT)
|
||||
S3_SECRET_KEY = os.getenv('S3_SECRET_KEY', DEFAULT)
|
||||
EXCLUDE_BUCKETS = os.getenv('EXCLUDE_BUCKETS', DEFAULT).split(',')
|
||||
DEFAULT_FILES_TO_KEEP = 5
|
||||
|
||||
def validate_env_vars():
|
||||
@@ -34,6 +35,10 @@ def clean_files():
|
||||
for bucket in buckets:
|
||||
try:
|
||||
bucket_name = bucket['Name']
|
||||
# Skip excluded buckets
|
||||
if bucket_name in EXCLUDE_BUCKETS:
|
||||
print(f'Skipping excluded bucket: {bucket_name}')
|
||||
continue
|
||||
print(f'Bucket: {bucket_name}')
|
||||
|
||||
# Get all files in the bucket
|
||||
|
||||
Reference in New Issue
Block a user