AWS S3 对上传的文件大小有限制,如果你上传的文件大于限制的大小,那么会抛出一个错误。如果你想解决这个问题,可以通过以下方法实现:
import boto3
from botocore.config import Config
AWS_ACCESS_KEY_ID = 'your_access_key_here'
AWS_SECRET_ACCESS_KEY = 'your_secret_access_key_here'
AWS_DEFAULT_REGION = 'your_default_region_here'
MAX_SIZE_MB = 50 # 限制上传大小为50MB
s3 = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_DEFAULT_REGION,
config=Config(signature_version='s3v4',
max_pool_connections=10,
retries=dict(max_attempts=10))
)
def upload_file_to_s3(file_path, bucket_name, file_key):
file_size = os.path.getsize(file_path)
if (file_size/1024/1024) > MAX_SIZE_MB:
raise ValueError(f"文件过大,不能超过 {MAX_SIZE_MB} MB")
with open(file_path, 'rb') as file_data:
s3.upload_fileobj(file_data, bucket_name, file_key)
if __name__ == '__main__':
upload_file_to_s3('myfile.jpg', 'mybucket', 'myfolder/myfile.jpg')
import boto3
import os
from boto3.s3.transfer import TransferConfig
AWS_ACCESS_KEY_ID = 'your_access_key_here'
AWS_SECRET_ACCESS_KEY = 'your_secret_access_key_here'
AWS_DEFAULT_REGION = 'your_default_region_here'
MAX_SIZE_MB = 50 # 限制上传大小为50MB
s3 = boto3.client(
's3',
aws_access_key_id=AWS_ACCESS_KEY_ID,
aws_secret_access_key=AWS_SECRET_ACCESS_KEY,
region_name=AWS_DEFAULT_REGION
)
# 通过分块上传,限制上传大小
def multipart_upload(bucket_name, file_key, file_path):
config = TransferConfig(
multipart_threshold=1024 * 1024 * MAX_SIZE_MB,
max_concurrency=10,
multipart_chunksize=1024 * 1024 * 5,
use_threads=True
)
s3.upload_file(
file_path,
bucket_name,
file_key,
Config=config
)
if __name__ == '__main__':
multipart_upload('mybucket', 'myfolder/myfile.jpg', 'myfile.jpg')
``