Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: OPTIC-1785: Validate S3 bucket name on storage connection form #7183

Open
wants to merge 1 commit into
base: develop
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 23 additions & 16 deletions label_studio/io_storages/s3/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,29 +3,33 @@
import os

from botocore.exceptions import ClientError, ParamValidationError
from botocore.handlers import validate_bucket_name
from io_storages.s3.models import S3ExportStorage, S3ImportStorage
from io_storages.serializers import ExportStorageSerializer, ImportStorageSerializer
from rest_framework import serializers
from rest_framework.exceptions import ValidationError


class S3ImportStorageSerializer(ImportStorageSerializer):
type = serializers.ReadOnlyField(default=os.path.basename(os.path.dirname(__file__)))
presign = serializers.BooleanField(required=False, default=True)
class S3StorageSerializerMixin:
secure_fields = ['aws_access_key_id', 'aws_secret_access_key']

class Meta:
model = S3ImportStorage
fields = '__all__'

def to_representation(self, instance):
result = super().to_representation(instance)
for attr in S3ImportStorageSerializer.secure_fields:
for attr in self.secure_fields:
result.pop(attr)
return result

def validate_bucket(self, value):
if not value:
return value

Check warning on line 24 in label_studio/io_storages/s3/serializers.py

View check run for this annotation

Codecov / codecov/patch

label_studio/io_storages/s3/serializers.py#L24

Added line #L24 was not covered by tests
try:
validate_bucket_name({'Bucket': value})
except ParamValidationError as exc:
raise ValidationError(exc.kwargs['report']) from exc

Check warning on line 28 in label_studio/io_storages/s3/serializers.py

View check run for this annotation

Codecov / codecov/patch

label_studio/io_storages/s3/serializers.py#L27-L28

Added lines #L27 - L28 were not covered by tests
return value

def validate(self, data):
data = super(S3ImportStorageSerializer, self).validate(data)
data = super().validate(data)
if not data.get('bucket', None):
return data

Expand All @@ -36,7 +40,7 @@
else:
if 'id' in self.initial_data:
storage_object = self.Meta.model.objects.get(id=self.initial_data['id'])
for attr in S3ImportStorageSerializer.secure_fields:
for attr in self.secure_fields:
data[attr] = data.get(attr) or getattr(storage_object, attr)
storage = self.Meta.model(**data)
try:
Expand All @@ -63,14 +67,17 @@
return data


class S3ExportStorageSerializer(ExportStorageSerializer):
class S3ImportStorageSerializer(S3StorageSerializerMixin, ImportStorageSerializer):
type = serializers.ReadOnlyField(default=os.path.basename(os.path.dirname(__file__)))
presign = serializers.BooleanField(required=False, default=True)

def to_representation(self, instance):
result = super().to_representation(instance)
result.pop('aws_access_key_id')
result.pop('aws_secret_access_key')
return result
class Meta:
model = S3ImportStorage
fields = '__all__'


class S3ExportStorageSerializer(S3StorageSerializerMixin, ExportStorageSerializer):
type = serializers.ReadOnlyField(default=os.path.basename(os.path.dirname(__file__)))

class Meta:
model = S3ExportStorage
Expand Down
Loading