Revert "build: remove boto old version (#31282)" (#31733)

This reverts commit 14e6040141.
This commit is contained in:
Usama Sadiq
2023-02-09 18:36:59 +05:00
committed by GitHub
parent 14e6040141
commit 997f194669
13 changed files with 25 additions and 25 deletions

View File

@@ -5,11 +5,11 @@ Storage backend for course import and export.
from django.conf import settings
from django.core.files.storage import get_storage_class
from storages.backends.s3boto3 import S3Boto3Storage
from storages.backends.s3boto import S3BotoStorage
from storages.utils import setting
class ImportExportS3Storage(S3Boto3Storage): # pylint: disable=abstract-method
class ImportExportS3Storage(S3BotoStorage): # pylint: disable=abstract-method
"""
S3 backend for course import and export OLX files.
"""

View File

@@ -25,6 +25,7 @@ from django.test.utils import override_settings
from milestones.tests.utils import MilestonesTestCaseMixin
from opaque_keys.edx.locator import LibraryLocator
from path import Path as path
from storages.backends.s3boto import S3BotoStorage
from storages.backends.s3boto3 import S3Boto3Storage
from user_tasks.models import UserTaskStatus
@@ -957,7 +958,7 @@ class ExportTestCase(CourseTestCase):
"""
Verify that the export status handler generates the correct export path
for storage providers other than ``FileSystemStorage`` and
``S3Boto3Storage``
``S3BotoStorage``
"""
mock_latest_task_status.return_value = Mock(state=UserTaskStatus.SUCCEEDED)
mock_get_user_task_artifact.return_value = self._mock_artifact(
@@ -967,7 +968,7 @@ class ExportTestCase(CourseTestCase):
result = json.loads(resp.content.decode('utf-8'))
self.assertEqual(result['ExportOutput'], '/path/to/testfile.tar.gz')
@ddt.data(S3Boto3Storage)
@ddt.data(S3BotoStorage, S3Boto3Storage)
@patch('cms.djangoapps.contentstore.views.import_export._latest_task_status')
@patch('user_tasks.models.UserTaskArtifact.objects.get')
def test_export_status_handler_s3(
@@ -978,7 +979,7 @@ class ExportTestCase(CourseTestCase):
):
"""
Verify that the export status handler generates the correct export path
for the ``S3Boto3Storage`` storage provider
for the ``S3BotoStorage`` storage provider
"""
mock_latest_task_status.return_value = Mock(state=UserTaskStatus.SUCCEEDED)
mock_get_user_task_artifact.return_value = self._mock_artifact(

View File

@@ -5,10 +5,10 @@ Storage backend for course metadata export.
from django.conf import settings
from django.core.files.storage import get_storage_class
from storages.backends.s3boto3 import S3Boto3Storage
from storages.backends.s3boto import S3BotoStorage
class CourseMetadataExportS3Storage(S3Boto3Storage): # pylint: disable=abstract-method
class CourseMetadataExportS3Storage(S3BotoStorage): # pylint: disable=abstract-method
"""
S3 backend for course metadata export
"""

View File

@@ -719,7 +719,7 @@ derived_collection_entry('TEMPLATES', 1, 'DIRS')
DEFAULT_TEMPLATE_ENGINE = TEMPLATES[0]
#################################### AWS #######################################
# S3Boto3Storage insists on a timeout for uploaded assets. We should make it
# S3BotoStorage insists on a timeout for uploaded assets. We should make it
# permanent instead, but rather than trying to figure out exactly where that
# setting is, I'm just bumping the expiration time to something absurd (100
# years). This is only used if DEFAULT_FILE_STORAGE is overriden to use S3
@@ -2436,7 +2436,7 @@ VIDEO_IMAGE_SETTINGS = dict(
VIDEO_IMAGE_MAX_BYTES=2 * 1024 * 1024, # 2 MB
VIDEO_IMAGE_MIN_BYTES=2 * 1024, # 2 KB
# Backend storage
# STORAGE_CLASS='storages.backends.s3boto3.S3Boto3Storage',
# STORAGE_CLASS='storages.backends.s3boto.S3BotoStorage',
# STORAGE_KWARGS=dict(bucket='video-image-bucket'),
STORAGE_KWARGS=dict(
location=MEDIA_ROOT,
@@ -2451,7 +2451,7 @@ VIDEO_IMAGE_MAX_AGE = 31536000
VIDEO_TRANSCRIPTS_SETTINGS = dict(
VIDEO_TRANSCRIPTS_MAX_BYTES=3 * 1024 * 1024, # 3 MB
# Backend storage
# STORAGE_CLASS='storages.backends.s3boto3.S3Boto3Storage',
# STORAGE_CLASS='storages.backends.s3boto.S3BotoStorage',
# STORAGE_KWARGS=dict(bucket='video-transcripts-bucket'),
STORAGE_KWARGS=dict(
location=MEDIA_ROOT,

View File

@@ -338,7 +338,7 @@ AWS_S3_CUSTOM_DOMAIN = AUTH_TOKENS.get('AWS_S3_CUSTOM_DOMAIN', 'edxuploads.s3.am
if AUTH_TOKENS.get('DEFAULT_FILE_STORAGE'):
DEFAULT_FILE_STORAGE = AUTH_TOKENS.get('DEFAULT_FILE_STORAGE')
elif AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY:
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
else:
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'

View File

@@ -117,14 +117,14 @@ def skip_signal(signal, **kwargs):
class MockS3BotoMixin:
"""
TestCase mixin that mocks the S3Boto3Storage save method and s3 connection.
TestCase mixin that mocks the S3BotoStorage save method and s3 connection.
"""
def setUp(self):
super().setUp()
self._mocked_connection = patch('boto.connect_s3', return_value=Mock())
self.mocked_connection = self._mocked_connection.start()
self.patcher = patch('storages.backends.s3boto3.S3Boto3Storage.save')
self.patcher = patch('storages.backends.s3boto.S3BotoStorage.save')
self.patcher.start()
def tearDown(self):

View File

@@ -2749,8 +2749,7 @@ class TestInstructorAPILevelsDataDump(SharedModuleStoreTestCase, LoginEnrollment
ex_status = 503
ex_reason = 'Slow Down'
url = reverse(endpoint, kwargs={'course_id': str(self.course.id)})
with patch('storages.backends.s3boto3.S3Boto3Storage.listdir',
side_effect=BotoServerError(ex_status, ex_reason)):
with patch('storages.backends.s3boto.S3BotoStorage.listdir', side_effect=BotoServerError(ex_status, ex_reason)):
if endpoint in INSTRUCTOR_GET_ENDPOINTS:
response = self.client.get(url)
else:

View File

@@ -230,7 +230,7 @@ class ReportStore:
storage_type = config.get('STORAGE_TYPE', '').lower()
if storage_type == 's3':
return DjangoStorageReportStore(
storage_class='storages.backends.s3boto3.S3Boto3Storage',
storage_class='storages.backends.s3boto.S3BotoStorage',
storage_kwargs={
'bucket': config['BUCKET'],
'location': config['ROOT_PATH'],

View File

@@ -105,7 +105,7 @@ class DjangoStorageReportStoreS3TestCase(MockS3BotoMixin, ReportStoreTestMixin,
storage.
"""
test_settings = copy.deepcopy(settings.GRADES_DOWNLOAD)
test_settings['STORAGE_CLASS'] = 'storages.backends.s3boto3.S3Boto3Storage'
test_settings['STORAGE_CLASS'] = 'storages.backends.s3boto.S3BotoStorage'
test_settings['STORAGE_KWARGS'] = {
'bucket': settings.GRADES_DOWNLOAD['BUCKET'],
'location': settings.GRADES_DOWNLOAD['ROOT_PATH'],

View File

@@ -910,7 +910,7 @@ class SoftwareSecurePhotoVerification(PhotoVerification):
config = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]
# Default to the S3 backend for backward compatibility
storage_class = config.get("STORAGE_CLASS", "storages.backends.s3boto3.S3Boto3Storage")
storage_class = config.get("STORAGE_CLASS", "storages.backends.s3boto.S3BotoStorage")
storage_kwargs = config.get("STORAGE_KWARGS", {})
# Map old settings to the parameters expected by the storage backend

View File

@@ -1853,7 +1853,7 @@ class TestReverifyView(TestVerificationBase):
"CERT_VERIFICATION_PATH": False,
},
"DAYS_GOOD_FOR": 10,
"STORAGE_CLASS": 'storages.backends.s3boto3.S3Boto3Storage',
"STORAGE_CLASS": 'storages.backends.s3boto.S3BotoStorage',
"STORAGE_KWARGS": {
'bucket': 'test-idv',
},
@@ -1917,7 +1917,7 @@ class TestPhotoURLView(TestVerificationBase):
"CERT_VERIFICATION_PATH": False,
},
"DAYS_GOOD_FOR": 10,
"STORAGE_CLASS": 'storages.backends.s3boto3.S3Boto3Storage',
"STORAGE_CLASS": 'storages.backends.s3boto.S3BotoStorage',
"STORAGE_KWARGS": {
'bucket': 'test-idv',
},

View File

@@ -1937,7 +1937,7 @@ TRANSLATORS_GUIDE = 'https://edx.readthedocs.org/projects/edx-developer-guide/en
'conventions/internationalization/i18n_translators_guide.html'
#################################### AWS #######################################
# S3Boto3Storage insists on a timeout for uploaded assets. We should make it
# S3BotoStorage insists on a timeout for uploaded assets. We should make it
# permanent instead, but rather than trying to figure out exactly where that
# setting is, I'm just bumping the expiration time to something absurd (100
# years). This is only used if DEFAULT_FILE_STORAGE is overriden to use S3
@@ -3758,7 +3758,7 @@ VIDEO_IMAGE_SETTINGS = dict(
VIDEO_IMAGE_MAX_BYTES=2 * 1024 * 1024, # 2 MB
VIDEO_IMAGE_MIN_BYTES=2 * 1024, # 2 KB
# Backend storage
# STORAGE_CLASS='storages.backends.s3boto3.S3Boto3Storage',
# STORAGE_CLASS='storages.backends.s3boto.S3BotoStorage',
# STORAGE_KWARGS=dict(bucket='video-image-bucket'),
STORAGE_KWARGS=dict(
location=MEDIA_ROOT,
@@ -3774,7 +3774,7 @@ VIDEO_IMAGE_MAX_AGE = 31536000
VIDEO_TRANSCRIPTS_SETTINGS = dict(
VIDEO_TRANSCRIPTS_MAX_BYTES=3 * 1024 * 1024, # 3 MB
# Backend storage
# STORAGE_CLASS='storages.backends.s3boto3.S3Boto3Storage',
# STORAGE_CLASS='storages.backends.s3boto.S3BotoStorage',
# STORAGE_KWARGS=dict(bucket='video-transcripts-bucket'),
STORAGE_KWARGS=dict(
location=MEDIA_ROOT,
@@ -5097,7 +5097,7 @@ BUNDLE_ASSET_URL_STORAGE_SECRET = None
# See `blockstore.apps.bundles.storage.LongLivedSignedUrlStorage` for details.
BUNDLE_ASSET_STORAGE_SETTINGS = dict(
# Backend storage
# STORAGE_CLASS='storages.backends.s3boto3.S3Boto3Storage',
# STORAGE_CLASS='storages.backends.s3boto.S3BotoStorage',
# STORAGE_KWARGS=dict(bucket='bundle-asset-bucket', location='/path-to-bundles/'),
STORAGE_CLASS='django.core.files.storage.FileSystemStorage',
STORAGE_KWARGS=dict(

View File

@@ -463,7 +463,7 @@ AWS_S3_CUSTOM_DOMAIN = AUTH_TOKENS.get('AWS_S3_CUSTOM_DOMAIN', 'edxuploads.s3.am
if AUTH_TOKENS.get('DEFAULT_FILE_STORAGE'):
DEFAULT_FILE_STORAGE = AUTH_TOKENS.get('DEFAULT_FILE_STORAGE')
elif AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY:
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
else:
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'