Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ migrate:
# 4) Remove the management command from this `deploy-migrate` recipe
# 5) Repeat!
deploy-migrate:
echo "Nothing to do here!"
python contentcuration/manage.py rectify_incorrect_contentnode_source_fields

contentnodegc:
python contentcuration/manage.py garbage_collect
Expand Down
2 changes: 1 addition & 1 deletion contentcuration/contentcuration/production_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

MEDIA_ROOT = base_settings.STORAGE_ROOT

DEFAULT_FILE_STORAGE = 'contentcuration.utils.gcs_storage.GoogleCloudStorage'
DEFAULT_FILE_STORAGE = 'contentcuration.utils.gcs_storage.CompositeGCS'
SESSION_ENGINE = "django.contrib.sessions.backends.db"

# email settings
Expand Down
2 changes: 1 addition & 1 deletion contentcuration/contentcuration/sandbox_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

DEBUG = True

DEFAULT_FILE_STORAGE = "contentcuration.utils.gcs_storage.GoogleCloudStorage"
DEFAULT_FILE_STORAGE = "contentcuration.utils.gcs_storage.CompositeGCS"

LANGUAGES += (("ar", gettext("Arabic")),) # noqa

Expand Down
3 changes: 2 additions & 1 deletion contentcuration/contentcuration/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -292,8 +292,9 @@ def gettext(s):
# ('en-PT', gettext('English - Pirate')),
)

PRODUCTION_SITE_ID = 1
SITE_BY_ID = {
'master': 1,
'master': PRODUCTION_SITE_ID,
'unstable': 3,
'hotfixes': 4,
}
Expand Down
114 changes: 98 additions & 16 deletions contentcuration/contentcuration/tests/test_gcs_storage.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,15 @@
#!/usr/bin/env python
from future import standard_library
standard_library.install_aliases()
from io import BytesIO

import pytest
import mock
from django.core.files import File
from django.test import TestCase
from google.cloud.storage import Bucket
from google.cloud.storage import Client
from google.cloud.storage.blob import Blob
from mixer.main import mixer
from mock import create_autospec
from mock import patch

from contentcuration.utils.gcs_storage import GoogleCloudStorage as gcs
from contentcuration.utils.gcs_storage import CompositeGCS
from contentcuration.utils.gcs_storage import GoogleCloudStorage


class GoogleCloudStorageSaveTestCase(TestCase):
Expand All @@ -21,10 +18,10 @@ class GoogleCloudStorageSaveTestCase(TestCase):
"""

def setUp(self):
self.blob_class = create_autospec(Blob)
self.blob_class = mock.create_autospec(Blob)
self.blob_obj = self.blob_class("blob", "blob")
self.mock_client = create_autospec(Client)
self.storage = gcs(client=self.mock_client())
self.mock_client = mock.create_autospec(Client)
self.storage = GoogleCloudStorage(client=self.mock_client(), bucket_name="bucket")
self.content = BytesIO(b"content")

def test_calls_upload_from_file(self):
Expand Down Expand Up @@ -73,8 +70,8 @@ def test_uploads_cache_control_private_if_content_database(self):
self.storage.save(filename, self.content, blob_object=self.blob_obj)
assert "private" in self.blob_obj.cache_control

@patch("contentcuration.utils.gcs_storage.BytesIO")
@patch("contentcuration.utils.gcs_storage.GoogleCloudStorage._is_file_empty", return_value=False)
@mock.patch("contentcuration.utils.gcs_storage.BytesIO")
@mock.patch("contentcuration.utils.gcs_storage.GoogleCloudStorage._is_file_empty", return_value=False)
def test_gzip_if_content_database(self, bytesio_mock, file_empty_mock):
"""
Check that if we're uploading a gzipped content database and
Expand All @@ -99,17 +96,17 @@ class RandomFileSchema:
filename = str

def setUp(self):
self.blob_class = create_autospec(Blob)
self.blob_class = mock.create_autospec(Blob)
self.blob_obj = self.blob_class("blob", "blob")
self.mock_client = create_autospec(Client)
self.storage = gcs(client=self.mock_client())
self.mock_client = mock.create_autospec(Client)
self.storage = GoogleCloudStorage(client=self.mock_client(), bucket_name="bucket")
self.local_file = mixer.blend(self.RandomFileSchema)

def test_raises_error_if_mode_is_not_rb(self):
"""
open() should raise an assertion error if passed in a mode flag that's not "rb".
"""
with pytest.raises(AssertionError):
with self.assertRaises(AssertionError):
self.storage.open("randfile", mode="wb")

def test_calls_blob_download_to_file(self):
Expand All @@ -130,3 +127,88 @@ def test_returns_django_file(self):
assert isinstance(f, File)
# This checks that an actual temp file was written on disk for the file.git
assert f.name


class CompositeGCSTestCase(TestCase):
"""
Tests for the GoogleCloudStorage class.
"""

def setUp(self):
mock_client_cls = mock.MagicMock(spec_set=Client)
bucket_cls = mock.MagicMock(spec_set=Bucket)
self.blob_cls = mock.MagicMock(spec_set=Blob)

self.mock_default_client = mock_client_cls(project="project")
self.mock_anon_client = mock_client_cls(project=None)

self.mock_default_bucket = bucket_cls(self.mock_default_client, "bucket")
self.mock_default_client.get_bucket.return_value = self.mock_default_bucket
self.mock_anon_bucket = bucket_cls(self.mock_anon_client, "bucket")
self.mock_anon_client.get_bucket.return_value = self.mock_anon_bucket

with mock.patch("contentcuration.utils.gcs_storage._create_default_client", return_value=self.mock_default_client), \
mock.patch("contentcuration.utils.gcs_storage.Client.create_anonymous_client", return_value=self.mock_anon_client):
self.storage = CompositeGCS()

def test_get_writeable_backend(self):
backend = self.storage._get_writeable_backend()
self.assertEqual(backend.client, self.mock_default_client)

def test_get_writeable_backend__raises_error_if_none(self):
self.mock_default_client.project = None
with self.assertRaises(AssertionError):
self.storage._get_writeable_backend()

def test_get_readonly_backend(self):
self.mock_anon_bucket.get_blob.return_value = self.blob_cls("blob", "blob")
backend = self.storage._get_readable_backend("blob")
self.assertEqual(backend.client, self.mock_anon_client)

def test_get_readonly_backend__raises_error_if_not_found(self):
self.mock_default_bucket.get_blob.return_value = None
self.mock_anon_bucket.get_blob.return_value = None
with self.assertRaises(FileNotFoundError):
self.storage._get_readable_backend("blob")

def test_open(self):
self.mock_default_bucket.get_blob.return_value = self.blob_cls("blob", "blob")
f = self.storage.open("blob")
self.assertIsInstance(f, File)
self.mock_default_bucket.get_blob.assert_called_with("blob")

@mock.patch("contentcuration.utils.gcs_storage.Blob")
def test_save(self, mock_blob):
self.storage.save("blob", BytesIO(b"content"))
blob = mock_blob.return_value
blob.upload_from_file.assert_called()

def test_delete(self):
mock_blob = self.blob_cls("blob", "blob")
self.mock_default_bucket.get_blob.return_value = mock_blob
self.storage.delete("blob")
mock_blob.delete.assert_called()

def test_exists(self):
self.mock_default_bucket.get_blob.return_value = self.blob_cls("blob", "blob")
self.assertTrue(self.storage.exists("blob"))

def test_exists__returns_false_if_not_found(self):
self.mock_default_bucket.get_blob.return_value = None
self.assertFalse(self.storage.exists("blob"))

def test_size(self):
mock_blob = self.blob_cls("blob", "blob")
self.mock_default_bucket.get_blob.return_value = mock_blob
mock_blob.size = 4
self.assertEqual(self.storage.size("blob"), 4)

def test_url(self):
mock_blob = self.blob_cls("blob", "blob")
self.mock_default_bucket.get_blob.return_value = mock_blob
mock_blob.public_url = "https://storage.googleapis.com/bucket/blob"
self.assertEqual(self.storage.url("blob"), "https://storage.googleapis.com/bucket/blob")

def test_get_created_time(self):
self.mock_default_bucket.get_blob.return_value = self.blob_cls("blob", "blob")
self.assertEqual(self.storage.get_created_time("blob"), self.blob_cls.return_value.time_created)
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
# DELETE THIS FILE AFTER RUNNING THE MIGRATIONSSS
import datetime
import uuid

from django.core.management import call_command
from django.utils import timezone
from le_utils.constants import content_kinds

from contentcuration.models import Channel
from contentcuration.models import ContentNode
from contentcuration.models import License
from contentcuration.tests import testdata
from contentcuration.tests.base import StudioAPITestCase
from contentcuration.utils.publish import publish_channel


class TestRectifyMigrationCommand(StudioAPITestCase):

@classmethod
def setUpClass(cls):
super(TestRectifyMigrationCommand, cls).setUpClass()

def tearDown(self):
super(TestRectifyMigrationCommand, self).tearDown()

def setUp(self):
super(TestRectifyMigrationCommand, self).setUp()
self.original_channel = testdata.channel()
self.license_original = License.objects.get(license_name="Special Permissions")
self.license_description_original = "License to chill"
self.original_contentnode = ContentNode.objects.create(
id=uuid.uuid4().hex,
title="Original Node",
parent=self.original_channel.main_tree,
license=self.license_original,
license_description=self.license_description_original,
original_channel_id=None,
source_channel_id=None,
author="old author"
)
self.user = testdata.user()
self.original_channel.editors.add(self.user)
self.client.force_authenticate(user=self.user)

def create_base_channel_and_contentnode(self, source_contentnode, source_channel):
base_channel = testdata.channel()
base_channel.public = True
base_channel.save()
base_node = ContentNode.objects.create(
id=uuid.uuid4().hex,
title="base contentnode",
parent=base_channel.main_tree,
kind_id=content_kinds.VIDEO,
original_channel_id=self.original_channel.id,
original_source_node_id=self.original_contentnode.node_id,
source_channel_id=source_channel.id,
source_node_id=source_contentnode.node_id,
author="source author",
license=self.license_original,
license_description=None,
)
return base_node, base_channel

def create_source_channel_and_contentnode(self):
source_channel = testdata.channel()
source_channel.public = True
source_channel.save()
source_node = ContentNode.objects.create(
id=uuid.uuid4().hex,
title="base contentnode",
parent=source_channel.main_tree,
kind_id=content_kinds.VIDEO,
license=self.license_original,
license_description="No chill",
original_channel_id=self.original_channel.id,
source_channel_id=self.original_channel.id,
source_node_id=self.original_contentnode.node_id,
original_source_node_id=self.original_contentnode.node_id,
author="source author",
)

return source_node, source_channel

def run_migrations(self):
call_command('rectify_incorrect_contentnode_source_fields')

def test_two_node_case(self):
base_node, base_channel = self.create_base_channel_and_contentnode(self.original_contentnode, self.original_channel)

publish_channel(self.user.id, Channel.objects.get(pk=base_channel.pk).id)

# main_tree node still has changed=true even after the publish
for node in Channel.objects.get(pk=base_channel.pk).main_tree.get_family().filter(changed=True):
node.changed = False
# This should probably again change the changed=true but suprisingly it doesnot
# Meaning the changed boolean doesnot change for the main_tree no matter what we do
# through ContentNode model methods like save.
node.save()

ContentNode.objects.filter(pk=base_node.pk).update(
modified=datetime.datetime(2023, 7, 5, tzinfo=timezone.utc)
)

self.run_migrations()
updated_base_node = ContentNode.objects.get(pk=base_node.pk)
self.assertEqual(updated_base_node.license_description, self.original_contentnode.license_description)
self.assertEqual(Channel.objects.get(pk=base_channel.id).main_tree.get_family().filter(changed=True).exists(), True)

def test_three_node_case_implicit(self):
source_node, source_channel = self.create_source_channel_and_contentnode()
base_node, base_channel = self.create_base_channel_and_contentnode(source_node, source_channel)
source_node.aggregator = "Nami"
source_node.save()
# Implicit case
base_node.author = source_node.author
base_node.license = source_node.license
base_node.aggregator = source_node.aggregator
base_node.save()

publish_channel(self.user.id, Channel.objects.get(pk=base_channel.pk).id)

for node in Channel.objects.get(pk=base_channel.pk).main_tree.get_family().filter(changed=True):
node.changed = False
node.save()

ContentNode.objects.filter(pk=base_node.pk).update(
modified=datetime.datetime(2023, 7, 5, tzinfo=timezone.utc)
)

ContentNode.objects.filter(pk=source_node.pk).update(
modified=datetime.datetime(2023, 3, 5, tzinfo=timezone.utc)
)

self.run_migrations()
updated_base_node = ContentNode.objects.get(pk=base_node.pk)
updated_source_node = ContentNode.objects.get(pk=source_node.pk)
self.assertEqual(updated_base_node.license_description, self.original_contentnode.license_description)
self.assertEqual(updated_source_node.license_description, self.original_contentnode.license_description)
self.assertEqual(Channel.objects.get(pk=base_channel.id).main_tree.get_family().filter(changed=True).exists(), True)

def test_three_node_case_explicit(self):
source_node, source_channel = self.create_source_channel_and_contentnode()
base_node, base_channel = self.create_base_channel_and_contentnode(source_node, source_channel)
source_node.license_description = "luffy"
base_node.license_description = "zoro"
base_node.save()
source_node.save()
publish_channel(self.user.id, Channel.objects.get(pk=base_channel.pk).id)

for node in Channel.objects.get(pk=base_channel.pk).main_tree.get_family().filter(changed=True):
node.changed = False
node.save()

ContentNode.objects.filter(pk=base_node.pk).update(
modified=datetime.datetime(2023, 7, 5, tzinfo=timezone.utc)
)

ContentNode.objects.filter(pk=source_node.pk).update(
modified=datetime.datetime(2023, 3, 5, tzinfo=timezone.utc)
)

self.run_migrations()
updated_base_node = ContentNode.objects.get(pk=base_node.pk)
updated_source_node = ContentNode.objects.get(pk=source_node.pk)
self.assertEqual(updated_base_node.license_description, self.original_contentnode.license_description)
self.assertEqual(updated_source_node.license_description, self.original_contentnode.license_description)
self.assertEqual(Channel.objects.get(pk=base_channel.id).main_tree.get_family().filter(changed=True).exists(), True)
Loading