Skip to content

Commit

Permalink
tests now work well for finding relations on the medi aobject used in…
Browse files Browse the repository at this point in the history
… the entire process...
  • Loading branch information
frimpongopoku committed Nov 13, 2023
1 parent 8b37a43 commit ac24e5e
Show file tree
Hide file tree
Showing 2 changed files with 54 additions and 15 deletions.
54 changes: 45 additions & 9 deletions src/api/tests/test_helper_functions_for_finding_duplicates.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,17 @@
from django.test import TestCase
from _main_.utils.utils import Console
from api.store.common import find_duplicate_items
from api.tests.common import createImage, makeAction, makeCommunity, makeEvent, makeHomePageSettings, makeMedia, makeTeam, makeUserUpload, makeVendor
from api.store.common import find_duplicate_items, find_relations_for_item
from api.tests.common import (
createImage,
makeAction,
makeCommunity,
makeEvent,
makeHomePageSettings,
makeMedia,
makeTeam,
makeUserUpload,
makeVendor,
)
from database.models import Media, Tag


Expand Down Expand Up @@ -33,15 +43,41 @@ def test_relationship_finder(self):
retrieved
"""


tag = Tag.objects.create(name = "Personal Tag")
media = makeMedia(tags = [tag])
tag = Tag.objects.create(name="Personal Tag")
media = makeMedia(tags=[tag])
com = makeCommunity(name="Top Community", logo=media)
action = makeAction(image=media)
event = makeEvent(image=media)
team = makeTeam (logo=media, community =com )
team = makeTeam(logo=media, community=com)
vendor = makeVendor(image=media)
homepage = makeHomePageSettings(title="Top Settings", images = [media])

homepage = makeHomePageSettings(title="Top Settings", images=[media])


item = find_relations_for_item(media)
actions = item.get("actions", [])
actions = [a.id for a in actions]
com_logos = item.get("community_logos")
com_logos = [com.id for com in com_logos]
homepages = item.get("homepage", [])
homepages = [h.id for h in homepages]
teams = item.get("teams", [])
teams = [t.id for t in teams]
events = item.get("events", [])
events = [e.id for e in events]
vendors = item.get("vendors", [])
vendors = [v.id for v in vendors]
print("Checking to see if all related items are available...")
self.assertEquals(len(actions), 1)
self.assertIn(action.id, actions)
self.assertEquals(len(com_logos), 1)
self.assertIn(com.id, com_logos)
self.assertEquals(len(homepages), 1)
self.assertIn(homepage.id, homepages)
self.assertEquals(len(teams), 1)
self.assertIn(team.id, teams)
self.assertEquals(len(events), 1)
self.assertIn(event.id, events)
self.assertEquals(len(vendors), 1)
self.assertIn(vendor.id, vendors)
print(
"Actions, homepages, events, vendors, community logos, all relations with media were retrieved successfully!"
)
15 changes: 9 additions & 6 deletions src/task_queue/database_tasks/media_library_cleanup.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,14 @@
REMOVE_DUPLICATE_IMAGE_FLAG_KEY = "remove-duplicate-images-feature-flag"


def remove_duplicate_images(task):
def remove_duplicate_images(task=None):
"""
This checks all media on the platform and removes all duplicates.
Its based on the "Remove Duplicate Images" feature flag. For communities that are subscribed
to the flag, duplicates will be removed from their libraries.
"""
try:
generate_hashes()
# generate_hashes() # UNCHECK BEFORE PR (BPR)
flag = FeatureFlag.objects.filter(key=REMOVE_DUPLICATE_IMAGE_FLAG_KEY).first()
communities = flag.enabled_communities()
# task = Task.objects.filter(name="Media Library Cleanup Routine").first()
Expand All @@ -37,15 +37,18 @@ def remove_duplicate_images(task):
print("Duplicate Removal Error (Media Library Cleanup): " + str(e))
return "Failure"

def clean_and_notify(ids,community,notification_receiver):
def clean_and_notify(ids,community,notification_receiver,**kwargs):
do_removal = kwargs.get("remove",False)
send_notification = kwargs.get("notify", False)
grouped_dupes = find_duplicate_items(False, community_ids=ids)
num_of_dupes_in_all = get_duplicate_count(grouped_dupes)
csv_file = summarize_duplicates_into_csv(grouped_dupes)

for hash_value in grouped_dupes.keys():
remove_duplicates_and_attach_relations(hash_value)
if do_removal:
for hash_value in grouped_dupes.keys():
remove_duplicates_and_attach_relations(hash_value)

if notification_receiver:
if send_notification and notification_receiver:
send_summary_email_to_admin(notification_receiver, community, num_of_dupes_in_all, csv_file)


Expand Down

0 comments on commit ac24e5e

Please sign in to comment.