diff --git a/src/openedx_content/api.py b/src/openedx_content/api.py index d8c2f0c3..a0d7d0ab 100644 --- a/src/openedx_content/api.py +++ b/src/openedx_content/api.py @@ -9,7 +9,14 @@ """ # These wildcard imports are okay because these api modules declare __all__. -# pylint: disable=wildcard-import +# pylint: disable=wildcard-import,unused-import + +# Signals are kept in a separate namespace, for two reasons: +# (1) so they can easily be imported/used as `api.signals` (e.g. `from openedx_content import api`, use `api.signals.x`) +# (2) to avoid confusion between event data structures and other API symbols with similar names (e.g. +# `DraftChangeLogEventData` vs `DraftChangeLogRecord` is clearer if the former is `signals.DraftChangeLogEventData`) +from . import signals +# The rest of the public API (other than models): from .applets.backup_restore.api import * from .applets.collections.api import * from .applets.components.api import * diff --git a/src/openedx_content/applets/collections/api.py b/src/openedx_content/applets/collections/api.py index 6abe5e2b..40692ca0 100644 --- a/src/openedx_content/applets/collections/api.py +++ b/src/openedx_content/applets/collections/api.py @@ -1,15 +1,19 @@ """ Collections API (warning: UNSTABLE, in progress API) """ + from __future__ import annotations from datetime import datetime, timezone +from functools import partial from django.core.exceptions import ValidationError from django.db.models import QuerySet +from django.db.transaction import on_commit from ..publishing import api as publishing_api from ..publishing.models import PublishableEntity +from . import signals from .models import Collection, CollectionPublishableEntity, LearningPackage # The public API that will be re-exported by openedx_content.api @@ -32,6 +36,39 @@ ] +def _queue_change_event( + collection: Collection, + *, + created: bool = False, + metadata_modified: bool = False, + deleted: bool = False, + entities_added: list[PublishableEntity.ID] | None = None, + entities_removed: list[PublishableEntity.ID] | None = None, + user_id: int | None = None, +) -> None: + """Helper for emitting the event when a collection has changed.""" + + learning_package_id = collection.learning_package.id + learning_package_title = collection.learning_package.title + + # Send out an event immediately after this database transaction commits. + on_commit(partial( + signals.COLLECTION_CHANGED.send_event, + time=collection.modified, + learning_package=signals.LearningPackageEventData(id=learning_package_id, title=learning_package_title), + changed_by=signals.UserAttributionEventData(user_id=user_id), + change=signals.CollectionChangeData( + collection_id=collection.id, + collection_code=collection.collection_code, + created=created, + metadata_modified=metadata_modified, + deleted=deleted, + entities_added=entities_added or [], + entities_removed=entities_removed or [], + ), + )) + + def create_collection( learning_package_id: LearningPackage.ID, collection_code: str, @@ -54,6 +91,8 @@ def create_collection( ) collection.full_clean() collection.save() + if enabled: + _queue_change_event(collection, created=True, user_id=created_by) return collection @@ -87,6 +126,7 @@ def update_collection( collection.description = description collection.save() + _queue_change_event(collection, metadata_modified=True) return collection @@ -103,12 +143,20 @@ def delete_collection( Soft-deleted collections can be re-enabled using restore_collection. """ collection = get_collection(learning_package_id, collection_code) + entities_removed = list(collection.entities.order_by("id").values_list("id", flat=True)) + was_already_soft_deleted = not collection.enabled if hard_delete: + collection.modified = datetime.now(tz=timezone.utc) # For the event timestamp; won't get saved to the DB + if not was_already_soft_deleted: # Send the deleted event unless this was already soft deleted. + _queue_change_event(collection, deleted=True, entities_removed=entities_removed) + # Delete after enqueing the event: collection.delete() - else: + elif not was_already_soft_deleted: + # Soft delete: collection.enabled = False collection.save() + _queue_change_event(collection, deleted=True, entities_removed=entities_removed) return collection @@ -120,9 +168,11 @@ def restore_collection( Undo a "soft delete" by re-enabling a Collection. """ collection = get_collection(learning_package_id, collection_code) + entities_added = list(collection.entities.order_by("id").values_list("id", flat=True)) collection.enabled = True collection.save() + _queue_change_event(collection, created=True, entities_added=entities_added) return collection @@ -152,12 +202,12 @@ def add_to_collection( ) collection = get_collection(learning_package_id, collection_code) - collection.entities.add( - *entities_qset.all(), - through_defaults={"created_by_id": created_by}, - ) + existing_ids = set(collection.entities.values_list("id", flat=True)) + ids_to_add = entities_qset.values_list("id", flat=True) + collection.entities.add(*ids_to_add, through_defaults={"created_by_id": created_by}) collection.modified = datetime.now(tz=timezone.utc) collection.save() + _queue_change_event(collection, entities_added=sorted(list(set(ids_to_add) - existing_ids)), user_id=created_by) return collection @@ -178,9 +228,12 @@ def remove_from_collection( """ collection = get_collection(learning_package_id, collection_code) - collection.entities.remove(*entities_qset.all()) + ids_to_remove = list(entities_qset.values_list("id", flat=True)) + entities_removed = sorted(list(collection.entities.filter(id__in=ids_to_remove).values_list("id", flat=True))) + collection.entities.remove(*ids_to_remove) collection.modified = datetime.now(tz=timezone.utc) collection.save() + _queue_change_event(collection, entities_removed=entities_removed) return collection @@ -222,7 +275,7 @@ def get_collections(learning_package_id: LearningPackage.ID, enabled: bool | Non qs = Collection.objects.filter(learning_package_id=learning_package_id) if enabled is not None: qs = qs.filter(enabled=enabled) - return qs.select_related("learning_package").order_by('pk') + return qs.select_related("learning_package").order_by("pk") def set_collections( @@ -245,25 +298,34 @@ def set_collections( raise ValidationError( "Collection entities must be from the same learning package as the collection.", ) - current_relations = CollectionPublishableEntity.objects.filter( - entity=publishable_entity - ).select_related('collection') - # Clear other collections for given entity and add only new collections from collection_qset - removed_collections = set( - r.collection for r in current_relations.exclude(collection__in=collection_qset) + current_relations = CollectionPublishableEntity.objects.filter(entity=publishable_entity).select_related( + "collection" ) - new_collections = set(collection_qset.exclude( - id__in=current_relations.values_list('collection', flat=True) - )) + # Clear other collections for given entity and add only new collections from collection_qset + removed_collections = set(r.collection for r in current_relations.exclude(collection__in=collection_qset)) + new_collections = set(collection_qset.exclude(id__in=current_relations.values_list("collection", flat=True))) # Triggers a m2m_changed signal publishable_entity.collections.set( objs=collection_qset, through_defaults={"created_by_id": created_by}, ) - # Update modified date via update to avoid triggering post_save signal for all collections, which can be very slow. - affected_collection = removed_collections | new_collections - Collection.objects.filter( - id__in=[collection.id for collection in affected_collection] - ).update(modified=datetime.now(tz=timezone.utc)) + # Update modified date: + affected_collections = removed_collections | new_collections + Collection.objects.filter(id__in=[collection.id for collection in affected_collections]).update( + modified=datetime.now(tz=timezone.utc) + ) - return affected_collection + # Emit one event per affected collection. Re-fetch with select_related so _queue_change_event + # can read collection.learning_package without extra queries; the re-fetch also picks up the + # updated modified timestamp from the bulk update above. + removed_ids = {c.id for c in removed_collections} + for collection in Collection.objects.filter(id__in=[c.id for c in affected_collections]).select_related( + "learning_package" + ): + # TODO: test performance of this and potentially send these async if > 1 affected collection. + if collection.id in removed_ids: + _queue_change_event(collection, entities_removed=[publishable_entity.id], user_id=created_by) + else: + _queue_change_event(collection, entities_added=[publishable_entity.id], user_id=created_by) + + return affected_collections diff --git a/src/openedx_content/applets/collections/signal_handlers.py b/src/openedx_content/applets/collections/signal_handlers.py new file mode 100644 index 00000000..951840d4 --- /dev/null +++ b/src/openedx_content/applets/collections/signal_handlers.py @@ -0,0 +1,46 @@ +"""Signal handlers for collections-related updates.""" + +from functools import partial + +from django.db import transaction +from django.dispatch import receiver + +from ..publishing.signals import ENTITIES_DRAFT_CHANGED, DraftChangeLogEventData, UserAttributionEventData +from .tasks import emit_collections_changed_for_entity_changes_task + + +@receiver(ENTITIES_DRAFT_CHANGED) +def on_entities_changed( + change_log: DraftChangeLogEventData, + changed_by: UserAttributionEventData, + **kwargs, +): + """ + When entity drafts are deleted or restored, notify affected collections. + + Dispatches a task to emit COLLECTION_CHANGED for any + collections that contain the changed entities. + """ + removed_entity_ids = [record.entity_id for record in change_log.changes if record.new_version_id is None] + # old_version_id=None covers both brand-new entities and restored soft-deletes; we can't distinguish + # them here without a DB query. The task is a no-op for new entities (not yet in any collection). + # TODO: if ChangeLogRecordData gains a 'restored' flag, filter to only restored entities here. + # (Newly-created entities cannot be part of collections yet, so we only care about entities that + # were previously in collections, then deleted and then restored.) + added_entity_ids = [ + record.entity_id + for record in change_log.changes + if record.old_version_id is None and record.new_version_id is not None + ] + + if not removed_entity_ids and not added_entity_ids: + return + + transaction.on_commit( + partial( + emit_collections_changed_for_entity_changes_task.delay, + removed_entity_ids=removed_entity_ids, + added_entity_ids=added_entity_ids, + user_id=changed_by.user_id, + ) + ) diff --git a/src/openedx_content/applets/collections/signals.py b/src/openedx_content/applets/collections/signals.py new file mode 100644 index 00000000..70191023 --- /dev/null +++ b/src/openedx_content/applets/collections/signals.py @@ -0,0 +1,73 @@ +""" +Low-level events/signals emitted by openedx_content +""" + +from attrs import define, field +from openedx_events.tooling import OpenEdxPublicSignal # type: ignore[import-untyped] + +from ..publishing.models.publishable_entity import PublishableEntity +from ..publishing.signals import LearningPackageEventData, UserAttributionEventData + +# Public API available via openedx_content.api +__all__ = [ + # All event data structures should end with "...Data": + "CollectionChangeData", + # All events: + "COLLECTION_CHANGED", +] + + +@define +class CollectionChangeData: + """Summary of changes to a collection, for event purposes""" + + collection_id: int + collection_code: str + created: bool = False + """The collection is newly-created, or un-deleted. Some entities may be added simultaneously.""" + metadata_modified: bool = False + """The collection's title/description has changed. Does not indicate whether or not entities were added/removed.""" + deleted: bool = False + """ + The collection has been deleted. When this is true, the entities_removed list will have all entity IDs. + Does not distinguish between "soft" and "hard" deletion. + """ + entities_added: list[PublishableEntity.ID] = field(factory=list) + entities_removed: list[PublishableEntity.ID] = field(factory=list) + + +COLLECTION_CHANGED = OpenEdxPublicSignal( + event_type="org.openedx.content.collections.collection_changed.v1", + data={ + "learning_package": LearningPackageEventData, + "changed_by": UserAttributionEventData, + "change": CollectionChangeData, + }, +) +""" +A ``Collection`` has been created, modified, or deleted, or its entities have +changed. + +This is a low-level batch event. It does not have any course or library context +information available. It does not distinguish between Containers, Components, +or other entity types. + +💾 This event is only emitted after any transaction has been committed. + +⏳ This **batch** event is emitted **synchronously**. Handlers that do anything +per-entity or that is possibly slow should dispatch an asynchronous task for +processing the event. +""" + +# Note: at present, the openedx_tagging code (in this repo) emits a +# CONTENT_OBJECT_ASSOCIATIONS_CHANGED event whenever an entity's tags change. +# But we do NOT emit the same event when an entity's collections change; rather +# we expect code in the platform to listen for COLLECTION_CHANGED and then +# re-emit '...ASSOCIATIONS_CHANGED' as needed. +# The reason we don't emit the '...ASSOCIATIONS_CHANGED' event here +# is simple: we know the entity IDs but not their opaque keys, and all of the +# code that listens for that event expects the entity's opaque keys. +# The tagging code can do it here because the `object_id` in the tagging models +# _is_ the opaque key ("lb:..."), but the collections code is too low-level to +# know about opaque keys of the entities. We don't even know which learning +# context (which content library) a given entity is in. diff --git a/src/openedx_content/applets/collections/tasks.py b/src/openedx_content/applets/collections/tasks.py new file mode 100644 index 00000000..6b5a43d9 --- /dev/null +++ b/src/openedx_content/applets/collections/tasks.py @@ -0,0 +1,83 @@ +"""Celery tasks for the collections applet.""" + +import logging +from collections import defaultdict + +from celery import shared_task # type: ignore[import] + +from ..publishing.models import PublishableEntity +from .models import Collection, CollectionPublishableEntity +from .signals import COLLECTION_CHANGED, CollectionChangeData, LearningPackageEventData, UserAttributionEventData + +logger = logging.getLogger(__name__) + + +@shared_task +def emit_collections_changed_for_entity_changes_task( + removed_entity_ids: list[int], + added_entity_ids: list[int], + user_id: int | None, +) -> int: + """ + Emit COLLECTION_CHANGED for each collection affected by entity draft + deletions or restorations. + + For each collection that contains any of the given entities, emits one event + with entities_removed (for deletions) and/or entities_added (for + restorations). A single event covers both if the same collection has + entities in both lists. + + Triggered by ENTITIES_DRAFT_CHANGED. New entities (old_version_id=None, + new_version_id is not None) that aren't in any collection result in a no-op. + """ + all_entity_ids = list(set(removed_entity_ids) | set(added_entity_ids)) + if not all_entity_ids: + return 0 + + affected_cpes = ( + CollectionPublishableEntity.objects.filter(entity_id__in=all_entity_ids) + .select_related("collection__learning_package") + .order_by("collection_id", "entity_id") + ) + + collection_map: dict[int, Collection] = {} + removed_map: dict[int, list[PublishableEntity.ID]] = defaultdict(list) + added_map: dict[int, list[PublishableEntity.ID]] = defaultdict(list) + removed_set = set(removed_entity_ids) + added_set = set(added_entity_ids) + + for cpe in affected_cpes: + collection_map[cpe.collection_id] = cpe.collection + if cpe.entity_id in removed_set: + removed_map[cpe.collection_id].append(cpe.entity_id) + if cpe.entity_id in added_set: + added_map[cpe.collection_id].append(cpe.entity_id) + + emitted_events = 0 + for collection_id, collection in collection_map.items(): + # .. event_implemented_name: COLLECTION_CHANGED + # .. event_type: org.openedx.content.collections.collection_changed.v1 + COLLECTION_CHANGED.send_event( + time=collection.modified, + learning_package=LearningPackageEventData( + id=collection.learning_package.id, + title=collection.learning_package.title, + ), + changed_by=UserAttributionEventData(user_id=user_id), + change=CollectionChangeData( + collection_id=collection.id, + collection_code=collection.collection_code, + entities_removed=removed_map[collection_id], + entities_added=added_map[collection_id], + ), + ) + emitted_events += 1 + + if emitted_events: + logger.info( + "Entity draft changes (removed=%s, added=%s): emitted COLLECTION_CHANGED for %s collections.", + removed_entity_ids, + added_entity_ids, + emitted_events, + ) + return emitted_events diff --git a/src/openedx_content/applets/publishing/api.py b/src/openedx_content/applets/publishing/api.py index 8889c1f1..14a71e41 100644 --- a/src/openedx_content/applets/publishing/api.py +++ b/src/openedx_content/applets/publishing/api.py @@ -9,15 +9,17 @@ from contextlib import nullcontext from datetime import datetime, timezone +from functools import partial from typing import ContextManager, Optional, cast from django.contrib.auth import get_user_model from django.core.exceptions import ObjectDoesNotExist from django.db.models import F, OuterRef, Prefetch, Q, QuerySet, Subquery -from django.db.transaction import atomic +from django.db.transaction import atomic, on_commit from openedx_django_lib.fields import create_hash_digest +from . import signals from .contextmanagers import DraftChangeLogContext from .models import ( Draft, @@ -114,6 +116,14 @@ def create_learning_package( ) package.full_clean() package.save() + new_id = package.id + + def send_event(): + signals.LEARNING_PACKAGE_CREATED.send_event( + learning_package=signals.LearningPackageEventData(id=new_id, title=title), + ) + + on_commit(send_event) return package @@ -152,6 +162,21 @@ def update_learning_package( lp.updated = updated lp.save() + + # Emit LEARNING_PACKAGE_UPDATED once the transaction commits. Note: we only + # reach this point if at least one of key/title/description/updated was + # passed in (the early-return above handles the no-op case), so the update + # really did touch the row. + lp_id = lp.id + lp_title = lp.title + + def send_event(): + signals.LEARNING_PACKAGE_UPDATED.send_event( + learning_package=signals.LearningPackageEventData(id=lp_id, title=lp_title), + ) + + on_commit(send_event) + return lp @@ -195,7 +220,7 @@ def create_publishable_entity_version( created: datetime, created_by: int | None, *, - dependencies: list[int] | None = None, # PublishableEntity IDs + dependencies: list[PublishableEntity.ID] | None = None, ) -> PublishableEntityVersion: """ Create a PublishableEntityVersion. @@ -226,7 +251,7 @@ def create_publishable_entity_version( def set_version_dependencies( version_id: int, # PublishableEntityVersion.id, /, - dependencies: list[int] # List of PublishableEntity.id + dependencies: list[PublishableEntity.ID], ) -> None: """ Set the dependencies of a publishable entity version. @@ -509,6 +534,7 @@ def publish_from_drafts( published_draft_ids.add(draft.pk) _create_side_effects_for_change_log(publish_log) + _emit_event_for_change_log(publish_log, timestamp=published_at, user_id=published_by) return publish_log @@ -962,6 +988,8 @@ def set_draft_version( ) draft.save() _create_side_effects_for_change_log(change_log) + # Send out an event immediately after this database transaction commits, since this is an isolated change. + _emit_event_for_change_log(change_log, timestamp=set_at, user_id=set_by) def _add_to_existing_draft_change_log( @@ -1197,6 +1225,49 @@ def _create_side_effects_for_change_log(change_log: DraftChangeLog | PublishLog) update_dependencies_hash_digests_for_log(change_log) +def _emit_event_for_change_log( + change_log: PublishLog | DraftChangeLog, timestamp: datetime, user_id: int | None +) -> None: + """ + Construct and emit the _CHANGED / _PUBLISHED event when a set of entities is + changed or published. + + Works with either ``DraftChangeLog`` or ``PublishLog``. + """ + + learning_package_id = change_log.learning_package.id + learning_package_title = change_log.learning_package.title + changes = [ + signals.ChangeLogRecordData( + entity_id=record.entity_id, + old_version=record.old_version.version_num if record.old_version else None, + old_version_id=record.old_version_id, + new_version=record.new_version.version_num if record.new_version else None, + new_version_id=record.new_version_id, + direct=record.direct if isinstance(record, PublishLogRecord) else None, + ) + for record in change_log.records.order_by("id").select_related("old_version", "new_version").all() + ] + + change_log_data: signals.DraftChangeLogEventData | signals.PublishLogEventData + if isinstance(change_log, DraftChangeLog): + signal = signals.ENTITIES_DRAFT_CHANGED + change_log_data = signals.DraftChangeLogEventData(draft_change_log_id=change_log.id, changes=changes) + else: + assert isinstance(change_log, PublishLog) + signal = signals.ENTITIES_PUBLISHED + change_log_data = signals.PublishLogEventData(publish_log_id=change_log.id, changes=changes) + + # Send out an event immediately after this database transaction commits. + on_commit(partial( + signal.send_event, + time=timestamp, + learning_package=signals.LearningPackageEventData(id=learning_package_id, title=learning_package_title), + changed_by=signals.UserAttributionEventData(user_id=user_id), + change_log=change_log_data, + )) + + def update_dependencies_hash_digests_for_log( change_log: DraftChangeLog | PublishLog, backfill: bool = False, @@ -1649,11 +1720,14 @@ def bulk_draft_changes_for( with bulk_draft_changes_for(component.learning_package.id): update_one_component(component.learning_package.id, component) """ + if not changed_at: + changed_at = datetime.now(tz=timezone.utc) return DraftChangeLogContext( learning_package_id, changed_at=changed_at, changed_by=changed_by, exit_callbacks=[ _create_side_effects_for_change_log, + partial(_emit_event_for_change_log, timestamp=changed_at, user_id=changed_by), ] ) diff --git a/src/openedx_content/applets/publishing/signal_handlers.py b/src/openedx_content/applets/publishing/signal_handlers.py new file mode 100644 index 00000000..9fe088f2 --- /dev/null +++ b/src/openedx_content/applets/publishing/signal_handlers.py @@ -0,0 +1,41 @@ +""" +Django signal handlers for the publishing applet. +""" + +from functools import partial + +from django.db import transaction +from django.db.models.signals import post_delete +from django.dispatch import receiver + +from .models.learning_package import LearningPackage +from .signals import LEARNING_PACKAGE_DELETED, LearningPackageEventData + + +@receiver(post_delete, sender=LearningPackage) +def emit_learning_package_deleted(sender, instance, **kwargs): # pylint: disable=unused-argument + """ + Emit ``LEARNING_PACKAGE_DELETED`` after a ``LearningPackage`` is deleted. + + This fires for any deletion: single-object ``.delete()``, bulk + ``QuerySet.delete()`` (Django calls ``post_delete`` once per row), or + deletions performed via the Django admin. There is currently no official API + for deleting Learning Packages, but you can orphan them by deleting any + references to them such as ``ContentLibrary`` instances in openedx-platform. + + The event is deferred via ``transaction.on_commit`` so that it is only + emitted once the enclosing database transaction has been committed. If + the transaction is rolled back, the row still exists and no event fires. + + Note: by the time this handler runs, the ``LearningPackage`` row has + already been removed from the database (Django preserves ``instance.pk`` + on the in-memory object, but the DB row is gone). We capture ``id`` and + ``title`` at handler-invocation time so that the event payload remains + correct even though the underlying record is no longer retrievable. + """ + transaction.on_commit( + partial( + LEARNING_PACKAGE_DELETED.send_event, + learning_package=LearningPackageEventData(id=instance.id, title=instance.title), + ) + ) diff --git a/src/openedx_content/applets/publishing/signals.py b/src/openedx_content/applets/publishing/signals.py new file mode 100644 index 00000000..148c8c17 --- /dev/null +++ b/src/openedx_content/applets/publishing/signals.py @@ -0,0 +1,238 @@ +""" +Low-level events/signals emitted by openedx_content +""" + +from attrs import define +from openedx_events.tooling import OpenEdxPublicSignal # type: ignore[import-untyped] + +from .models.learning_package import LearningPackage +from .models.publishable_entity import PublishableEntity + +# Public API available via openedx_content.api +__all__ = [ + # All event data structures should end with "...Data": + "LearningPackageEventData", + "UserAttributionEventData", + "ChangeLogRecordData", + "DraftChangeLogEventData", + "PublishLogEventData", + # All events: + "LEARNING_PACKAGE_CREATED", + "LEARNING_PACKAGE_UPDATED", + "LEARNING_PACKAGE_DELETED", + "ENTITIES_DRAFT_CHANGED", + "ENTITIES_PUBLISHED", +] + + +@define +class LearningPackageEventData: + """Identifies which learning package an event is associated with.""" + + id: LearningPackage.ID + title: str # Since 'id' is not easily human-understandable, we include the title too + + +@define +class UserAttributionEventData: + """Identifies which user triggered the event.""" + + user_id: int | None + + +@define +class ChangeLogRecordData: + """A single change that was made to a PublishableEntity""" + + entity_id: PublishableEntity.ID + + old_version: int | None + """The old version number of this entity. None if newly-created or un-deleted.""" + old_version_id: int | None + """ + The old version of this entity (the PublishableEntityVersion ID). + This is None if the entity is newly created (or un-deleted). + """ + + new_version: int | None + """The old version number of this entity. None if newly-created or un-deleted.""" + new_version_id: int | None + """ + The new version of this entity (the PublishableEntityVersion ID. + This is None if the entity is now deleted. + """ + + direct: bool | None = None + """ + Did the user chose to directly publish this specific thing, or was it auto-published because it's a dependency? + (if applicable/known) + """ + + +@define +class DraftChangeLogEventData: + """Summary of a `DraftChangeLog` for event purposes""" + + draft_change_log_id: int + changes: list[ChangeLogRecordData] + + +@define +class PublishLogEventData: + """Summary of a `PublishLog` for event purposes""" + + publish_log_id: int + changes: list[ChangeLogRecordData] + + +LEARNING_PACKAGE_CREATED = OpenEdxPublicSignal( + event_type="org.openedx.content.publishing.lp_created.v1", + data={ + "learning_package": LearningPackageEventData, + }, +) +""" +A new ``LearningPackage`` has been created. + +This is emitted exactly once per ``LearningPackage``, after the row is inserted +in the database. This is a low-level event. It's most likely that the Learning +Package is still being prepared/populated, and any necessary relationships, +entities, metadata, or other data may not yet exist at the time this event is +emitted. + +💾 This event is only emitted after the enclosing database transaction has +been committed. If the transaction is rolled back, no event is emitted. + +⏳ This event is emitted synchronously. +""" + + +LEARNING_PACKAGE_UPDATED = OpenEdxPublicSignal( + event_type="org.openedx.content.publishing.lp_updated.v1", + data={ + "learning_package": LearningPackageEventData, + }, +) +""" +A ``LearningPackage``'s own metadata (key, title, and/or description) has been +changed. + +This is emitted only when the ``update_learning_package`` API is called, with at +least one field change that actually modifies the row. + +This event covers changes to the ``LearningPackage`` row itself (its ``key``, +``title``, and ``description``). Changes to the content inside the package +(entities, versions, drafts, publishes) are covered by +``ENTITIES_DRAFT_CHANGED`` and ``ENTITIES_PUBLISHED`` instead. + +The ``learning_package`` payload reflects the ``id`` and the post-update +``title`` of the package. + +💾 This event is only emitted after the enclosing database transaction has +been committed. If the transaction is rolled back, no event is emitted. + +⏳ This event is emitted synchronously. +""" + + +LEARNING_PACKAGE_DELETED = OpenEdxPublicSignal( + event_type="org.openedx.content.publishing.lp_deleted.v1", + data={ + "learning_package": LearningPackageEventData, + }, +) +""" +A ``LearningPackage`` has been deleted. + +This is emitted exactly once per ``LearningPackage``, after the row has been +removed from the database. It is emitted regardless of how the row was deleted +(via a direct ORM ``.delete()`` call, via the Django admin, or as part of a +``QuerySet.delete()``), because it is fired by a Django ``post_delete`` signal +on the ``LearningPackage`` model. + +Note: at the time this event is emitted, the ``LearningPackage`` and all of +its related content (entities, versions, drafts, publishes, etc.) have already +been removed from the database. Handlers cannot look up the learning package +by ID — they only get the ``id`` and ``title`` that are captured in the +``LearningPackageEventData`` payload. + +🗑️ Unlike other ``publishing`` events, the effects of this deletion are +completely irreversible and the LearningPackage cannot be restored/un-deleted. + +💾 This event is only emitted after the enclosing database transaction has +been committed. If the transaction is rolled back, no event is emitted. + +⏳ This event is emitted synchronously. +""" + + +ENTITIES_DRAFT_CHANGED = OpenEdxPublicSignal( + event_type="org.openedx.content.publishing.entities_draft_changed.v1", + data={ + "learning_package": LearningPackageEventData, + "changed_by": UserAttributionEventData, + "change_log": DraftChangeLogEventData, + }, +) +""" +The draft version of one or more entities in a `LearningPackage` has changed. + +This is emitted when the first version of an entity is **created**, when a new +version of an entity is created (i.e. an entity is **modified**), when an entity +is **reverted** to an old version, when **a dependency is modified**, or when an +entity is **deleted**. (All referring to the draft version of the entity.) + +The ``old_version`` and ``new_version`` fields can be used to distinguish among +these cases (e.g. ``old_version`` is ``None`` for newly-created entities). + +This is a low-level batch event. It does not have any course or library context +information available. It does not distinguish between Containers, Components, +or other entity types. + +Collections and tags are not `PublishableEntity`-based, so do not participate in +this event. + +💾 This event is only emitted after the enclosing database transaction has +been committed. If the transaction is rolled back, no event is emitted. + +⏳ This **batch** event is emitted **synchronously**. Handlers that do anything +per-entity or that is possibly slow should dispatch an asynchronous task for +processing the event. +""" + + +ENTITIES_PUBLISHED = OpenEdxPublicSignal( + event_type="org.openedx.content.publishing.entities_published.v1", + data={ + "learning_package": LearningPackageEventData, + "changed_by": UserAttributionEventData, + "change_log": PublishLogEventData, + }, +) +""" +The published version of one or more entities in a `LearningPackage` has +changed. + +This is emitted when **a newly-created entity is first published**, when +**changes to an existing entity** are published, when **changes to a +dependency** (or a dependency's dependencies...) are published, when a published +entity is **reverted** to a previous version, or when **a "delete" is +published**. + +The ``old_version`` and ``new_version`` fields can be used to distinguish among +these cases (e.g. ``old_version`` is ``None`` for newly-created entities). + +This is a low-level batch event. It does not have any course or library context +information available. It does not distinguish between Containers, Components, +or other entity types. + +Collections and tags are not `PublishableEntity`-based, so do not participate in +this event. + +💾 This event is only emitted after the enclosing database transaction has +been committed. If the transaction is rolled back, no event is emitted. + +⏳ This **batch** event is emitted **synchronously**. Handlers that do anything +per-entity or that is possibly slow should dispatch an asynchronous task for +processing the event. +""" diff --git a/src/openedx_content/apps.py b/src/openedx_content/apps.py index 563d5d03..9f2e74f2 100644 --- a/src/openedx_content/apps.py +++ b/src/openedx_content/apps.py @@ -45,8 +45,9 @@ def register_publishable_models(self): def ready(self): """ - Currently used to register publishable models. - - May later be used to register signal handlers as well. + Currently used to register publishable models and signal handlers. """ self.register_publishable_models() + # Import signal handlers so Django registers all @receiver callbacks. + from .applets.collections import signal_handlers # pylint: disable=unused-import + from .applets.publishing import signal_handlers as _publishing_signal_handlers diff --git a/src/openedx_content/signals.py b/src/openedx_content/signals.py new file mode 100644 index 00000000..848ae550 --- /dev/null +++ b/src/openedx_content/signals.py @@ -0,0 +1,20 @@ +""" +Signals that are part of the public API of openedx_content. + +Import these as e.g. `from openedx_content.api import signals` or as +`from openedx_content import api as content_api` -> `content_api.signals._____` + +These signals may be moved into openedx_events at some point. +""" + +# This intermediate file is necessary so we can (1) filter the applet .signals +# module exports using `__all__` (we don't want to import models like +# `LearningPackage` that happen to be used in our `signals.py` files), and (2) +# so we can still namespace these under `api.signals.____` (see `api.py` for +# details on why). + +# These wildcard imports are okay because these api modules declare __all__ +# to define which symbols are public. +# pylint: disable=wildcard-import +from .applets.collections.signals import * +from .applets.publishing.signals import * diff --git a/tests/openedx_content/applets/collections/test_signals.py b/tests/openedx_content/applets/collections/test_signals.py new file mode 100644 index 00000000..7c5c5fab --- /dev/null +++ b/tests/openedx_content/applets/collections/test_signals.py @@ -0,0 +1,562 @@ +""" +Tests for the COLLECTION_CHANGED signal. +""" + +from datetime import datetime, timezone + +import pytest + +from openedx_content import api +from openedx_content.applets.collections.signals import COLLECTION_CHANGED, CollectionChangeData +from openedx_content.models_api import Collection, LearningPackage, PublishableEntity +from tests.utils import abort_transaction, capture_events + +pytestmark = pytest.mark.django_db(transaction=True) +now_time = datetime.now(tz=timezone.utc) + + +@pytest.fixture(name="lp1") +def _lp1() -> LearningPackage: + """A learning package for use across collection signal tests.""" + return api.create_learning_package(package_ref="lp1", title="Test LP 📦") + + +def _create_entity(learning_package_id: LearningPackage.ID, entity_ref: str) -> PublishableEntity: + """Helper: create a bare PublishableEntity in the given learning package.""" + return api.create_publishable_entity(learning_package_id, entity_ref=entity_ref, created=now_time, created_by=None) + + +# COLLECTION_CHANGED — create_collection + + +def test_create_collection(lp1: LearningPackage, admin_user) -> None: + """ + Test that COLLECTION_CHANGED is emitted with created=True + when a new collection is created. + """ + with capture_events(expected_count=1) as captured: + collection = api.create_collection( + lp1.id, + collection_code="col1", + title="Collection 1", + created_by=admin_user.id, + ) + + event = captured[0] + assert event.signal is COLLECTION_CHANGED + assert event.kwargs["learning_package"].id == lp1.id + assert event.kwargs["learning_package"].title == "Test LP 📦" + assert event.kwargs["changed_by"].user_id == admin_user.id + assert event.kwargs["change"] == CollectionChangeData( + collection_id=collection.id, + collection_code="col1", + created=True, + ) + collection.refresh_from_db() + # Note: unfortunately collection.modified is slightly different than collection.created + # (see https://code.djangoproject.com/ticket/16745). It would be nice if we made them exactly the same. + assert event.kwargs["metadata"].time == collection.modified + + +def test_create_collection_disabled(lp1: LearningPackage) -> None: + """ + Test that no event is emitted when a collection is created with enabled=False. + + A disabled collection is invisible to consumers, so there is nothing to notify about. + """ + with capture_events(expected_count=0): + api.create_collection( + lp1.id, + collection_code="col1", + title="Collection 1", + created_by=None, + enabled=False, + ) + + # And if that disabled collection is deleted, no event is emitted. We don't want to emit a deleted event for a + # collection that never had a created event. + with capture_events(expected_count=0): + api.delete_collection(lp1.id, collection_code="col1", hard_delete=True) + + +def test_create_collection_disabled_then_enabled(lp1: LearningPackage) -> None: + """ + Test that no event is emitted when a collection is created already soft + deleted (with enabled=False), but IS emitted when we enable/un-delete it. + """ + with capture_events(expected_count=0): + collection = api.create_collection( + lp1.id, + collection_code="col1", + title="Collection 1", + created_by=None, + enabled=False, + ) + + # Enabling (un-deleting) that collection will result in a "created" event: + with capture_events(expected_count=1) as captured: + api.restore_collection(lp1.id, collection_code="col1") # FIXME: we can't specify a user here. + + event = captured[0] + assert event.signal is COLLECTION_CHANGED + assert event.kwargs["learning_package"].id == lp1.id + assert event.kwargs["changed_by"].user_id is None + assert event.kwargs["change"] == CollectionChangeData( + collection_id=collection.id, + collection_code="col1", + created=True, + ) + + +def test_create_collection_aborted(lp1: LearningPackage) -> None: + """ + Test that no event is emitted when a collection creation is rolled back. + """ + with capture_events(expected_count=0): + with abort_transaction(): + api.create_collection( + lp1.id, + collection_code="col1", + title="Collection 1", + created_by=None, + ) + + +# COLLECTION_CHANGED — update_collection + + +def test_update_collection(lp1: LearningPackage) -> None: + """ + Test that COLLECTION_CHANGED is emitted with metadata_modified=True + when a collection's title or description is updated. + """ + collection = api.create_collection(lp1.id, "col1", title="Collection 1", created_by=None) + orig_modified = collection.modified + + with capture_events(expected_count=1) as captured: + api.update_collection(lp1.id, "col1", title="Updated Title") + + event = captured[0] + assert event.signal is COLLECTION_CHANGED + assert event.kwargs["learning_package"].id == lp1.id + assert event.kwargs["change"] == CollectionChangeData( + collection_id=collection.id, + collection_code="col1", + metadata_modified=True, + ) + collection.refresh_from_db() + assert collection.modified > orig_modified + assert event.kwargs["metadata"].time == collection.modified + + +def test_update_collection_no_op(lp1: LearningPackage) -> None: + """ + Test that COLLECTION_CHANGED is NOT emitted when + update_collection is called without any fields to update. + """ + api.create_collection(lp1.id, "col1", title="Collection 1", created_by=None) + + with capture_events(expected_count=0): + # No title or description provided — the API short-circuits with no DB write. + api.update_collection(lp1.id, "col1") + + +# COLLECTION_CHANGED — delete_collection + + +def test_delete_collection_soft(lp1: LearningPackage) -> None: + """ + Test that COLLECTION_CHANGED is emitted with deleted=True + when a collection is soft-deleted (enabled=False). + """ + collection = api.create_collection(lp1.id, "col1", title="Collection 1", created_by=None) + entity1 = _create_entity(lp1.id, "entity1") + entity2 = _create_entity(lp1.id, "entity2") + api.add_to_collection( + lp1.id, + "col1", + PublishableEntity.objects.filter(id__in=[entity1.id, entity2.id]), + ) + + with capture_events(expected_count=1) as captured: + api.delete_collection(lp1.id, "col1") + + event = captured[0] + assert event.signal is COLLECTION_CHANGED + assert event.kwargs["learning_package"].id == lp1.id + assert event.kwargs["change"] == CollectionChangeData( + collection_id=collection.id, + collection_code="col1", + deleted=True, + entities_removed=sorted([entity1.id, entity2.id]), + ) + + +def test_delete_collection_hard(lp1: LearningPackage) -> None: + """ + Test that COLLECTION_CHANGED is emitted with deleted=True and + entities_removed populated when a collection is hard-deleted. + """ + collection = api.create_collection(lp1.id, "col1", title="Collection 1", created_by=None) + entity1 = _create_entity(lp1.id, "entity1") + entity2 = _create_entity(lp1.id, "entity2") + api.add_to_collection( + lp1.id, + "col1", + PublishableEntity.objects.filter(id__in=[entity1.id, entity2.id]), + ) + + collection_id = collection.id # Capture before deletion + + with capture_events(expected_count=1) as captured: + api.delete_collection(lp1.id, "col1", hard_delete=True) + + event = captured[0] + assert event.signal is COLLECTION_CHANGED + assert event.kwargs["learning_package"].id == lp1.id + assert event.kwargs["change"] == CollectionChangeData( + collection_id=collection_id, + collection_code="col1", + deleted=True, + entities_removed=sorted([entity1.id, entity2.id]), + ) + + +# COLLECTION_CHANGED — restore_collection + + +def test_restore_collection(lp1: LearningPackage) -> None: + """ + Test that COLLECTION_CHANGED is emitted with created=True + when a soft-deleted collection is restored. + """ + collection = api.create_collection(lp1.id, "col1", title="Collection 1", created_by=None) + api.delete_collection(lp1.id, "col1") # soft-delete first + + with capture_events(expected_count=1) as captured: + api.restore_collection(lp1.id, "col1") + + event = captured[0] + assert event.signal is COLLECTION_CHANGED + assert event.kwargs["learning_package"].id == lp1.id + assert event.kwargs["change"] == CollectionChangeData( + collection_id=collection.id, + collection_code="col1", + created=True, + ) + + +# COLLECTION_CHANGED — add_to_collection + + +def test_add_to_collection(lp1: LearningPackage) -> None: + """ + Test that COLLECTION_CHANGED is emitted with the correct + entities_added list when entities are added to a collection. + """ + collection = api.create_collection(lp1.id, "col1", title="Collection 1", created_by=None) + entity1 = _create_entity(lp1.id, "entity1") + entity2 = _create_entity(lp1.id, "entity2") + + with capture_events(expected_count=1) as captured: + api.add_to_collection( + lp1.id, + "col1", + PublishableEntity.objects.filter(id__in=[entity1.id, entity2.id]), + ) + + event = captured[0] + assert event.signal is COLLECTION_CHANGED + assert event.kwargs["learning_package"].id == lp1.id + assert event.kwargs["change"] == CollectionChangeData( + collection_id=collection.id, + collection_code="col1", + entities_added=sorted([entity1.id, entity2.id]), + ) + + +def test_add_to_collection_aborted(lp1: LearningPackage) -> None: + """ + Test that no event is emitted when adding entities to a collection is rolled back. + """ + api.create_collection(lp1.id, "col1", title="Collection 1", created_by=None) + entity1 = _create_entity(lp1.id, "entity1") + + with capture_events(expected_count=0): + with abort_transaction(): + api.add_to_collection( + lp1.id, + "col1", + PublishableEntity.objects.filter(id=entity1.id), + ) + + +# COLLECTION_CHANGED — remove_from_collection + + +def test_remove_from_collection(lp1: LearningPackage) -> None: + """ + Test that COLLECTION_CHANGED is emitted with the correct + entities_removed list when entities are removed from a collection. + """ + collection = api.create_collection(lp1.id, "col1", title="Collection 1", created_by=None) + entity1 = _create_entity(lp1.id, "entity1") + entity2 = _create_entity(lp1.id, "entity2") + api.add_to_collection( + lp1.id, + "col1", + PublishableEntity.objects.filter(id__in=[entity1.id, entity2.id]), + ) + + with capture_events(expected_count=1) as captured: + api.remove_from_collection( + lp1.id, + "col1", + PublishableEntity.objects.filter(id=entity1.id), + ) + + event = captured[0] + assert event.signal is COLLECTION_CHANGED + assert event.kwargs["learning_package"].id == lp1.id + assert event.kwargs["change"] == CollectionChangeData( + collection_id=collection.id, + collection_code="col1", + entities_removed=[entity1.id], + ) + + +# COLLECTION_CHANGED — set_collections + + +def test_set_collections(lp1: LearningPackage, admin_user) -> None: + """ + Test that COLLECTION_CHANGED is emitted once per affected + collection when set_collections reassigns an entity's collections. + + In this scenario entity starts in col1+col2, then is moved to col2+col3. + We expect two events: one for col1 (entity removed) and one for col3 (entity added). + col2 is unchanged so it should not emit an event. + """ + col1 = api.create_collection(lp1.id, "col1", title="Collection 1", created_by=None) + col2 = api.create_collection(lp1.id, "col2", title="Collection 2", created_by=None) + col3 = api.create_collection(lp1.id, "col3", title="Collection 3", created_by=None) + entity = _create_entity(lp1.id, "entity1") + + # Put entity in col1 + col2 to start with + api.set_collections(entity, Collection.objects.filter(id__in=[col1.id, col2.id])) + + # Reassign: entity goes into col2 + col3 (col1 removed, col3 added) + with capture_events(expected_count=2) as captured: + api.set_collections(entity, Collection.objects.filter(id__in=[col2.id, col3.id]), created_by=admin_user.id) + + events_by_collection = {e.kwargs["change"].collection_id: e for e in captured} + assert set(events_by_collection.keys()) == {col1.id, col3.id} + + # col1: entity was removed + col1_removed_event = events_by_collection[col1.id].kwargs + assert col1_removed_event["changed_by"].user_id == admin_user.id + assert col1_removed_event["change"] == CollectionChangeData( + collection_id=col1.id, + collection_code="col1", + entities_removed=[entity.id], + ) + + # col3: entity was added + col3_added_event = events_by_collection[col3.id].kwargs + assert col1_removed_event["changed_by"].user_id == admin_user.id + assert col3_added_event["change"] == CollectionChangeData( + collection_id=col3.id, + collection_code="col3", + entities_added=[entity.id], + ) + # The collections were modified simultaneously: + assert col1_removed_event["metadata"].time == col3_added_event["metadata"].time + + +def test_set_collections_aborted(lp1: LearningPackage) -> None: + """ + Test that no events are emitted when set_collections is rolled back. + """ + col1 = api.create_collection(lp1.id, "col1", title="Collection 1", created_by=None) + entity = _create_entity(lp1.id, "entity1") + + with capture_events(expected_count=0): + with abort_transaction(): + api.set_collections(entity, Collection.objects.filter(id=col1.id)) + + +# COLLECTION_CHANGED — on entity draft deletion + + +def _create_entity_with_version(learning_package_id: LearningPackage.ID, entity_ref: str) -> PublishableEntity: + """Helper: create a PublishableEntity with an initial draft version (so its draft can be deleted).""" + entity = api.create_publishable_entity( + learning_package_id, entity_ref=entity_ref, created=now_time, created_by=None + ) + api.create_publishable_entity_version(entity.id, version_num=1, title=entity_ref, created=now_time, created_by=None) + return entity + + +def test_entity_draft_deleted_in_collection(lp1: LearningPackage, admin_user) -> None: + """ + Test that COLLECTION_CHANGED is emitted with entities_removed + when an entity's draft is deleted and that entity is in a collection. + """ + collection = api.create_collection(lp1.id, "col1", title="Collection 1", created_by=None) + entity = _create_entity_with_version(lp1.id, "entity1") + api.add_to_collection(lp1.id, "col1", PublishableEntity.objects.filter(id=entity.id)) + + with capture_events(signals=[COLLECTION_CHANGED], expected_count=1) as captured: + api.soft_delete_draft(entity.id, deleted_by=admin_user.id) + + event = captured[0] + assert event.signal is COLLECTION_CHANGED + assert event.kwargs["learning_package"].id == lp1.id + assert event.kwargs["changed_by"].user_id == admin_user.id + assert event.kwargs["change"] == CollectionChangeData( + collection_id=collection.id, + collection_code="col1", + entities_removed=[entity.id], + ) + + +def test_entity_draft_deleted_multiple_collections(lp1: LearningPackage) -> None: + """ + Test that COLLECTION_CHANGED is emitted once per collection + when a deleted entity belongs to multiple collections. + """ + col1 = api.create_collection(lp1.id, "col1", title="Collection 1", created_by=None) + col2 = api.create_collection(lp1.id, "col2", title="Collection 2", created_by=None) + entity = _create_entity_with_version(lp1.id, "entity1") + api.add_to_collection(lp1.id, "col1", PublishableEntity.objects.filter(id=entity.id)) + api.add_to_collection(lp1.id, "col2", PublishableEntity.objects.filter(id=entity.id)) + + with capture_events(signals=[COLLECTION_CHANGED], expected_count=2) as captured: + api.soft_delete_draft(entity.id) + + events_by_collection = {e.kwargs["change"].collection_id: e for e in captured} + assert set(events_by_collection.keys()) == {col1.id, col2.id} + assert events_by_collection[col1.id].kwargs["change"] == CollectionChangeData( + collection_id=col1.id, + collection_code="col1", + entities_removed=[entity.id], + ) + assert events_by_collection[col2.id].kwargs["change"] == CollectionChangeData( + collection_id=col2.id, + collection_code="col2", + entities_removed=[entity.id], + ) + + +def test_entity_draft_deleted_not_in_collection(lp1: LearningPackage) -> None: + """ + Test that no COLLECTION_CHANGED is emitted when the deleted + entity is not in any collection. + """ + entity = _create_entity_with_version(lp1.id, "entity1") + + with capture_events(signals=[COLLECTION_CHANGED], expected_count=0): + api.soft_delete_draft(entity.id) + + +def test_entity_draft_deleted_aborted(lp1: LearningPackage) -> None: + """ + Test that no COLLECTION_CHANGED is emitted when the + entity-delete transaction is rolled back. + """ + api.create_collection(lp1.id, "col1", title="Collection 1", created_by=None) + entity = _create_entity_with_version(lp1.id, "entity1") + api.add_to_collection(lp1.id, "col1", PublishableEntity.objects.filter(id=entity.id)) + + with capture_events(signals=[COLLECTION_CHANGED], expected_count=0): + with abort_transaction(): + api.soft_delete_draft(entity.id) + + +# COLLECTION_CHANGED — on entity draft restore (deletion reverted) + + +def test_entity_draft_restored_in_collection(lp1: LearningPackage) -> None: + """ + Test that COLLECTION_CHANGED is emitted with entities_added + when a soft-deleted entity's draft is restored while it is in a collection. + """ + collection = api.create_collection(lp1.id, "col1", title="Collection 1", created_by=None) + entity = _create_entity_with_version(lp1.id, "entity1") + api.add_to_collection(lp1.id, "col1", PublishableEntity.objects.filter(id=entity.id)) + api.soft_delete_draft(entity.id) + + with capture_events(signals=[COLLECTION_CHANGED], expected_count=1) as captured: + api.create_publishable_entity_version( + entity.id, version_num=2, title="entity1 v2", created=now_time, created_by=None + ) + + event = captured[0] + assert event.signal is COLLECTION_CHANGED + assert event.kwargs["learning_package"].id == lp1.id + assert event.kwargs["change"] == CollectionChangeData( + collection_id=collection.id, + collection_code="col1", + entities_added=[entity.id], + ) + + +def test_entity_draft_restored_multiple_collections(lp1: LearningPackage) -> None: + """ + Test that COLLECTION_CHANGED is emitted once per collection + when a restored entity belongs to multiple collections. + """ + col1 = api.create_collection(lp1.id, "col1", title="Collection 1", created_by=None) + col2 = api.create_collection(lp1.id, "col2", title="Collection 2", created_by=None) + entity = _create_entity_with_version(lp1.id, "entity1") + api.add_to_collection(lp1.id, "col1", PublishableEntity.objects.filter(id=entity.id)) + api.add_to_collection(lp1.id, "col2", PublishableEntity.objects.filter(id=entity.id)) + original_version = api.get_draft_version(entity) + assert original_version is not None + + api.soft_delete_draft(entity.id) + + with capture_events(signals=[COLLECTION_CHANGED], expected_count=2) as captured: + # Restore the deleted draft to its previous version: + api.set_draft_version(entity.id, original_version.id) + + events_by_collection = {e.kwargs["change"].collection_id: e for e in captured} + assert set(events_by_collection.keys()) == {col1.id, col2.id} + assert events_by_collection[col1.id].kwargs["change"] == CollectionChangeData( + collection_id=col1.id, + collection_code="col1", + entities_added=[entity.id], + ) + assert events_by_collection[col2.id].kwargs["change"] == CollectionChangeData( + collection_id=col2.id, + collection_code="col2", + entities_added=[entity.id], + ) + + +def test_entity_draft_restored_aborted(lp1: LearningPackage) -> None: + """ + Test that no COLLECTION_CHANGED is emitted when the + restore transaction is rolled back. + """ + api.create_collection(lp1.id, "col1", title="Collection 1", created_by=None) + entity = _create_entity_with_version(lp1.id, "entity1") + api.add_to_collection(lp1.id, "col1", PublishableEntity.objects.filter(id=entity.id)) + api.soft_delete_draft(entity.id) + + with capture_events(signals=[COLLECTION_CHANGED], expected_count=0): + with abort_transaction(): + api.create_publishable_entity_version( + entity.id, version_num=2, title="entity1 v2", created=now_time, created_by=None + ) + + +def test_entity_created_no_collection_event(lp1: LearningPackage) -> None: + """ + Test that no COLLECTION_CHANGED is emitted when a brand-new + entity gets its first version — even though the change log also has old_version=None. + + A freshly created entity is never in any collections yet, so the task is a no-op. + """ + with capture_events(signals=[COLLECTION_CHANGED], expected_count=0): + _create_entity_with_version(lp1.id, "entity1") diff --git a/tests/openedx_content/applets/containers/test_api.py b/tests/openedx_content/applets/containers/test_api.py index 0e16b0d9..8c5acee8 100644 --- a/tests/openedx_content/applets/containers/test_api.py +++ b/tests/openedx_content/applets/containers/test_api.py @@ -373,10 +373,10 @@ def test_create_container_queries(lp: LearningPackage, child_entity1: TestEntity "container_cls": TestContainer, } # The exact numbers here aren't too important - this is just to alert us if anything significant changes. - with django_assert_num_queries(31): + with django_assert_num_queries(33): containers_api.create_container_and_version(lp.id, container_code="c1", **base_args) # And try with a a container that has children: - with django_assert_num_queries(32): + with django_assert_num_queries(34): containers_api.create_container_and_version(lp.id, container_code="c2", **base_args, entities=[child_entity1]) @@ -943,7 +943,7 @@ def test_contains_unpublished_changes_queries( assert containers_api.contains_unpublished_changes(grandparent.id) # Publish grandparent and all its descendants: - with django_assert_num_queries(136): # TODO: investigate as this seems high! + with django_assert_num_queries(138): # TODO: investigate as this seems high! publish_entity(grandparent) # Tests: @@ -1263,7 +1263,7 @@ def test_uninstalled_publish( """Simple test of publishing a container of uninstalled type, plus its child, and reviewing the publish log""" # Publish container_of_uninstalled_type (and child_entity1). Should not affect anything else, # but we should see "child_entity1" omitted from the subsequent publish. - with django_assert_num_queries(50): + with django_assert_num_queries(52): publish_log = publish_entity(container_of_uninstalled_type) # Nothing else should have been affected by the publish: assert list(publish_log.records.order_by("entity__pk").values_list("entity__entity_ref", flat=True)) == [ @@ -1301,7 +1301,7 @@ def test_deep_publish_log( ) # Publish container_of_uninstalled_type (and child_entity1). Should not affect anything else, # but we should see "child_entity1" omitted from the subsequent publish. - with django_assert_num_queries(50): + with django_assert_num_queries(52): publish_log = publish_entity(container_of_uninstalled_type) # Nothing else should have been affected by the publish: assert list(publish_log.records.order_by("entity__pk").values_list("entity__entity_ref", flat=True)) == [ @@ -1310,7 +1310,7 @@ def test_deep_publish_log( ] # Publish great_grandparent. Should publish the whole tree. - with django_assert_num_queries(127): + with django_assert_num_queries(129): publish_log = publish_entity(great_grandparent) assert list(publish_log.records.order_by("entity__pk").values_list("entity__entity_ref", flat=True)) == [ "child_entity2", diff --git a/tests/openedx_content/applets/publishing/test_models.py b/tests/openedx_content/applets/publishing/test_models.py index ccfbd9fe..edd3a0a4 100644 --- a/tests/openedx_content/applets/publishing/test_models.py +++ b/tests/openedx_content/applets/publishing/test_models.py @@ -27,5 +27,5 @@ class FooEntityVersion(PublishableEntityVersionMixin): # Test typing of PublishableEntity identifiers. pe = PublishableEntity() - assert_type(pe.pk, PublishableEntity.ID) - assert_type(pe.id, PublishableEntity.ID) # `id` should show as deprecated + assert_type(pe.pk, PublishableEntity.ID) # `pk` should show as deprecated + assert_type(pe.id, PublishableEntity.ID) diff --git a/tests/openedx_content/applets/publishing/test_signals.py b/tests/openedx_content/applets/publishing/test_signals.py new file mode 100644 index 00000000..405f933a --- /dev/null +++ b/tests/openedx_content/applets/publishing/test_signals.py @@ -0,0 +1,502 @@ +""" +Tests related to the Catalog signal handlers +""" + +from datetime import datetime, timezone +from typing import Any + +import pytest + +from openedx_content import api +from openedx_content.models_api import LearningPackage, PublishableEntity, PublishLog +from tests.utils import abort_transaction, capture_events + +pytestmark = pytest.mark.django_db(transaction=True) +now_time = datetime.now(tz=timezone.utc) + + +def publish_entity(obj: PublishableEntity) -> PublishLog: + """Helper method to publish a single entity.""" + lp_id = obj.learning_package_id + return api.publish_from_drafts(lp_id, draft_qset=api.get_all_drafts(lp_id).filter(entity=obj)) + + +def change_record(obj: PublishableEntity, old_version: int | None, new_version: int | None, direct: bool | None = None): + """Helper function to construct ChangeLogRecordData() using only version numbers instead of numbers+IDs""" + old_version_id = obj.versions.get(version_num=old_version).id if old_version is not None else None + new_version_id = obj.versions.get(version_num=new_version).id if new_version is not None else None + return api.signals.ChangeLogRecordData( + entity_id=obj.id, + old_version=old_version, + old_version_id=old_version_id, + new_version=new_version, + new_version_id=new_version_id, + direct=direct, + ) + + +# LEARNING_PACKAGE_CREATED + + +def test_learning_package_created() -> None: + """ + Test that LEARNING_PACKAGE_CREATED is emitted when a new ``LearningPackage`` + is created. + """ + with capture_events(signals=[api.signals.LEARNING_PACKAGE_CREATED], expected_count=1) as captured: + learning_package = api.create_learning_package(package_ref="lp1", title="Test LP 📦") + + event = captured[0] + assert event.signal is api.signals.LEARNING_PACKAGE_CREATED + assert event.kwargs["learning_package"].id == learning_package.id + assert event.kwargs["learning_package"].title == "Test LP 📦" + + +def test_learning_package_created_not_emitted_on_update() -> None: + """ + Test that updating an existing ``LearningPackage`` does NOT emit + LEARNING_PACKAGE_CREATED. The event is only for new rows. + """ + learning_package = api.create_learning_package(package_ref="lp1", title="Test LP 📦") + + with capture_events(signals=[api.signals.LEARNING_PACKAGE_CREATED], expected_count=0): + api.update_learning_package(learning_package.id, title="Updated Title") + + +def test_learning_package_created_aborted() -> None: + """ + Test that LEARNING_PACKAGE_CREATED is NOT emitted when the transaction + that created the ``LearningPackage`` is rolled back. + """ + with capture_events(signals=[api.signals.LEARNING_PACKAGE_CREATED], expected_count=0): + with abort_transaction(): + api.create_learning_package(package_ref="lp1", title="Test LP 📦") + + +# LEARNING_PACKAGE_UPDATED + + +def test_learning_package_updated() -> None: + """ + Test that LEARNING_PACKAGE_UPDATED is emitted when + ``update_learning_package`` actually changes a field, and that the payload + reflects the post-update title. + """ + learning_package = api.create_learning_package(package_ref="lp1", title="Original Title") + + with capture_events(signals=[api.signals.LEARNING_PACKAGE_UPDATED], expected_count=1) as captured: + api.update_learning_package(learning_package.id, title="New Title 📦") + + event = captured[0] + assert event.signal is api.signals.LEARNING_PACKAGE_UPDATED + assert event.kwargs["learning_package"].id == learning_package.id + assert event.kwargs["learning_package"].title == "New Title 📦" + + +def test_learning_package_updated_noop() -> None: + """ + Test that LEARNING_PACKAGE_UPDATED is NOT emitted when + ``update_learning_package`` is called with no field changes (the early + return in the API means the row is never saved). + """ + learning_package = api.create_learning_package(package_ref="lp1", title="Test LP 📦") + + with capture_events(signals=[api.signals.LEARNING_PACKAGE_UPDATED], expected_count=0): + api.update_learning_package(learning_package.id) + + +def test_learning_package_updated_aborted() -> None: + """ + Test that LEARNING_PACKAGE_UPDATED is NOT emitted when the transaction + that would have updated the ``LearningPackage`` is rolled back. + """ + learning_package = api.create_learning_package(package_ref="lp1", title="Original Title") + + with capture_events(signals=[api.signals.LEARNING_PACKAGE_UPDATED], expected_count=0): + with abort_transaction(): + api.update_learning_package(learning_package.id, title="Not going to stick") + + # Confirm the title was not actually changed: + learning_package.refresh_from_db() + assert learning_package.title == "Original Title" + + +# LEARNING_PACKAGE_DELETED + + +def test_learning_package_deleted() -> None: + """ + Test that LEARNING_PACKAGE_DELETED is emitted when a ``LearningPackage`` + is deleted. + """ + learning_package = api.create_learning_package(package_ref="lp1", title="Test LP 📦") + lp_id = learning_package.id + + with capture_events(signals=[api.signals.LEARNING_PACKAGE_DELETED], expected_count=1) as captured: + learning_package.delete() + + event = captured[0] + assert event.signal is api.signals.LEARNING_PACKAGE_DELETED + assert event.kwargs["learning_package"].id == lp_id + assert event.kwargs["learning_package"].title == "Test LP 📦" + + +def test_learning_package_deleted_via_queryset() -> None: + """ + Test that LEARNING_PACKAGE_DELETED fires once per row when multiple + ``LearningPackage`` instances are deleted via a ``QuerySet.delete()``. + """ + lp1 = api.create_learning_package(package_ref="lp1", title="LP 1") + lp2 = api.create_learning_package(package_ref="lp2", title="LP 2") + + with capture_events(signals=[api.signals.LEARNING_PACKAGE_DELETED], expected_count=2) as captured: + LearningPackage.objects.filter(id__in=[lp1.id, lp2.id]).delete() + + deleted_ids = {event.kwargs["learning_package"].id for event in captured} + assert deleted_ids == {lp1.id, lp2.id} + + +def test_learning_package_deleted_aborted() -> None: + """ + Test that LEARNING_PACKAGE_DELETED is NOT emitted when the transaction + that would have deleted the ``LearningPackage`` is rolled back. + """ + learning_package = api.create_learning_package(package_ref="lp1", title="Test LP 📦") + lp_id = learning_package.id + + with capture_events(signals=[api.signals.LEARNING_PACKAGE_DELETED], expected_count=0): + with abort_transaction(): + learning_package.delete() + + # Confirm it's still in the database (the row survived the rollback). + # Note: we can't use ``learning_package.id`` here because Django sets + # ``instance.id = None`` after ``.delete()``, even if the transaction + # ultimately rolls back; that's why we captured it beforehand. + assert LearningPackage.objects.filter(id=lp_id).exists() + + +# ENTITIES_DRAFT_CHANGED + + +def test_single_entity_changed() -> None: + """ + Test that ENTITIES_DRAFT_CHANGED is emitted when we change a publishable entity. + """ + learning_package = api.create_learning_package(package_ref="lp1", title="Test LP 📦") + + # Note: creating an entity does not emit any events until we create a version of that entity. + with capture_events(expected_count=0): + entity = api.create_publishable_entity( + learning_package.id, entity_ref="entity1", created=now_time, created_by=None + ) + + NEW_VERSION_NUM = 3 # Just for fun let's use a version number other than 1 + + with capture_events(expected_count=1) as captured: + v1 = api.create_publishable_entity_version( + entity.id, version_num=NEW_VERSION_NUM, title="Entity 1 V3", created=now_time, created_by=None + ) + + entity.refresh_from_db() + assert api.get_draft_version(entity.id) == v1 + + # Because only one change (create_..._version) has affected this version, it's easy for us to get its DraftChangeLog + expected_draft_change_log_id = v1.draftchangelogrecord_set.get().draft_change_log_id + + event = captured[0] # capture_events(...) context manager already asserted there's only one event. + assert event.signal is api.signals.ENTITIES_DRAFT_CHANGED + assert event.kwargs["learning_package"].id == learning_package.id + assert event.kwargs["learning_package"].title == "Test LP 📦" + assert event.kwargs["changed_by"].user_id is None + assert event.kwargs["change_log"].draft_change_log_id == expected_draft_change_log_id + assert event.kwargs["change_log"].changes == [ + change_record(entity, old_version=None, new_version=NEW_VERSION_NUM), + ] + assert event.kwargs["metadata"].time == now_time + + +def test_single_entity_changed_abort() -> None: + """ + Test that no events are emitted when we roll back a transaction that would have + changed a publishable entity. + """ + learning_package = api.create_learning_package(package_ref="lp1", title="Test LP 📦") + + entity = api.create_publishable_entity(learning_package.id, entity_ref="entity1", created=now_time, created_by=None) + + with capture_events(expected_count=0): + with abort_transaction(): + api.create_publishable_entity_version( + entity.id, version_num=1, title="Entity 1 V1", created=now_time, created_by=None + ) + + +def test_multiple_entites_changed(admin_user) -> None: + """ + Test that ENTITIES_DRAFT_CHANGED is emitted when we change several publishable entities in a single edit. + """ + learning_package = api.create_learning_package(package_ref="lp1", title="Test LP 📦") + created_args = {"created": now_time, "created_by": admin_user.id} + + # Entity 1 will have no initial version: + entity1 = api.create_publishable_entity(learning_package.id, entity_ref="entity1", **created_args) + # Entity 2 will have an initial version: + entity2 = api.create_publishable_entity(learning_package.id, entity_ref="entity2", **created_args) + api.create_publishable_entity_version(entity2.id, version_num=1, title="Entity 2 V1", **created_args) + # Entity 3 will have an initial version that later gets deleted: + entity3 = api.create_publishable_entity(learning_package.id, entity_ref="entity3", **created_args) + api.create_publishable_entity_version(entity3.id, version_num=1, title="Entity 3 V1", **created_args) + + with capture_events(expected_count=1) as captured: + with api.bulk_draft_changes_for( + learning_package.id, + changed_by=admin_user.id, + changed_at=now_time, + ) as draft_change_log: + # Note: the 'created_args' values below get ignored because of the bulk context. + # Create two versions of entity1: + api.create_publishable_entity_version(entity1.id, version_num=1, title="Entity 1 V1", **created_args) + api.create_publishable_entity_version(entity1.id, version_num=2, title="Entity 1 V2", **created_args) + # Create a version 2 of entity 2: + api.create_publishable_entity_version(entity2.id, version_num=2, title="Entity 2 V2", **created_args) + # Delete entity 3: + api.set_draft_version(entity3.id, None, set_at=now_time, set_by=admin_user.id) + + event = captured[0] + assert event.signal is api.signals.ENTITIES_DRAFT_CHANGED + assert event.kwargs["learning_package"].id == learning_package.id + assert event.kwargs["learning_package"].title == "Test LP 📦" + assert event.kwargs["changed_by"].user_id is admin_user.id + assert event.kwargs["change_log"].draft_change_log_id == draft_change_log.id + assert event.kwargs["change_log"].changes == [ + # Entity 1 jumps from no version to version 2: + change_record(entity1, old_version=None, new_version=2), + # Entity 2 jumps v1 -> v2: + change_record(entity2, old_version=1, new_version=2), + # Entity 3 gets deleted: + change_record(entity3, old_version=1, new_version=None), + ] + assert event.kwargs["metadata"].time == now_time + + +def test_multiple_entites_change_aborted() -> None: + """ + Test that ENTITIES_DRAFT_CHANGED is NOT emitted when we roll back + a transaction that would have modified multiple entities in a bulk change. + """ + learning_package = api.create_learning_package(package_ref="lp1", title="Test LP 📦") + created_args: dict[str, Any] = {"created": now_time, "created_by": None} + + # Entity 1 will have no initial version: + entity1 = api.create_publishable_entity(learning_package.id, entity_ref="entity1", **created_args) + # Entity 2 will have an initial version: + entity2 = api.create_publishable_entity(learning_package.id, entity_ref="entity2", **created_args) + api.create_publishable_entity_version(entity2.id, version_num=1, title="Entity 2 V1", **created_args) + # Entity 3 will have an initial version that later gets deleted: + entity3 = api.create_publishable_entity(learning_package.id, entity_ref="entity3", **created_args) + api.create_publishable_entity_version(entity3.id, version_num=1, title="Entity 3 V1", **created_args) + + with capture_events(expected_count=0): + with abort_transaction(): + with api.bulk_draft_changes_for(learning_package.id, changed_by=None, changed_at=now_time): + # Note: the 'created_args' values below get ignored because of the bulk context. + # Create two versions of entity1: + api.create_publishable_entity_version(entity1.id, version_num=1, title="Entity 1 V1", **created_args) + api.create_publishable_entity_version(entity1.id, version_num=2, title="Entity 1 V2", **created_args) + # Create a version 2 of entity 2: + api.create_publishable_entity_version(entity2.id, version_num=2, title="Entity 2 V2", **created_args) + # Delete entity 3: + api.set_draft_version(entity3.id, None, set_at=now_time, set_by=None) + + +def test_changes_with_side_effects() -> None: + """ + Test that the ENTITIES_DRAFT_CHANGED event handles dependencies + and side effects. + """ + learning_package = api.create_learning_package(package_ref="lp1", title="Test LP 📦") + created_args: dict[str, Any] = {"created": now_time, "created_by": None} + + # Create entities with dependencies + + def create_entity(name: str, dependencies: list[PublishableEntity.ID] | None = None) -> PublishableEntity: + e = api.create_publishable_entity(learning_package.id, entity_ref=name, **created_args) + api.create_publishable_entity_version( + e.id, version_num=1, title=f"{name} V1", dependencies=dependencies, **created_args + ) + return e + + child1 = create_entity("child1") + parent1 = create_entity("parent1", dependencies=[child1.id]) + + # now, modifying child1 will affect parent1: + with capture_events(expected_count=1) as captured: + api.create_publishable_entity_version(child1.id, version_num=2, title="child1 V2", **created_args) + + event = captured[0] + assert event.signal is api.signals.ENTITIES_DRAFT_CHANGED + assert event.kwargs["change_log"].changes == [ + change_record(child1, old_version=1, new_version=2), # directly modified + change_record(parent1, old_version=1, new_version=1), # side effect + ] + + +# ENTITIES_PUBLISHED + + +def test_publish_events(admin_user) -> None: + """ + Test that ENTITIES_PUBLISHED is emitted when we publish + changes to entities in a learning package. + """ + learning_package = api.create_learning_package(package_ref="lp1", title="Test LP 📦") + created_args = {"created": now_time, "created_by": admin_user.id} + + # Entity 1 will have no initial version: + entity1 = api.create_publishable_entity(learning_package.id, entity_ref="entity1", **created_args) + # Entity 2 will have an initial version with some changes: + entity2 = api.create_publishable_entity(learning_package.id, entity_ref="entity2", **created_args) + api.create_publishable_entity_version(entity2.id, version_num=1, title="Entity 2 V1", **created_args) + api.create_publishable_entity_version(entity2.id, version_num=2, title="Entity 2 V2", **created_args) + # Entity 3 will have an initial version that later gets deleted: + entity3 = api.create_publishable_entity(learning_package.id, entity_ref="entity3", **created_args) + api.create_publishable_entity_version(entity3.id, version_num=1, title="Entity 3 V1", **created_args) + + # Publish these initial changes: + first_publish_time = datetime.now(tz=timezone.utc) + with capture_events(expected_count=1) as captured: + first_log = api.publish_all_drafts( + learning_package.id, published_at=first_publish_time, published_by=admin_user.id + ) + + event = captured[0] + assert event.signal is api.signals.ENTITIES_PUBLISHED + assert event.kwargs["learning_package"].id == learning_package.id + assert event.kwargs["learning_package"].title == "Test LP 📦" + assert event.kwargs["changed_by"].user_id is admin_user.id + assert event.kwargs["change_log"].publish_log_id == first_log.id + assert event.kwargs["change_log"].changes == [ + # Entity 1 is not yet published, since it has no draft version. + # Entity 2 is newly published, and now at v2: + change_record(entity2, old_version=None, new_version=2, direct=True), + # Entity 3 is newly published, and now at v1: + change_record(entity3, old_version=None, new_version=1, direct=True), + ] + assert event.kwargs["metadata"].time == first_publish_time + + # Now modify the entities again: + # Create a version of entity1: + api.create_publishable_entity_version(entity1.id, version_num=1, title="Entity 1 V1", **created_args) + # Create a version 3 of entity2: + api.create_publishable_entity_version(entity2.id, version_num=3, title="Entity 2 V3", **created_args) + # Delete entity 3: + api.set_draft_version(entity3.id, None, set_at=now_time, set_by=admin_user.id) + + # Publish these new changes: + second_publish_time = datetime.now(tz=timezone.utc) + with capture_events(expected_count=1) as captured: + second_log = api.publish_all_drafts( + learning_package.id, published_at=second_publish_time, published_by=admin_user.id + ) + + event = captured[0] + assert event.signal is api.signals.ENTITIES_PUBLISHED + assert event.kwargs["learning_package"].id == learning_package.id + assert event.kwargs["learning_package"].title == "Test LP 📦" + assert event.kwargs["changed_by"].user_id is admin_user.id + assert event.kwargs["change_log"].publish_log_id == second_log.id + assert event.kwargs["change_log"].changes == [ + # Entity 1 is newly published at v1: + change_record(entity1, old_version=None, new_version=1, direct=True), + # Entity 2 jumps v2 -> v3: + change_record(entity2, old_version=2, new_version=3, direct=True), + # Entity 3 gets deleted: + change_record(entity3, old_version=1, new_version=None, direct=True), + ] + assert event.kwargs["metadata"].time == second_publish_time + + +def test_publish_events_aborted(admin_user) -> None: + """ + Test that ENTITIES_PUBLISHED is NOT emitted when we roll + back a transaction that would have published some entities. + """ + learning_package = api.create_learning_package(package_ref="lp1", title="Test LP 📦") + created_args = {"created": now_time, "created_by": admin_user.id} + + # Create an entity with some initial version: + entity1 = api.create_publishable_entity(learning_package.id, entity_ref="entity1", **created_args) + api.create_publishable_entity_version(entity1.id, version_num=1, title="Entity 1 V1", **created_args) + + def do_publish(): + draft_qset = api.get_all_drafts(learning_package.id).filter(entity=entity1) + api.publish_from_drafts( + learning_package.id, draft_qset=draft_qset, published_at=now_time, published_by=admin_user.id + ) + + with capture_events(expected_count=0): + with abort_transaction(): + do_publish() + + with capture_events(expected_count=1): + do_publish() + + +def test_publish_with_dependencies() -> None: + """ + Test that the ENTITIES_PUBLISHED event handles dependencies + and side effects. + """ + learning_package = api.create_learning_package(package_ref="lp1", title="Test LP 📦") + created_args: dict[str, Any] = {"created": now_time, "created_by": None} + + # Create entities with dependencies + + def create_entity(name: str, dependencies: list[PublishableEntity.ID] | None = None) -> PublishableEntity: + e = api.create_publishable_entity(learning_package.id, entity_ref=name, **created_args) + api.create_publishable_entity_version( + e.id, version_num=1, title=f"{name} V1", dependencies=dependencies, **created_args + ) + return e + + # 👧👦 children + child1 = create_entity("child1") + child2 = create_entity("child2") + child3 = create_entity("child3") + # 🧓👩 parents + parent1 = create_entity("parent1", dependencies=[child1.id, child2.id]) + parent2 = create_entity("parent2", dependencies=[child2.id, child3.id]) + # 👴👵 grandparents + grandparent1 = create_entity("grandparent1", dependencies=[parent1.id]) + grandparent2 = create_entity("grandparent2", dependencies=[parent2.id]) + + # publish grandparent1 directly and all its dependencies indirectly: + with capture_events(expected_count=1) as captured: + publish_entity(grandparent1) + + event = captured[0] + assert event.signal is api.signals.ENTITIES_PUBLISHED + assert event.kwargs["change_log"].changes == [ + change_record(grandparent1, old_version=None, new_version=1, direct=True), + change_record(parent1, old_version=None, new_version=1, direct=False), + change_record(child1, old_version=None, new_version=1, direct=False), + change_record(child2, old_version=None, new_version=1, direct=False), + ] + + # publish the rest: + with capture_events(expected_count=1): + api.publish_all_drafts(learning_package.id) + + # ✨ Now modify 'child3', causing side effects for parent2 and grandparent2 + api.create_publishable_entity_version(child3.id, version_num=2, title="child3 V2", **created_args) + + with capture_events(expected_count=1) as captured: + publish_entity(child3) + + event = captured[0] + assert event.signal is api.signals.ENTITIES_PUBLISHED + assert event.kwargs["change_log"].changes == [ + change_record(child3, old_version=1, new_version=2, direct=True), + change_record(parent2, old_version=1, new_version=1, direct=False), + change_record(grandparent2, old_version=1, new_version=1, direct=False), + ] diff --git a/tests/openedx_content/applets/sections/test_api.py b/tests/openedx_content/applets/sections/test_api.py index fd4b255f..853f5ac5 100644 --- a/tests/openedx_content/applets/sections/test_api.py +++ b/tests/openedx_content/applets/sections/test_api.py @@ -153,9 +153,9 @@ def test_section_queries(self) -> None: """ Test the number of queries needed for each part of the sections API """ - with self.assertNumQueries(37): + with self.assertNumQueries(39): section = self.create_section_with_subsections([self.subsection_1, self.subsection_2_v1]) - with self.assertNumQueries(161): + with self.assertNumQueries(163): content_api.publish_from_drafts( self.learning_package.id, draft_qset=content_api.get_all_drafts(self.learning_package.id).filter(entity=section.id), diff --git a/tests/openedx_content/applets/subsections/test_api.py b/tests/openedx_content/applets/subsections/test_api.py index 5d2378b9..29bee46b 100644 --- a/tests/openedx_content/applets/subsections/test_api.py +++ b/tests/openedx_content/applets/subsections/test_api.py @@ -131,9 +131,9 @@ def test_subsection_queries(self) -> None: """ Test the number of queries needed for each part of the subsections API """ - with self.assertNumQueries(37): + with self.assertNumQueries(39): subsection = self.create_subsection_with_units([self.unit_1, self.unit_1_v1]) - with self.assertNumQueries(103): # TODO: this seems high? + with self.assertNumQueries(105): # TODO: this seems high? content_api.publish_from_drafts( self.learning_package.id, draft_qset=content_api.get_all_drafts(self.learning_package.id).filter(entity=subsection.id), diff --git a/tests/openedx_content/applets/units/test_api.py b/tests/openedx_content/applets/units/test_api.py index 349ac36b..49586bdd 100644 --- a/tests/openedx_content/applets/units/test_api.py +++ b/tests/openedx_content/applets/units/test_api.py @@ -132,9 +132,9 @@ def test_unit_queries(self) -> None: """ Test the number of queries needed for each part of the units API """ - with self.assertNumQueries(35): + with self.assertNumQueries(37): unit = self.create_unit_with_components([self.component_1, self.component_2_v1]) - with self.assertNumQueries(49): # TODO: this seems high? + with self.assertNumQueries(51): # TODO: this seems high? content_api.publish_from_drafts( self.learning_package.id, draft_qset=content_api.get_all_drafts(self.learning_package.id).filter(entity=unit.id), diff --git a/tests/openedx_content/conftest.py b/tests/openedx_content/conftest.py new file mode 100644 index 00000000..e7289f59 --- /dev/null +++ b/tests/openedx_content/conftest.py @@ -0,0 +1,15 @@ +"""Shared fixtures for openedx_content tests.""" + +import pytest +from celery import current_app # type: ignore[import] + + +@pytest.fixture(autouse=True) +def _celery_task_always_eager(): + """ + Run Celery tasks synchronously so per-entity CONTENT_OBJECT_ASSOCIATIONS_CHANGED + events fire inline during tests without needing a real broker. + """ + current_app.conf.task_always_eager = True + yield + current_app.conf.task_always_eager = False diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 00000000..33780691 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,104 @@ +""" +Shared testing utilities for openedx-core tests. +""" + +from __future__ import annotations + +from contextlib import contextmanager +from dataclasses import dataclass +from typing import Generator + +from django.db import transaction +from openedx_events.tooling import OpenEdxPublicSignal # type: ignore[import-untyped] + + +@dataclass +class CapturedEvent: + """A single captured event emission.""" + + signal: OpenEdxPublicSignal + kwargs: dict + + +@contextmanager +def capture_events( + signals: list[OpenEdxPublicSignal] | None = None, + expected_count: int | None = None, +) -> Generator[list[CapturedEvent], None, None]: + """ + Context manager that captures Open edX events emitted during the block. + + Args: + signals: Optional list of ``OpenEdxPublicSignal`` instances to monitor. + Defaults to all registered signals (OpenEdxPublicSignal.all_events()). + expected_count: How many events are expected (optional). If specified, + will assert that the resulting list has this length. + + Yields: + list[CapturedEvent]: A list that is populated as each event fires. + Each entry has a ``signal`` attribute and a ``kwargs`` + dict containing the event data (learning_package, + changed_by, etc.) plus ``metadata`` and + ``from_event_bus``. + + Example usage:: + + with capture_events(expected_count=1) as captured: + api.do_something(entity.id, ...) + + assert captured[0].signal is ENTITIES_DRAFT_CHANGED + assert captured[0].kwargs['learning_package'].id == learning_package.id + """ + if signals is None: + signals = list(OpenEdxPublicSignal.all_events()) + + captured: list[CapturedEvent] = [] + receivers: dict[OpenEdxPublicSignal, object] = {} + + for signal in signals: + + def make_receiver(sig: OpenEdxPublicSignal): + def receiver(sender, **kwargs): # pylint: disable=unused-argument + kwargs.pop("signal", None) + captured.append(CapturedEvent(signal=sig, kwargs=kwargs)) + + return receiver + + receiver = make_receiver(signal) + signal.connect(receiver) + receivers[signal] = receiver + + try: + yield captured + finally: + for signal, receiver in receivers.items(): + signal.disconnect(receiver) + + if expected_count is not None: + assert len(captured) == expected_count, ( + f"Expected {expected_count} event(s), got {len(captured)}: {[e.signal for e in captured]}" + ) + + +class DeliberateRollbackException(Exception): + """Exception used to deliberately cancel and roll back a DB transaction""" + + +@contextmanager +def abort_transaction() -> Generator[None, None, None]: + """ + Context manager that wraps the block in a transaction that gets rolled back. + + Example usage:: + + with abort_transaction(): + api.do_something(...) + + assert nothing was done + """ + try: + with transaction.atomic(): + yield + raise DeliberateRollbackException + except DeliberateRollbackException: + pass