From 0a54c4ef34d7c34689f77646800a8210071e7a6b Mon Sep 17 00:00:00 2001 From: Ed Rivas Date: Tue, 14 Dec 2021 16:12:52 +0000 Subject: [PATCH 1/8] Track Task-related activities --- metecho/api/admin.py | 10 ++ metecho/api/hook_serializers.py | 9 +- metecho/api/jobs.py | 10 +- metecho/api/migrations/0107_taskactivity.py | 93 +++++++++++++ metecho/api/models.py | 138 ++++++++++++++++++-- metecho/api/serializers.py | 104 ++++++--------- 6 files changed, 280 insertions(+), 84 deletions(-) create mode 100644 metecho/api/migrations/0107_taskactivity.py diff --git a/metecho/api/admin.py b/metecho/api/admin.py index 8ea921c16..e19ea6da2 100644 --- a/metecho/api/admin.py +++ b/metecho/api/admin.py @@ -17,6 +17,7 @@ ScratchOrg, SiteProfile, Task, + TaskActivity, TaskSlug, User, ) @@ -170,6 +171,15 @@ class TaskSlugAdmin(admin.ModelAdmin): search_fields = ("slug",) +@admin.register(TaskActivity) +class TaskActivityAdmin(admin.ModelAdmin): + list_display = ("type", "description", "task", "created_at") + list_filter = ("type",) + list_select_related = ("task",) + date_hierarchy = "created_at" + autocomplete_fields = ("task", "scratch_org") + + @admin.register(ScratchOrg) class ScratchOrgAdmin(admin.ModelAdmin): list_display = ( diff --git a/metecho/api/hook_serializers.py b/metecho/api/hook_serializers.py index 33c87e830..9aad0f3b0 100644 --- a/metecho/api/hook_serializers.py +++ b/metecho/api/hook_serializers.py @@ -37,6 +37,7 @@ class PrSerializer(serializers.Serializer): head = PrBranchSerializer() base = PrBranchSerializer() number = serializers.IntegerField() + title = serializers.CharField() # All other fields are ignored by default. @@ -100,13 +101,13 @@ def process_hook(self): # In all these, our originating user is None, because this # comes from the GitHub hook, and therefore all users on the # frontend should pay attention to it. - pr_number = self.validated_data["number"] + pr = self.validated_data["pull_request"] if self._is_opened(): - instance.finalize_pr_opened(pr_number, originating_user_id=None) + instance.finalize_pr_opened(pr, originating_user_id=None) elif self._is_merged(): - instance.finalize_status_completed(pr_number, originating_user_id=None) + instance.finalize_status_completed(pr, originating_user_id=None) else: - instance.finalize_pr_closed(pr_number, originating_user_id=None) + instance.finalize_pr_closed(pr, originating_user_id=None) class CommitSerializer(serializers.Serializer): diff --git a/metecho/api/jobs.py b/metecho/api/jobs.py index 93c13a7b5..b124580f5 100644 --- a/metecho/api/jobs.py +++ b/metecho/api/jobs.py @@ -30,7 +30,7 @@ normalize_commit, try_to_make_branch, ) -from .models import TaskReviewStatus +from .models import TaskActivityType, TaskReviewStatus from .push import report_scratch_org_error from .sf_org_changes import ( commit_changes_to_github, @@ -457,6 +457,14 @@ def commit_changes_from_org( scratch_org.task.add_metecho_git_sha(commit.sha) scratch_org.task.has_unmerged_commits = True scratch_org.task.finalize_task_update(originating_user_id=originating_user_id) + scratch_org.task.log_activity( + type=TaskActivityType.CHANGES_RETRIEVED, + user_id=originating_user_id, + link_url=f"{scratch_org.task.root_project.repo_url}/commit/{commit.sha}", + link_title=commit.sha[:7], + description=commit_message, + scratch_org=scratch_org, + ) scratch_org.refresh_from_db() scratch_org.last_modified_at = now() diff --git a/metecho/api/migrations/0107_taskactivity.py b/metecho/api/migrations/0107_taskactivity.py new file mode 100644 index 000000000..99930cdbb --- /dev/null +++ b/metecho/api/migrations/0107_taskactivity.py @@ -0,0 +1,93 @@ +# Generated by Django 4.0 on 2021-12-10 21:39 + +import django.db.models.deletion +import hashid_field.field +import sfdo_template_helpers.fields.string +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("api", "0106_project_tasks"), + ] + + operations = [ + migrations.CreateModel( + name="TaskActivity", + fields=[ + ( + "id", + hashid_field.field.HashidAutoField( + alphabet="abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890", # noqa + min_length=7, + prefix="", + primary_key=True, + serialize=False, + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True)), + ("edited_at", models.DateTimeField(auto_now=True)), + ( + "type", + sfdo_template_helpers.fields.string.StringField( + choices=[ + ("Merged", "Merged"), + ("Approved", "Approved"), + ("Closed", "Closed"), + ("Test Org created", "Test Org Created"), + ("Test Org deleted", "Test Org Deleted"), + ("Dev Org created", "Dev Org Created"), + ("Dev Org deleted", "Dev Org Deleted"), + ("Submitted for testing", "Submitted For Testing"), + ("Changes retrieved", "Changes Retrieved"), + ("Changes requested", "Changes Requested"), + ("Tester accepted", "Tester Accepted"), + ("Tester assigned", "Tester Assigned"), + ("Tester unassigned", "Tester Unassigned"), + ("Developer accepted", "Dev Accepted"), + ("Developer assigned", "Dev Assigned"), + ("Developer unassigned", "Dev Unassigned"), + ("Created", "Created"), + ] + ), + ), + ( + "link_title", + sfdo_template_helpers.fields.string.StringField(blank=True), + ), + ("link_url", models.URLField(blank=True)), + ( + "description", + sfdo_template_helpers.fields.string.StringField(blank=True), + ), + ( + "collaborator_id", + sfdo_template_helpers.fields.string.StringField(blank=True), + ), + ( + "scratch_org", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="task_activities", + to="api.scratchorg", + ), + ), + ( + "task", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="activities", + to="api.task", + ), + ), + ], + options={ + "verbose_name": "task activity", + "verbose_name_plural": "task activities", + "ordering": ("-created_at",), + }, + ), + ] diff --git a/metecho/api/models.py b/metecho/api/models.py index 707482829..eece19340 100644 --- a/metecho/api/models.py +++ b/metecho/api/models.py @@ -81,6 +81,26 @@ class TaskReviewStatus(models.TextChoices): CHANGES_REQUESTED = "Changes requested" +class TaskActivityType(models.TextChoices): + MERGED = "Merged" + APPROVED = "Approved" + CLOSED = "Closed" + TEST_ORG_CREATED = "Test Org created" + TEST_ORG_DELETED = "Test Org deleted" + DEV_ORG_CREATED = "Dev Org created" + DEV_ORG_DELETED = "Dev Org deleted" + SUBMITTED_FOR_TESTING = "Submitted for testing" + CHANGES_RETRIEVED = "Changes retrieved" + CHANGES_REQUESTED = "Changes requested" + TESTER_ACCEPTED = "Tester accepted" + TESTER_ASSIGNED = "Tester assigned" + TESTER_UNASSIGNED = "Tester unassigned" + DEV_ACCEPTED = "Developer accepted" + DEV_ASSIGNED = "Developer assigned" + DEV_UNASSIGNED = "Developer unassigned" + CREATED = "Created" + + class SiteProfile(TranslatableModel): site = models.OneToOneField(Site, on_delete=models.CASCADE) @@ -387,6 +407,10 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) + @property + def repo_url(self) -> str: + return f"https://github.com/{self.repo_owner}/{self.repo_name}" + def finalize_get_social_image(self): self.save() self.notify_changed(originating_user_id=None) @@ -622,14 +646,14 @@ def notify_created(self, originating_user_id=None): group_name=group_name, ) - def finalize_pr_closed(self, pr_number, *, originating_user_id): - self.pr_number = pr_number + def finalize_pr_closed(self, pr, *, originating_user_id): + self.pr_number = pr["number"] self.pr_is_open = False self.save() self.notify_changed(originating_user_id=originating_user_id) - def finalize_pr_opened(self, pr_number, *, originating_user_id): - self.pr_number = pr_number + def finalize_pr_opened(self, pr, *, originating_user_id): + self.pr_number = pr["number"] self.pr_is_open = True self.pr_is_merged = False self.save() @@ -639,8 +663,8 @@ def finalize_epic_update(self, *, originating_user_id=None): self.save() self.notify_changed(originating_user_id=originating_user_id) - def finalize_status_completed(self, pr_number, *, originating_user_id): - self.pr_number = pr_number + def finalize_status_completed(self, pr, *, originating_user_id): + self.pr_number = pr["number"] self.pr_is_merged = True self.has_unmerged_commits = False self.pr_is_open = False @@ -745,6 +769,26 @@ def root_project(self) -> Project: return self.epic.project return self.project + @property + def branch_url(self) -> Optional[str]: + if self.branch_name: + return f"{self.root_project.repo_url}/tree/{self.branch_name}" + return None + + @property + def branch_diff_url(self) -> Optional[str]: + base_branch = self.get_base() + branch = self.branch_name + if base_branch and branch: + return f"{self.root_project.repo_url}/compare/{base_branch}...{branch}" + return None + + @property + def pr_url(self) -> Optional[str]: + if self.pr_number: + return f"{self.root_project.repo_url}/pull/{self.pr_number}" + return None + def save(self, *args, force_epic_save=False, **kwargs): is_new = self.pk is None ret = super().save(*args, **kwargs) @@ -823,6 +867,17 @@ def get_base(self): def get_head(self): return self.branch_name + def log_activity(self, *, user_id=None, **kwargs): + """ + Transform a local user_id into a GitHub ID before creating an activity related + to this task + """ + if user_id: + kwargs["collaborator_id"] = getattr( + User.objects.filter(id=user_id).first(), "github_id", "" + ) + return self.activities.create(**kwargs) + def try_to_notify_assigned_user(self): # This takes the tester (a.k.a. assigned_qa) and sends them an # email when a PR has been made. @@ -885,39 +940,58 @@ def finalize_task_update(self, *, originating_user_id): self.save() self.notify_changed(originating_user_id=originating_user_id) - def finalize_status_completed(self, pr_number, *, originating_user_id): + def finalize_status_completed(self, pr, *, originating_user_id): self.status = TaskStatus.COMPLETED self.has_unmerged_commits = False - self.pr_number = pr_number + self.pr_number = pr["number"] self.pr_is_open = False if self.epic: self.epic.has_unmerged_commits = True # This will save the epic, too: self.save(force_epic_save=True) + self.log_activity(type=TaskActivityType.MERGED, user_id=originating_user_id) self.notify_changed(originating_user_id=originating_user_id) - def finalize_pr_closed(self, pr_number, *, originating_user_id): + def finalize_pr_closed(self, pr, *, originating_user_id): self.status = TaskStatus.CANCELED - self.pr_number = pr_number + self.pr_number = pr["number"] self.pr_is_open = False self.review_valid = False self.save() + self.log_activity(type=TaskActivityType.CLOSED, user_id=originating_user_id) self.notify_changed(originating_user_id=originating_user_id) - def finalize_pr_opened(self, pr_number, *, originating_user_id): + def finalize_pr_opened(self, pr, *, originating_user_id): self.status = TaskStatus.IN_PROGRESS - self.pr_number = pr_number + self.pr_number = pr["number"] self.pr_is_open = True self.pr_is_merged = False self.save() + self.log_activity( + type=TaskActivityType.SUBMITTED_FOR_TESTING, + user_id=originating_user_id, + link_title=f"PR{pr['number']}", + link_url=self.pr_url, + description=pr["title"], + ) self.notify_changed(originating_user_id=originating_user_id) - def finalize_provision(self, *, originating_user_id): + def finalize_provision(self, *, scratch_org: "ScratchOrg", originating_user_id): if self.status == TaskStatus.PLANNED: self.status = TaskStatus.IN_PROGRESS self.save() self.notify_changed(originating_user_id=originating_user_id) + type_map = { + ScratchOrgType.DEV: TaskActivityType.DEV_ORG_CREATED, + ScratchOrgType.QA: TaskActivityType.TEST_ORG_CREATED, + } + self.log_activity( + type=type_map[scratch_org.org_type], + user_id=originating_user_id, + scratch_org=scratch_org, + ) + def finalize_commit_changes(self, *, originating_user_id): if self.status != TaskStatus.IN_PROGRESS: self.status = TaskStatus.IN_PROGRESS @@ -972,6 +1046,13 @@ def finalize_submit_review( self.review_sha = sha self.update_review_valid() self.save() + type_map = { + TaskReviewStatus.CHANGES_REQUESTED: TaskActivityType.CHANGES_REQUESTED, + TaskReviewStatus.APPROVED: TaskActivityType.APPROVED, + } + self.log_activity( + type=type_map[self.review_status], user_id=originating_user_id + ) self.notify_changed( type_="TASK_SUBMIT_REVIEW", originating_user_id=originating_user_id ) @@ -1174,7 +1255,9 @@ def finalize_provision(self, *, error=None, originating_user_id): type_="SCRATCH_ORG_PROVISION", originating_user_id=originating_user_id ) if self.task: - self.task.finalize_provision(originating_user_id=originating_user_id) + self.task.finalize_provision( + originating_user_id=originating_user_id, scratch_org=self + ) else: self.notify_scratch_org_error( error=error, @@ -1373,6 +1456,33 @@ def notify_org_provisioning(self, originating_user_id): ) +class TaskActivity(HashIdMixin, TimestampsMixin): + """Keep track of activities like re-assignments, org creations, commits, etc""" + + type = StringField(choices=TaskActivityType.choices) + link_title = StringField(blank=True) + link_url = models.URLField(blank=True) + description = StringField(blank=True) + + task = models.ForeignKey(Task, related_name="activities", on_delete=models.CASCADE) + collaborator_id = StringField(blank=True) + scratch_org = models.ForeignKey( + ScratchOrg, + related_name="task_activities", + on_delete=models.SET_NULL, + blank=True, + null=True, + ) + + class Meta: + ordering = ("-created_at",) + verbose_name = _("task activity") + verbose_name_plural = _("task activities") + + def __str__(self) -> str: + return self.get_type_display() + + @receiver(user_logged_in) def user_logged_in_handler(sender, *, user, **kwargs): user.queue_refresh_repositories() diff --git a/metecho/api/serializers.py b/metecho/api/serializers.py index 928c7e078..ab4b46a3a 100644 --- a/metecho/api/serializers.py +++ b/metecho/api/serializers.py @@ -19,6 +19,7 @@ ScratchOrgType, SiteProfile, Task, + TaskActivityType, TaskReviewStatus, ) from .sf_run_flow import is_org_good @@ -161,7 +162,7 @@ class ProjectSerializer(HashIdModelSerializer): slug = serializers.CharField() old_slugs = StringListField(read_only=True) description_rendered = MarkdownField(source="description", read_only=True) - repo_url = serializers.SerializerMethodField() + repo_url = serializers.URLField(read_only=True) repo_image_url = serializers.SerializerMethodField() has_push_permission = serializers.SerializerMethodField() github_users = GitHubUserSerializer(many=True, allow_empty=True, required=False) @@ -195,10 +196,6 @@ class Meta: "latest_sha": {"read_only": True}, } - @extend_schema_field(OpenApiTypes.URI) - def get_repo_url(self, obj) -> Optional[str]: - return f"https://github.com/{obj.repo_owner}/{obj.repo_name}" - @extend_schema_field(OpenApiTypes.URI) def get_repo_image_url(self, obj) -> Optional[str]: return obj.repo_image_url if obj.include_repo_image_url else "" @@ -427,10 +424,10 @@ class TaskSerializer(HashIdModelSerializer): required=False, allow_null=True, ) - root_project = serializers.SerializerMethodField() - branch_url = serializers.SerializerMethodField() - branch_diff_url = serializers.SerializerMethodField() - pr_url = serializers.SerializerMethodField() + root_project = serializers.CharField(source="root_project.id", read_only=True) + branch_url = serializers.URLField(read_only=True) + branch_diff_url = serializers.URLField(read_only=True) + pr_url = serializers.URLField(read_only=True) dev_org = serializers.PrimaryKeyRelatedField( queryset=ScratchOrg.objects.active(), @@ -476,12 +473,8 @@ class Meta: "has_unmerged_commits": {"read_only": True}, "currently_creating_branch": {"read_only": True}, "currently_creating_pr": {"read_only": True}, - "root_project": {"read_only": True}, - "branch_url": {"read_only": True}, "commits": {"read_only": True}, "origin_sha": {"read_only": True}, - "branch_diff_url": {"read_only": True}, - "pr_url": {"read_only": True}, "review_submitted_at": {"read_only": True}, "review_valid": {"read_only": True}, "review_status": {"read_only": True}, @@ -492,43 +485,6 @@ class Meta: "currently_submitting_review": {"read_only": True}, } - def get_root_project(self, obj) -> str: - return str(obj.root_project.pk) - - @extend_schema_field(OpenApiTypes.URI) - def get_branch_url(self, obj) -> Optional[str]: - project = obj.root_project - repo_owner = project.repo_owner - repo_name = project.repo_name - branch = obj.branch_name - if repo_owner and repo_name and branch: - return f"https://github.com/{repo_owner}/{repo_name}/tree/{branch}" - return None - - @extend_schema_field(OpenApiTypes.URI) - def get_branch_diff_url(self, obj) -> Optional[str]: - base_branch = obj.get_base() - project = obj.root_project - repo_owner = project.repo_owner - repo_name = project.repo_name - branch = obj.branch_name - if repo_owner and repo_name and base_branch and branch: - return ( - f"https://github.com/{repo_owner}/{repo_name}/compare/" - f"{base_branch}...{branch}" - ) - return None - - @extend_schema_field(OpenApiTypes.URI) - def get_pr_url(self, obj) -> Optional[str]: - project = obj.root_project - repo_owner = project.repo_owner - repo_name = project.repo_name - pr_number = obj.pr_number - if repo_owner and repo_name and pr_number: - return f"https://github.com/{repo_owner}/{repo_name}/pull/{pr_number}" - return None - def validate(self, data: dict) -> dict: project = data.get("project", getattr(self.instance, "project", None)) epic = data.get("epic", getattr(self.instance, "epic", None)) @@ -550,6 +506,7 @@ def create(self, validated_data): validated_data["assigned_dev"] = user.github_id task = super().create(validated_data) + task.log_activity(type=TaskActivityType.CREATED, collaborator_id=user.github_id) task.notify_created(originating_user_id=str(user.id)) if dev_org: @@ -612,13 +569,35 @@ def validate_assigned_qa(self, new_qa): def update(self, task, data): user = self.context["request"].user user_id = str(user.id) + activities = [] + if "assigned_dev" in data: self._handle_reassign("dev", task, data, user, originating_user_id=user_id) task.assigned_dev = data["assigned_dev"] + activities.append( + dict( + type=TaskActivityType.DEV_ASSIGNED + if data["assigned_dev"] is not None + else TaskActivityType.DEV_UNASSIGNED, + collaborator_id=data["assigned_dev"] or "", + ) + ) if "assigned_qa" in data: self._handle_reassign("qa", task, data, user, originating_user_id=user_id) task.assigned_qa = data["assigned_qa"] - task.finalize_task_update(originating_user_id=user_id) + activities.append( + dict( + type=TaskActivityType.TESTER_ASSIGNED + if data["assigned_qa"] is not None + else TaskActivityType.TESTER_UNASSIGNED, + collaborator_id=data["assigned_qa"] or "", + ) + ) + + task.save() + for kwargs in activities: + task.log_activity(**kwargs) + task.notify_changed(originating_user_id=user_id) return task def _handle_reassign( @@ -638,7 +617,7 @@ def _handle_reassign( epic.notify_changed(originating_user_id=None) if validated_data.get(f"should_alert_{type_}"): - self.try_send_assignment_emails(instance, type_, validated_data, user) + self.try_send_assignment_emails(instance, type_, new_assignee, user) reassigned_org = False # We want to consider soft-deleted orgs, too: @@ -647,9 +626,7 @@ def _handle_reassign( *instance.orgs.inactive().filter(org_type=org_type), ] for org in orgs: - new_user = self._valid_reassign( - type_, org, validated_data[f"assigned_{type_}"] - ) + new_user = self._valid_reassign(org, new_assignee) valid_commit = org.latest_commit == ( instance.commits[0] if instance.commits else instance.origin_sha ) @@ -674,16 +651,14 @@ def _handle_reassign( originating_user_id=originating_user_id, preserve_sf_org=True ) - def _valid_reassign(self, type_, org, new_assignee): - new_user = self.get_matching_assigned_user( - type_, {f"assigned_{type_}": new_assignee} - ) + def _valid_reassign(self, org, ghid): + new_user = self.get_matching_assigned_user(ghid) if new_user and org.owner_sf_username == new_user.sf_username: return new_user return None - def try_send_assignment_emails(self, instance, type_, validated_data, user): - assigned_user = self.get_matching_assigned_user(type_, validated_data) + def try_send_assignment_emails(self, instance, type_, ghid, user): + assigned_user = self.get_matching_assigned_user(ghid) if assigned_user: task = instance metecho_link = get_user_facing_url(path=task.get_absolute_url()) @@ -700,10 +675,9 @@ def try_send_assignment_emails(self, instance, type_, validated_data, user): ) assigned_user.notify(subject, body) - def get_matching_assigned_user(self, type_, validated_data): - id_ = validated_data.get(f"assigned_{type_}") - sa = SocialAccount.objects.filter(provider="github", uid=id_).first() - return getattr(sa, "user", None) # Optional[User] + def get_matching_assigned_user(self, ghid: int) -> Optional[User]: + sa = SocialAccount.objects.filter(provider="github", uid=ghid).first() + return getattr(sa, "user", None) class CreatePrSerializer(serializers.Serializer): From 689c1f416c0cb21023bee4ea7c6f2342fc874088 Mon Sep 17 00:00:00 2001 From: Ed Rivas Date: Tue, 14 Dec 2021 16:57:03 +0000 Subject: [PATCH 2/8] Fix existing tests --- metecho/api/models.py | 13 ++- metecho/api/tests/hook_serializers.py | 115 ++++++++------------------ metecho/api/tests/models.py | 16 +++- metecho/conftest.py | 9 ++ 4 files changed, 61 insertions(+), 92 deletions(-) diff --git a/metecho/api/models.py b/metecho/api/models.py index eece19340..ed89504b2 100644 --- a/metecho/api/models.py +++ b/metecho/api/models.py @@ -1046,13 +1046,12 @@ def finalize_submit_review( self.review_sha = sha self.update_review_valid() self.save() - type_map = { - TaskReviewStatus.CHANGES_REQUESTED: TaskActivityType.CHANGES_REQUESTED, - TaskReviewStatus.APPROVED: TaskActivityType.APPROVED, - } - self.log_activity( - type=type_map[self.review_status], user_id=originating_user_id - ) + if status: + type_map = { + TaskReviewStatus.CHANGES_REQUESTED: TaskActivityType.CHANGES_REQUESTED, + TaskReviewStatus.APPROVED: TaskActivityType.APPROVED, + } + self.log_activity(type=type_map[status], user_id=originating_user_id) self.notify_changed( type_="TASK_SUBMIT_REVIEW", originating_user_id=originating_user_id ) diff --git a/metecho/api/tests/hook_serializers.py b/metecho/api/tests/hook_serializers.py index 7fa35054b..179b513e4 100644 --- a/metecho/api/tests/hook_serializers.py +++ b/metecho/api/tests/hook_serializers.py @@ -48,16 +48,11 @@ def test_process_hook__tag(self, project_factory): @pytest.mark.django_db class TestPrHookSerializer: - def test_process_hook__no_matching_project(self): + def test_process_hook__no_matching_project(self, pull_request_payload_factory): data = { "action": "closed", "number": 123, - "pull_request": { - "merged": False, - "head": {"ref": "head-ref", "sha": "head-sha"}, - "base": {"ref": "base-ref", "sha": "base-sha"}, - "number": 123, - }, + "pull_request": pull_request_payload_factory(), "repository": {"id": 123}, } serializer = PrHookSerializer(data=data) @@ -65,17 +60,14 @@ def test_process_hook__no_matching_project(self): with pytest.raises(NotFound): serializer.process_hook() - def test_process_hook__no_matching_task(self, project_factory): + def test_process_hook__no_matching_task( + self, project_factory, pull_request_payload_factory + ): project_factory(repo_id=123) data = { "action": "closed", "number": 456, - "pull_request": { - "merged": True, - "head": {"ref": "head-ref", "sha": "head-sha"}, - "base": {"ref": "base-ref", "sha": "base-sha"}, - "number": 123, - }, + "pull_request": pull_request_payload_factory(), "repository": {"id": 123}, } serializer = PrHookSerializer(data=data) @@ -98,20 +90,13 @@ def test_process_hook__no_matching_task(self, project_factory): ), ) def test_process_hook__mark_matching_tasks_as_completed( - self, _task_factory, task_data + self, _task_factory, task_data, pull_request_payload_factory ): - task = _task_factory( - **task_data, pr_number=456, status=TaskStatus.IN_PROGRESS - ) + task = _task_factory(**task_data, pr_number=456, status=TaskStatus.IN_PROGRESS) data = { "action": "closed", "number": 456, - "pull_request": { - "merged": True, - "head": {"ref": "head-ref", "sha": "head-sha"}, - "base": {"ref": "base-ref", "sha": "base-sha"}, - "number": 123, - }, + "pull_request": pull_request_payload_factory(merged=True), "repository": {"id": 123}, } serializer = PrHookSerializer(data=data) @@ -121,7 +106,9 @@ def test_process_hook__mark_matching_tasks_as_completed( task.refresh_from_db() assert task.status == TaskStatus.COMPLETED - def test_process_hook__closed_not_merged(self, task_factory): + def test_process_hook__closed_not_merged( + self, task_factory, pull_request_payload_factory + ): task = task_factory( pr_number=456, epic__project__repo_id=123, @@ -130,12 +117,7 @@ def test_process_hook__closed_not_merged(self, task_factory): data = { "action": "closed", "number": 456, - "pull_request": { - "merged": False, - "head": {"ref": "head-ref", "sha": "head-sha"}, - "base": {"ref": "base-ref", "sha": "base-sha"}, - "number": 123, - }, + "pull_request": pull_request_payload_factory(), "repository": {"id": 123}, } serializer = PrHookSerializer(data=data) @@ -146,7 +128,7 @@ def test_process_hook__closed_not_merged(self, task_factory): assert task.status == TaskStatus.CANCELED assert not task.pr_is_open - def test_process_hook__reopened(self, task_factory): + def test_process_hook__reopened(self, task_factory, pull_request_payload_factory): task = task_factory( pr_number=456, epic__project__repo_id=123, @@ -155,12 +137,7 @@ def test_process_hook__reopened(self, task_factory): data = { "action": "reopened", "number": 456, - "pull_request": { - "merged": False, - "head": {"ref": "head-ref", "sha": "head-sha"}, - "base": {"ref": "base-ref", "sha": "base-sha"}, - "number": 123, - }, + "pull_request": pull_request_payload_factory(), "repository": {"id": 123}, } serializer = PrHookSerializer(data=data) @@ -171,18 +148,15 @@ def test_process_hook__reopened(self, task_factory): assert task.status == TaskStatus.IN_PROGRESS assert task.pr_is_open - def test_process_hook__close_matching_epics(self, project_factory, epic_factory): + def test_process_hook__close_matching_epics( + self, project_factory, epic_factory, pull_request_payload_factory + ): project = project_factory(repo_id=123) epic = epic_factory(pr_number=456, project=project, pr_is_open=True) data = { "action": "closed", "number": 456, - "pull_request": { - "merged": True, - "head": {"ref": "head-ref", "sha": "head-sha"}, - "base": {"ref": "base-ref", "sha": "base-sha"}, - "number": 123, - }, + "pull_request": pull_request_payload_factory(), "repository": {"id": 123}, } serializer = PrHookSerializer(data=data) @@ -192,7 +166,9 @@ def test_process_hook__close_matching_epics(self, project_factory, epic_factory) epic.refresh_from_db() assert not epic.pr_is_open - def test_process_hook__epic_closed_not_merged(self, project_factory, epic_factory): + def test_process_hook__epic_closed_not_merged( + self, project_factory, epic_factory, pull_request_payload_factory + ): project = project_factory(repo_id=123) epic = epic_factory( pr_number=456, @@ -202,12 +178,7 @@ def test_process_hook__epic_closed_not_merged(self, project_factory, epic_factor data = { "action": "closed", "number": 456, - "pull_request": { - "merged": False, - "head": {"ref": "head-ref", "sha": "head-sha"}, - "base": {"ref": "base-ref", "sha": "base-sha"}, - "number": 123, - }, + "pull_request": pull_request_payload_factory(), "repository": {"id": 123}, } serializer = PrHookSerializer(data=data) @@ -217,7 +188,9 @@ def test_process_hook__epic_closed_not_merged(self, project_factory, epic_factor epic.refresh_from_db() assert not epic.pr_is_open - def test_process_hook__epic_reopened(self, project_factory, epic_factory): + def test_process_hook__epic_reopened( + self, project_factory, epic_factory, pull_request_payload_factory + ): project = project_factory(repo_id=123) epic = epic_factory( pr_number=456, @@ -227,12 +200,7 @@ def test_process_hook__epic_reopened(self, project_factory, epic_factory): data = { "action": "reopened", "number": 456, - "pull_request": { - "merged": False, - "head": {"ref": "head-ref", "sha": "head-sha"}, - "base": {"ref": "base-ref", "sha": "base-sha"}, - "number": 123, - }, + "pull_request": pull_request_payload_factory(), "repository": {"id": 123}, } serializer = PrHookSerializer(data=data) @@ -245,16 +213,11 @@ def test_process_hook__epic_reopened(self, project_factory, epic_factory): @pytest.mark.django_db class TestPrReviewHookSerializer: - def test_no_project(self): + def test_no_project(self, pull_request_payload_factory): data = { "sender": {"login": "login", "avatar_url": "https://example.com"}, "repository": {"id": 123}, - "pull_request": { - "merged": False, - "head": {"ref": "head-ref", "sha": "head-sha"}, - "base": {"ref": "base-ref", "sha": "base-sha"}, - "number": 123, - }, + "pull_request": pull_request_payload_factory(), } serializer = PrReviewHookSerializer(data=data) assert serializer.is_valid(), serializer.errors @@ -262,17 +225,12 @@ def test_no_project(self): with pytest.raises(NotFound): serializer.process_hook() - def test_no_task(self, project_factory): + def test_no_task(self, project_factory, pull_request_payload_factory): project_factory(repo_id=123) data = { "sender": {"login": "login", "avatar_url": "https://example.com"}, "repository": {"id": 123}, - "pull_request": { - "merged": False, - "head": {"ref": "head-ref", "sha": "head-sha"}, - "base": {"ref": "base-ref", "sha": "base-sha"}, - "number": 123, - }, + "pull_request": pull_request_payload_factory(), } serializer = PrReviewHookSerializer(data=data) assert serializer.is_valid(), serializer.errors @@ -295,17 +253,12 @@ def test_no_task(self, project_factory): ), ), ) - def test_good(self, _factory, task_data): - task = _factory(**task_data, pr_number=123) + def test_good(self, _factory, task_data, pull_request_payload_factory): + task = _factory(**task_data, pr_number=111) data = { "sender": {"login": "login", "avatar_url": "https://example.com"}, "repository": {"id": 123}, - "pull_request": { - "merged": False, - "head": {"ref": "head-ref", "sha": "head-sha"}, - "base": {"ref": "base-ref", "sha": "base-sha"}, - "number": 123, - }, + "pull_request": pull_request_payload_factory(number=111), } serializer = PrReviewHookSerializer(data=data) assert serializer.is_valid(), serializer.errors diff --git a/metecho/api/tests/models.py b/metecho/api/tests/models.py index e033369af..9f56ccf33 100644 --- a/metecho/api/tests/models.py +++ b/metecho/api/tests/models.py @@ -143,14 +143,18 @@ def test_get_repo_id(self, project_factory, epic_factory): assert epic.get_repo_id() == 123 - def test_finalize_status_completed(self, epic_factory): + def test_finalize_status_completed( + self, epic_factory, pull_request_payload_factory + ): with ExitStack() as stack: epic = epic_factory(has_unmerged_commits=True) async_to_sync = stack.enter_context( patch("metecho.api.model_mixins.async_to_sync") ) - epic.finalize_status_completed(123, originating_user_id=None) + epic.finalize_status_completed( + pull_request_payload_factory(number=123), originating_user_id=None + ) epic.refresh_from_db() assert epic.pr_number == 123 assert not epic.has_unmerged_commits @@ -241,14 +245,18 @@ def test_notify_changed(self, task_factory): assert async_to_sync.called - def test_finalize_status_completed(self, task_factory): + def test_finalize_status_completed( + self, task_factory, pull_request_payload_factory + ): with ExitStack() as stack: async_to_sync = stack.enter_context( patch("metecho.api.model_mixins.async_to_sync") ) task = task_factory() - task.finalize_status_completed(123, originating_user_id=None) + task.finalize_status_completed( + pull_request_payload_factory(number=123), originating_user_id=None + ) task.refresh_from_db() assert async_to_sync.called diff --git a/metecho/conftest.py b/metecho/conftest.py index 09a32f502..e72b6208f 100644 --- a/metecho/conftest.py +++ b/metecho/conftest.py @@ -125,6 +125,15 @@ class Meta: valid_target_directories = {"source": []} +@register +class PullRequestPayloadFactory(factory.base.DictFactory): + number = factory.Sequence(lambda n: n) + title = factory.Sequence("Pull Request {}".format) + merged = False + head = {"ref": "head-ref", "sha": "head-sha"} + base = {"ref": "base-ref", "sha": "base-sha"} + + @pytest.fixture def client(user_factory): user = user_factory() From 0ae441d030b1e83d0fa873188b8e202b12d9a389 Mon Sep 17 00:00:00 2001 From: Ed Rivas Date: Wed, 15 Dec 2021 17:54:24 +0000 Subject: [PATCH 3/8] Test activity logging for Tasks --- metecho/api/tests/jobs.py | 166 ++++++++++++++++++++++-------------- metecho/api/tests/models.py | 98 ++++++++++++++++----- metecho/api/tests/views.py | 40 ++++++++- 3 files changed, 217 insertions(+), 87 deletions(-) diff --git a/metecho/api/tests/jobs.py b/metecho/api/tests/jobs.py index bcbba32a9..926437a68 100644 --- a/metecho/api/tests/jobs.py +++ b/metecho/api/tests/jobs.py @@ -30,7 +30,7 @@ submit_review, user_reassign, ) -from ..models import ScratchOrgType +from ..models import ScratchOrgType, TaskActivityType Author = namedtuple("Author", ("avatar_url", "login")) Commit = namedtuple( @@ -391,33 +391,69 @@ def test_get_unsaved_changes(scratch_org_factory): assert scratch_org.latest_revision_numbers == {"TypeOne": {"NameOne": 10}} -def test_create_branches_on_github_then_create_scratch_org(): - # Not a great test, but not a complicated function. - with ExitStack() as stack: - stack.enter_context(patch(f"{PATCH_ROOT}.local_github_checkout")) - _create_branches_on_github = stack.enter_context( - patch(f"{PATCH_ROOT}._create_branches_on_github") - ) - _create_branches_on_github.return_value = "this_branch" - _create_org_and_run_flow = stack.enter_context( - patch(f"{PATCH_ROOT}._create_org_and_run_flow") - ) - stack.enter_context(patch(f"{PATCH_ROOT}.get_scheduler")) - get_repo_info = stack.enter_context(patch(f"{PATCH_ROOT}.get_repo_info")) - latest_sha = MagicMock() - latest_sha.return_value = "abcd1234" - repository = MagicMock() - repository.branch.return_value = MagicMock(latest_sha=latest_sha) - get_repo_info.return_value = repository +@pytest.mark.parametrize( + "org_type, activity_type", + ( + (ScratchOrgType.DEV, TaskActivityType.DEV_ORG_CREATED), + (ScratchOrgType.QA, TaskActivityType.TEST_ORG_CREATED), + ), +) +@pytest.mark.django_db +def test_create_branches_on_github_then_create_scratch_org( + mocker, scratch_org_factory, user_factory, org_type, activity_type +): + mocker.patch(f"{PATCH_ROOT}.local_github_checkout") + _create_branches_on_github = mocker.patch( + f"{PATCH_ROOT}._create_branches_on_github" + ) + _create_branches_on_github.return_value = "this_branch" + _create_org_and_run_flow = mocker.patch(f"{PATCH_ROOT}._create_org_and_run_flow") + mocker.patch(f"{PATCH_ROOT}.get_scheduler") + get_repo_info = mocker.patch(f"{PATCH_ROOT}.get_repo_info") + latest_sha = MagicMock() + latest_sha.return_value = "abcd1234" + repository = MagicMock() + repository.branch.return_value = MagicMock(latest_sha=latest_sha) + get_repo_info.return_value = repository - org = MagicMock(task=None, epic=MagicMock()) - org.parent = MagicMock(branch_name="", latest_sha="") - create_branches_on_github_then_create_scratch_org( - scratch_org=org, originating_user_id=None - ) + user = user_factory() + org = scratch_org_factory(org_type=org_type, task__epic__project__repo_id=1) - assert _create_branches_on_github.called - assert _create_org_and_run_flow.called + create_branches_on_github_then_create_scratch_org( + scratch_org=org, originating_user_id=user.id + ) + + assert _create_branches_on_github.called + assert _create_org_and_run_flow.called + assert org.task.activities.filter( + type=activity_type, collaborator_id=user.github_id + ).exists(), org.task.activities.all() + + +@pytest.mark.django_db +def test_create_branches_on_github_then_create_scratch_org__no_task(mocker): + mocker.patch(f"{PATCH_ROOT}.local_github_checkout") + _create_branches_on_github = mocker.patch( + f"{PATCH_ROOT}._create_branches_on_github" + ) + _create_branches_on_github.return_value = "this_branch" + _create_org_and_run_flow = mocker.patch(f"{PATCH_ROOT}._create_org_and_run_flow") + mocker.patch(f"{PATCH_ROOT}.get_scheduler") + get_repo_info = mocker.patch(f"{PATCH_ROOT}.get_repo_info") + latest_sha = MagicMock() + latest_sha.return_value = "abcd1234" + repository = MagicMock() + repository.branch.return_value = MagicMock(latest_sha=latest_sha) + get_repo_info.return_value = repository + org = MagicMock(task=None, epic=MagicMock()) + org.parent = MagicMock(branch_name="", latest_sha="") + + create_branches_on_github_then_create_scratch_org( + scratch_org=org, originating_user_id=None + ) + + assert _create_branches_on_github.called + assert _create_org_and_run_flow.called @pytest.mark.django_db @@ -575,45 +611,49 @@ def test_error(self, mocker, caplog, user_factory, git_hub_repository_factory): @pytest.mark.django_db -def test_commit_changes_from_org(scratch_org_factory, user_factory): - scratch_org = scratch_org_factory() +def test_commit_changes_from_org(mocker, scratch_org_factory, user_factory): + scratch_org = scratch_org_factory(task__epic__project__repo_id=1) user = user_factory() - with ExitStack() as stack: - commit_changes_to_github = stack.enter_context( - patch(f"{PATCH_ROOT}.commit_changes_to_github") - ) - get_latest_revision_numbers = stack.enter_context( - patch(f"{PATCH_ROOT}.get_latest_revision_numbers") - ) - get_latest_revision_numbers.return_value = { - "name": {"member": 1, "member2": 1}, - "name1": {"member": 1, "member2": 1}, - } - get_repo_info = stack.enter_context(patch(f"{PATCH_ROOT}.get_repo_info")) - commit = MagicMock( - sha="12345", - html_url="https://github.com/test/user/foo", - commit=MagicMock(author={"date": now()}), - ) - repository = MagicMock() - repository.branch.return_value = MagicMock(commit=commit) - get_repo_info.return_value = repository - - desired_changes = {"name": ["member"]} - commit_message = "test message" - target_directory = "src" - assert scratch_org.latest_revision_numbers == {} - commit_changes_from_org( - scratch_org=scratch_org, - user=user, - desired_changes=desired_changes, - commit_message=commit_message, - target_directory=target_directory, - originating_user_id=None, - ) + commit_changes_to_github = mocker.patch(f"{PATCH_ROOT}.commit_changes_to_github") + get_latest_revision_numbers = mocker.patch( + f"{PATCH_ROOT}.get_latest_revision_numbers" + ) + get_latest_revision_numbers.return_value = { + "name": {"member": 1, "member2": 1}, + "name1": {"member": 1, "member2": 1}, + } + get_repo_info = mocker.patch(f"{PATCH_ROOT}.get_repo_info") + commit = MagicMock( + sha="1234" * 10, # 40 characters + html_url="https://github.com/test/user/foo", + commit=MagicMock(author={"date": now()}), + ) + repository = MagicMock() + repository.branch.return_value = MagicMock(commit=commit) + get_repo_info.return_value = repository + + desired_changes = {"name": ["member"]} + commit_message = "test message" + target_directory = "src" + assert scratch_org.latest_revision_numbers == {} + commit_changes_from_org( + scratch_org=scratch_org, + user=user, + desired_changes=desired_changes, + commit_message=commit_message, + target_directory=target_directory, + originating_user_id=user.id, + ) - assert commit_changes_to_github.called - assert scratch_org.latest_revision_numbers == {"name": {"member": 1}} + assert commit_changes_to_github.called + assert scratch_org.latest_revision_numbers == {"name": {"member": 1}} + assert scratch_org.task.activities.filter( + type=TaskActivityType.CHANGES_RETRIEVED, + collaborator_id=user.github_id, + link_title="1234123", + description="test message", + scratch_org=scratch_org, + ).exists(), scratch_org.task.activities.all() # TODO: this should be bundled with each function, not all error-handling together. diff --git a/metecho/api/tests/models.py b/metecho/api/tests/models.py index 9f56ccf33..54c0a5366 100644 --- a/metecho/api/tests/models.py +++ b/metecho/api/tests/models.py @@ -12,6 +12,9 @@ EpicStatus, ScratchOrgType, Task, + TaskActivity, + TaskActivityType, + TaskReviewStatus, TaskStatus, user_logged_in_handler, ) @@ -246,22 +249,52 @@ def test_notify_changed(self, task_factory): assert async_to_sync.called def test_finalize_status_completed( - self, task_factory, pull_request_payload_factory + self, mocker, task, pull_request_payload_factory ): - with ExitStack() as stack: - async_to_sync = stack.enter_context( - patch("metecho.api.model_mixins.async_to_sync") - ) + async_to_sync = mocker.patch("metecho.api.model_mixins.async_to_sync") - task = task_factory() - task.finalize_status_completed( - pull_request_payload_factory(number=123), originating_user_id=None - ) + task.finalize_status_completed( + pull_request_payload_factory(number=123), originating_user_id=None + ) - task.refresh_from_db() - assert async_to_sync.called - assert task.pr_number == 123 - assert task.status == TaskStatus.COMPLETED + task.refresh_from_db() + assert async_to_sync.called + assert task.pr_number == 123 + assert task.status == TaskStatus.COMPLETED + assert task.activities.filter( + type=TaskActivityType.MERGED, collaborator_id="" + ).exists(), task.activities.all() + + def test_finalize_pr_closed(self, mocker, task, pull_request_payload_factory): + async_to_sync = mocker.patch("metecho.api.model_mixins.async_to_sync") + + task.finalize_pr_closed( + pull_request_payload_factory(number=123), originating_user_id=None + ) + + task.refresh_from_db() + assert async_to_sync.called + assert task.pr_number == 123 + assert task.status == TaskStatus.CANCELED + assert task.activities.filter( + type=TaskActivityType.CLOSED, collaborator_id="" + ).exists(), task.activities.all() + + def test_finalize_pr_opened(self, mocker, task, pull_request_payload_factory): + async_to_sync = mocker.patch("metecho.api.model_mixins.async_to_sync") + + pr = pull_request_payload_factory(number=123, title="A new PR") + task.finalize_pr_opened(pr, originating_user_id=None) + + task.refresh_from_db() + assert async_to_sync.called + assert task.pr_number == 123 + assert task.status == TaskStatus.IN_PROGRESS + assert task.activities.filter( + type=TaskActivityType.SUBMITTED_FOR_TESTING, + collaborator_id="", + description="A new PR", + ).exists(), task.activities.all() def test_finalize_task_update(self, task_factory): with ExitStack() as stack: @@ -320,19 +353,31 @@ def test_queue_submit_review(self, user_factory, task_factory): assert submit_review_job.delay.called - def test_finalize_submit_review(self, task_factory): + @pytest.mark.parametrize( + "review_status, activity_type", + ( + (TaskReviewStatus.APPROVED, TaskActivityType.APPROVED), + (TaskReviewStatus.CHANGES_REQUESTED, TaskActivityType.CHANGES_REQUESTED), + ), + ) + def test_finalize_submit_review( + self, mocker, task_factory, user_factory, review_status, activity_type + ): now = datetime(2020, 12, 31, 12, 0) - with ExitStack() as stack: - async_to_sync = stack.enter_context( - patch("metecho.api.model_mixins.async_to_sync") - ) + async_to_sync = mocker.patch("metecho.api.model_mixins.async_to_sync") - task = task_factory(commits=[{"id": "123"}]) - task.finalize_submit_review(now, sha="123", originating_user_id=None) + user = user_factory() + task = task_factory(commits=[{"id": "123"}]) + task.finalize_submit_review( + now, sha="123", status=review_status, originating_user_id=user.id + ) - assert async_to_sync.called - assert task.review_sha == "123" - assert task.review_valid + assert async_to_sync.called + assert task.review_sha == "123" + assert task.review_valid + assert task.activities.filter( + type=activity_type, collaborator_id=user.github_id + ).exists(), task.activities.all() def test_finalize_submit_review__delete_org( self, task_factory, scratch_org_factory @@ -371,6 +416,7 @@ def test_finalize_submit_review__error(self, task_factory): assert async_to_sync.called assert not task.review_valid + assert not task.activities.exists() def test_soft_delete_cascade(self, task_factory, scratch_org_factory): task = task_factory() @@ -952,6 +998,12 @@ def test_str(self, git_hub_repository_factory): assert str(gh_repo) == "https://github.com/test/repo.git" +class TestTaskActivity: + def test_str(self): + activity = TaskActivity(type=TaskActivityType.APPROVED) + assert str(activity) == TaskActivityType.APPROVED.label + + @pytest.mark.django_db def test_login_handler(user_factory): user = user_factory() diff --git a/metecho/api/tests/views.py b/metecho/api/tests/views.py index 7988eb6f8..337c25081 100644 --- a/metecho/api/tests/views.py +++ b/metecho/api/tests/views.py @@ -12,7 +12,7 @@ from metecho.api.serializers import EpicSerializer, TaskSerializer -from ..models import ScratchOrgType +from ..models import ScratchOrgType, Task, TaskActivityType Branch = namedtuple("Branch", ["name"]) @@ -879,6 +879,20 @@ def test_create__dev_org( assert task_data["assigned_dev"] == client.user.github_id, task_data + def test_create__log_activity( + self, client, git_hub_repository_factory, scratch_org_factory, epic_factory + ): + repo = git_hub_repository_factory(permissions={"push": True}, user=client.user) + epic = epic_factory(project__repo_id=repo.repo_id) + data = {"name": "Task 1", "epic": str(epic.id), "org_config_name": "dev"} + + response = client.post(reverse("task-list"), data=data) + task = Task.objects.get(id=response.json()["id"]) + + assert task.activities.filter( + type=TaskActivityType.CREATED, collaborator_id=client.user.github_id + ).exists(), task.activities.all() + def test_create_pr(self, client, task_factory): with ExitStack() as stack: task = task_factory() @@ -1075,6 +1089,30 @@ def test_assignees(self, client, git_hub_repository_factory, task_factory): task.refresh_from_db() assert task.assigned_dev == "123456" assert task.assigned_qa == "123456" + assert task.activities.filter( + type=TaskActivityType.DEV_ASSIGNED, collaborator_id="123456" + ).exists(), task.activities.all() + assert task.activities.filter( + type=TaskActivityType.TESTER_ASSIGNED, collaborator_id="123456" + ).exists(), task.activities.all() + + def test_unassign_activity(self, client, git_hub_repository_factory, task_factory): + repo = git_hub_repository_factory(permissions={"push": True}, user=client.user) + task = task_factory( + epic__project__repo_id=repo.repo_id, + epic__project__github_users=[ + {"id": "123456", "permissions": {"push": True}} + ], + ) + data = {"assigned_dev": None, "assigned_qa": None} + client.post(reverse("task-assignees", args=[task.id]), data, format="json") + + assert task.activities.filter( + type=TaskActivityType.DEV_UNASSIGNED, collaborator_id="" + ).exists(), task.activities.all() + assert task.activities.filter( + type=TaskActivityType.TESTER_UNASSIGNED, collaborator_id="" + ).exists(), task.activities.all() @pytest.mark.django_db From c10cf0d3c92d3b33c2e3749c6462f57f2c0fb4c9 Mon Sep 17 00:00:00 2001 From: Ed Rivas Date: Wed, 15 Dec 2021 18:13:29 +0000 Subject: [PATCH 4/8] Log user IDs on GitHub-initiated PR events --- metecho/api/hook_serializers.py | 6 ++++++ metecho/api/models.py | 9 +++++++-- metecho/api/tests/models.py | 20 ++++++++++---------- metecho/conftest.py | 1 + 4 files changed, 24 insertions(+), 12 deletions(-) diff --git a/metecho/api/hook_serializers.py b/metecho/api/hook_serializers.py index 9aad0f3b0..1f02700ef 100644 --- a/metecho/api/hook_serializers.py +++ b/metecho/api/hook_serializers.py @@ -32,12 +32,18 @@ class PrBranchSerializer(serializers.Serializer): # All other fields are ignored by default. +class UserSerializer(serializers.Serializer): + id = serializers.IntegerField() + login = serializers.CharField() + + class PrSerializer(serializers.Serializer): merged = serializers.BooleanField(required=False) head = PrBranchSerializer() base = PrBranchSerializer() number = serializers.IntegerField() title = serializers.CharField() + user = UserSerializer() # All other fields are ignored by default. diff --git a/metecho/api/models.py b/metecho/api/models.py index ed89504b2..2b1cda126 100644 --- a/metecho/api/models.py +++ b/metecho/api/models.py @@ -949,7 +949,9 @@ def finalize_status_completed(self, pr, *, originating_user_id): self.epic.has_unmerged_commits = True # This will save the epic, too: self.save(force_epic_save=True) - self.log_activity(type=TaskActivityType.MERGED, user_id=originating_user_id) + self.log_activity( + type=TaskActivityType.MERGED, collaborator_id=pr["user"]["id"] + ) self.notify_changed(originating_user_id=originating_user_id) def finalize_pr_closed(self, pr, *, originating_user_id): @@ -958,7 +960,9 @@ def finalize_pr_closed(self, pr, *, originating_user_id): self.pr_is_open = False self.review_valid = False self.save() - self.log_activity(type=TaskActivityType.CLOSED, user_id=originating_user_id) + self.log_activity( + type=TaskActivityType.CLOSED, collaborator_id=pr["user"]["id"] + ) self.notify_changed(originating_user_id=originating_user_id) def finalize_pr_opened(self, pr, *, originating_user_id): @@ -973,6 +977,7 @@ def finalize_pr_opened(self, pr, *, originating_user_id): link_title=f"PR{pr['number']}", link_url=self.pr_url, description=pr["title"], + collaborator_id=pr["user"]["id"], ) self.notify_changed(originating_user_id=originating_user_id) diff --git a/metecho/api/tests/models.py b/metecho/api/tests/models.py index 54c0a5366..4e857f916 100644 --- a/metecho/api/tests/models.py +++ b/metecho/api/tests/models.py @@ -253,37 +253,37 @@ def test_finalize_status_completed( ): async_to_sync = mocker.patch("metecho.api.model_mixins.async_to_sync") - task.finalize_status_completed( - pull_request_payload_factory(number=123), originating_user_id=None - ) + pr = pull_request_payload_factory(number=123, user={"id": 456}) + task.finalize_status_completed(pr, originating_user_id=None) task.refresh_from_db() assert async_to_sync.called assert task.pr_number == 123 assert task.status == TaskStatus.COMPLETED assert task.activities.filter( - type=TaskActivityType.MERGED, collaborator_id="" + type=TaskActivityType.MERGED, collaborator_id="456" ).exists(), task.activities.all() def test_finalize_pr_closed(self, mocker, task, pull_request_payload_factory): async_to_sync = mocker.patch("metecho.api.model_mixins.async_to_sync") - task.finalize_pr_closed( - pull_request_payload_factory(number=123), originating_user_id=None - ) + pr = pull_request_payload_factory(number=123, user={"id": 456}) + task.finalize_pr_closed(pr, originating_user_id=None) task.refresh_from_db() assert async_to_sync.called assert task.pr_number == 123 assert task.status == TaskStatus.CANCELED assert task.activities.filter( - type=TaskActivityType.CLOSED, collaborator_id="" + type=TaskActivityType.CLOSED, collaborator_id="456" ).exists(), task.activities.all() def test_finalize_pr_opened(self, mocker, task, pull_request_payload_factory): async_to_sync = mocker.patch("metecho.api.model_mixins.async_to_sync") - pr = pull_request_payload_factory(number=123, title="A new PR") + pr = pull_request_payload_factory( + number=123, title="A new PR", user={"id": 456} + ) task.finalize_pr_opened(pr, originating_user_id=None) task.refresh_from_db() @@ -292,7 +292,7 @@ def test_finalize_pr_opened(self, mocker, task, pull_request_payload_factory): assert task.status == TaskStatus.IN_PROGRESS assert task.activities.filter( type=TaskActivityType.SUBMITTED_FOR_TESTING, - collaborator_id="", + collaborator_id="456", description="A new PR", ).exists(), task.activities.all() diff --git a/metecho/conftest.py b/metecho/conftest.py index e72b6208f..d22f22b46 100644 --- a/metecho/conftest.py +++ b/metecho/conftest.py @@ -132,6 +132,7 @@ class PullRequestPayloadFactory(factory.base.DictFactory): merged = False head = {"ref": "head-ref", "sha": "head-sha"} base = {"ref": "base-ref", "sha": "base-sha"} + user = {"id": 123, "login": "octocat"} @pytest.fixture From af1877c431aa539c204b6d15ef543333c789f7ff Mon Sep 17 00:00:00 2001 From: Ed Rivas Date: Thu, 17 Feb 2022 01:23:54 +0000 Subject: [PATCH 5/8] Fix tests --- metecho/conftest.py | 1 + 1 file changed, 1 insertion(+) diff --git a/metecho/conftest.py b/metecho/conftest.py index c19710d23..f49a818a4 100644 --- a/metecho/conftest.py +++ b/metecho/conftest.py @@ -151,6 +151,7 @@ class PullRequestPayloadFactory(factory.base.DictFactory): user = {"id": 123, "login": "octocat"} +@register class ShortIssueFactory(factory.StubFactory): """ Stub for github3.issues.issue.ShortIssue From 480628ed8b5a4de1f81dd925d736f586e5396313 Mon Sep 17 00:00:00 2001 From: Ed Rivas Date: Thu, 17 Feb 2022 02:12:24 +0000 Subject: [PATCH 6/8] Add TaskActivity endpoints --- config/settings/base.py | 1 + docs/api/schema.yml | 109 +++++++++++++++++++++++++++++++++++++ metecho/api/filters.py | 8 ++- metecho/api/serializers.py | 17 ++++++ metecho/api/tests/views.py | 56 +++++++++++++++++++ metecho/api/urls.py | 2 + metecho/api/views.py | 13 +++++ metecho/conftest.py | 20 ++++++- 8 files changed, 224 insertions(+), 2 deletions(-) diff --git a/config/settings/base.py b/config/settings/base.py index 40aa72713..52eb79799 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -448,6 +448,7 @@ def env(name, default=NoDefaultValue, type_=str): "TaskStatusEnum": "metecho.api.models.TaskStatus.choices", "EpicStatusEnum": "metecho.api.models.EpicStatus.choices", "ReviewStatusEnum": "metecho.api.models.TaskReviewStatus.choices", + "TaskActivityTypeEnum": "metecho.api.models.TaskActivityType.choices", }, "SERVE_INCLUDE_SCHEMA": False, # Don't include schema view in docs } diff --git a/docs/api/schema.yml b/docs/api/schema.yml index fd8fe8e12..2b0028b73 100644 --- a/docs/api/schema.yml +++ b/docs/api/schema.yml @@ -742,6 +742,54 @@ paths: schema: $ref: '#/components/schemas/ScratchOrg' description: '' + /api/task-activities/: + get: + operationId: task_activities_list + description: Activity log related to project Tasks. + parameters: + - in: query + name: task + schema: + type: string + format: HashID + tags: + - task-activities + security: + - tokenAuth: [] + - cookieAuth: [] + responses: + '200': + content: + application/json: + schema: + type: array + items: + $ref: '#/components/schemas/TaskActivity' + description: '' + /api/task-activities/{id}/: + get: + operationId: task_activities_retrieve + description: Activity log related to project Tasks. + parameters: + - in: path + name: id + schema: + type: string + format: HashID + description: A unique integer value identifying this task activity. + required: true + tags: + - task-activities + security: + - tokenAuth: [] + - cookieAuth: [] + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/TaskActivity' + description: '' /api/tasks/: get: operationId: tasks_list @@ -2399,6 +2447,67 @@ components: - root_project - slug - status + TaskActivity: + type: object + properties: + id: + type: string + readOnly: true + type: + allOf: + - $ref: '#/components/schemas/TaskActivityTypeEum' + readOnly: true + link_title: + type: string + readOnly: true + link_url: + type: string + format: uri + readOnly: true + description: + type: string + readOnly: true + task: + type: string + format: HashID + readOnly: true + collaborator_id: + type: string + readOnly: true + scratch_org: + type: string + format: HashID + readOnly: true + nullable: true + required: + - collaborator_id + - description + - id + - link_title + - link_url + - scratch_org + - task + - type + TaskActivityTypeEum: + enum: + - Merged + - Approved + - Closed + - Test Org created + - Test Org deleted + - Dev Org created + - Dev Org deleted + - Submitted for testing + - Changes retrieved + - Changes requested + - Tester accepted + - Tester assigned + - Tester unassigned + - Developer accepted + - Developer assigned + - Developer unassigned + - Created + type: string TaskAssignee: type: object properties: diff --git a/metecho/api/filters.py b/metecho/api/filters.py index c4cc1d61e..9b1357f38 100644 --- a/metecho/api/filters.py +++ b/metecho/api/filters.py @@ -2,7 +2,7 @@ from django.utils.translation import gettext_lazy as _ from django_filters import rest_framework as filters -from .models import Epic, GitHubIssue, Project, ScratchOrg, Task +from .models import Epic, GitHubIssue, Project, ScratchOrg, Task, TaskActivity def slug_is_active(queryset, name, value): @@ -66,6 +66,12 @@ def filter_project(self, queryset, name, project): return queryset.filter(Q(project=project) | Q(epic__project=project)) +class TaskActivityFilter(filters.FilterSet): + class Meta: + model = TaskActivity + fields = ("task",) + + class ScratchOrgFilter(filters.FilterSet): class Meta: model = ScratchOrg diff --git a/metecho/api/serializers.py b/metecho/api/serializers.py index 99abc9b73..7d2062c17 100644 --- a/metecho/api/serializers.py +++ b/metecho/api/serializers.py @@ -20,6 +20,7 @@ ScratchOrgType, SiteProfile, Task, + TaskActivity, TaskActivityType, TaskReviewStatus, ) @@ -777,6 +778,22 @@ def get_matching_assigned_user(self, ghid: int) -> Optional[User]: return getattr(sa, "user", None) +class TaskActivitySerializer(HashIdModelSerializer): + class Meta: + model = TaskActivity + read_only_fields = ( + "id", + "type", + "link_title", + "link_url", + "description", + "task", + "collaborator_id", + "scratch_org", + ) + fields = read_only_fields + + class CreatePrSerializer(serializers.Serializer): title = serializers.CharField() critical_changes = serializers.CharField(allow_blank=True) diff --git a/metecho/api/tests/views.py b/metecho/api/tests/views.py index c2493037b..cda638067 100644 --- a/metecho/api/tests/views.py +++ b/metecho/api/tests/views.py @@ -1245,6 +1245,62 @@ def test_unassign_activity(self, client, git_hub_repository_factory, task_factor ).exists(), task.activities.all() +@pytest.mark.django_db +class TestTaskActivityViewSet: + @pytest.mark.parametrize( + "method, check", + ( + ("get", status.is_success), + ("post", status.is_client_error), + ("put", status.is_client_error), + ("patch", status.is_client_error), + ("delete", status.is_client_error), + ), + ) + def test_list_access(self, client, method, check): + response = getattr(client, method)(reverse("task-activity-list")) + assert check(response.status_code) + + @pytest.mark.parametrize( + "method, check", + ( + ("get", status.is_success), + ("post", status.is_client_error), + ("put", status.is_client_error), + ("patch", status.is_client_error), + ("delete", status.is_client_error), + ), + ) + def test_detail_access(self, client, task_activity, method, check): + response = getattr(client, method)( + reverse("task-activity-detail", args=[task_activity.id]) + ) + assert check(response.status_code) + + def test_response(self, client, task_activity): + response = client.get(reverse("task-activity-detail", args=[task_activity.id])) + assert tuple(response.json().keys()) == ( + "id", + "type", + "link_title", + "link_url", + "description", + "task", + "collaborator_id", + "scratch_org", + ) + + def test_filters__task(self, client, task_activity_factory): + task1 = task_activity_factory().task_id + task2 = task_activity_factory().task_id + + response = client.get(reverse("task-activity-list"), data={"task": task1}) + assert [obj["task"] for obj in response.json()] == [task1] + + response = client.get(reverse("task-activity-list"), data={"task": task2}) + assert [obj["task"] for obj in response.json()] == [task2] + + @pytest.mark.django_db class TestEpicViewSet: def test_get(self, client, epic_factory): diff --git a/metecho/api/urls.py b/metecho/api/urls.py index da4633519..b13841a30 100644 --- a/metecho/api/urls.py +++ b/metecho/api/urls.py @@ -10,6 +10,7 @@ HookView, ProjectViewSet, ScratchOrgViewSet, + TaskActivityViewSet, TaskViewSet, UserViewSet, ) @@ -21,6 +22,7 @@ router.register("projects", ProjectViewSet, basename="project") router.register("epics", EpicViewSet, basename="epic") router.register("tasks", TaskViewSet, basename="task") +router.register("task-activities", TaskActivityViewSet, basename="task-activity") router.register("scratch-orgs", ScratchOrgViewSet, basename="scratch-org") urlpatterns = router.urls + [ path("hook/", HookView.as_view(), name="hook"), diff --git a/metecho/api/views.py b/metecho/api/views.py index d5290e397..d3953a46d 100644 --- a/metecho/api/views.py +++ b/metecho/api/views.py @@ -22,6 +22,7 @@ GitHubIssueFilter, ProjectFilter, ScratchOrgFilter, + TaskActivityFilter, TaskFilter, ) from .hook_serializers import ( @@ -37,6 +38,7 @@ ScratchOrg, ScratchOrgType, Task, + TaskActivity, ) from .paginators import CustomPaginator from .serializers import ( @@ -52,6 +54,7 @@ ProjectSerializer, ReviewSerializer, ScratchOrgSerializer, + TaskActivitySerializer, TaskAssigneeSerializer, TaskSerializer, ) @@ -412,6 +415,16 @@ def assignees(self, request, pk=None): return Response(self.get_serializer(task).data) +class TaskActivityViewSet(viewsets.ReadOnlyModelViewSet): + """Activity log related to project Tasks.""" + + permission_classes = (IsAuthenticated,) + queryset = TaskActivity.objects.all() + serializer_class = TaskActivitySerializer + filter_backends = (DjangoFilterBackend,) + filterset_class = TaskActivityFilter + + class ScratchOrgViewSet( mixins.CreateModelMixin, mixins.RetrieveModelMixin, diff --git a/metecho/conftest.py b/metecho/conftest.py index f49a818a4..a0b968b23 100644 --- a/metecho/conftest.py +++ b/metecho/conftest.py @@ -7,7 +7,16 @@ from rest_framework.test import APIClient from sfdo_template_helpers.crypto import fernet_encrypt -from .api.models import Epic, GitHubIssue, GitHubRepository, Project, ScratchOrg, Task +from .api.models import ( + Epic, + GitHubIssue, + GitHubRepository, + Project, + ScratchOrg, + Task, + TaskActivity, + TaskActivityType, +) User = get_user_model() @@ -129,6 +138,15 @@ class Meta: project = factory.SubFactory(ProjectFactory) +@register +class TaskActivityFactory(factory.django.DjangoModelFactory): + class Meta: + model = TaskActivity + + type = TaskActivityType.CREATED + task = factory.SubFactory(TaskFactory) + + @register class ScratchOrgFactory(factory.django.DjangoModelFactory): class Meta: From 427ee5ff1ef52f2fa37317b84db4ef05b7d2eb74 Mon Sep 17 00:00:00 2001 From: Ed Rivas Date: Thu, 17 Feb 2022 02:17:44 +0000 Subject: [PATCH 7/8] Update schema --- docs/api/schema.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/api/schema.yml b/docs/api/schema.yml index 2b0028b73..052e1bf9c 100644 --- a/docs/api/schema.yml +++ b/docs/api/schema.yml @@ -2455,7 +2455,7 @@ components: readOnly: true type: allOf: - - $ref: '#/components/schemas/TaskActivityTypeEum' + - $ref: '#/components/schemas/TaskActivityTypeEnum' readOnly: true link_title: type: string @@ -2488,7 +2488,7 @@ components: - scratch_org - task - type - TaskActivityTypeEum: + TaskActivityTypeEnum: enum: - Merged - Approved From f4dc5070862b21ce9aa19591610de0ddb3c53261 Mon Sep 17 00:00:00 2001 From: Ed Rivas Date: Fri, 18 Feb 2022 18:11:55 +0000 Subject: [PATCH 8/8] Push notifications for TASK_ACTIVITY_LOGGED events --- docs/api/schema.yml | 5 +++++ metecho/api/models.py | 25 +++++++++++++++++++++++-- metecho/api/serializers.py | 1 + metecho/api/tests/views.py | 1 + metecho/consumers.py | 2 +- metecho/tests/consumers.py | 33 +++++++++++++++++++++++++++++++++ 6 files changed, 64 insertions(+), 3 deletions(-) diff --git a/docs/api/schema.yml b/docs/api/schema.yml index 052e1bf9c..fb0865836 100644 --- a/docs/api/schema.yml +++ b/docs/api/schema.yml @@ -2453,6 +2453,10 @@ components: id: type: string readOnly: true + created_at: + type: string + format: date-time + readOnly: true type: allOf: - $ref: '#/components/schemas/TaskActivityTypeEnum' @@ -2481,6 +2485,7 @@ components: nullable: true required: - collaborator_id + - created_at - description - id - link_title diff --git a/metecho/api/models.py b/metecho/api/models.py index 0fc0c57b5..cab3c0bd9 100644 --- a/metecho/api/models.py +++ b/metecho/api/models.py @@ -941,7 +941,14 @@ def log_activity(self, *, user_id=None, **kwargs): kwargs["collaborator_id"] = getattr( User.objects.filter(id=user_id).first(), "github_id", "" ) - return self.activities.create(**kwargs) + activity = self.activities.create(**kwargs) + # Send a WS notification with the new activity on this Task's channel + activity.notify_changed( + type_="TASK_ACTIVITY_LOGGED", + originating_user_id=None, + group_name=CHANNELS_GROUP_NAME.format(model="task", id=self.id), + ) + return activity def try_to_notify_assigned_user(self): # This takes the tester (a.k.a. assigned_qa) and sends them an @@ -1525,7 +1532,7 @@ def notify_org_provisioning(self, originating_user_id): ) -class TaskActivity(HashIdMixin, TimestampsMixin): +class TaskActivity(HashIdMixin, TimestampsMixin, PushMixin): """Keep track of activities like re-assignments, org creations, commits, etc""" type = StringField(choices=TaskActivityType.choices) @@ -1551,6 +1558,20 @@ class Meta: def __str__(self) -> str: return self.get_type_display() + # begin PushMixin configuration: + + def get_serialized_representation(self, user): + from .serializers import TaskActivitySerializer + + return TaskActivitySerializer( + self, context=self._create_context_with_user(user) + ).data + + def subscribable_by(self, user): # pragma: nocover + return True + + # end PushMixin configuration + @receiver(user_logged_in) def user_logged_in_handler(sender, *, user, **kwargs): diff --git a/metecho/api/serializers.py b/metecho/api/serializers.py index 7d2062c17..307f145d3 100644 --- a/metecho/api/serializers.py +++ b/metecho/api/serializers.py @@ -783,6 +783,7 @@ class Meta: model = TaskActivity read_only_fields = ( "id", + "created_at", "type", "link_title", "link_url", diff --git a/metecho/api/tests/views.py b/metecho/api/tests/views.py index cda638067..8de48be0f 100644 --- a/metecho/api/tests/views.py +++ b/metecho/api/tests/views.py @@ -1281,6 +1281,7 @@ def test_response(self, client, task_activity): response = client.get(reverse("task-activity-detail", args=[task_activity.id])) assert tuple(response.json().keys()) == ( "id", + "created_at", "type", "link_title", "link_url", diff --git a/metecho/consumers.py b/metecho/consumers.py index 536d217c7..d6dd23a83 100644 --- a/metecho/consumers.py +++ b/metecho/consumers.py @@ -10,7 +10,7 @@ from .api.constants import CHANNELS_GROUP_NAME, LIST from .consumer_utils import clear_message_semaphore -KNOWN_MODELS = {"user", "project", "epic", "task", "scratchorg"} +KNOWN_MODELS = {"user", "project", "epic", "task", "taskactivity", "scratchorg"} class Actions(Enum): diff --git a/metecho/tests/consumers.py b/metecho/tests/consumers.py index e4ea18d41..f61d90aa2 100644 --- a/metecho/tests/consumers.py +++ b/metecho/tests/consumers.py @@ -8,6 +8,7 @@ EpicSerializer, ProjectSerializer, ScratchOrgSerializer, + TaskActivitySerializer, TaskSerializer, ) from ..consumers import PushNotificationConsumer @@ -140,6 +141,38 @@ async def test_push_notification_consumer__task(user_factory, task_factory): await communicator.disconnect() +@pytest.mark.django_db +async def test_push_notification_consumer__task_activity( + user_factory, task_activity_factory +): + user = await database_sync_to_async(user_factory)() + task_activity = await database_sync_to_async(task_activity_factory)() + + communicator = WebsocketCommunicator(websockets, "/ws/notifications/") + communicator.scope["user"] = user + connected, _ = await communicator.connect() + assert connected + + await communicator.send_json_to( + {"model": "taskactivity", "id": task_activity.id, "action": "SUBSCRIBE"} + ) + response = await communicator.receive_json_from() + assert "ok" in response + + await push_message_about_instance( + task_activity, + {"type": "TEST_MESSAGE", "payload": {"originating_user_id": "abc"}}, + ) + response = await communicator.receive_json_from() + model = await serialize_model(TaskActivitySerializer, task_activity, user) + assert response == { + "type": "TEST_MESSAGE", + "payload": {"originating_user_id": "abc", "model": model}, + } + + await communicator.disconnect() + + @pytest.mark.django_db async def test_push_notification_consumer__scratch_org( user_factory, scratch_org_factory