diff --git a/config/settings/base.py b/config/settings/base.py index 89196bbcf..96f239f41 100644 --- a/config/settings/base.py +++ b/config/settings/base.py @@ -366,6 +366,10 @@ def nl_separated_bytes_list(val: str) -> List[bytes]: } RQ_EXCEPTION_HANDLERS = ["metaci.build.exceptions.maybe_requeue_job"] CRON_JOBS = { + "execute_active_release_cohorts": { + "func": "metaci.release.tasks.execute_active_release_cohorts_job", + "cron_string": "* * * * *", + }, "autoscale": { "func": "metaci.build.autoscaling.autoscale", "cron_string": "* * * * *", diff --git a/metaci/build/models.py b/metaci/build/models.py index 8d3c129f3..2a1ddbb87 100644 --- a/metaci/build/models.py +++ b/metaci/build/models.py @@ -114,6 +114,9 @@ def get_for_user_or_404(self, user, query, perms=None): class Build(models.Model): + FAILED_STATUSES = [BUILD_STATUSES.error, BUILD_STATUSES.fail] + COMPLETED_STATUSES = [BUILD_STATUSES.success, *FAILED_STATUSES] + repo = models.ForeignKey( "repository.Repository", related_name="builds", on_delete=models.CASCADE ) @@ -728,6 +731,17 @@ def _get_flow_options(self) -> dict: if push_time: task_options["start_time"] = push_time.isoformat() + if ( + self.build.plan.role == ("publish_installer") + and self.build.release + ): + try: + publish_date = self.build.release.production_push_date.isoformat() + except AttributeError: + raise + else: + options["publish_date"] = publish_date + return options def set_commit_status(self): diff --git a/metaci/build/tests/test_models.py b/metaci/build/tests/test_models.py index fa9d4a3ae..1cb801c21 100644 --- a/metaci/build/tests/test_models.py +++ b/metaci/build/tests/test_models.py @@ -270,14 +270,48 @@ def test_get_flow_options__push_production(self): build_flow.build.release = Release( repo=build_flow.build.repo, change_case_template=change_case_template, + version_number="1.0", ) - build_flow.build.release.version_number = "1.0" build_flow.build.release.save() options = build_flow._get_flow_options() assert options["push_all"]["version"] == "1.0" expected = f"{datetime.date.today().isoformat()}T21:00:00+00:00" assert options["push_all"]["start_time"] == expected + def test_get_flow_options__publish_installer(self): + build_flow = BuildFlowFactory() + build_flow.build.plan = PlanFactory( + role="publish_installer", change_traffic_control=True + ) + build_flow.build.plan.save() + build_flow.build.repo = RepositoryFactory( + default_implementation_steps=[ + { + "role": "publish_installer", + "duration": 10, + "push_time": 21, + "start_time": 8, + "start_date_offset": 0, + }, + ], + ) + build_flow.build.repo.save() + planrepo = PlanRepositoryFactory( + plan=build_flow.build.plan, repo=build_flow.build.repo + ) + planrepo.save() + change_case_template = ChangeCaseTemplate() + change_case_template.save() + publish_date = datetime.date.today() + datetime.timedelta(days=6) + build_flow.build.release = Release( + repo=build_flow.build.repo, + change_case_template=change_case_template, + production_push_date=publish_date, + version_number="1.0", + ) + options = build_flow._get_flow_options() + assert options["publish_date"] == publish_date.isoformat() + def detach_logger(model): for handler in model.logger.handlers: diff --git a/metaci/plan/migrations/0041_auto_20211123_2247.py b/metaci/plan/migrations/0041_auto_20211123_2247.py new file mode 100644 index 000000000..f01bceeda --- /dev/null +++ b/metaci/plan/migrations/0041_auto_20211123_2247.py @@ -0,0 +1,18 @@ +# Generated by Django 3.1.13 on 2021-11-23 22:47 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('plan', '0040_plan_commit_status_regex'), + ] + + operations = [ + migrations.AlterField( + model_name='plan', + name='role', + field=models.CharField(choices=[('beta_release', 'Beta Release'), ('beta_test', 'Beta Test'), ('deploy', 'Deployment'), ('feature', 'Feature Test'), ('feature_robot', 'Feature Test Robot'), ('publish_installer', 'Publish Installer'), ('other', 'Other'), ('push_sandbox', 'Push Sandbox'), ('push_production', 'Push Production'), ('qa', 'QA Org'), ('release_deploy', 'Release Deploy'), ('release', 'Release'), ('release_test', 'Release Test'), ('scratch', 'Scratch Org')], max_length=17), + ), + ] diff --git a/metaci/plan/migrations/0043_merge_20211130_2313.py b/metaci/plan/migrations/0043_merge_20211130_2313.py new file mode 100644 index 000000000..accbacd99 --- /dev/null +++ b/metaci/plan/migrations/0043_merge_20211130_2313.py @@ -0,0 +1,14 @@ +# Generated by Django 3.1.13 on 2021-11-30 23:13 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('plan', '0042_auto_20211117_0113'), + ('plan', '0041_auto_20211123_2247'), + ] + + operations = [ + ] diff --git a/metaci/plan/models.py b/metaci/plan/models.py index ab8d22e6e..dd300a20a 100644 --- a/metaci/plan/models.py +++ b/metaci/plan/models.py @@ -24,6 +24,7 @@ ("deploy", "Deployment"), ("feature", "Feature Test"), ("feature_robot", "Feature Test Robot"), + ("publish_installer", "Publish Installer"), ("other", "Other"), ("push_sandbox", "Push Sandbox"), ("push_production", "Push Production"), @@ -83,7 +84,7 @@ class Plan(models.Model): through_fields=("plan", "repo"), ) trigger = models.CharField(max_length=8, choices=TRIGGER_TYPES) - role = models.CharField(max_length=16, choices=BUILD_ROLES) + role = models.CharField(max_length=17, choices=BUILD_ROLES) queue = models.CharField(max_length=16, choices=QUEUES, default="default") regex = models.CharField(max_length=255, null=True, blank=True) commit_status_regex = models.CharField( diff --git a/metaci/release/migrations/0026_auto_20211123_1841.py b/metaci/release/migrations/0026_auto_20211123_1841.py new file mode 100644 index 000000000..25d552586 --- /dev/null +++ b/metaci/release/migrations/0026_auto_20211123_1841.py @@ -0,0 +1,23 @@ +# Generated by Django 3.1.13 on 2021-11-23 18:41 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('release', '0025_auto_20211117_0126'), + ] + + operations = [ + migrations.AddField( + model_name='releasecohort', + name='dependency_graph', + field=models.JSONField(blank=True, null=True), + ), + migrations.AddField( + model_name='releasecohort', + name='error_message', + field=models.TextField(blank=True, null=True), + ), + ] diff --git a/metaci/release/models.py b/metaci/release/models.py index 48bfb3df7..33871ac41 100644 --- a/metaci/release/models.py +++ b/metaci/release/models.py @@ -32,6 +32,8 @@ class ReleaseCohort(models.Model): ) merge_freeze_start = models.DateTimeField(_("Merge Freeze Start Time")) merge_freeze_end = models.DateTimeField(_("Merge Freeze End Time")) + error_message = models.TextField(null=True, blank=True) + dependency_graph = models.JSONField(null=True, blank=True) def __str__(self): return self.name @@ -147,6 +149,8 @@ class Release(StatusModel): ("waiting", _("Waiting")), ("blocked", _("Blocked")), ) + FAILED_STATUSES = [STATUS.failed] + COMPLETED_STATUSES = [STATUS.completed, *FAILED_STATUSES] created = AutoCreatedField(_("created")) modified = AutoLastModifiedField(_("modified")) repo = models.ForeignKey( diff --git a/metaci/release/tasks.py b/metaci/release/tasks.py index 897861b01..e027e1795 100644 --- a/metaci/release/tasks.py +++ b/metaci/release/tasks.py @@ -1,7 +1,11 @@ from collections import defaultdict from datetime import datetime, timezone -from typing import List +from typing import DefaultDict, List, Optional +from cumulusci.core.dependencies.dependencies import GitHubDynamicDependency +from cumulusci.core.dependencies.github import get_remote_project_config +from cumulusci.core.github import get_github_api_for_repo +from cumulusci.utils.git import split_repo_url from django.conf import settings from django.db.models.query import QuerySet from django.db.models.signals import post_delete, post_save @@ -12,11 +16,16 @@ from github3.repos.repo import Repository as GitHubRepository from metaci.build.models import BUILD_STATUSES, Build -from metaci.plan.models import PlanRepository +from metaci.cumulusci.keychain import GitHubSettingsKeychain +from metaci.plan.models import Plan, PlanRepository from metaci.release.models import Release, ReleaseCohort from metaci.repository.models import Repository +class DependencyGraphError(Exception): + pass + + def _run_planrepo_for_release(release: Release, planrepo: PlanRepository): build = Build( repo=release.repo, @@ -55,13 +64,11 @@ def _run_release_builds(release: Release): Inspect a Release and run the next appropriate Build and maintain Release Status automatically. Triggered by a cronjob every minute. """ - FAILED_STATUSES = [BUILD_STATUSES.error, BUILD_STATUSES.fail] - COMPLETED_STATUSES = [BUILD_STATUSES.success, *FAILED_STATUSES] # NOTE: bug where MetaCI builds can hang in In Progress forever # will be painful here. def running(builds: List[Build]) -> bool: - return any(b.status not in COMPLETED_STATUSES for b in builds) + return any(b.status not in Build.COMPLETED_STATUSES for b in builds) def last_succeeded(builds: List[Build]) -> bool: return ( @@ -74,11 +81,11 @@ def last_failed(builds: List[Build]) -> bool: return ( bool(builds) and not running(builds) - and builds[-1].status in FAILED_STATUSES + and builds[-1].status in Build.FAILED_STATUSES ) def any_failed(builds: List[Build]) -> bool: - return any(b.status in FAILED_STATUSES for b in builds) + return any(b.status in Build.FAILED_STATUSES for b in builds) if release.status in [Release.STATUS.failed, Release.STATUS.completed]: # Release manager must manually set the status back to In Progress if they're attempting @@ -145,6 +152,185 @@ def any_failed(builds: List[Build]) -> bool: # The release is running - no new builds required. +# Construct an object we can use as a ProjectConfig equivalent +# for the `flatten()` method. All it needs is the .logger property +# and the method `get_repo_from_url()`. +# This is a bit fragile. TODO: refactor flatten() et al to +# accept an explicitly limited context object. +class NonProjectConfig: + def get_repo_from_url(self, url: str) -> Optional[GitHubRepository]: + owner, name = split_repo_url(url) + + return get_github_api_for_repo( + GitHubSettingsKeychain(), owner, name + ).repository(owner, name) + + +def get_dependency_graph( + releases: List[Release], +) -> DefaultDict[str, List[str]]: + """Turn a list of Releases into a dependency graph, mapping GitHub repo URLs + to other GitHub repo URLs on which they depend. Note that the return value + may include repo URLs that are not part of this Release Cohort or list of + Releases.""" + deps = defaultdict(list) + to_process = [(r.repo.url, r.created_from_commit) for r in releases] + context = NonProjectConfig() + + while True: + try: + (this_dep_url, this_dep_commit) = to_process.pop(0) + except IndexError: + break + + # Construct a dependency representing this Release. + this_dep = GitHubDynamicDependency(github=this_dep_url, ref=this_dep_commit) + + # We're only interested in dependencies on other GitHub repos (== Releases) + transitive_deps = [ + td + for td in this_dep.flatten(context) + if isinstance(td, GitHubDynamicDependency) + ] + for transitive_dep in transitive_deps: + if transitive_dep.github in deps: + # Already processed + continue + + # Add this specific dependency relationship to the graph + deps[this_dep_url].append(transitive_dep.github) + + if ( + transitive_dep.github not in deps + and transitive_dep.github not in to_process + ): + # We need to process this transitive dependency. + + # Find the ref for this dependency + releases_for_transitive_dep = [ + r for r in releases if r.repo.url == transitive_dep.github + ] + if len(releases) > 1: + raise DependencyGraphError( + "More than one Release with repo {transitive_dep.github} in Release Cohort." + ) + + to_process.append( + ( + transitive_dep.github, + releases_for_transitive_dep[0].created_from_commit + if releases_for_transitive_dep + else None, + ) + ) + + return deps + + +def create_dependency_tree(rc: ReleaseCohort): + try: + graph = get_dependency_graph(rc.releases) + except DependencyGraphError as e: + rc.status = ReleaseCohort.STATUS.failed + rc.error_message = str(e) + rc.save() + return + + rc.dependency_graph = graph + rc.save() + + +def advance_releases(rc: ReleaseCohort): + dependency_graph = defaultdict(list, rc.dependency_graph or {}) + releases = rc.releases.all() + for release in releases: + if release.status not in Release.COMPLETED_STATUSES: + # Find this Release's dependencies and check if they're satisfied. + deps = dependency_graph[release.repo.url] + if release.status == Release.STATUS.inprogress or all_deps_satisfied( + deps, dependency_graph, releases + ): + # This Release is ready to advance. + _run_release_builds(release) + + +def execute_active_release_cohorts(): + # First, identify Release Cohorts that need their dependency trees created. + for rc in ReleaseCohort.objects.filter( + status=ReleaseCohort.STATUS.approved, dependency_graph__isnull=True + ): + create_dependency_tree(rc) + + publish_installer_plans = Plan.objects.filter(role="publish_installer", active=True) + + # Next, identify in-progress Release Cohorts that have reached a successful conclusion. + # Release Cohorts whose component Releases fail are updated to a failure state by Release automation. + for rc in ReleaseCohort.objects.filter(status=ReleaseCohort.STATUS.active).exclude( + releases__in=Release.objects.exclude(status=Release.STATUS.completed) + ): + rc.status = ReleaseCohort.STATUS.completed + rc.save() + + if publish_installer_plans.count() == 1: + run_publish_installer_plans(rc, publish_installer_plans.first()) + + # Next, identify in-progress Release Cohorts that need to be advanced. + for rc in ReleaseCohort.objects.filter( + status=ReleaseCohort.STATUS.active + ).prefetch_related("releases"): + # This Release Cohort can potentially advance. Grab our dependency graph, + # then iterate through Releases that are ready to advance and call + # the function that advances them. + advance_releases(rc) + + +execute_active_release_cohorts_job = job(execute_active_release_cohorts) + + +# Run publish_installer for every repo that has a release in this cohort +# if that product has a metadeploy plan in its cumulusci.yml +def run_publish_installer_plans(rc: ReleaseCohort, publish_installer_plan: Plan): + for release in rc.releases.all(): + if release_has_plans(release): + build = Build( + repo=release.repo, + plan=publish_installer_plan, + commit=release.created_from_commit, + build_type="auto", + release=release, + release_relationship_type="automation", + ) + build.save() + + +def release_has_plans(release: Release) -> bool: + github_repo = release.repo.get_github_api() + config = get_remote_project_config( + github_repo, release.created_from_commit + ) # TODO: exception handling + print(config) + return len(config.get("plans", {})) > 0 + + +def all_deps_satisfied( + deps: List[str], graph: DefaultDict[str, List[str]], releases: List[Release] +) -> bool: + """Recursively walk the dependency tree to validate that all dependencies are + either complete or out of scope.""" + + releases_dict = {r.repo.url: r for r in releases} + + return all( + releases_dict[d].status == Release.STATUS.completed + for d in deps + if d in releases_dict + ) and all( + all_deps_satisfied(graph[d], graph, releases) + for d in deps + if d not in releases_dict + ) + + @job def update_cohort_status() -> str: """Run every minute to update Release Cohorts to Active once they pass their start date @@ -157,17 +343,9 @@ def _update_release_cohorts() -> str: now = datetime.now(tz=timezone.utc) # Signals will trigger the updating of merge freezes upon save. - names_ended = [] - for rc in ReleaseCohort.objects.filter( - status=ReleaseCohort.STATUS.active, merge_freeze_end__lt=now - ).all(): - rc.status = ReleaseCohort.STATUS.completed - rc.save() - names_ended.append(rc.name) - names_started = [] for rc in ReleaseCohort.objects.filter( - status=ReleaseCohort.STATUS.planned, + status=ReleaseCohort.STATUS.approved, merge_freeze_start__lt=now, merge_freeze_end__gt=now, ).all(): @@ -175,7 +353,9 @@ def _update_release_cohorts() -> str: rc.save() names_started.append(rc.name) - return f"Enabled merge freeze on {', '.join(names_started)} and ended merge freeze on {', '.join(names_ended)}." + # Moving into a completed status is handled by release process automation. + + return f"Enabled merge freeze on {', '.join(names_started)}." def set_merge_freeze_status(repo: Repository, *, freeze: bool): diff --git a/metaci/release/tests/test_tasks.py b/metaci/release/tests/test_tasks.py index f6625d0bd..6de1ed5ab 100644 --- a/metaci/release/tests/test_tasks.py +++ b/metaci/release/tests/test_tasks.py @@ -1,5 +1,7 @@ import unittest +from collections import defaultdict from datetime import datetime, timedelta, timezone +from unittest.mock import Mock, call import pytest from django.conf import settings @@ -11,9 +13,15 @@ ReleaseCohortFactory, ReleaseFactory, RepositoryFactory) from metaci.release.models import Release, ReleaseCohort -from metaci.release.tasks import (_run_planrepo_for_release, +from metaci.release.tasks import (DependencyGraphError, + _run_planrepo_for_release, _run_release_builds, _update_release_cohorts, + advance_releases, all_deps_satisfied, + create_dependency_tree, + execute_active_release_cohorts, + release_has_plans, release_merge_freeze_if_safe, + run_publish_installer_plans, set_merge_freeze_status) @@ -255,12 +263,6 @@ def test_run_release_builds__succeeded_release_no_action(rpr_mock, smfs_mock): @pytest.mark.django_db def test_update_release_cohorts(): - cohort_ended = ReleaseCohortFactory() - cohort_ended.merge_freeze_end = datetime.now(tz=timezone.utc) - timedelta( - minutes=20 - ) - cohort_ended.save() - cohort_started = ReleaseCohortFactory() cohort_started.merge_freeze_start = datetime.now(tz=timezone.utc) - timedelta( minutes=20 @@ -268,12 +270,11 @@ def test_update_release_cohorts(): cohort_started.merge_freeze_end = datetime.now(tz=timezone.utc) + timedelta( minutes=20 ) - cohort_started.status = ReleaseCohort.STATUS.planned + cohort_started.status = ReleaseCohort.STATUS.approved cohort_started.save() assert ( - _update_release_cohorts() - == f"Enabled merge freeze on {cohort_started.name} and ended merge freeze on {cohort_ended.name}." + _update_release_cohorts() == f"Enabled merge freeze on {cohort_started.name}." ) @@ -420,3 +421,269 @@ def test_release_merge_freeze_if_safe__safe(smfs_mock): release_merge_freeze_if_safe(release.repo) smfs_mock.assert_called_once_with(release.repo, freeze=False) + + +def test_all_deps_satisfied(): + a = Mock() + b = Mock() + c = Mock() + + # Build a mock release tree where the middle link (b) + # is not being released in this Cohort. + # C depends on B depends on A. + a.repo.url = "foo" + b.repo.url = "bar" + c.repo.url = "spam" + a.status = Release.STATUS.completed + c.status = Release.STATUS.blocked + + # Build the dependency graph, a map from GitHub URL + # to a set of dependency GitHub URLs. + graph = defaultdict(list) + graph[b.repo.url].append(a.repo.url) + graph[c.repo.url].append(b.repo.url) + + # We only have releases for A and C. We're asking, + # "Is C ready to start?" + assert all_deps_satisfied(graph[c.repo.url], graph, [a, c]) is True + + # Validate behavior with empty dependency lists + assert all_deps_satisfied([], graph, [a, c]) is True + assert all_deps_satisfied(graph[a.repo.url], graph, [a, c]) is True + + # Validate the negative case + a.status = Release.STATUS.failed + assert all_deps_satisfied(graph[c.repo.url], graph, [a, c]) is False + + +@pytest.mark.django_db +@unittest.mock.patch("metaci.release.tasks.get_dependency_graph") +def test_create_dependency_tree(get_dependency_graph): + graph = defaultdict(list) + graph["foo"].append("bar") + get_dependency_graph.return_value = graph + rc = ReleaseCohortFactory() + + create_dependency_tree(rc) + assert rc.dependency_graph == {"foo": ["bar"]} + + +@pytest.mark.django_db +@unittest.mock.patch("metaci.release.tasks.get_dependency_graph") +def test_create_dependency_tree__failure(get_dependency_graph): + get_dependency_graph.side_effect = DependencyGraphError("foo") + rc = ReleaseCohortFactory() + + create_dependency_tree(rc) + assert rc.error_message == str(DependencyGraphError("foo")) + assert rc.status == ReleaseCohort.STATUS.failed + + +@pytest.mark.django_db +@unittest.mock.patch("metaci.release.tasks._run_release_builds") +@unittest.mock.patch("metaci.release.tasks.set_merge_freeze_status") +def test_advance_releases(set_merge_freeze_status, run_release_builds): + graph = defaultdict(list) + graph["spam"].append("foo") + graph["baz"].append("bar") + rc = ReleaseCohortFactory(dependency_graph=graph) + _ = ReleaseFactory( + repo__url="foo", release_cohort=rc, status=Release.STATUS.completed + ) + r2 = ReleaseFactory( + repo__url="bar", release_cohort=rc, status=Release.STATUS.inprogress + ) + r3 = ReleaseFactory( + repo__url="spam", release_cohort=rc, status=Release.STATUS.blocked + ) + _ = ReleaseFactory( + repo__url="baz", release_cohort=rc, status=Release.STATUS.blocked + ) + + advance_releases(rc) + + run_release_builds.assert_has_calls([call(r2), call(r3)], any_order=True) + + +@pytest.mark.django_db +@unittest.mock.patch("metaci.release.tasks.get_dependency_graph") +@unittest.mock.patch("metaci.release.tasks.set_merge_freeze_status") +def test_execute_active_release_cohorts__creates_dependency_trees( + smfs_mock, + get_dependency_graph_mock, +): + rc = ReleaseCohortFactory(status=ReleaseCohort.STATUS.approved) + _ = ReleaseFactory( + repo__url="foo", release_cohort=rc, status=Release.STATUS.waiting + ) + get_dependency_graph_mock.return_value = {} + + execute_active_release_cohorts() + rc.refresh_from_db() + + assert rc.dependency_graph == {} + + +@pytest.mark.django_db +@unittest.mock.patch("metaci.release.tasks.set_merge_freeze_status") +@unittest.mock.patch("metaci.release.tasks.advance_releases") +def test_execute_active_release_cohorts__completes_finished_cohorts( + advance_releases_mock, smfs_mock +): + rc = ReleaseCohortFactory(status=ReleaseCohort.STATUS.active, dependency_graph={}) + _ = ReleaseFactory( + repo__url="foo", release_cohort=rc, status=Release.STATUS.completed + ) + + rc_progress = ReleaseCohortFactory( + status=ReleaseCohort.STATUS.active, dependency_graph={} + ) + _ = ReleaseFactory( + repo__url="bar", release_cohort=rc_progress, status=Release.STATUS.blocked + ) + _ = ReleaseFactory( + repo__url="baz", release_cohort=rc_progress, status=Release.STATUS.completed + ) + + execute_active_release_cohorts() + rc.refresh_from_db() + rc_progress.refresh_from_db() + + assert rc.status == ReleaseCohort.STATUS.completed + assert rc_progress.status == ReleaseCohort.STATUS.active + + +@pytest.mark.django_db +@unittest.mock.patch("metaci.release.tasks.advance_releases") +@unittest.mock.patch("metaci.release.tasks.set_merge_freeze_status") +def test_execute_active_release_cohorts__advances_release_cohorts( + smfs_mock, + advance_releases_mock, +): + rc = ReleaseCohortFactory(status=ReleaseCohort.STATUS.active, dependency_graph={}) + _ = ReleaseFactory( + repo__url="foo", release_cohort=rc, status=Release.STATUS.inprogress + ) + _ = ReleaseCohortFactory(status=ReleaseCohort.STATUS.completed, dependency_graph={}) + + execute_active_release_cohorts() + + advance_releases_mock.assert_called_once_with(rc) + + +@pytest.mark.django_db +@unittest.mock.patch("metaci.release.tasks.run_publish_installer_plans") +@unittest.mock.patch("metaci.release.tasks.set_merge_freeze_status") +def test_execute_active_release_cohorts__run_publish_installer_plans( + smfs_mock, + run_publish_installer_plans_mock, +): + other_plan = PlanFactory(role="release", active=True) + publish_installer_plan = PlanFactory(role="publish_installer", active=True) + rc = ReleaseCohortFactory(status=ReleaseCohort.STATUS.active) + _ = ReleaseFactory( + repo__url="foo", release_cohort=rc, status=Release.STATUS.completed + ) + + execute_active_release_cohorts() + + run_publish_installer_plans_mock.assert_called_once_with(rc, publish_installer_plan) + + +@pytest.mark.django_db +@unittest.mock.patch("metaci.release.tasks.release_has_plans") +@unittest.mock.patch("metaci.release.tasks.set_merge_freeze_status") +def test_run_publish_installer_plans__with_cumulusci_yml_plans( + smfs_mock, + release_has_plans_mock, +): + publish_installer_plan = PlanFactory(role="publish_installer", active=True) + rc = ReleaseCohortFactory(status=ReleaseCohort.STATUS.active) + release = ReleaseFactory( + repo__url="foo", + release_cohort=rc, + status=Release.STATUS.completed, + created_from_commit="abc", + ) + release_has_plans_mock.return_value = True + + assert Build.objects.count() == 0 + + run_publish_installer_plans(rc, publish_installer_plan) + + assert Build.objects.count() == 1 + build = Build.objects.first() + assert build.release == release + assert build.plan == publish_installer_plan + + release_has_plans_mock.assert_called_once_with(release) + + +@pytest.mark.django_db +@unittest.mock.patch("metaci.release.tasks.release_has_plans") +@unittest.mock.patch("metaci.release.tasks.set_merge_freeze_status") +def test_run_publish_installer_plans__without_cumulusci_yml_plans( + smfs_mock, + release_has_plans_mock, +): + publish_installer_plan = PlanFactory(role="publish_installer", active=True) + rc = ReleaseCohortFactory(status=ReleaseCohort.STATUS.active) + release = ReleaseFactory( + repo__url="foo", + release_cohort=rc, + status=Release.STATUS.completed, + created_from_commit="abc", + ) + release_has_plans_mock.return_value = False + + assert Build.objects.count() == 0 + + run_publish_installer_plans(rc, publish_installer_plan) + + assert Build.objects.count() == 0 + + release_has_plans_mock.assert_called_once_with(release) + + +@pytest.mark.django_db +@unittest.mock.patch("metaci.release.tasks.get_remote_project_config") +@unittest.mock.patch("metaci.release.tasks.set_merge_freeze_status") +def test_release_has_plans__with_plans( + smfs_mock, + get_remote_project_config_mock, +): + rc = ReleaseCohortFactory(status=ReleaseCohort.STATUS.active) + repo = RepositoryFactory() + repo.get_github_api = unittest.mock.Mock() + release = ReleaseFactory( + repo=repo, + repo__url="foo", + release_cohort=rc, + status=Release.STATUS.completed, + created_from_commit="abc", + ) + get_remote_project_config_mock.return_value = {"plans": ["abc", "123"]} + + assert release_has_plans(release) + + +@pytest.mark.django_db +@unittest.mock.patch("metaci.release.tasks.get_remote_project_config") +@unittest.mock.patch("metaci.release.tasks.set_merge_freeze_status") +def test_release_has_plans__without_plans( + smfs_mock, + get_remote_project_config_mock, +): + rc = ReleaseCohortFactory(status=ReleaseCohort.STATUS.active) + repo = RepositoryFactory() + repo.get_github_api = unittest.mock.Mock() + release = ReleaseFactory( + repo=repo, + repo__url="foo", + release_cohort=rc, + status=Release.STATUS.completed, + created_from_commit="abc", + ) + get_remote_project_config_mock.return_value = {} + + assert not release_has_plans(release) diff --git a/package.json b/package.json index 598dd0301..ab3f6dd2b 100644 --- a/package.json +++ b/package.json @@ -110,7 +110,7 @@ }, "scripts": { "webpack:serve": "webpack serve --config webpack.dev.js", - "django:serve": "python manage.py runserver 0.0.0.0:8000", + "django:serve": "python manage.py runserver 0.0.0.0:${PORT:-8000}", "redis:clear": "redis-cli FLUSHALL", "rq:work": "python manage.py metaci_rqworker short", "rq:scheduler": "python manage.py metaci_rqscheduler --queue short",