def test_get_sorted_events(self): # create a campaign campaign = Campaign.create(self.org, self.user, "Planting Reminders", self.farmers) flow = self.create_flow() event1 = CampaignEvent.create_flow_event(self.org, self.admin, campaign, self.planting_date, offset=1, unit='W', flow=flow, delivery_hour='13') event2 = CampaignEvent.create_flow_event(self.org, self.admin, campaign, self.planting_date, offset=1, unit='W', flow=flow, delivery_hour='9') event3 = CampaignEvent.create_flow_event(self.org, self.admin, campaign, self.planting_date, offset=2, unit='W', flow=flow, delivery_hour='1') self.assertEqual(campaign.get_sorted_events(), [event2, event1, event3]) flow_json = self.get_flow_json('call_me_maybe')['definition'] flow = Flow.create_instance(dict(name='Call Me Maybe', org=self.org, flow_type=Flow.MESSAGE, created_by=self.admin, modified_by=self.admin, saved_by=self.admin, version_number=3)) FlowRevision.create_instance(dict(flow=flow, definition=flow_json, spec_version=3, revision=1, created_by=self.admin, modified_by=self.admin)) event4 = CampaignEvent.create_flow_event(self.org, self.admin, campaign, self.planting_date, offset=2, unit='W', flow=flow, delivery_hour='5') self.assertEqual(flow.version_number, 3) self.assertEqual(campaign.get_sorted_events(), [event2, event1, event3, event4]) flow.refresh_from_db() self.assertNotEqual(flow.version_number, 3) self.assertEqual(flow.version_number, get_current_export_version())
def migrate_flows(min_version=None): # pragma: no cover to_version = min_version or get_current_export_version() # get all flows below the min version old_versions = Flow.get_versions_before(to_version) flows_to_migrate = Flow.objects.filter(is_active=True, version_number__in=old_versions) flow_ids = list(flows_to_migrate.values_list("id", flat=True)) total = len(flow_ids) if not total: print("All flows up to date") return True print("Found %d flows to migrate to %s..." % (len(flow_ids), to_version)) num_updated = 0 errored = [] for id_batch in chunk_list(flow_ids, 1000): for flow in Flow.objects.filter(id__in=id_batch): try: flow.ensure_current_version(min_version=to_version) num_updated += 1 except Exception: print("Unable to migrate flow '%s' (#%d)" % (flow.name, flow.id)) errored.append(flow) print(" > Flows migrated: %d of %d (%d errored)" % (num_updated, total, len(errored))) if errored: print(" > Errored flows: %s" % (", ".join([str(e.id) for e in errored]))) return len(errored) == 0
def validate(self, data): steps = data.get("steps") revision = data.get("revision", data.get("version")) if not revision: # pragma: needs cover raise serializers.ValidationError("Missing 'revision' field") # load the specific revision of the flow flow_revision = self.flow_obj.revisions.filter( revision=revision).first() if not flow_revision: raise serializers.ValidationError("Invalid revision: %s" % revision) # make sure we are operating off a current spec definition = flow_revision.definition definition = FlowRevision.migrate_definition( definition, self.flow_obj, get_current_export_version()) # look for a matching node for each step in our path for step in steps: node_obj = None node_set = "rule_sets" if "rule" in step else "action_sets" for node_json in definition[node_set]: if node_json["uuid"] == step["node"]: node_obj = FlowRunWriteSerializer.RevisionNode( self.flow_obj, node_json) break if not node_obj: raise serializers.ValidationError( "No such node with UUID %s in flow '%s'" % (step["node"], self.flow_obj.name)) else: rule = step.get("rule", None) if rule: media = rule.get("media", None) if media: (media_type, media_path) = media.split(":", 1) if media_type != "geo": media_type_parts = media_type.split("/") error = None if len(media_type_parts) != 2: error = (media_type, media) if media_type_parts[0] not in Msg.MEDIA_TYPES: error = (media_type_parts[0], media) if error: raise serializers.ValidationError( "Invalid media type '%s': %s" % error) step["node"] = node_obj return data
def migrate_flows(min_version=None): # pragma: no cover to_version = min_version or get_current_export_version() # get all flows below the min version old_versions = Flow.get_versions_before(to_version) flows_to_migrate = Flow.objects.filter(is_active=True, version_number__in=old_versions) flow_ids = list(flows_to_migrate.values_list("id", flat=True)) total = len(flow_ids) if not total: print("All flows up to date") return True print(f"Found {len(flow_ids)} flows to migrate to {to_version}...") num_updated = 0 num_errored = 0 for id_batch in chunk_list(flow_ids, 1000): for flow in Flow.objects.filter(id__in=id_batch): try: flow.ensure_current_version(min_version=to_version) num_updated += 1 except Exception: print( f"Unable to migrate flow '{flow.name}' ({str(flow.uuid)}):" ) print(traceback.format_exc()) num_errored += 1 print( f" > Flows migrated: {num_updated} of {total} ({num_errored} errored)" ) return num_errored == 0
def validate(self, data): class VersionNode: def __init__(self, node, is_ruleset): self.node = node self.uuid = node['uuid'] self.ruleset = is_ruleset def is_ruleset(self): return self.ruleset def is_pause(self): from temba.flows.models import RuleSet return self.node['ruleset_type'] in RuleSet.TYPE_WAIT def get_step_type(self): if self.is_ruleset(): return FlowStep.TYPE_RULE_SET else: return FlowStep.TYPE_ACTION_SET steps = data.get('steps') revision = data.get('revision', data.get('version')) if not revision: # pragma: needs cover raise serializers.ValidationError("Missing 'revision' field") flow_revision = self.flow_obj.revisions.filter( revision=revision).first() if not flow_revision: raise serializers.ValidationError("Invalid revision: %s" % revision) definition = json.loads(flow_revision.definition) # make sure we are operating off a current spec definition = FlowRevision.migrate_definition( definition, self.flow_obj, get_current_export_version()) for step in steps: node_obj = None key = 'rule_sets' if 'rule' in step else 'action_sets' for json_node in definition[key]: if json_node['uuid'] == step['node']: node_obj = VersionNode(json_node, 'rule' in step) break if not node_obj: raise serializers.ValidationError( "No such node with UUID %s in flow '%s'" % (step['node'], self.flow_obj.name)) else: rule = step.get('rule', None) if rule: media = rule.get('media', None) if media: (media_type, media_path) = media.split(':', 1) if media_type != 'geo': media_type_parts = media_type.split('/') error = None if len(media_type_parts) != 2: error = (media_type, media) if media_type_parts[0] not in Msg.MEDIA_TYPES: error = (media_type_parts[0], media) if error: raise serializers.ValidationError( "Invalid media type '%s': %s" % error) step['node'] = node_obj return data
def validate(self, data): class VersionNode: def __init__(self, node, is_ruleset): self.node = node self.uuid = node["uuid"] self.ruleset = is_ruleset def is_ruleset(self): return self.ruleset def is_pause(self): return self.node["ruleset_type"] in RuleSet.TYPE_WAIT steps = data.get("steps") revision = data.get("revision", data.get("version")) if not revision: # pragma: needs cover raise serializers.ValidationError("Missing 'revision' field") flow_revision = self.flow_obj.revisions.filter( revision=revision).first() if not flow_revision: raise serializers.ValidationError("Invalid revision: %s" % revision) definition = flow_revision.definition # make sure we are operating off a current spec definition = FlowRevision.migrate_definition( definition, self.flow_obj, get_current_export_version()) for step in steps: node_obj = None key = "rule_sets" if "rule" in step else "action_sets" for json_node in definition[key]: if json_node["uuid"] == step["node"]: node_obj = VersionNode(json_node, "rule" in step) break if not node_obj: raise serializers.ValidationError( "No such node with UUID %s in flow '%s'" % (step["node"], self.flow_obj.name)) else: rule = step.get("rule", None) if rule: media = rule.get("media", None) if media: (media_type, media_path) = media.split(":", 1) if media_type != "geo": media_type_parts = media_type.split("/") error = None if len(media_type_parts) != 2: error = (media_type, media) if media_type_parts[0] not in Msg.MEDIA_TYPES: error = (media_type_parts[0], media) if error: raise serializers.ValidationError( "Invalid media type '%s': %s" % error) step["node"] = node_obj return data