def get_user_groups(self, user):
        """
        Get the groups this users is authorized under for this Arena
        @param user: Email address of authenticated user
        @type user: str
        @return: List of groups assigned to the user
        @rtype: list
        """
        user_groups = []
        for group in self.UserGroups.ALL_GROUPS:
            if user in self.admin_info[group]:
                user_groups.append(group)

        if not user_groups and user not in self.admin_info[
                self.UserGroups.PENDING]:
            cloud_log(
                LogIDs.USER_AUTHORIZATION,
                f"Unauthorized User: {user}. Adding to pending authorization",
                LOG_LEVELS.ERROR)
            self.admin_info[self.UserGroups.PENDING].append(user)
            ds_client.put(self.admin_info)

        cloud_log(LogIDs.USER_AUTHORIZATION,
                  f"{user} logged in under groups {user_groups}",
                  LOG_LEVELS.INFO)
        return user_groups
    def _parse_container(self):
        container_info = self.build_spec['container_info']
        assessment = self.build_spec.get('assessment', None)
        host_name = self.build_spec['workout'].get('host_name', None)
        if not host_name:
            error_message = f"Container build does not contain a host name in its specification."
            cloud_log(LogIDs.MAIN_APP, error_message, LOG_LEVELS.ERROR)
            raise InvalidBuildSpecification(error_message)
        container_url = f"https://{host_name}{dns_suffix}"
        for i in range(self.build_count):
            student_name = self.student_names[i] if self.student_names else None
            student_email = self.student_emails[
                i] if self.student_emails else None

            cloud_ready_spec = {
                'unit_id': self.unit_id,
                'build_type': BuildTypes.CONTAINER,
                'type': self.spec_name,
                'workout_name': self.workout_name,
                'expiration': self.workout_length,
                'time_expiry': self.time_expiry,
                'container_url': container_url,
                'student_instructions_url': self.student_instructions_url,
                'time_created': datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
                'timestamp': str(calendar.timegm(time.gmtime())),
                'complete': False,
                'container_info': container_info,
                'assessment': assessment,
                'state': BUILD_STATES.RUNNING,
                'instructor_email': self.email,
                'student_name': student_name,
                'registration_required': self.registration_required,
                'student_email': student_email
            }
            self.cloud_ready_specs.append(cloud_ready_spec)
    def commit_to_cloud(self):
        """
        Commits the parsed workout specification for multiple student builds to the cloud datastore.
        This first stores the unit to the datastore and then all the individual builds
        @return: None
        """
        cloud_log(LogIDs.MAIN_APP, f"Creating unit {self.unit_id}",
                  LOG_LEVELS.INFO)
        new_unit = datastore.Entity(
            ds_client.key('cybergym-unit', self.unit_id))
        new_unit.update(self.new_unit)

        workout_ids = []
        for cloud_ready_spec in self.cloud_ready_specs:
            workout_id = ''.join(
                random.choice(string.ascii_lowercase) for i in range(10))
            workout_ids.append(workout_id)
            new_workout = datastore.Entity(
                ds_client.key('cybergym-workout', workout_id))
            new_workout.update(cloud_ready_spec)
            ds_client.put(new_workout)
            # Store the server specifications for compute workouts
            if self.build_type == BuildTypes.COMPUTE:
                self._commit_workout_servers(workout_id, new_workout)
        new_unit['workouts'] = workout_ids
        ds_client.put(new_unit)
        # If this is an arena, then store all server configurations at this time.
        if self.build_type == BuildTypes.ARENA:
            CompetitionServerSpecToCloud(
                unit=new_unit,
                workout_ids=workout_ids,
                workout_specs=self.cloud_ready_specs).commit_to_cloud()

        return {'unit_id': self.unit_id, 'build_type': self.build_type}
예제 #4
0
 def validate_machine_images(self):
     """
     Args:
         self.image_list: List of images used in workout
     Returns: Dict of status and error message. Status is False if at least one image
              doesn't exist for that project.
     """
     if self.image_list:
         images_state = {
             'status': True,
             'bad_images': [],
         }
         for image in self.image_list:
             try:
                 compute.images().get(project=project,
                                      image=image).execute()
             except HttpError as e:
                 # Image doesn't exist; append bad image to list and log event
                 images_state['bad_images'].append(image)
                 logger.error('%s' % e.error_details[0]["message"])
         # Images in image_list doesn't exist in current project
         if len(images_state['bad_images']) > 0:
             error_message = f"Compute images not found! The following compute images do not exist in the " \
                             f"cloud project: {images_state['bad_images']}"
             cloud_log(LogIDs.MAIN_APP, error_message, LOG_LEVELS.ERROR)
             raise self.ImageNotFound(error_message)
         # All images exist in current project
         return images_state
예제 #5
0
def login():         
    if request.method == 'POST':
        user_data = request.get_json(force=True)
        if 'user_email' in user_data:
            arena_auth = ArenaAuthorizer()
            user_groups = arena_auth.get_user_groups(user_data['user_email'])
            if user_groups:
                session['user_email'] = user_data['user_email']
                session['user_groups'] = user_groups
                cloud_log(LogIDs.MAIN_APP, f"User {user_data['user_email']} logged in", LOG_LEVELS.INFO)
                return json.dumps({"redirect": "/home"})
        return json.dumps({'redirect': '/unauthorized'})
    return render_template('login.html', auth_config=auth_config, error_resp='403')
    def _get_build_allocation(self):
        """
        Identify the build allocation among the parent and all children based on available resources and identified
        quotas.
        @param workout_count: Number of workouts to build in this request
        @type workout_count: Integer
        @return: The parent/or children projects to use, which may split across multiple projects
        @rtype: Dict {'<parent>': 80, '<child1>': 300, '<child2>': 300}
        """
        # First, get all available projects.
        available = {project: self.MAX_BUILDS}
        admin_info = ds_client.get(
            ds_client.key(AdminInfoEntity.KIND, 'cybergym'))
        if AdminInfoEntity.Entities.CHILD_PROJECTS in admin_info:
            for child_project in admin_info[
                    AdminInfoEntity.Entities.CHILD_PROJECTS]:
                available[child_project] = self.MAX_BUILDS

        # Next find all active workouts for the application
        query_workouts = ds_client.query(kind='cybergym-workout')
        query_workouts.add_filter('active', '=', True)

        # Determine the available build left for each parent and children projects.
        for workout in list(query_workouts.fetch()):
            workout_project = workout.get('build_project_location', project)
            if workout_project not in available:
                cloud_log(
                    LogIDs.MAIN_APP,
                    f"Error in workout specification. The project {workout_project} is not "
                    f"a valid project for this application.", LOG_LEVELS.ERROR)
                return False
            available[workout_project] -= 1

        # Set the build allocation for this project based on availability.
        for _project in available:
            workouts_available = available[_project]
            if workouts_available > 0:
                _project_allocation = min(self.remaining_count,
                                          workouts_available)
                self.allocation[_project] = _project_allocation
                self.remaining_count -= _project_allocation

        if self.remaining_count > 0:
            cloud_log(
                LogIDs.MAIN_APP,
                "Error: Not enough available resources to complete the build!",
                LOG_LEVELS.ERROR)
            return False
        else:
            return True
예제 #7
0
def build_workout():
    """
    Initiates a cloud build for the specified workout ID
    @return: None
    """
    data = request.get_json(force=True)
    workout_id = data.get('workout_id', None)
    if not workout_id:
        return redirect(404)

    publisher = pubsub_v1.PublisherClient()
    topic_path = publisher.topic_path(project, workout_globals.ps_build_workout_topic)
    publisher.publish(topic_path, data=b'Cyber Gym Workout', workout_id=workout_id)

    cloud_log(workout_id, f"Student initiated cloud build for workout {workout_id}", LOG_LEVELS.INFO)
    return 'Workout Built'
 def __init__(self, unit, workout_ids, workout_specs):
     self.unit_id = unit.key.name
     self.unit = unit
     self.workout_specs = workout_specs
     self.workout_ids = workout_ids
     self.arena = self.unit.get('arena', None)
     if not self.arena:
         cloud_log(
             self.unit_id,
             f"Build operation failed. No unit {self.unit_id} exists in the data store",
             LOG_LEVELS.ERROR)
         raise LookupError
     self.student_entry_type = self.arena['student_entry_type']
     self.student_entry_server = self.arena['student_entry']
     self.student_entry_username = self.arena['student_entry_username']
     self.student_entry_password = self.arena['student_entry_password']
     self.network_type = self.arena['student_network_type']
 def __init__(self):
     self.admin_info = ds_client.get(
         ds_client.key('cybergym-admin-info', 'cybergym'))
     if self.UserGroups.ADMINS not in self.admin_info:
         admin_email = myconfig.get_variable.config('admin_email')
         if not admin_email:
             cloud_log(LogIDs.MAIN_APP,
                       f"Error: Admin Email is not set for this project!",
                       LOG_LEVELS.ERROR)
         else:
             self.admin_info[self.UserGroups.ADMINS] = [admin_email]
     if self.UserGroups.AUTHORIZED not in self.admin_info:
         self.admin_info[self.UserGroups.AUTHORIZED] = []
     if self.UserGroups.STUDENTS not in self.admin_info:
         self.admin_info[self.UserGroups.STUDENTS] = []
     if self.UserGroups.PENDING not in self.admin_info:
         self.admin_info[self.UserGroups.PENDING] = []
     ds_client.put(self.admin_info)
예제 #10
0
    def commit_to_cloud(self):
        """
        Store the student entry server specification for the given workout.
        @return: None
        @rtype:
        """
        self._prepare_guac_connections()

        guac_startup_script = self._get_guac_startup_script(
            self.guac_connections)
        student_entry_ip = self._get_student_entry_ip_address(self.build, self.student_entry_network) \
            if not self.student_entry_ip_address else self.student_entry_ip_address

        if not student_entry_ip:
            cloud_log(
                LogIDs.MAIN_APP,
                "Could not find available IP address for student entry guacamole server",
                LOG_LEVELS.ERROR)
            raise LookupError

        server_spec = {
            'name':
            "student-guacamole",
            'image':
            self.STUDENT_ENTRY_IMAGE,
            'tags': {
                'items': ['student-entry']
            },
            'machine_type':
            'n1-standard-1',
            'nics': [{
                "network": self.student_entry_network,
                "subnet": "default",
                "external_NAT": True,
                "internal_IP": student_entry_ip
            }],
            'guacamole_startup_script':
            guac_startup_script
        }
        self._update_build_spec()
        ServerSpecToCloud(server_spec, self.build_id,
                          student_entry=True).commit_to_cloud()
def parse_workout_yaml(yaml_filename):
    """
    Pull and validate yaml contents from a designated project cloud bucket
    :param yaml_filename: Both the work type and filename of the yaml file in the cloud bucket
    :return: The yaml string from the cloud bucket
    """
    bucket = storage_client.get_bucket(workout_globals.yaml_bucket)
    blob = bucket.get_blob(workout_globals.yaml_folder + yaml_filename +
                           ".yaml")
    if not blob:
        cloud_log(LogIDs.MAIN_APP, f"YAML file not found: {yaml_filename}",
                  LOG_LEVELS.ERROR)
        raise FileNotFoundError
    yaml_string = blob.download_as_string()
    try:
        y = yaml.safe_load(yaml_string)
    except yaml.YAMLError as err:
        cloud_log(LogIDs.MAIN_APP,
                  f"Error parsing specified yaml file: {yaml_filename}",
                  LOG_LEVELS.ERROR)
        if hasattr(err, 'problem_mark'):
            mark = err.problem_mark
            cloud_log(LogIDs.MAIN_APP,
                      f"Error position: ({mark.line + 1}:{mark.column + 1})",
                      LOG_LEVELS.ERROR)
        raise
    return y
 def _validate_build_spec(self):
     """
     Use the marshmallow schemas for validation only before using the data loaded from the yaml
     @param build_spec: Build specification pulled from the yaml
     @type build_spec:
     @return: None
     @rtype:
     """
     try:
         if self.build_type == BuildTypes.COMPUTE:
             validation_result = WorkoutComputeSchema().validate(
                 self.build_spec)
         elif self.build_type == BuildTypes.CONTAINER:
             validation_result = WorkoutContainerSchema().validate(
                 self.build_spec)
         elif self.build_type == BuildTypes.ARENA:
             validation_result = ArenaSchema().validate(self.build_spec)
     except ValidationError as err:
         error_message = f"Error when trying to load build specification of type {self.build_type}. " \
                         f"Validation errors: {err.messages}"
         cloud_log(LogIDs.MAIN_APP, error_message, LOG_LEVELS.ERROR)
         raise InvalidBuildSpecification(error_message)
     return
예제 #13
0
    def commit_to_cloud(self):
        config = {'name': self.server_name}
        if self.machine_type:
            config[
                'machineType'] = f"zones/{zone}/machineTypes/{self.machine_type}"
        if self.tags:
            config['tags'] = self.tags
        if self.build_type != ComputeBuildTypes.MACHINE_IMAGE:
            image_response = compute.images().get(
                project=project, image=self.custom_image).execute()
            source_disk_image = image_response['selfLink']
            config['disks'] = [{
                'boot': True,
                'autoDelete': True,
                'initializeParams': {
                    'sourceImage': source_disk_image,
                }
            }]

        if self.networks:
            network_interfaces = []
            for network in self.networks:
                if network.get("external_NAT", None):
                    accessConfigs = {
                        'type': 'ONE_TO_ONE_NAT',
                        'name': 'External NAT'
                    }
                else:
                    accessConfigs = None
                add_network_interface = {
                    'network':
                    f'projects/{project}/global/networks/{network["network"]}',
                    'subnetwork':
                    f'regions/us-central1/subnetworks/{network["subnet"]}',
                    'accessConfigs': [accessConfigs]
                }
                if 'internal_IP' in network:
                    add_network_interface['networkIP'] = network["internal_IP"]

                if 'aliasIpRanges' in network:
                    add_network_interface['aliasIpRanges'] = network[
                        'aliasIpRanges']
                network_interfaces.append(add_network_interface)
            config['networkInterfaces'] = network_interfaces
        # Allow the instance to access cloud storage and logging.
        config['serviceAccounts'] = [{
            'email':
            'default',
            'scopes': [
                'https://www.googleapis.com/auth/devstorage.read_write',
                'https://www.googleapis.com/auth/logging.write'
            ]
        }]

        if self.meta_data:
            config['metadata'] = {'items': [self.meta_data]}
        if self.sshkey:
            if 'metadata' in config and config[
                    'metadata'] and 'items' in config['metadata']:
                config['metadata']['items'].append({
                    "key": "ssh-keys",
                    "value": self.sshkey
                })
            else:
                config['metadata'] = {
                    'items': [{
                        "key": "ssh-keys",
                        "value": self.sshkey
                    }]
                }

        if self.network_routing:
            config["canIpForward"] = True

        if self.add_disk:
            new_disk = {
                "mode":
                "READ_WRITE",
                "boot":
                False,
                "autoDelete":
                True,
                "source":
                "projects/" + project + "/zones/" + zone + "/disks/" +
                self.server_name + "-disk"
            }
            config['disks'].append(new_disk)

        if self.min_cpu_platform:
            config['minCpuPlatform'] = self.min_cpu_platform

        new_server = datastore.Entity(
            key=ds_client.key('cybergym-server', self.server_name),
            exclude_from_indexes=['guacamole_startup_script'])

        new_server.update({
            'name':
            self.server_name,
            'workout':
            self.build_id,
            'build_type':
            self.build_type,
            'machine_image':
            self.machine_image,
            'config':
            config,
            'add_disk':
            self.add_disk,
            'state':
            BUILD_STATES.READY,
            'state-timestamp':
            str(calendar.timegm(time.gmtime())),
            'student_entry':
            self.student_entry,
            'guacamole_startup_script':
            self.guacamole_startup_script,
            'snapshot':
            self.snapshot
        })

        if self.options:
            self._process_server_options(new_server, self.options)

        try:
            ds_client.put(new_server)
        except:
            cloud_log(LogIDs.MAIN_APP,
                      f"Error storing server config for {self.server_name}",
                      LOG_LEVELS.ERROR)
            raise
예제 #14
0
    def __init__(self,
                 server_spec,
                 build_id,
                 startup_scripts=None,
                 student_entry=False,
                 options=None):
        """
        Initialize the server specification from a build specification.
        @param server_spec: The raw build specification for the server.
        @type server_spec: dict
        @param build_id: The ID associated with the build
        @type build_id: str
        @param startup_scripts: The startup scripts for all servers associated with this build
        @type startup_scripts: dict
        @param student_entry: Whether this is a student entry server
        @type: bool
        @param options: Additional options to include with the server
        @type options: dict
        """
        base_name = server_spec['name']
        self.server_name = f"{build_id}-{base_name}"
        # First check to see if the server configuration already exists. If so, then return without error
        exists_check = ds_client.query(kind='cybergym-server')
        exists_check.add_filter("name", "=", self.server_name)
        if exists_check.fetch().num_results > 0:
            cloud_log(
                LogIDs.MAIN_APP,
                f"The server {self.server_name} already exists. Skipping configuration.",
                LOG_LEVELS.ERROR)
            raise ReferenceError

        self.build_id = build_id
        self.custom_image = server_spec.get('image', None)
        self.build_type = server_spec.get("build_type", None)
        self.machine_image = server_spec.get("machine_image", None)
        self.sshkey = server_spec.get("sshkey", None)
        self.tags = server_spec.get('tags', None)
        self.machine_type = server_spec.get("machine_type", "n1-standard-1")
        self.network_routing = server_spec.get("network_routing", None)
        self.min_cpu_platform = server_spec.get("minCpuPlatform", None)
        self.add_disk = server_spec.get("add_disk", None)
        self.options = server_spec.get("options", None)
        self.snapshot = server_spec.get('snapshot', None)
        # Add the network configuration
        self.networks = []
        for n in server_spec['nics']:
            n['external_NAT'] = n[
                'external_NAT'] if 'external_NAT' in n else False
            nic = {
                "network": f"{build_id}-{n['network']}",
                "internal_IP": n['internal_IP'],
                "subnet": f"{build_id}-{n['network']}-{n['subnet']}",
                "external_NAT": n['external_NAT']
            }
            # Nested VMs are sometimes used for vulnerable servers. This adds those specified IP addresses as
            # aliases to the NIC
            if 'IP_aliases' in n and n['IP_aliases']:
                alias_ip_ranges = []
                for ipaddr in n['IP_aliases']:
                    alias_ip_ranges.append({"ipCidrRange": ipaddr})
                nic['aliasIpRanges'] = alias_ip_ranges
            self.networks.append(nic)
        # Competitions may have meta_data defined, but compute workouts use assessments. First, check for meta_data
        # if this is a competition, and then look for startup scripts which have been identified from the assessment
        self.meta_data = server_spec.get("meta_data", None)
        if not self.meta_data and startup_scripts and base_name in startup_scripts:
            self.meta_data = startup_scripts[base_name]
        self.student_entry = student_entry
        self.guacamole_startup_script = server_spec.get(
            'guacamole_startup_script', None)
        self.options = options