Esempio n. 1
0
def _generate_filename(directory, filename):
    """Generate the path at which to the save a file uploaded by the user.

    For now, this just generates a random UUID (version 4), then appends the
    same extension found from the input filename.

    """
    extension = filename.rsplit('.', 1)[1]
    uuid = random_uuid().hex  # the string containing just the hex characters
    filename = os.path.join(directory, '{0}.{1}'.format(uuid, extension))
    while os.path.exists(filename):
        uuid = random_uuid().hex
        filename = os.path.join(directory, '{0}.{1}'.format(uuid, extension))
    return filename
Esempio n. 2
0
async def sender(client_stream):
    print("sender: starting")
    while True:
        data = random_uuid().urn[9:].encode()
        print(f"sender: sending {data}")
        await client_stream.send_all(data)
        await trio.sleep(0.15)
Esempio n. 3
0
    def handle_request(self, event):
        tracking_id = _determine_tracking_id(event.arguments)
        vote = str(random_uuid())

        def broadcast_vote(_):
            log.msg('Voting %r for request with tracking-id %r' %
                    (vote, tracking_id))
            self.broadcaster._sendEvent('vote',
                                        data=vote,
                                        tracking_id=tracking_id,
                                        target=event.target)

        def cleanup_fsm(_):
            del self.states[tracking_id]
            log.msg('Cleaned up fsm for %s, %d left in memory' %
                    (event.target, len(self.states)))

        def fold(_):
            METRICS['voting_folds'] += 1

        self.states[tracking_id] = create_voting_fsm(
            tracking_id, vote, broadcast_vote,
            functools.partial(self.perform_request, event), fold, cleanup_fsm)

        reactor.callLater(10, self.states[tracking_id].showdown)
Esempio n. 4
0
    def handle_request(self, event):
        tracking_id = _determine_tracking_id(event.arguments)
        vote = str(random_uuid())

        def broadcast_vote(_):
            log.msg('Voting %r for request with tracking-id %r' %
                    (vote, tracking_id))
            self.broadcaster._sendEvent('vote',
                                        data=vote,
                                        tracking_id=tracking_id,
                                        target=event.target)

        def cleanup_fsm(_):
            del self.states[tracking_id]
            log.msg('Cleaned up fsm for %s, %d left in memory' % (event.target, len(self.states)))

        def fold(_):
            METRICS['voting_folds'] += 1

        self.states[tracking_id] = create_voting_fsm(tracking_id,
                                                     vote,
                                                     broadcast_vote,
                                                     functools.partial(
                                                         self.perform_request, event),
                                                     fold,
                                                     cleanup_fsm)

        reactor.callLater(10, self.states[tracking_id].showdown)
Esempio n. 5
0
    def create_job(self, **kwargs) -> RequestedJob:
        """
        Create and return a new job object that has been saved to the backend store.

        Since this class works with ::class:`RequestedJob` objects, a new object must receive a
        ::class:`SchedulerRequestMessage` as a parameter.  This is in the ``request`` keyword arg.

        Parameters
        ----------
        kwargs
            Implementation-specific keyed parameters for creating appropriate job objects (see *Keyword Args* section).

        Keyword Args
        ------------
        request : SchedulerRequestMessage
            The originating request for the job.

        Returns
        -------
        RequestedJob
            The newly created job object.
        """
        job_obj = RequestedJob(job_request=kwargs['request'])
        #if allocation is not None:
        #    job_obj.allocation = allocation
        #if key_pair is not None:
        #    job_obj.rsa_key_pair = key_pair
        job_obj.job_id = random_uuid()
        self.save_job(job_obj)
        return job_obj
Esempio n. 6
0
def compare_btc_bch_node():
    uuid = str(random_uuid())
    node = SporeStack.node(days=1, uuid=uuid, cloudinit='#!/bin/true')
    node_bch = SporeStack.node(days=1,
                               uuid=uuid,
                               cloudinit='#!/bin/true',
                               currency='bch')
    assert node.satoshis < node_bch.satoshis
Esempio n. 7
0
def get_or_create_token(username):
    token_to_username = load_dict("token_to_username")
    for k_token, v_username in token_to_username.items():
        if v_username == username:
            return k_token
    new_token = str(random_uuid())
    token_to_username[new_token] = username
    save_dict(token_to_username, "token_to_username")
    return new_token
Esempio n. 8
0
def start_upload(sha1, size):
    file_path = '/gs/acid-cloud/' + str(random_uuid()) + '.block'
    gs_path = files.gs.create(file_path)
    file_item = File(
        read_path = file_path,
        write_path = gs_path,
        size = size,
        sha1 = sha1.lower(),
        visit_count = 0,
        upload_finished = False,
        current_size = 0)
    file_item.put()
    return file_item.key().id()
Esempio n. 9
0
    def create_job(self, **kwargs) -> RequestedJob:
        """
        Create and return a new job object that has been saved to the backend store.

        Since this class works with ::class:`RequestedJob` objects, a new object must receive a
        ::class:`SchedulerRequestMessage` as a parameter.  This is in the ``request`` keyword arg.

        Parameters
        ----------
        kwargs
            Implementation-specific keyed parameters for creating appropriate job objects (see *Keyword Args* section).

        Keyword Args
        ------------
        request : SchedulerRequestMessage
            The originating request for the job.
        job_id : str, UUID, None
            Optional value to try use for the job's id, falling back to random if not present, invalid, or already used.

        Returns
        -------
        RequestedJob
            The newly created job object.
        """
        job_obj = RequestedJob(job_request=kwargs['request'])
        try:
            job_uuid = kwargs['job_id'] if isinstance(
                kwargs['job_id'], UUID) else UUID(str(kwargs['job_id']))
            if not self._does_redis_key_exist(
                    self._get_job_key_for_id(job_uuid)):
                job_obj.job_id = job_uuid
            else:
                job_obj.job_id = random_uuid()
        except:
            job_obj.job_id = random_uuid()

        self.save_job(job_obj)
        return job_obj
Esempio n. 10
0
def log_process_stderr(stderr_obj, err_msg: str = None) -> str:
    """Log stderr output from process

    Side effects:
        - Makes directory (if doesn't exist)
        - Writes to logfile (if error message)

    Args:
        stderr_obj: Stderr object
        err_msg (str): Custom error message

    Returns:
        str: errors
    """
    err = ''
    err_base: str = \
        err_msg + '\n\nFor more information, search for error details by ' \
                  'id "{}" in logfile "{}".\n'
    log_msg = ''

    try:
        for line in iter(stderr_obj.readline, ''):
            try:
                log_msg += line.encode('utf-8')
            except TypeError:
                log_msg += str(line)
    except AttributeError:
        log_msg: str = stderr_obj

    if log_msg:
        uuid = str(random_uuid())
        log_msg_open: str = '<error id="{}" datetime="{}">' \
            .format(uuid, str(datetime.now()))
        log_msg_close: str = '</' + log_msg_open[1:]
        log_msg: str = '\n\n' + \
                       log_msg_open + '\n' + \
                       log_msg + '\n' + \
                       log_msg_close
        if not os.path.exists(LOGS_DIR):
            os.mkdir(LOGS_DIR)
        with open(ERROR_LOG_PATH, 'a') as log:
            log.writelines(log_msg)
        err: str = err_base.format(uuid, ERROR_LOG_PATH)

    return err
Esempio n. 11
0
def upload_image():
    form = ImageUploadForm()
    if form.validate():
        client_name, ext = os.path.splitext(form.image.data.filename)
        if not (ext):
            abort(400)

        dest_name = secure_filename(form.name.data)
        if not (dest_name):
            dest_name = str(random_uuid())
        dest_path = os.path.join('images', 'original', dest_name + ext)

        # TODO : handle name conflicts
        location = os.path.join(app.config['UPLOAD_FOLDER'], dest_path)
        form.image.data.save(location)
        # resize the image
        resized_path = resize_image(location,
                                    app.config['NYLOG_IMAGES_LARGE_WIDTH'])

        return jsonify({'original': dest_path, 'resized': resized_path})
    else:
        abort(400)
Esempio n. 12
0
def upload_image():
    form = ImageUploadForm()
    if form.validate():
        client_name, ext = os.path.splitext(form.image.data.filename)
        if not(ext):
            abort(400)

        dest_name = secure_filename(form.name.data)
        if not(dest_name):
            dest_name = str(random_uuid())
        dest_path = os.path.join('images', 'original', dest_name + ext)

        # TODO : handle name conflicts
        location = os.path.join(app.config['UPLOAD_FOLDER'], dest_path)
        form.image.data.save(location)
        # resize the image
        resized_path = resize_image(location, app.config['NYLOG_IMAGES_LARGE_WIDTH'])

        return jsonify({'original' : dest_path,
                        'resized' : resized_path})
    else:
        abort(400)
Esempio n. 13
0
def send_mail(receiver, subject, body):
    msg = EmailMessage()
    msg.add_header(
        "Message-Id",
        "<{uuid}@{domain}>".format(uuid=random_uuid(),
                                   domain=app.config["SMTP_FROM"].split(
                                       "@", 1)[1]))
    msg.add_header("Date", datetime.now().strftime("%c"))
    msg.add_header("Subject", subject)
    msg.add_header("From", app.config["SMTP_FROM"])
    msg.add_header("To", receiver)
    msg.set_content(body)

    hostname = app.config["SMTP_HOST"]
    port = app.config["SMTP_PORT"]

    if app.config["SMTP_SSL"]:
        conn = SMTP_SSL(hostname, port)
    else:
        conn = SMTP(hostname, port)

    conn.login(app.config["SMTP_USER"], app.config["SMTP_PASS"])
    conn.send_message(msg)
    conn.quit()
Esempio n. 14
0
    def _add_or_update_endpoint(self, action, name, version, request_data):
        '''
        Add or update an endpoint
        '''
        self.logger.log(logging.DEBUG, f'Adding/updating model {name}...')

        _name_checker = _compile(r'^[a-zA-Z0-9-_\s]+$')
        if not isinstance(name, str):
            msg = 'Endpoint name must be a string'
            self.logger.log(logging.CRITICAL, msg)
            raise TypeError(msg)

        if not _name_checker.match(name):
            raise gen.Return('endpoint name can only contain: a-z, A-Z, 0-9,'
                             ' underscore, hyphens and spaces.')

        if self.settings.get('add_or_updating_endpoint'):
            msg = ('Another endpoint update is already in progress'
                   ', please wait a while and try again')
            self.logger.log(logging.CRITICAL, msg)
            raise RuntimeError(msg)

        request_uuid = random_uuid()
        self.settings['add_or_updating_endpoint'] = request_uuid
        try:
            description = (request_data['description']
                           if 'description' in request_data else None)
            if 'docstring' in request_data:
                docstring = str(
                    bytes(request_data['docstring'],
                          "utf-8").decode('unicode_escape'))
            else:
                docstring = None
            endpoint_type = (request_data['type']
                             if 'type' in request_data else None)
            methods = (request_data['methods']
                       if 'methods' in request_data else [])
            dependencies = (request_data['dependencies']
                            if 'dependencies' in request_data else None)
            target = (request_data['target']
                      if 'target' in request_data else None)
            schema = (request_data['schema']
                      if 'schema' in request_data else None)

            src_path = (request_data['src_path']
                        if 'src_path' in request_data else None)
            target_path = get_query_object_path(
                self.settings[SettingsParameters.StateFilePath], name, version)
            self.logger.log(logging.DEBUG,
                            f'Checking source path {src_path}...')
            _path_checker = _compile(r'^[\\\:a-zA-Z0-9-_~\s/\.\(\)]+$')
            # copy from staging
            if src_path:
                if not isinstance(request_data['src_path'], str):
                    raise gen.Return("src_path must be a string.")
                if not _path_checker.match(src_path):
                    raise gen.Return(
                        'Endpoint source path name can only contain: '
                        'a-z, A-Z, 0-9, underscore, hyphens and spaces.')

                yield self._copy_po_future(src_path, target_path)
            elif endpoint_type != 'alias':
                raise gen.Return("src_path is required to add/update an "
                                 "endpoint.")

            # alias special logic:
            if endpoint_type == 'alias':
                if not target:
                    raise gen.Return('Target is required for alias endpoint.')
                dependencies = [target]

            # update local config
            try:
                if action == 'add':
                    self.tabpy_state.add_endpoint(name=name,
                                                  description=description,
                                                  docstring=docstring,
                                                  endpoint_type=endpoint_type,
                                                  methods=methods,
                                                  dependencies=dependencies,
                                                  target=target,
                                                  schema=schema)
                else:
                    self.tabpy_state.update_endpoint(
                        name=name,
                        description=description,
                        docstring=docstring,
                        endpoint_type=endpoint_type,
                        methods=methods,
                        dependencies=dependencies,
                        target=target,
                        schema=schema,
                        version=version)

            except Exception as e:
                raise gen.Return(f'Error when changing TabPy state: {e}')

            on_state_change(self.settings, self.tabpy_state,
                            self.python_service, self.logger)

        finally:
            self.settings['add_or_updating_endpoint'] = None
Esempio n. 15
0
	def persist(self, filepaths):
		for filepath in filepaths:
			sid = str(random_uuid())
			self._table.insert({'id': sid, 'filepath': filepath})
			yield sid
Esempio n. 16
0
    def create_note(self, pos_um=None, size_um=None):
        log.debug("notepage %r: create_note(pos_um=%r, size_um=%r)",
                  self.full_name, pos_um, size_um)

        if not (pos_um is None or
                (isinstance(pos_um, (list, tuple)) and
                 len(pos_um) == 2 and
                 isinstance(pos_um[0]), (int, float, long) and
                 isinstance(pos_um[1]), (int, float, long))):
            log.error("create_note: invalid pos_um value %r", pos_um)
            raise InvalidParameterError("pos_um must be null or (left, top)")

        if not (size_um is None or
                (isinstance(size_um, (list, tuple)) and
                 len(size_um) == 2 and
                 isinstance(size_um[0]), (int, float, long) and
                 isinstance(size_um[1]), (int, float, long))):
            log.error("create_note: invalid size_um value %r", size_um)
            raise InvalidParameterError(
                "size_um must be null or (width, height)")

        if not self.access(PERM_EDIT_DOCUMENT):
            raise PermissionDeniedError("No permission to edit notepage %s" %
                                        notepage_name)
        
        if size_um is None:
            width_um = DEFAULT_NOTE_WIDTH
            height_um = DEFAULT_NOTE_HEIGHT
        else:
            width_um, height_um = size_um
 
        if pos_um is None:
            x_pos_um, y_pos_um = self._calculate_note_position()
        else:
            x_pos_um, y_pos_um = pos_um

        # The z-index will be one greater than all other note z-index values.
        try:
            z_index = 1 + max([child.z_index for child in self.children])
        except ValueError:
            # No children
            z_index = 0

        session = _request_db_session()
        now = datetime.utcnow()

        # Use a random UUID for the name
        note_name = str(random_uuid())

        # Create the DAO holding the note.
        note_dao = dao.Note(
            node_type_id=dao.NODE_TYPE_ID_NOTE,
            parent_node_id=self._dao.node_id,
            node_name=note_name,
            is_active=True,
            inherit_permissions=True,
            contents_markdown="",
            x_pos_um=x_pos_um,
            y_pos_um=y_pos_um,
            width_um=width_um,
            height_um=height_um,
            z_index=z_index,
            revision_id=0)
        session.add(note_dao)
        session.flush()
        note = FilesystemNode._from_dao(note_dao, parent=self)

        # Mark that a change was made to the notepage
        self._dao.edit_time_utc = now
        session.add(self._dao)
        session.flush()

        # Record the change made.
        rev = dao.NotepageRevision(
            node_id=self.node_id,
            revision_id=self._dao.revision_id,
            delta_to_previous=json.dumps(
                [{'action': 'remove_note',
                  'note_id': note_dao.node_id}]),
            editor_user_id=_request_user_id(),
            edit_time_utc=now)
        session.add(rev)
        session.flush()
                       
        # Invalidate our cache of children, if present.
        if hasattr(self, "_children"):
            del self._children

        return note
Esempio n. 17
0
    def _add_or_update_endpoint(self, action, name, version, request_data):
        '''
        Add or update an endpoint
        '''
        _name_checker = _compile('^[a-zA-Z0-9-_\ ]+$')
        if not isinstance(name, basestring):
            raise TypeError("Endpoint name must be a string or unicode")

        if not _name_checker.match(name):
            raise gen.Return('endpoint name can only contain: a-z, A-Z, 0-9,'
                             ' underscore, hyphens and spaces.')

        if self.settings.get('add_or_updating_endpoint'):
            raise RuntimeError(
                "Another endpoint update is already in progress, "
                "please wait a while and try again")

        request_uuid = random_uuid()
        self.settings['add_or_updating_endpoint'] = request_uuid
        try:
            description = request_data[
                'description'] if 'description' in request_data else None
            docstring = request_data[
                'docstring'] if 'docstring' in request_data else None
            endpoint_type = request_data[
                'type'] if 'type' in request_data else None
            methods = request_data[
                'methods'] if 'methods' in request_data else []
            dependencies = request_data[
                'dependencies'] if 'dependencies' in request_data else None
            target = request_data[
                'target'] if 'target' in request_data else None
            schema = request_data[
                'schema'] if 'schema' in request_data else None

            src_path = request_data[
                'src_path'] if 'src_path' in request_data else None
            target_path = get_query_object_path(
                self.settings['state_file_path'], name, version)
            _path_checker = _compile('^[\\a-zA-Z0-9-_\ /]+$')
            # copy from staging
            if src_path:
                if not isinstance(request_data['src_path'], basestring):
                    raise gen.Return("src_path must be a string.")
                if not _path_checker.match(src_path):
                    raise gen.Return(
                        'Endpoint name can only contain: a-z, A-Z, 0-9,underscore, hyphens and spaces.'
                    )

                yield self._copy_po_future(src_path, target_path)
            elif endpoint_type != 'alias':
                raise gen.Return(
                    "src_path is required to add/update an endpoint.")

            # alias special logic:
            if endpoint_type == 'alias':
                if not target:
                    raise gen.Return('Target is required for alias endpoint.')
                dependencies = [target]

            # update local config
            try:
                if action == 'add':
                    self.tabpy.add_endpoint(name=name,
                                            description=description,
                                            docstring=docstring,
                                            endpoint_type=endpoint_type,
                                            methods=methods,
                                            dependencies=dependencies,
                                            target=target,
                                            schema=schema)
                else:
                    self.tabpy.update_endpoint(name=name,
                                               description=description,
                                               docstring=docstring,
                                               endpoint_type=endpoint_type,
                                               methods=methods,
                                               dependencies=dependencies,
                                               target=target,
                                               schema=schema,
                                               version=version)

            except Exception as e:
                raise gen.Return("Error when changing TabPy state: %s" % e)

            on_state_change(self.settings)

        finally:
            self.settings['add_or_updating_endpoint'] = None
Esempio n. 18
0
def main():
    options = sporestack.node_options()
    launch_profiles = sporestack.node_get_launch_profile('index')

    class CustomFormatter(argparse.ArgumentDefaultsHelpFormatter,
                          argparse.RawTextHelpFormatter):
        """
        This makes help honor newlines and shows defaults.
        https://bugs.python.org/issue21633
        http://stackoverflow.com/questions/18462610/
        """
        pass

    parser = argparse.ArgumentParser(description='SporeStack.com CLI.')
    launch_help = ''
    for profile in launch_profiles:
        launch_help += '{}: {}: {}\n'.format(profile['name'],
                                             profile['human_name'],
                                             profile['description'])
    osid_help = ''
    for osid in sorted(options['osid'], key=int):
        name = options['osid'][osid]['name']
        osid_help += '{}: {}\n'.format(osid, name)
    dcid_help = ''
    for dcid in sorted(options['dcid'], key=int):
        name = options['dcid'][dcid]['name']
        dcid_help += '{}: {}\n'.format(dcid, name)
    flavor_help = ''
    for flavor in sorted(options['flavor'], key=int):
        help_line = '{}: RAM: {}, VCPUs: {}, DISK: {}\n'
        ram = options['flavor'][flavor]['ram']
        disk = options['flavor'][flavor]['disk']
        vcpus = options['flavor'][flavor]['vcpu_count']
        flavor_help += help_line.format(flavor, ram, vcpus, disk)
    subparser = parser.add_subparsers()
    spawn_subparser = subparser.add_parser('spawn',
                                           help='Spawns a node.',
                                           formatter_class=CustomFormatter)
    spawn_subparser.set_defaults(func=spawn_wrapper)
    list_subparser = subparser.add_parser('list', help='Lists nodes.')
    list_subparser.set_defaults(func=list)
    ssh_subparser = subparser.add_parser('ssh', help='Connect to node.')
    ssh_subparser.set_defaults(func=ssh_wrapper)
    ssh_subparser.add_argument('uuid', help='UUID of node to connect to.')
    ssh_subparser.add_argument('--stdin',
                               help='Send to stdin and return stdout',
                               default=None)

    json_extractor_help = 'Helps you extract fields from json files.'
    json_extractor_subparser = subparser.add_parser('json_extractor',
                                                    help=json_extractor_help)
    json_extractor_subparser.set_defaults(func=json_extractor_wrapper)
    json_extractor_subparser.add_argument('json_file', help='json file.')
    json_extractor_subparser.add_argument('json_key', help='json key.')

    ssfh_help = 'Helps you write sporestack.json files.'
    ssfh_subparser = subparser.add_parser('sporestackfile_helper',
                                          help=ssfh_help)
    ssfh_subparser.set_defaults(func=sporestackfile_helper_wrapper)
    ssfh_subparser.add_argument('--cloudinit', help='cloudinit data.')
    ssfh_subparser.add_argument('--startupscript', help='startup script file.')
    ssfh_subparser.add_argument('--postlaunch',
                                help='postlaunch script file.',
                                default=None)
    ssfh_subparser.add_argument('--days', help='Days', default=1, type=int)
    ssfh_subparser.add_argument('--name', help='Name')
    ssfh_subparser.add_argument('--human_name', help='Human readable name')
    ssfh_subparser.add_argument('--description', help='Description')
    ssfh_subparser.add_argument('--osid',
                                help='OSID',
                                required=True,
                                type=int,
                                default=None)
    ssfh_subparser.add_argument('--dcid', help='DCID', type=int, default=None)
    ssfh_subparser.add_argument('--flavor', help='DCID', type=int, default=29)

    spawn_subparser.add_argument('--osid',
                                 help=osid_help,
                                 type=int,
                                 default=230)
    spawn_subparser.add_argument('--dcid', help=dcid_help, type=int, default=3)
    spawn_subparser.add_argument('--flavor',
                                 help=flavor_help,
                                 type=int,
                                 default=29)
    spawn_subparser.add_argument('--days',
                                 help='Days to live: 1-28.',
                                 type=int,
                                 default=1)
    spawn_subparser.add_argument('--uuid',
                                 help=argparse.SUPPRESS,
                                 default=str(random_uuid()))
    spawn_subparser.add_argument('--endpoint',
                                 help=argparse.SUPPRESS,
                                 default=None)
    spawn_subparser.add_argument('--paycode',
                                 help=argparse.SUPPRESS,
                                 default=None)
    default_ssh_key_path = '{}/.ssh/id_rsa.pub'.format(os.getenv('HOME'))
    spawn_subparser.add_argument('--ssh_key',
                                 help='SSH public key.',
                                 default=default_ssh_key_path)
    spawn_subparser.add_argument('--launch', help=launch_help, default=None)
    spawn_subparser.add_argument('--sporestackfile',
                                 help='SporeStack JSON file.',
                                 default=None)
    spawn_subparser.add_argument('--cloudinit',
                                 help='cloudinit file.',
                                 default=None)
    spawn_subparser.add_argument('--group',
                                 help='Arbitrary group to associate node with',
                                 default=None)
    args = parser.parse_args()
    # This calls the function or wrapper function, depending on what we set
    # above.
    args.func(args)
Esempio n. 19
0
    def _add_or_update_endpoint(self, action, name, version, request_data):
        '''
        Add or update an endpoint
        '''
        _name_checker = _compile('^[a-zA-Z0-9-_\ ]+$')
        if not isinstance(name, (str,unicode)):
            raise TypeError("Endpoint name must be a string or unicode")

        if not _name_checker.match(name):
            raise gen.Return('endpoint name can only contain: a-z, A-Z, 0-9,'
            ' underscore, hyphens and spaces.')

        if self.settings.get('add_or_updating_endpoint'):
            raise RuntimeError("Another endpoint update is already in progress, "
                                "please wait a while and try again")

        request_uuid = random_uuid()
        self.settings['add_or_updating_endpoint'] = request_uuid
        try:
            description = request_data['description'] if 'description' in request_data else None
            if 'docstring' in request_data:
                if sys.version_info > (3, 0):
                    docstring = str(bytes(request_data['docstring'], "utf-8").decode('unicode_escape'))
                else:
                    docstring = request_data['docstring'].decode('string_escape')
            else:
                docstring=None
            endpoint_type = request_data['type'] if 'type' in request_data else None
            methods = request_data['methods'] if 'methods' in request_data else []
            dependencies = request_data['dependencies'] if 'dependencies' in request_data else None
            target = request_data['target'] if 'target' in request_data else None
            schema = request_data['schema'] if 'schema' in request_data else None

            src_path = request_data['src_path'] if 'src_path' in request_data else None
            target_path = get_query_object_path(self.settings['state_file_path'], name, version)
            _path_checker = _compile('^[\\a-zA-Z0-9-_\ /]+$')
            # copy from staging
            if src_path:
                if not isinstance(request_data['src_path'], (str,unicode)):
                    raise gen.Return("src_path must be a string.")
                if not _path_checker.match(src_path):
                    raise gen.Return('Endpoint name can only contain: a-z, A-Z, 0-9,underscore, hyphens and spaces.')

                yield self._copy_po_future(src_path, target_path)
            elif endpoint_type != 'alias':
                    raise gen.Return("src_path is required to add/update an endpoint.")

            # alias special logic:
            if endpoint_type == 'alias':
                if not target:
                    raise gen.Return('Target is required for alias endpoint.')
                dependencies = [target]

            # update local config
            try:
                if action == 'add':
                    self.tabpy.add_endpoint(
                        name=name,
                        description=description,
                        docstring=docstring,
                        endpoint_type=endpoint_type,
                        methods=methods,
                        dependencies=dependencies,
                        target=target,
                        schema=schema)
                else:
                    self.tabpy.update_endpoint(
                        name=name,
                        description=description,
                        docstring=docstring,
                        endpoint_type=endpoint_type,
                        methods=methods,
                        dependencies=dependencies,
                        target=target,
                        schema=schema,
                        version=version)

            except Exception as e:
                raise gen.Return("Error when changing TabPy state: %s" % e)

            on_state_change(self.settings)

        finally:
            self.settings['add_or_updating_endpoint'] = None
Esempio n. 20
0
    def _add_or_update_endpoint(self, action, name, version, request_data):
        """
        Add or update an endpoint
        """
        self.logger.log(logging.DEBUG, f"Adding/updating model {name}...")

        _name_checker = _compile(r"^[a-zA-Z0-9-_\s]+$")
        if not isinstance(name, str):
            msg = "Endpoint name must be a string"
            self.logger.log(logging.CRITICAL, msg)
            raise TypeError(msg)

        if not _name_checker.match(name):
            raise gen.Return("endpoint name can only contain: a-z, A-Z, 0-9,"
                             " underscore, hyphens and spaces.")

        if self.settings.get("add_or_updating_endpoint"):
            msg = ("Another endpoint update is already in progress"
                   ", please wait a while and try again")
            self.logger.log(logging.CRITICAL, msg)
            raise RuntimeError(msg)

        request_uuid = random_uuid()
        self.settings["add_or_updating_endpoint"] = request_uuid
        try:
            description = (request_data["description"]
                           if "description" in request_data else None)
            if "docstring" in request_data:
                docstring = str(
                    bytes(request_data["docstring"],
                          "utf-8").decode("unicode_escape"))
            else:
                docstring = None
            endpoint_type = request_data[
                "type"] if "type" in request_data else None
            methods = request_data[
                "methods"] if "methods" in request_data else []
            dependencies = (request_data["dependencies"]
                            if "dependencies" in request_data else None)
            target = request_data[
                "target"] if "target" in request_data else None
            schema = request_data[
                "schema"] if "schema" in request_data else None

            src_path = request_data[
                "src_path"] if "src_path" in request_data else None
            target_path = get_query_object_path(
                self.settings[SettingsParameters.StateFilePath], name, version)
            self.logger.log(logging.DEBUG,
                            f"Checking source path {src_path}...")
            _path_checker = _compile(r"^[\\\:a-zA-Z0-9-_~\s/\.\(\)]+$")
            # copy from staging
            if src_path:
                if not isinstance(request_data["src_path"], str):
                    raise gen.Return("src_path must be a string.")
                if not _path_checker.match(src_path):
                    raise gen.Return(
                        "Endpoint source path name can only contain: "
                        "a-z, A-Z, 0-9, underscore, hyphens and spaces.")

                yield self._copy_po_future(src_path, target_path)
            elif endpoint_type != "alias":
                raise gen.Return("src_path is required to add/update an "
                                 "endpoint.")

            # alias special logic:
            if endpoint_type == "alias":
                if not target:
                    raise gen.Return("Target is required for alias endpoint.")
                dependencies = [target]

            # update local config
            try:
                if action == "add":
                    self.tabpy_state.add_endpoint(
                        name=name,
                        description=description,
                        docstring=docstring,
                        endpoint_type=endpoint_type,
                        methods=methods,
                        dependencies=dependencies,
                        target=target,
                        schema=schema,
                    )
                else:
                    self.tabpy_state.update_endpoint(
                        name=name,
                        description=description,
                        docstring=docstring,
                        endpoint_type=endpoint_type,
                        methods=methods,
                        dependencies=dependencies,
                        target=target,
                        schema=schema,
                        version=version,
                    )

            except Exception as e:
                raise gen.Return(f"Error when changing TabPy state: {e}")

            on_state_change(self.settings, self.tabpy_state,
                            self.python_service, self.logger)

        finally:
            self.settings["add_or_updating_endpoint"] = None
 def random_uuid(self):
     '''
     Use the Python uuid4 to generate a 'truly random' uuid - useful in creating Consul ACL tokens
     '''
     key = random_uuid()
     return str(key)
Esempio n. 22
0
def test_node():
    uuid = str(random_uuid())
    node = SporeStack.node(days=1, uuid=uuid, cloudinit='#!/bin/true')
    assert node.satoshis > 10000
    assert node.satoshis < 55000
Esempio n. 23
0
def main():
    parser = argparse.ArgumentParser(description='SporeStack.com CLI.')
    parser.add_argument(
        '--version',
        action='version',
        version='SporeStack {version}'.format(version=__version__))
    parser.add_argument('--endpoint',
                        help='Use alternate SporeStack endpoint.',
                        default='https://sporestack.com')
    subparser = parser.add_subparsers(title='Command', dest='command')
    subparser.required = True
    formatter_class = argparse.ArgumentDefaultsHelpFormatter

    list_subparser = subparser.add_parser('list', help='Lists nodes.')
    list_subparser.set_defaults(func=list)

    msg = 'Show node options (flavor, osid, etc.)'
    options_subparser = subparser.add_parser('options', help=msg)
    options_subparser.set_defaults(func=options)

    ssh_subparser = subparser.add_parser('ssh', help='Connect to node.')
    ssh_subparser.set_defaults(func=ssh_wrapper)
    ssh_subparser.add_argument('uuid', help='UUID of node to connect to.')
    ssh_subparser.add_argument('--stdin',
                               help='Send to stdin and return stdout',
                               default=None)
    ssh_subparser.add_argument('--command',
                               help='Command to run over SSH',
                               default=None)
    ssh_subparser.add_argument('--ssh_user',
                               help='Connect as user over SSH.',
                               default='root')

    node_info_sp = subparser.add_parser('node_info',
                                        help='Return info about a node.')
    node_info_sp.set_defaults(func=node_info_wrapper)
    node_info_sp.add_argument('uuid', help='UUID of node to connect to.')
    node_info_sp.add_argument('--attribute',
                              help='Which attribute you want to return.',
                              default=None)

    json_extractor_help = 'Helps you extract fields from json files.'
    json_extractor_subparser = subparser.add_parser('json_extractor',
                                                    help=json_extractor_help)
    json_extractor_subparser.set_defaults(func=json_extractor_wrapper)
    json_extractor_subparser.add_argument('json_file', help='json file.')
    json_extractor_subparser.add_argument('json_key', help='json key.')

    ssfh_help = 'Helps you write sporestack.json files.'
    ssfh_subparser = subparser.add_parser('sporestackfile_helper',
                                          help=ssfh_help)
    ssfh_subparser.set_defaults(func=sporestackfile_helper_wrapper)
    ssfh_subparser.add_argument('--cloudinit',
                                help='cloudinit data.',
                                default=None)
    ssfh_subparser.add_argument('--startupscript', help='startup script file.')
    ssfh_subparser.add_argument('--postlaunch',
                                help='postlaunch script file.',
                                default=None)
    ssfh_subparser.add_argument('--days', help='Days', default=1, type=int)
    ssfh_subparser.add_argument('--name', help='Name', required=True)
    ssfh_subparser.add_argument('--human_name',
                                help='Human readable name',
                                required=True)
    ssfh_subparser.add_argument('--description',
                                help='Description Markdown text file')
    ssfh_subparser.add_argument('--osid',
                                help='OSID',
                                required=True,
                                type=int,
                                default=None)
    ssfh_subparser.add_argument('--dcid', help='DCID', type=int, default=None)
    ssfh_subparser.add_argument('--flavor',
                                help='Flavor',
                                type=int,
                                default=None)
    ssfh_subparser.add_argument('--mimetype',
                                help='Suggested MIME type of stdout',
                                default='text/plain')

    spawn_subparser = subparser.add_parser('spawn',
                                           help='Spawns a node.',
                                           formatter_class=formatter_class)
    spawn_subparser.set_defaults(func=spawn_wrapper)

    spawn_subparser.add_argument('--osid',
                                 help='Operating System ID',
                                 type=int,
                                 default=None)
    spawn_subparser.add_argument('--dcid', help='Datacenter ID', default=None)
    spawn_subparser.add_argument('--flavor',
                                 help='Flavor ID',
                                 type=int,
                                 default=None)
    spawn_subparser.add_argument('--days',
                                 help='Days to live: 1-28.',
                                 type=int,
                                 default=1)
    spawn_subparser.add_argument('--uuid',
                                 help=argparse.SUPPRESS,
                                 default=str(random_uuid()))
    spawn_subparser.add_argument('--paycode',
                                 help=argparse.SUPPRESS,
                                 default=None)
    default_ssh_key_path = '{}/.ssh/id_rsa.pub'.format(os.getenv('HOME'))
    spawn_subparser.add_argument('--ssh_key',
                                 help='SSH public key.',
                                 default=default_ssh_key_path)
    spawn_subparser.add_argument('--ssh',
                                 help='Connect after spawning',
                                 action='store_true',
                                 default=False)
    spawn_subparser.add_argument('--ssh_user',
                                 help='Connect as user over SSH.',
                                 default='root')
    spawn_subparser.add_argument('--launch',
                                 help='Launch profile',
                                 default=None)
    spawn_subparser.add_argument('--sporestackfile',
                                 help='SporeStack JSON file.',
                                 default=None)
    spawn_subparser.add_argument('--startupscript',
                                 help='startup script file.',
                                 default=None)
    spawn_subparser.add_argument('--cloudinit',
                                 help='cloudinit file.',
                                 default=None)
    spawn_subparser.add_argument('--ipxe',
                                 help='Set if startup script is iPXE type.',
                                 action='store_true',
                                 default=False)
    spawn_subparser.add_argument('--ipxe_chain_url',
                                 help='iPXE URL to chainload.',
                                 default=None)
    spawn_subparser.add_argument('--group',
                                 help='Arbitrary group to associate node with',
                                 default=None)
    spawn_subparser.add_argument('--currency',
                                 help='Cryptocurrency to pay with',
                                 default='bch')
    help_text = 'Run payment with (command) (address) (satoshis)'
    spawn_subparser.add_argument('--wallet_command',
                                 help=help_text,
                                 default=None)

    topup_subparser = subparser.add_parser('topup',
                                           help='Top up a node.',
                                           formatter_class=formatter_class)
    topup_subparser.set_defaults(func=topup)
    topup_subparser.add_argument('--uuid',
                                 help='UUID to top up.',
                                 required=True)
    topup_subparser.add_argument('--days',
                                 help='Additional days to live: 1-28.',
                                 type=int,
                                 required=True)
    topup_subparser.add_argument('--paycode',
                                 help=argparse.SUPPRESS,
                                 default=None)
    topup_subparser.add_argument('--currency',
                                 help='Cryptocurrency to pay with',
                                 default='bch')
    help_text = 'Run payment with (command) (address) (satoshis)'
    topup_subparser.add_argument('--wallet_command',
                                 help=help_text,
                                 default=None)

    args = parser.parse_args()
    # This calls the function or wrapper function, depending on what we set
    # above.
    args.func(args)
Esempio n. 24
0
def test_bad_currency():
    uuid = str(random_uuid())
    node = SporeStack.node(days=1,
                           uuid=uuid,
                           cloudinit='#!/bin/true',
                           currency='usd')
Esempio n. 25
0
def register(event_id):
    user = None
    discount = False
    sg = sendgrid.SendGridClient('gopilot', app.config["SENDGRID_PASS"])

    event = Event.find_event(event_id)
    if not event:
        return "Event not found", 404

    price = event.price

    if hasattr(request, 'json') and request.json and 'user' in request.json:
        if User.objects(email=request.json['user']['email']).first():
            return json.dumps({
                "status":
                "failed",
                "reason":
                "email",
                "message":
                "Your email already has a Pilot account."
            }), 400, jsonType

        print("has user")
        user = Student()
        user.name = request.json['user']['name']
        user.email = request.json['user']['email']
        user.complete = False
        user.completion_token = random_uuid().hex

        if 'discount' in request.json and request.json['discount'] != False:
            print("has discount")
            # user.save()
            discount = checkDiscount(request.json['discount'])
            if discount:
                price -= discount

        print("Charging user %s" % price)
        if 'stripe_token' in request.json:
            print("has stripe")
            stripe.api_key = app.config['STRIPE_KEY']

            try:
                customer = stripe.Customer.create(
                    source=request.json['stripe_token'],
                    description=user.name,
                    email=user.email)
            except stripe.CardError, e:
                app.logger.error("Customer Card Error: " + str(e))
                err = e.json_body['error']
                return json.dumps({
                    "status":
                    "failed",
                    "reason":
                    err['param'] if ('param' in err) else 'customer',
                    "message":
                    err['message']
                }), 400, jsonType

            user.stripe_id = customer.id
            try:
                stripe.Charge.create(
                    amount=(price * 100),  ## Cents
                    currency="usd",
                    customer=customer.id,
                    description="Registration for " + event.name)
            except stripe.CardError, e:
                print("Charge Card Error", e)
                err = e.json_body['error']
                return json.dumps({
                    "status":
                    "failed",
                    "reason":
                    err['param'] if ('param' in err) else 'charge',
                    "message":
                    err['message']
                }), 400, jsonType
Esempio n. 26
0
Just some basic tests.

Kinda broken.

Very broken.
"""

from uuid import uuid4 as random_uuid
import os

import sporestack

script = """#!/bin/sh
            date > /date
         """
new_uuid = str(random_uuid())

ssh_key_path = '{}/.ssh/id_rsa.pub'.format(os.getenv('HOME'))

with open(ssh_key_path) as ssh_key_file:
    sshkey = ssh_key_file.read()

for unique in ['3b69d7c9-ad4d-4d31-b04e-b224f02de4d4',
               new_uuid]:
    node = sporestack.node(days=1,
                           sshkey=sshkey,
                           unique=unique,
                           cloudinit=script,
                           startupscript=script)
    print('Payment status: ' + str(node.payment_status))
    print('Creation status: ' + str(node.creation_status))
Esempio n. 27
0
    def _add_or_update_endpoint(self, action, name, version, request_data):
        '''
        Add or update an endpoint
        '''
        logging.debug("Adding/updating model {}...".format(name))
        _name_checker = _compile('^[a-zA-Z0-9-_\\s]+$')
        if not isinstance(name, (str, unicode)):
            log_and_raise("Endpoint name must be a string or unicode",
                          TypeError)

        if not _name_checker.match(name):
            raise gen.Return('endpoint name can only contain: a-z, A-Z, 0-9,'
                             ' underscore, hyphens and spaces.')

        if self.settings.get('add_or_updating_endpoint'):
            log_and_raise(
                "Another endpoint update is already in progress"
                ", please wait a while and try again", RuntimeError)

        request_uuid = random_uuid()
        self.settings['add_or_updating_endpoint'] = request_uuid
        try:
            description = (request_data['description']
                           if 'description' in request_data else None)
            if 'docstring' in request_data:
                if sys.version_info > (3, 0):
                    docstring = str(
                        bytes(request_data['docstring'],
                              "utf-8").decode('unicode_escape'))
                else:
                    docstring = request_data['docstring'].decode(
                        'string_escape')
            else:
                docstring = None
            endpoint_type = (request_data['type']
                             if 'type' in request_data else None)
            methods = (request_data['methods']
                       if 'methods' in request_data else [])
            dependencies = (request_data['dependencies']
                            if 'dependencies' in request_data else None)
            target = (request_data['target']
                      if 'target' in request_data else None)
            schema = (request_data['schema']
                      if 'schema' in request_data else None)

            src_path = (request_data['src_path']
                        if 'src_path' in request_data else None)
            target_path = get_query_object_path(
                self.settings[SettingsParameters.StateFilePath], name, version)
            _path_checker = _compile('^[\\a-zA-Z0-9-_\\s/]+$')
            # copy from staging
            if src_path:
                if not isinstance(request_data['src_path'], (str, unicode)):
                    raise gen.Return("src_path must be a string.")
                if not _path_checker.match(src_path):
                    raise gen.Return('Endpoint name can only contain: a-z, A-'
                                     'Z, 0-9,underscore, hyphens and spaces.')

                yield self._copy_po_future(src_path, target_path)
            elif endpoint_type != 'alias':
                raise gen.Return("src_path is required to add/update an "
                                 "endpoint.")

            # alias special logic:
            if endpoint_type == 'alias':
                if not target:
                    raise gen.Return('Target is required for alias endpoint.')
                dependencies = [target]

            # update local config
            try:
                if action == 'add':
                    self.tabpy_state.add_endpoint(name=name,
                                                  description=description,
                                                  docstring=docstring,
                                                  endpoint_type=endpoint_type,
                                                  methods=methods,
                                                  dependencies=dependencies,
                                                  target=target,
                                                  schema=schema)
                else:
                    self.tabpy_state.update_endpoint(
                        name=name,
                        description=description,
                        docstring=docstring,
                        endpoint_type=endpoint_type,
                        methods=methods,
                        dependencies=dependencies,
                        target=target,
                        schema=schema,
                        version=version)

            except Exception as e:
                raise gen.Return("Error when changing TabPy state: %s" % e)

            on_state_change(self.settings, self.tabpy_state,
                            self.python_service)

        finally:
            self.settings['add_or_updating_endpoint'] = None
Esempio n. 28
0
    def _extractor(self, msg_object, msg_number):
        """
        Extract parts from email object

        :param msg_object: (required) email object
        :type msg_object: email.message.Message instance

        :return: simple message representation
                 eg. { 'uuid': {str} 'message_uuid',
                       'links': {list of tuples} [('file_name', 'owncloud_link'), ...],
                       'parts': {list of objects} [email.message.MIMEBase, ...],
                       'attach': {list of tuples} [('path/to/file', 'content_type'), ...],
                       'object': {object} email.message.Message }
        :rtype: dict

        """

        # message ID
        uuid = random_uuid().hex

        links = list()
        parts = list()
        attach = list()

        attachments_counter = 1

        # iterate over ALL message parts (include nested)
        for part in self._extract_parts(msg_object):
            content_type = part.get_content_type()
            base, spec = content_type.split('/', 1)

            # text or html = save 'as is'
            if base == 'text':
                charset = part.get_content_charset()

                if charset is not None:
                    payload = (part.get_payload(decode=True)).decode(charset)

                    if spec == 'plain':
                        new_part = MIMEText(payload, 'plain', 'utf-8')
                        parts.append(new_part)

                    if spec == 'html':
                        new_part = MIMEText(payload, 'html', 'utf-8')
                        parts.append(new_part)

                continue

            # save attach to temporary folder
            attach_name = self._decode(part.get_filename())
            attach_path = os.path.join(
                self._place_to, uuid + '_' + str(attachments_counter) + '.' +
                attach_name.split('.')[-1])

            if not os.path.isdir(attach_path):
                payload = part.get_payload(decode=True)

                try:
                    if payload is not None:
                        with open(attach_path, mode='wb') as file_pointer:
                            file_pointer.write(payload)

                except Exception:
                    formatted_lines = traceback.format_exc().splitlines()
                    self.logger.error('EXCEPTION: \n{exc} PART: {part}'.format(
                        exc='\n'.join(formatted_lines), part=payload))

            # increment attachments counter
            attachments_counter += 1

            # image = append to compression list
            if base == 'image':
                attach.append((attach_path, content_type))
                continue

            # not image = upload to OwnCloud, return link to file
            else:
                link = self._owncloud.push_file(attach_path,
                                                '/' + self._project + '/')

                if link:
                    links.append((attach_name, link))

                    try:
                        os.remove(attach_path)
                    except Exception:
                        formatted_lines = traceback.format_exc().splitlines()
                        self.logger.error('EXCEPTION: \n{exc}'.format(
                            exc='\n'.join(formatted_lines)))

                continue

        return {
            'uuid': uuid,
            'links': links,
            'parts': parts,
            'attach': attach,
            'object': msg_object,
            'msg_number': msg_number
        }