def __init__(self, user_id, user_role, user_status):
        self.id = generateRandomKey(42)
        self.user_id = user_id
        self.user_role = user_role
        self.user_status = user_status

        GLSettings.sessions.set(self.id, self)
Пример #2
0
def fsops_pgp_encrypt(fpath, recipient_pgp):
    """
    return
        path of encrypted file,
        length of the encrypted file

    this function is used to encrypt a file for a specific recipient.
    commonly 'receiver_desc' is expected as second argument;
    anyhow a simpler dict can be used.

    required keys are checked on top
    """
    gpoj = GLBPGP()

    try:
        gpoj.load_key(recipient_pgp['pgp_key_public'])

        filepath = os.path.join(Settings.submission_path, fpath)

        with SecureFile(filepath) as f:
            encrypted_file_path = os.path.join(
                os.path.abspath(Settings.submission_path),
                "pgp_encrypted-%s" % generateRandomKey(16))
            _, encrypted_file_size = gpoj.encrypt_file(
                recipient_pgp['pgp_key_fingerprint'], f, encrypted_file_path)

    except:
        raise

    finally:
        # the finally statement is always called also if
        # except contains a return or a raise
        gpoj.destroy_environment()

    return encrypted_file_path, encrypted_file_size
Пример #3
0
    def __init__(self, user_id, user_role, user_status):
        self.id = generateRandomKey(42)
        self.user_id = user_id
        self.user_role = user_role
        self.user_status = user_status

        GLSessions.set(self.id, self)
Пример #4
0
    def generate_token_challenge(self, challenges_dict=None):
        # initialization
        self.human_captcha = False
        self.proof_of_work = False

        if challenges_dict is None:
            challenges_dict = {'human_captcha': False, 'proof_of_work': False}

            if Alarm.stress_levels['activity'] >= 1:
                challenges_dict[
                    'human_captcha'] = True and GLSettings.memory_copy.enable_captcha

            # a proof of work is always required (if enabled at node level)
            challenges_dict[
                'proof_of_work'] = GLSettings.memory_copy.enable_proof_of_work

        if challenges_dict['human_captcha']:
            random_a = randint(0, 99)
            random_b = randint(0, 99)

            self.human_captcha = {
                'question': u"%d + %d" % (random_a, random_b),
                'answer': u"%d" % (random_a + random_b)
            }

        if challenges_dict['proof_of_work']:
            self.proof_of_work = {'question': generateRandomKey(20)}
Пример #5
0
 def __init__(self, user_id, user_role, user_status):
     self.user_id = user_id
     self.user_role = user_role
     self.user_status = user_status
     tempobj.TempObj.__init__(self,
                              GLSettings.sessions,
                              generateRandomKey(42),
                              GLSettings.defaults.authentication_lifetime,
                              reactor_override)
Пример #6
0
    def get_file_upload(self):
        try:
            if len(self.request.files) != 1:
                raise errors.InvalidInputFormat(
                    "cannot accept more than a file upload at once")

            chunk_size = len(self.request.files['file'][0]['body'])
            total_file_size = int(
                self.request.arguments['flowTotalSize'][0]
            ) if 'flowTotalSize' in self.request.arguments else chunk_size
            flow_identifier = self.request.arguments['flowIdentifier'][
                0] if 'flowIdentifier' in self.request.arguments else generateRandomKey(
                    10)

            if ((chunk_size /
                 (1024 * 1024)) > GLSettings.memory_copy.maximum_filesize or
                (total_file_size /
                 (1024 * 1024)) > GLSettings.memory_copy.maximum_filesize):
                log.err("File upload request rejected: file too big")
                raise errors.FileTooBig(
                    GLSettings.memory_copy.maximum_filesize)

            if flow_identifier not in GLUploads:
                f = GLSecureTemporaryFile(GLSettings.tmp_upload_path)
                GLUploads[flow_identifier] = f
            else:
                f = GLUploads[flow_identifier]

            f.write(self.request.files['file'][0]['body'])

            if 'flowChunkNumber' in self.request.arguments and 'flowTotalChunks' in self.request.arguments:
                if self.request.arguments['flowChunkNumber'][
                        0] != self.request.arguments['flowTotalChunks'][0]:
                    return None

            uploaded_file = {}
            uploaded_file['filename'] = self.request.files['file'][0][
                'filename']
            uploaded_file['content_type'] = self.request.files['file'][0][
                'content_type']
            uploaded_file['body_len'] = total_file_size
            uploaded_file['body_filepath'] = f.filepath
            uploaded_file['body'] = f

            upload_time = time.time() - f.creation_date

            track_handler(self)

            return uploaded_file

        except errors.FileTooBig:
            raise  # propagate the exception

        except Exception as exc:
            log.err("Error while handling file upload %s" % exc)
            return None
Пример #7
0
    def get_file_upload(self):
        try:
            chunk_size = len(self.request.args['file'][0])
            total_file_size = int(
                self.request.args['flowTotalSize']
                [0]) if 'flowTotalSize' in self.request.args else chunk_size
            flow_identifier = self.request.args['flowIdentifier'][
                0] if 'flowIdentifier' in self.request.args else generateRandomKey(
                    10)

            if ((chunk_size /
                 (1024 * 1024)) > GLSettings.memory_copy.maximum_filesize or
                (total_file_size /
                 (1024 * 1024)) > GLSettings.memory_copy.maximum_filesize):
                log.err("File upload request rejected: file too big")
                raise errors.FileTooBig(
                    GLSettings.memory_copy.maximum_filesize)

            if flow_identifier not in GLUploads:
                f = GLSecureTemporaryFile(GLSettings.tmp_upload_path)
                GLUploads[flow_identifier] = f
            else:
                f = GLUploads[flow_identifier]

            f.write(self.request.args['file'][0])

            if 'flowChunkNumber' in self.request.args and 'flowTotalChunks' in self.request.args:
                if self.request.args['flowChunkNumber'][
                        0] != self.request.args['flowTotalChunks'][0]:
                    return None

            mime_type, encoding = mimetypes.guess_type(
                self.request.args['flowFilename'][0])

            uploaded_file = {
                'name': self.request.args['flowFilename'][0],
                'type': mime_type,
                'size': total_file_size,
                'path': f.filepath,
                'body': f,
                'description': self.request.args.get('description', [''])[0]
            }

            return uploaded_file

        except errors.FileTooBig:
            raise  # propagate the exception

        except Exception as exc:
            log.err("Error while handling file upload %s" % exc)
            return None
Пример #8
0
    def __init__(self, token_kind, uses = MAX_USES):
        """
        token_kind assumes currently only value 'submission.

        we plan to add other kinds like 'file'.

        """

        if reactor_override:
            reactor = reactor_override
        else:
            reactor = None

        self.kind = token_kind

        # both 'validity' variables need to be expressed in seconds
        self.start_validity_secs = GLSettings.memory_copy.submission_minimum_delay
        self.end_validity_secs = GLSettings.memory_copy.submission_maximum_ttl

        # Remind: this is just for developers, because if a clean house
        # is a sign of a waste life, a Token object without shortcut
        # is a sign of a psycho life. (vecnish!)
        if GLSettings.devel_mode:
            self.start_validity_secs = 0

        self.remaining_uses = uses

        # creation_date of token assignment
        self.creation_date = datetime.utcnow()

        # to keep track of the file uploaded associated
        self.uploaded_files = []

        self.id = generateRandomKey(42)

        # initialization of token configuration
        self.human_captcha = False
        self.graph_captcha = False
        self.proof_of_work = False

        self.generate_token_challenge()

        TempObj.__init__(self,
                         TokenList.token_dict,
                         # token ID:
                         self.id,
                         # seconds of validity:
                         self.start_validity_secs + self.end_validity_secs,
                         reactor)
Пример #9
0
    def __init__(self, token_kind="submission", uses=MAX_USES):
        self.id = generateRandomKey(42)
        self.kind = token_kind
        self.remaining_uses = uses
        self.creation_date = datetime.utcnow()

        # Keeps track of the file uploaded associated
        self.uploaded_files = []

        # The token challenges in their default state
        self.human_captcha = {'solved': True}
        self.proof_of_work = {'solved': True}
        self.generate_token_challenges()

        TokenList.set(self.id, self)
Пример #10
0
    def __init__(self, token_kind="submission", uses=MAX_USES):
        self.id = generateRandomKey(42)
        self.kind = token_kind
        self.remaining_uses = uses
        self.creation_date = datetime.utcnow()

        # Keeps track of the file uploaded associated
        self.uploaded_files = []

        # The token challenges in their default state
        self.human_captcha = {'solved': True}
        self.proof_of_work = {'solved': True}
        self.generate_token_challenges()

        TokenList.set(self.id, self)
Пример #11
0
    def generate_token_challenges(self):
        if Alarm.stress_levels['activity'] >= 1 and GLSettings.memory_copy.enable_captcha:
            random_a = randint(0, 99)
            random_b = randint(0, 99)

            self.human_captcha = {
                'question': u"%d + %d" % (random_a, random_b),
                'answer': random_a + random_b,
                'solved': False
            }

        if GLSettings.memory_copy.enable_proof_of_work:
            self.proof_of_work = {
                'question': generateRandomKey(20),
                'solved': False
            }
Пример #12
0
    def generate_token_challenges(self):
        if Alarm.stress_levels[
                'activity'] >= 1 and GLSettings.memory_copy.enable_captcha:
            random_a = randint(0, 99)
            random_b = randint(0, 99)

            self.human_captcha = {
                'question': u"%d + %d" % (random_a, random_b),
                'answer': random_a + random_b,
                'solved': False
            }

        if GLSettings.memory_copy.enable_proof_of_work:
            self.proof_of_work = {
                'question': generateRandomKey(20),
                'solved': False
            }
Пример #13
0
    def generate_token_challenges(self):
        if Alarm.stress_levels['activity'] >= 1 and State.tenant_cache[
                1].enable_captcha:
            random_a = SystemRandom().randrange(100)
            random_b = SystemRandom().randrange(100)

            self.human_captcha = {
                'question': u"%d + %d" % (random_a, random_b),
                'answer': random_a + random_b,
                'solved': False
            }

        if State.tenant_cache[1].enable_proof_of_work:
            self.proof_of_work = {
                'question': generateRandomKey(20),
                'solved': False
            }
Пример #14
0
    def migrate_InternalFile(self):
        old_objs = self.store_old.find(self.model_from['InternalFile'])
        for old_obj in old_objs:
            new_obj = self.model_to['InternalFile']()
            for _, v in new_obj._storm_columns.items():
                if v.name == 'processing_attempts':
                    new_obj.processing_attempts = 0
                    continue

                if v.name == 'file_path':
                    new_obj.file_path = os.path.join(
                        GLSettings.submission_path,
                        "%s.aes" % generateRandomKey(16))
                    continue

                setattr(new_obj, v.name, getattr(old_obj, v.name))

            self.store_new.add(new_obj)
Пример #15
0
    def __init__(self, token_kind, uses=MAX_USES):
        """
        token_kind assumes currently only value 'submission.

        we plan to add other kinds like 'file'.

        """
        self.id = generateRandomKey(42)

        self.kind = token_kind

        # both 'validity' variables need to be expressed in seconds
        self.start_validity_secs = GLSettings.memory_copy.submission_minimum_delay
        self.end_validity_secs = GLSettings.memory_copy.submission_maximum_ttl

        # Remind: this is just for developers, because if a clean house
        # is a sign of a waste life, a Token object without shortcut
        # is a sign of a psycho life. (vecnish!)
        if GLSettings.devel_mode:
            self.start_validity_secs = 0

        self.remaining_uses = uses

        # creation_date of token assignment
        self.creation_date = datetime.utcnow()

        # to keep track of the file uploaded associated
        self.uploaded_files = []

        # initialization of token configuration
        self.human_captcha = False
        self.graph_captcha = False
        self.proof_of_work = False

        self.generate_token_challenge()

        TokenList.set(self.id, self)
Пример #16
0
    def generate_token_challenge(self, challenges_dict = None):
        # initialization
        self.human_captcha = False
        self.graph_captcha = False
        self.proof_of_work = False

        if challenges_dict is None:
            challenges_dict = {
                'human_captcha': False,
                'graph_captcha': False,
                'proof_of_work': False
            }

            if Alarm.stress_levels['activity'] >= 1:
                challenges_dict['human_captcha'] = True and GLSettings.memory_copy.enable_captcha

            # a proof of work is always required (if enabled at node level)
            challenges_dict['proof_of_work'] = GLSettings.memory_copy.enable_proof_of_work

        if challenges_dict['human_captcha']:
            random_a = randint(0, 99)
            random_b = randint(0, 99)

            self.human_captcha = {
                'question': u"%d + %d" % (random_a, random_b),
                'answer': u"%d" % (random_a + random_b)
            }

        if challenges_dict['graph_captcha']:
            # still not implemented
            pass

        if challenges_dict['proof_of_work']:
            self.proof_of_work = {
                'question': generateRandomKey(20)
            }
Пример #17
0
    def get_file_upload(self):
        try:
            if len(self.request.files) != 1:
                raise errors.InvalidInputFormat("cannot accept more than a file upload at once")

            chunk_size = len(self.request.files['file'][0]['body'])
            total_file_size = int(self.request.arguments['flowTotalSize'][0]) if 'flowTotalSize' in self.request.arguments else chunk_size
            flow_identifier = self.request.arguments['flowIdentifier'][0] if 'flowIdentifier' in self.request.arguments else generateRandomKey(10)

            if ((chunk_size / (1024 * 1024)) > GLSettings.memory_copy.maximum_filesize or
                (total_file_size / (1024 * 1024)) > GLSettings.memory_copy.maximum_filesize):
                log.err("File upload request rejected: file too big")
                raise errors.FileTooBig(GLSettings.memory_copy.maximum_filesize)

            if flow_identifier not in GLUploads:
                f = GLSecureTemporaryFile(GLSettings.tmp_upload_path)
                GLUploads[flow_identifier] = f
            else:
                f = GLUploads[flow_identifier]

            f.write(self.request.files['file'][0]['body'])

            if 'flowChunkNumber' in self.request.arguments and 'flowTotalChunks' in self.request.arguments:
                if self.request.arguments['flowChunkNumber'][0] != self.request.arguments['flowTotalChunks'][0]:
                    return None

            uploaded_file = {
                'filename': self.request.files['file'][0]['filename'],
                'content_type': self.request.files['file'][0]['content_type'],
                'body_len': total_file_size,
                'body_filepath': f.filepath,
                'body': f
            }

            self.request._start_time = f.creation_date
            track_handler(self)

            return uploaded_file

        except errors.FileTooBig:
            raise  # propagate the exception

        except Exception as exc:
            log.err("Error while handling file upload %s" % exc)
            return None
Пример #18
0
    def migrate_InternalFile(self):
        old_objs = self.store_old.find(self.model_from['InternalFile'])
        for old_obj in old_objs:
            new_obj = self.model_to['InternalFile']()
            for _, v in new_obj._storm_columns.iteritems():
                if v.name == 'processing_attempts':
                    new_obj.processing_attempts = 0
                    continue

                if v.name == 'file_path':
                    new_obj.file_path = os.path.join(GLSettings.submission_path, "%s.aes" % generateRandomKey(16))
                    continue

                setattr(new_obj, v.name, getattr(old_obj, v.name))

            self.store_new.add(new_obj)
Пример #19
0
    def get_file_upload(self):
        try:
            if len(self.request.files) != 1:
                raise errors.InvalidInputFormat("cannot accept more than a file upload at once")

            chunk_size = len(self.request.files["file"][0]["body"])
            total_file_size = (
                int(self.request.arguments["flowTotalSize"][0])
                if "flowTotalSize" in self.request.arguments
                else chunk_size
            )
            flow_identifier = (
                self.request.arguments["flowIdentifier"][0]
                if "flowIdentifier" in self.request.arguments
                else generateRandomKey(10)
            )

            if (chunk_size / (1024 * 1024)) > GLSettings.memory_copy.maximum_filesize or (
                total_file_size / (1024 * 1024)
            ) > GLSettings.memory_copy.maximum_filesize:
                log.err("File upload request rejected: file too big")
                raise errors.FileTooBig(GLSettings.memory_copy.maximum_filesize)

            if flow_identifier not in GLUploads:
                f = GLSecureTemporaryFile(GLSettings.tmp_upload_path)
                GLUploads[flow_identifier] = f
            else:
                f = GLUploads[flow_identifier]

            f.write(self.request.files["file"][0]["body"])

            if "flowChunkNumber" in self.request.arguments and "flowTotalChunks" in self.request.arguments:
                if self.request.arguments["flowChunkNumber"][0] != self.request.arguments["flowTotalChunks"][0]:
                    return None

            uploaded_file = {}
            uploaded_file["filename"] = self.request.files["file"][0]["filename"]
            uploaded_file["content_type"] = self.request.files["file"][0]["content_type"]
            uploaded_file["body_len"] = total_file_size
            uploaded_file["body_filepath"] = f.filepath
            uploaded_file["body"] = f

            upload_time = time.time() - f.creation_date

            # file uploads works on chunk basis so that we count 1 the file upload
            # as a whole in function get_file_upload()
            for event in outcoming_event_monitored:
                if (
                    event["status_checker"](self._status_code)
                    and event["method"] == self.request.method
                    and event["handler_check"](self.request.uri)
                ):
                    EventTrack(event, upload_time)

            return uploaded_file

        except errors.FileTooBig:
            raise  # propagate the exception

        except Exception as exc:
            log.err("Error while handling file upload %s" % exc)
            return None
Пример #20
0
    def get_file_upload(self):
        try:
            if len(self.request.files) != 1:
                raise errors.InvalidInputFormat("cannot accept more than a file upload at once")

            chunk_size = len(self.request.files['file'][0]['body'])
            total_file_size = int(self.request.arguments['flowTotalSize'][0]) if 'flowTotalSize' in self.request.arguments else chunk_size
            flow_identifier = self.request.arguments['flowIdentifier'][0] if 'flowIdentifier' in self.request.arguments else generateRandomKey(10)

            if ((chunk_size / (1024 * 1024)) > GLSettings.memory_copy.maximum_filesize or
                (total_file_size / (1024 * 1024)) > GLSettings.memory_copy.maximum_filesize):
                log.err("File upload request rejected: file too big")
                raise errors.FileTooBig(GLSettings.memory_copy.maximum_filesize)

            if flow_identifier not in GLUploads:
                f = GLSecureTemporaryFile(GLSettings.tmp_upload_path)
                GLUploads[flow_identifier] = f
            else:
                f = GLUploads[flow_identifier]

            f.write(self.request.files['file'][0]['body'])

            if 'flowChunkNumber' in self.request.arguments and 'flowTotalChunks' in self.request.arguments:
                if self.request.arguments['flowChunkNumber'][0] != self.request.arguments['flowTotalChunks'][0]:
                    return None

            uploaded_file = {}
            uploaded_file['filename'] = self.request.files['file'][0]['filename']
            uploaded_file['content_type'] = self.request.files['file'][0]['content_type']
            uploaded_file['body_len'] = total_file_size
            uploaded_file['body_filepath'] = f.filepath
            uploaded_file['body'] = f

            upload_time = time.time() - f.creation_date

            # file uploads works on chunk basis so that we count 1 the file upload
            # as a whole in function get_file_upload()
            for event in outcoming_event_monitored:
                if event['status_checker'](self._status_code) and \
                        event['method'] == self.request.method and \
                        event['handler_check'](self.request.uri):
                    EventTrack(event, upload_time)

            return uploaded_file

        except errors.FileTooBig:
            raise  # propagate the exception

        except Exception as exc:
            log.err("Error while handling file upload %s" % exc)
            return None
Пример #21
0
def fsops_pgp_encrypt(fpath, recipient_pgp):
    """
    return
        path of encrypted file,
        length of the encrypted file

    this function is used to encrypt a file for a specific recipient.
    commonly 'receiver_desc' is expected as second argument;
    anyhow a simpler dict can be used.

    required keys are checked on top
    """
    gpoj = GLBPGP()

    try:
        gpoj.load_key(recipient_pgp['pgp_key_public'])

        filepath = os.path.join(GLSettings.submission_path, fpath)

        with GLSecureFile(filepath) as f:
            encrypted_file_path = os.path.join(os.path.abspath(GLSettings.submission_path), "pgp_encrypted-%s" % generateRandomKey(16))
            _, encrypted_file_size = gpoj.encrypt_file(recipient_pgp['pgp_key_fingerprint'], f, encrypted_file_path)

    except:
        raise

    finally:
        # the finally statement is always called also if
        # except contains a return or a raise
        gpoj.destroy_environment()

    return encrypted_file_path, encrypted_file_size
Пример #22
0
 def generate_proof_of_work(self):
     if State.tenant_cache[1].enable_proof_of_work:
         self.proof_of_work = {
             'question': generateRandomKey(20),
             'solved': False
         }