Exemplo n.º 1
0
def _new_sid(req):
    """
    Make a number based on current time, pid, remote ip
    and two random ints, then hash with md5. This should
    be fairly unique and very difficult to guess.

    @param req: the mod_python request object.
    @type req: mod_python request object.
    @return: the session identifier.
    @rtype: 32 hexadecimal string

    @warning: The current implementation of _new_sid returns an
        md5 hexdigest string. To avoid a possible directory traversal
        attack in FileSession the sid is validated using
        the _check_sid() method and the compiled regex
        validate_sid_re. The sid will be accepted only if len(sid) == 32
        and it only contains the characters 0-9 and a-f.

        If you change this implementation of _new_sid, make sure to also
        change the validation scheme, as well as the test_Session_illegal_sid()
        unit test in test/test.py.
    """
    return uuid4().hex

    the_time = long(time.time() * 10000)
    pid = os.getpid()
    random_generator = _get_generator()
    rnd1 = random_generator.randint(0, 999999999)
    rnd2 = random_generator.randint(0, 999999999)
    remote_ip = req.remote_ip

    return md5("%d%d%d%d%s" %
               (the_time, pid, rnd1, rnd2, remote_ip)).hexdigest()
Exemplo n.º 2
0
def get_search_query_id(**kwargs):
    """
    Returns unique query indentifier.
    """
    p = kwargs.get('p', '')
    f = kwargs.get('f', '')
    cc = kwargs.get('cc', '')
    wl = kwargs.get('wl', '')
    return md5(repr((p, f, cc, wl))).hexdigest()
Exemplo n.º 3
0
def get_search_query_id(**kwargs):
    """Return unique query indentifier."""
    p = kwargs.get('p', '').strip()
    f = kwargs.get('f', '')
    cc = kwargs.get('cc', '')
    wl = kwargs.get('wl', '')
    so = kwargs.get('so', '')
    sf = kwargs.get('sf', '')
    return md5(repr((p, f, cc, wl, sf, so))).hexdigest()
Exemplo n.º 4
0
 def create(cls, kind, params, cookie_timeout=timedelta(days=1), onetime=False):
     """Create cookie with given params."""
     expiration = datetime.today() + cookie_timeout
     data = (kind, params, expiration, onetime)
     password = md5(str(random())).hexdigest()
     cookie = cls(expiration=expiration, kind=kind, onetime=int(onetime))
     cookie._data = mysql_aes_encrypt(dumps(data), password)
     db.session.add(cookie)
     db.session.commit()
     db.session.refresh(cookie)
     return password[:16] + hex(cookie.id)[2:-1] + password[-16:]
Exemplo n.º 5
0
def auto_version_url(file_path):
    """ Appends modification time of the file to the request URL in order for the
        browser to refresh the cache when file changes

        @param file_path: path to the file, e.g js/foo.js
        @return: file_path with modification time appended to URL
    """
    file_md5 = ""
    try:
        file_md5 = md5(open(cfg.get('CFG_WEBDIR') + os.sep + file_path).read()).hexdigest()
    except IOError:
        pass
    return file_path + "?%s" % file_md5
Exemplo n.º 6
0
def auto_version_url(file_path):
    """ Appends modification time of the file to the request URL in order for the
        browser to refresh the cache when file changes

        @param file_path: path to the file, e.g js/foo.js
        @return: file_path with modification time appended to URL
    """
    file_md5 = ""
    try:
        file_md5 = md5(
            open(cfg.get('CFG_WEBDIR') + os.sep +
                 file_path).read()).hexdigest()
    except IOError:
        pass
    return file_path + "?%s" % file_md5
Exemplo n.º 7
0
 def create(cls, kind, params, cookie_timeout=timedelta(days=1),
            onetime=False):
     """Create cookie with given params."""
     expiration = datetime.today() + cookie_timeout
     data = (kind, params, expiration, onetime)
     password = md5(str(random())).hexdigest()
     cookie = cls(
         expiration=expiration,
         kind=kind,
         onetime=int(onetime),
     )
     cookie._data = db.func.aes_encrypt(dumps(data), password)
     db.session.add(cookie)
     db.session.commit()
     db.session.refresh(cookie)
     return password[:16]+hex(cookie.id)[2:-1]+password[-16:]
Exemplo n.º 8
0
def mail_cookie_create_common(kind, params, cookie_timeout=timedelta(days=1), onetime=False):
    """Create a unique url to be sent via email to access this authorization
    @param kind: kind of authorization (e.g. 'pw_reset', 'mail_activation', 'role')
    @param params: whatever parameters are needed
    @param cookie_timeout: for how long the url will be valid
    @param onetime: whetever to remove the cookie after it has used.
    """
    assert(kind in _authorizations_kind)
    expiration = datetime.today()+cookie_timeout
    data = (kind, params, expiration, onetime)
    password = md5(str(random())).hexdigest()
    cookie_id = run_sql('INSERT INTO accMAILCOOKIE (data,expiration,kind,onetime) VALUES '
        '(AES_ENCRYPT(%s, %s),%s,%s,%s)',
        (dumps(data), password, expiration.strftime(_datetime_format), kind, onetime))
    cookie = password[:16]+hex(cookie_id)[2:-1]+password[-16:]
    return cookie
Exemplo n.º 9
0
def make_cache_key(custom_kbs_files=None):
    """Create cache key for kbs caches instances

    This function generates a unique key for a given set of arguments.

    The files dictionary is transformed like this:
    {'journal': '/var/journal.kb', 'books': '/var/books.kb'}
    to
    "journal=/var/journal.kb;books=/var/books.kb"

    Then _inspire is appended if we are an INSPIRE site.
    """
    if custom_kbs_files:
        serialized_args = ('%s=%s' % v for v in iteritems(custom_kbs_files))
        serialized_args = ';'.join(serialized_args)
    else:
        serialized_args = "default"
    cache_key = md5(serialized_args).digest()
    return cache_key
Exemplo n.º 10
0
def make_cache_key(custom_kbs_files=None):
    """Create cache key for kbs caches instances

    This function generates a unique key for a given set of arguments.

    The files dictionary is transformed like this:
    {'journal': '/var/journal.kb', 'books': '/var/books.kb'}
    to
    "journal=/var/journal.kb;books=/var/books.kb"

    Then _inspire is appended if we are an INSPIRE site.
    """
    if custom_kbs_files:
        serialized_args = ('%s=%s' % v for v in iteritems(custom_kbs_files))
        serialized_args = ';'.join(serialized_args)
    else:
        serialized_args = "default"
    cache_key = md5(serialized_args).digest()
    return cache_key
Exemplo n.º 11
0
def _new_sid(req):
    """
    Make a number based on current time, pid, remote ip
    and two random ints, then hash with md5. This should
    be fairly unique and very difficult to guess.

    @param req: the mod_python request object.
    @type req: mod_python request object.
    @return: the session identifier.
    @rtype: 32 hexadecimal string

    @warning: The current implementation of _new_sid returns an
        md5 hexdigest string. To avoid a possible directory traversal
        attack in FileSession the sid is validated using
        the _check_sid() method and the compiled regex
        validate_sid_re. The sid will be accepted only if len(sid) == 32
        and it only contains the characters 0-9 and a-f.

        If you change this implementation of _new_sid, make sure to also
        change the validation scheme, as well as the test_Session_illegal_sid()
        unit test in test/test.py.
    """
    return uuid4().hex

    the_time = long(time.time()*10000)
    pid = os.getpid()
    random_generator = _get_generator()
    rnd1 = random_generator.randint(0, 999999999)
    rnd2 = random_generator.randint(0, 999999999)
    remote_ip = req.remote_ip

    return md5("%d%d%d%d%s" % (
        the_time,
        pid,
        rnd1,
        rnd2,
        remote_ip)
    ).hexdigest()
Exemplo n.º 12
0
def document_upload(req=None, folder="", matching="", mode="", exec_date="", exec_time="", ln=CFG_SITE_LANG, priority="1", email_logs_to=None):
    """ Take files from the given directory and upload them with the appropiate mode.
    @parameters:
        + folder: Folder where the files to upload are stored
        + matching: How to match file names with record fields (report number, barcode,...)
        + mode: Upload mode (append, revise, replace)
    @return: tuple (file, error code)
        file: file name causing the error to notify the user
        error code:
            1 - More than one possible recID, ambiguous behaviour
            2 - No records match that file name
            3 - File already exists
    """
    import sys
    from invenio.legacy.bibdocfile.api import BibRecDocs, file_strip_ext
    from invenio.utils.hash import md5
    import shutil
    from invenio.legacy.search_engine import perform_request_search, \
                                      search_pattern, \
                                      guess_collection_of_a_record
    _ = gettext_set_language(ln)
    errors = []
    info = [0, []] # Number of files read, name of the files
    try:
        files = os.listdir(folder)
    except OSError as error:
        errors.append(("", error))
        return errors, info
    err_desc = {1: _("More than one possible recID, ambiguous behaviour"), 2: _("No records match that file name"),
                3: _("File already exists"), 4: _("A file with the same name and format already exists")}
    # Create directory DONE/ if doesn't exist
    folder = (folder[-1] == "/") and folder or (folder + "/")
    files_done_dir = folder + "DONE/"
    try:
        os.mkdir(files_done_dir)
    except OSError:
        # Directory exists or no write permission
        pass
    for docfile in files:
        if os.path.isfile(os.path.join(folder, docfile)):
            info[0] += 1
            identifier = file_strip_ext(docfile)
            extension = docfile[len(identifier):]
            rec_id = None
            if identifier:
                rec_id = search_pattern(p=identifier, f=matching, m='e')
            if not rec_id:
                errors.append((docfile, err_desc[2]))
                continue
            elif len(rec_id) > 1:
                errors.append((docfile, err_desc[1]))
                continue
            else:
                rec_id = str(list(rec_id)[0])
            rec_info = BibRecDocs(rec_id)
            if rec_info.bibdocs:
                for bibdoc in rec_info.bibdocs:
                    attached_files = bibdoc.list_all_files()
                    file_md5 = md5(open(os.path.join(folder, docfile), "rb").read()).hexdigest()
                    num_errors = len(errors)
                    for attached_file in attached_files:
                        if attached_file.checksum == file_md5:
                            errors.append((docfile, err_desc[3]))
                            break
                        elif attached_file.get_full_name() == docfile:
                            errors.append((docfile, err_desc[4]))
                            break
                if len(errors) > num_errors:
                    continue
            # Check if user has rights to upload file
            if req is not None:
                file_collection = guess_collection_of_a_record(int(rec_id))
                auth_code, auth_message = acc_authorize_action(req, 'runbatchuploader', collection=file_collection)
                if auth_code != 0:
                    error_msg = _("No rights to upload to collection '%(x_name)s'", x_name=file_collection)
                    errors.append((docfile, error_msg))
                    continue
            # Move document to be uploaded to temporary folder
            (fd, tmp_file) = tempfile.mkstemp(prefix=identifier + "_" + time.strftime("%Y%m%d%H%M%S", time.localtime()) + "_", suffix=extension, dir=CFG_TMPSHAREDDIR)
            shutil.copy(os.path.join(folder, docfile), tmp_file)
            # Create MARC temporary file with FFT tag and call bibupload
            (fd, filename) = tempfile.mkstemp(prefix=identifier + '_', dir=CFG_TMPSHAREDDIR)
            filedesc = os.fdopen(fd, 'w')
            marc_content = """ <record>
                                    <controlfield tag="001">%(rec_id)s</controlfield>
                                        <datafield tag="FFT" ind1=" " ind2=" ">
                                            <subfield code="n">%(name)s</subfield>
                                            <subfield code="a">%(path)s</subfield>
                                        </datafield>
                               </record> """ % {'rec_id': rec_id,
                                                'name': encode_for_xml(identifier),
                                                'path': encode_for_xml(tmp_file),
                                                }
            filedesc.write(marc_content)
            filedesc.close()
            info[1].append(docfile)
            user = ""
            if req is not None:
                user_info = collect_user_info(req)
                user = user_info['nickname']
            if not user:
                user = "******"
            # Execute bibupload with the appropiate mode

            task_arguments = ('bibupload', user, "--" + mode,
                              "--priority=" + priority, "-N", "batchupload")

            if exec_date:
                date = '--runtime=' + "\'" + exec_date + ' ' + exec_time + "\'"
                task_arguments += (date, )
            if email_logs_to:
                task_arguments += ("--email-logs-to", email_logs_to)
            task_arguments += (filename, )

            jobid = task_low_level_submission(*task_arguments)

            # write batch upload history
            run_sql("""INSERT INTO hstBATCHUPLOAD (user, submitdate,
                    filename, execdate, id_schTASK, batch_mode)
                    VALUES (%s, NOW(), %s, %s, %s, "document")""",
                    (user_info['nickname'], docfile,
                    exec_date != "" and (exec_date + ' ' + exec_time)
                    or time.strftime("%Y-%m-%d %H:%M:%S"), str(jobid)))

            # Move file to DONE folder
            done_filename = docfile + "_" + time.strftime("%Y%m%d%H%M%S", time.localtime()) + "_" + str(jobid)
            try:
                os.rename(os.path.join(folder, docfile), os.path.join(files_done_dir, done_filename))
            except OSError:
                errors.append('MoveError')
    return errors, info
Exemplo n.º 13
0
def document_upload(req=None,
                    folder="",
                    matching="",
                    mode="",
                    exec_date="",
                    exec_time="",
                    ln=CFG_SITE_LANG,
                    priority="1",
                    email_logs_to=None):
    """ Take files from the given directory and upload them with the appropiate mode.
    @parameters:
        + folder: Folder where the files to upload are stored
        + matching: How to match file names with record fields (report number, barcode,...)
        + mode: Upload mode (append, revise, replace)
    @return: tuple (file, error code)
        file: file name causing the error to notify the user
        error code:
            1 - More than one possible recID, ambiguous behaviour
            2 - No records match that file name
            3 - File already exists
    """
    import sys
    from invenio.legacy.bibdocfile.api import BibRecDocs, file_strip_ext
    from invenio.utils.hash import md5
    import shutil
    from invenio.legacy.search_engine import perform_request_search, \
                                      search_pattern, \
                                      guess_collection_of_a_record
    _ = gettext_set_language(ln)
    errors = []
    info = [0, []]  # Number of files read, name of the files
    try:
        files = os.listdir(folder)
    except OSError as error:
        errors.append(("", error))
        return errors, info
    err_desc = {
        1: _("More than one possible recID, ambiguous behaviour"),
        2: _("No records match that file name"),
        3: _("File already exists"),
        4: _("A file with the same name and format already exists")
    }
    # Create directory DONE/ if doesn't exist
    folder = (folder[-1] == "/") and folder or (folder + "/")
    files_done_dir = folder + "DONE/"
    try:
        os.mkdir(files_done_dir)
    except OSError:
        # Directory exists or no write permission
        pass
    for docfile in files:
        if os.path.isfile(os.path.join(folder, docfile)):
            info[0] += 1
            identifier = file_strip_ext(docfile)
            extension = docfile[len(identifier):]
            rec_id = None
            if identifier:
                rec_id = search_pattern(p=identifier, f=matching, m='e')
            if not rec_id:
                errors.append((docfile, err_desc[2]))
                continue
            elif len(rec_id) > 1:
                errors.append((docfile, err_desc[1]))
                continue
            else:
                rec_id = str(list(rec_id)[0])
            rec_info = BibRecDocs(rec_id)
            if rec_info.bibdocs:
                for bibdoc in rec_info.bibdocs:
                    attached_files = bibdoc.list_all_files()
                    file_md5 = md5(
                        open(os.path.join(folder, docfile),
                             "rb").read()).hexdigest()
                    num_errors = len(errors)
                    for attached_file in attached_files:
                        if attached_file.checksum == file_md5:
                            errors.append((docfile, err_desc[3]))
                            break
                        elif attached_file.get_full_name() == docfile:
                            errors.append((docfile, err_desc[4]))
                            break
                if len(errors) > num_errors:
                    continue
            # Check if user has rights to upload file
            if req is not None:
                file_collection = guess_collection_of_a_record(int(rec_id))
                auth_code, auth_message = acc_authorize_action(
                    req, 'runbatchuploader', collection=file_collection)
                if auth_code != 0:
                    error_msg = _(
                        "No rights to upload to collection '%(x_name)s'",
                        x_name=file_collection)
                    errors.append((docfile, error_msg))
                    continue
            # Move document to be uploaded to temporary folder
            (fd, tmp_file) = tempfile.mkstemp(
                prefix=identifier + "_" +
                time.strftime("%Y%m%d%H%M%S", time.localtime()) + "_",
                suffix=extension,
                dir=CFG_TMPSHAREDDIR)
            shutil.copy(os.path.join(folder, docfile), tmp_file)
            # Create MARC temporary file with FFT tag and call bibupload
            (fd, filename) = tempfile.mkstemp(prefix=identifier + '_',
                                              dir=CFG_TMPSHAREDDIR)
            filedesc = os.fdopen(fd, 'w')
            marc_content = """ <record>
                                    <controlfield tag="001">%(rec_id)s</controlfield>
                                        <datafield tag="FFT" ind1=" " ind2=" ">
                                            <subfield code="n">%(name)s</subfield>
                                            <subfield code="a">%(path)s</subfield>
                                        </datafield>
                               </record> """ % {
                'rec_id': rec_id,
                'name': encode_for_xml(identifier),
                'path': encode_for_xml(tmp_file),
            }
            filedesc.write(marc_content)
            filedesc.close()
            info[1].append(docfile)
            user = ""
            if req is not None:
                user_info = collect_user_info(req)
                user = user_info['nickname']
            if not user:
                user = "******"
            # Execute bibupload with the appropiate mode

            task_arguments = ('bibupload', user, "--" + mode,
                              "--priority=" + priority, "-N", "batchupload")

            if exec_date:
                date = '--runtime=' + "\'" + exec_date + ' ' + exec_time + "\'"
                task_arguments += (date, )
            if email_logs_to:
                task_arguments += ("--email-logs-to", email_logs_to)
            task_arguments += (filename, )

            jobid = task_low_level_submission(*task_arguments)

            # write batch upload history
            run_sql(
                """INSERT INTO hstBATCHUPLOAD (user, submitdate,
                    filename, execdate, id_schTASK, batch_mode)
                    VALUES (%s, NOW(), %s, %s, %s, "document")""",
                (user_info['nickname'], docfile, exec_date != "" and
                 (exec_date + ' ' + exec_time)
                 or time.strftime("%Y-%m-%d %H:%M:%S"), str(jobid)))

            # Move file to DONE folder
            done_filename = docfile + "_" + time.strftime(
                "%Y%m%d%H%M%S", time.localtime()) + "_" + str(jobid)
            try:
                os.rename(os.path.join(folder, docfile),
                          os.path.join(files_done_dir, done_filename))
            except OSError:
                errors.append('MoveError')
    return errors, info
Exemplo n.º 14
0
 def hash(self, password):
     if db.engine.name != 'mysql':
         return md5(password).digest()
     email = self.__clause_element__().table.columns.email
     return db.func.aes_encrypt(email, password)
Exemplo n.º 15
0
 def hash(self, password):
     """Generate a hashed version of the password."""
     if db.engine.name != 'mysql':
         return md5(password).digest()
     email = self.__clause_element__().table.columns.email
     return db.func.aes_encrypt(email, password)
Exemplo n.º 16
0
def _get_record_hash(link):
    """
    Generate a record hash including CFG_SITE_URL so that
    if CFG_SITE_URL is updated, the QR-code image is invalidated.
    """
    return md5(link).hexdigest()[:8].lower()
Exemplo n.º 17
0
 def test_md5(self):
     self.assertEqual(
         md5('').hexdigest(), 'd41d8cd98f00b204e9800998ecf8427e')
     self.assertEqual(
         md5('test').hexdigest(), '098f6bcd4621d373cade4e832627b4f6')
Exemplo n.º 18
0
def _get_record_hash(link):
    """
    Generate a record hash including CFG_SITE_URL so that
    if CFG_SITE_URL is updated, the QR-code image is invalidated.
    """
    return md5(link).hexdigest()[:8].lower()
Exemplo n.º 19
0
 def hash(self, password):
     """Generate a hashed version of the password."""
     if db.engine.name != 'mysql':
         return md5(password).digest()
     email = self.__clause_element__().table.columns.email
     return db.func.aes_encrypt(email, password)
Exemplo n.º 20
0
 def test_md5(self):
     self.assertEqual(md5("").hexdigest(), "d41d8cd98f00b204e9800998ecf8427e")
     self.assertEqual(md5("test").hexdigest(), "098f6bcd4621d373cade4e832627b4f6")