Exemplo n.º 1
0
 def print_blocks(self, blocks):
     if len(blocks) == 0:
         return
     print(utils.get_hash(blocks[0])[:8], end="")
     for i in range(1, len(blocks)):
         print("->", utils.get_hash(blocks[i])[:8], end="")
     print()
Exemplo n.º 2
0
def create_relations():
    # запоминаем все используемые в бд модели
    models = [User, RunSettings]

    # чистим базу данных
    print('чистим базу данных')
    db = SqliteDatabase('data.db')
    try:
        db.drop_tables(models)
    except OperationalError:
        pass

    # создаем таблицы в бд
    print('\nсоздаем модели в бд:')
    for i in models:
        print(i)
        i.create_table()

    # добавляем пользователя
    print('\nдобавляем пользователя')
    admin_user = User(mail="*****@*****.**", password=get_hash('1'), port=56001)
    admin_user.save()

    test_user = User(mail="*****@*****.**", password=get_hash('1'), port=9701)
    test_user.save()
Exemplo n.º 3
0
 def print_chain(self):
     with self.mutex:
         if len(self.blocks) == 0:
             return
         print(utils.get_hash(self.blocks[0])[:8], end="")
         for i in range(1, len(self.blocks)):
             print("->", utils.get_hash(self.blocks[i])[:8], end="")
         print()
Exemplo n.º 4
0
def process_app_data(metadata, cmd_string, input_filepath, stdout, stderr, return_code):
  """
  <Purpose>
    Execute the given command and redirect input (as necessary).

  <Arguments>
    metadata:
      The metadata dictonary.

    cmd_string:
      A string of the command that was provided to execute the build or test 
      (e.g. "python test.py", "make").

    input_filepath:
      The filepath of the file from which stdin should be read; will be None if 
      no explicit file specified

    stdout:
      A string representing the stdout from the command that was run.

    stderr:
      A string representing the stderr from the command that was run.

    return_code:
      An integer representing the return code from the command that was run.

  <Exceptions>
    TBD.

  <Return>
    None.
  """

  metadata['application']['command'] = cmd_string
  metadata['application']['return_code'] = return_code

  cwd = os.getcwd()

  # For the stdin, stdout and stderr, write each to a file, hash it, and store 
  # the hash and filepath to the metadata
  if input_filepath:
    saved_input_path = os.path.join(cwd,"in")
    shutil.copyfile(input_filepath, saved_input_path)
    metadata['application']['input_hash'] = utils.get_hash(saved_input_path)
    metadata['application']['input_path'] = saved_input_path
  else:
    metadata['application']['input_hash'] = None
    metadata['application']['input_path'] = None

  saved_output_path = os.path.join(cwd,"out")
  utils.write_to_file(stdout, saved_output_path)
  metadata['application']['output_hash'] = utils.get_hash(saved_output_path)
  metadata['application']['output_path'] = saved_output_path
  saved_err_path = os.path.join(cwd,"err")
  utils.write_to_file(stderr, saved_err_path)
  metadata['application']['err_hash'] = utils.get_hash(saved_err_path)
  metadata['application']['err_path'] = saved_err_path
Exemplo n.º 5
0
 def update_verified_txns(self):
     with self.verified_txns_mutex:
         self.verified_txout_used = {}
         self.verified_txout = {}
         with self.mutex:
             for blk in self.blocks:
                 for txn in blk.tr_list:
                     self.verified_txout_used[utils.get_hash(txn)] = [False] * len(txn.txouts)
                     self.verified_txout[utils.get_hash(txn)] = txn.txouts
                     for txin in txn.txins:
                         if not txin.pre_tx_hash == None:
                             self.verified_txout_used[txin.pre_tx_hash][txin.pre_txout_idx] = True
Exemplo n.º 6
0
def predict():
    data = json.loads(request.data.decode())
    mandatory_items = ['problem', 'question', 'answer']
    for item in mandatory_items:
        if item not in data.keys():
            return jsonify({'result': 'Fill in all text fields.'})
        if item == '':
            return jsonify({'result': 'Empty field: %s' % item})
    instance = {}
    instance['pd_hash'] = get_hash(data['problem'])
    instance['qu_hash'] = get_hash(data['question'])
    instance['answer'] = data['answer']
    prediction = get_prediction(landmarks, instance)
    return jsonify({'result': prediction})
Exemplo n.º 7
0
 def handle_send_block(self, blk):
     if not self.verify_block(blk):
         return
     with self.mutex:
         idx = len(self.blocks)
         if idx != blk.idx:
             return
         if idx > 0:
             pre_hash = utils.get_hash(self.blocks[-1])
         else:
             pre_hash = utils.get_hash(None)
         if pre_hash != blk.pre_hash:
             return
         self.blocks.append(blk)
     return
Exemplo n.º 8
0
 def get_irpath(self):
     """
     Gets the path for the saved calibration joblib model
     :return: path for calibration model
     """
     fn = utils.get_hash(self.detector_dict) + '_ir.joblib'
     return os.path.join(self.gift_basepath, self.calmodelsubpath, fn)
Exemplo n.º 9
0
 def get_caldatapath(self, scratch_dirpath):
     """
     Gets the path for the saved calibration data
     :return: path for calibration data
     """
     fn = utils.get_hash(self.detector_dict) + '_cal.p'
     return os.path.join(scratch_dirpath, fn)
Exemplo n.º 10
0
def log_in():
    data = request.form
    if NAME not in data:
        return utils.get_extended_error_by_code(1, NAME)
    if PASSWORD not in data:
        return utils.get_extended_error_by_code(1, PASSWORD)
    name = data[NAME]
    password = data[PASSWORD]
    exists = User.query.filter_by(name=name).count() != 0
    if not exists:
        return utils.get_error_by_code(3)
    user = User.query.filter_by(name=name).first()
    if utils.get_hash(password + user.salt) != user.password_hash:
        return utils.get_error_by_code(3)
    if not user.is_activated():
        return utils.get_error_by_code(15)
    token = Token.query.filter_by(user_id=user.id).first()
    if token is not None:
        db.session.delete(token)
    token = Token(name, password, user.id)
    db.session.add(token)
    db.session.flush()
    db.session.refresh(token)
    result_token = token.token
    db.session.commit()
    return utils.RESPONSE_FORMAT % utils.get_log_in_response(
        user.id, result_token)
Exemplo n.º 11
0
Arquivo: api.py Projeto: 7aske/blog
def api_post_comment(postid):
    if request.method == "POST":
        comment = commentutils.request_to_comment(request)
        if comment is not None:
            post = get_db().db.posts.find_one_or_404({"id": postid})
            post["comments"].append(comment)
            if "mailer" in config.sections():
                commentutils.mail_commenters(post, comment)
            get_db().db.posts.update_one(
                {"id": postid}, {"$set": {
                    "comments": post["comments"]
                }})
            return json.dumps(commentutils.comment_to_json(comment)), 201
        else:
            return "Not Found", 404
    elif request.method == "GET":
        post = get_db().db.posts.find_one_or_404({"id": postid})
        addr = request.headers.get("X-Forwarded-For",
                                   default=request.remote_addr)
        voter = get_db().db.voters.find_one({"voter", utils.get_hash(addr)})
        for comment in post["comments"]:
            comment["date_posted"] = comment["date_posted"].strftime(TIME_FMT)
        res = Response()
        res.set_data(
            json.dumps({
                "voter": voter,
                "comments": post["comments"]
            },
                       default=lambda o: str(o)))
        return res, 200
    else:
        return "Bad Request", 400
Exemplo n.º 12
0
def forget(request):
    # Comment out in production
    if settings.EMAIL_HOST_PASSWORD == "":
        raise ValueError(
            "Email password is missing. Set password in EMAIL_HOST_PASSWORD at settings.py")

    email = request.POST.get("email")
    try:
        user = User.objects.filter(
            Q(email=email) | Q(username=email)
        ).first()
        code = get_code()
        if Code.objects.filter(user=user).count() > 0:
            obj = Code.objects.filter(user=user).first()
        else:
            obj = Code()
        obj.code = get_hash(code)
        obj.user = user
        obj.save()
        username = user.username
        subject = "Bugbinder | Reset Password."
        message = f"Dear {username},\nYou recently requested to reset your password for your Bugbinder account.\n\nCODE: {code}\n\nIf you didn't request a password reset, please ignore this email.\n\nThanks,\nBugbinder"
        async_send_mail(subject, message, settings.EMAIL_HOST_USER, user.email)
        return JsonResponse({'status': 200})
    except:
        return JsonResponse({'status': 403})
Exemplo n.º 13
0
    def handle_full_block(self, packet, diff):

        dif = self.BLOCK_SIZE - self.dest_packetSize[packet.dest]
        if dif == packet.size():
            self.dest_packetList[packet.dest].append(
                tcp_packet.Packet(packet.src, packet.dest, True, packet.is_fin,
                                  packet.payload[:dif]))
        else:
            self.dest_packetList[packet.dest].append(
                tcp_packet.Packet(packet.src, packet.dest, True, False,
                                  packet.payload[:dif]))
        self.dest_block[
            packet.dest] = self.dest_block[packet.dest] + packet.payload[:dif]
        hash = utils.get_hash(self.dest_block[packet.dest])
        if hash in self.hash_packetList.keys():
            comp_packet = tcp_packet.Packet(packet.src, packet.dest, False,
                                            packet.is_fin, hash)
            self.send(comp_packet, self.wan_port)
        else:
            self.hash_packetList[hash] = self.dest_packetList[packet.dest]
            self.send_code(packet, self.dest_packetList[packet.dest])
        self.dest_packetSize[packet.dest] = diff

        self.dest_block[packet.dest] = ""
        self.dest_packetList[packet.dest] = []
        if diff != 0:
            self.dest_block[packet.dest] = packet.payload[dif:]
            self.dest_packetList[packet.dest].append(
                tcp_packet.Packet(packet.src, packet.dest, True, packet.is_fin,
                                  packet.payload[dif:]))
 def determine_if_hashed(self, block, packet_key, is_fin, is_remote):
     hash_code = utils.get_hash(block)
     if hash_code in self.hash_payloads:
         self.send_hash(hash_code, packet_key, is_fin, is_remote)
     else:
         self.add_block_to_hashes(block)
         self.send_block(block, packet_key, is_fin, is_remote)
Exemplo n.º 15
0
    def register(self, data):
        # check all fields
        if not check_all_parameters(data, [
                'id', 'name', 'surname', 'email', 'school_id', 'password',
                'education'
        ]):
            return json.dumps({"error": "Недостатньо данних"}), 400
        if (not 'phd' in data):
            data['phd'] = False
        # check fields that can be NULL
        data['patronymic'] = check_for_null(data, 'patronymic')
        data['phone'] = check_for_null(data, 'phone')

        # hash password
        data['password'] = get_hash(data['password'])

        # try to add to db
        try:
            sql = "INSERT INTO teachers (teacher_id, name, surname, patronymic, phd, email, phone, school_id, education, password) " \
                  "VALUES ('%s', '%s','%s', %s, '%s', '%s', %s, '%s', '%s','%s');" % (
                      data['id'], data['name'], data['surname'],
                      data['patronymic'], data['phd'],
                      data['email'], data['phone'],
                      data['school_id'], data['education'],
                      data['password'])
            self.db.execute(sql)
        except Exception as e:
            return get_error(e, 1)
        return "ok", 201
Exemplo n.º 16
0
def save_data(user_id):
    data = request.json

    send_to_ledger(data['predictors'], user_id, data['provider'])

    # TODO: move to a proper place
    db_connection = psycopg2.connect("dbname=postgres user=khomenkos")
    cursor = db_connection.cursor()

    for predictor in data['predictors']:
        # TODO: proper save
        query = """INSERT INTO t_user_interests (a_user_id,
                        a_type,
                        a_interest,
                        a_probability) VALUES ('%s', '%s', '%s', %f)""" % (
            get_hash(user_id), data['type'], predictor['prediction_class'],
            predictor['probability'])

        cursor.execute(query)

    db_connection.commit()
    db_connection.close()

    return jsonify(
        {'status': f'pretending to save data for user_id {user_id}'})
Exemplo n.º 17
0
    def register(self, data):
        # check all fields
        if not check_all_parameters(data, [
                'id', 'name', 'surname', 'school_id', 'password', 'email',
                'class'
        ]):
            return json.dumps({"error": "Недостатньо данних"}), 400

        # check fields that can be NULL

        data['patronymic'] = check_for_null(json, 'patronymic')
        data['phone'] = check_for_null(json, 'phone')
        data['birth_date'] = check_for_null(json, 'birth_date')

        # hash password
        data['password'] = get_hash(json['password'])

        # try to add to db
        try:
            sql = "INSERT INTO pupils (student_id,name, surname, patronymic, class, email, phone, birth_date, school_id, password) " \
                  "VALUES ('%s','%s', '%s', %s, '%s', '%s', %s, %s, '%s', '%s');" % (
                      data['id'], data['name'], data['surname'],
                      data['patronymic'], data['class'],
                      data['email'], data['phone'],
                      data['birth_date'], data['school_id'],
                      data['password'])
            self.db.execute(sql)
        except Exception as e:
            return get_error(e)
        return json.dumps({"data": True}), 201
Exemplo n.º 18
0
def bytes_to_hashed_chunk(s):
    if len(s) > settings.CHUNK_SIZE:
        logger.error("Too big a byteseq to fit in one chunk")
        return

    b = bytearray()

    # Marker for the beginning of the chunk
    b += constant_to_bytes(Constant.BEG_CHUNK)

    # "Size block"
    b += constant_to_bytes(Constant.BEG_SIZE)
    b += uint_to_bytes(len(s))
    b += constant_to_bytes(Constant.END_SIZE)

    # "Data block"
    b += constant_to_bytes(Constant.BEG_DATA)
    b += s
    b += constant_to_bytes(Constant.END_DATA)

    # "Hash block"
    b += constant_to_bytes(Constant.BEG_HASH)
    b += utils.get_hash(s)
    b += constant_to_bytes(Constant.END_HASH)

    # Marker for the end of the chunk
    b += constant_to_bytes(Constant.END_CHUNK)

    # logger.debug("Constructed chunk {} for byteseq {}"
    #  .format(pretty_print(b), pretty_print(s)))
    return bytes(b)
Exemplo n.º 19
0
def bytes_to_hashed_chunk(s):
    if len(s) > settings.CHUNK_SIZE:
        logger.error("Too big a byteseq to fit in one chunk")
        return

    b = bytearray()

    # Marker for the beginning of the chunk
    b += constant_to_bytes(Constant.BEG_CHUNK)

    # "Size block"
    b += constant_to_bytes(Constant.BEG_SIZE)
    b += uint_to_bytes(len(s))
    b += constant_to_bytes(Constant.END_SIZE)

    # "Data block"
    b += constant_to_bytes(Constant.BEG_DATA)
    b += s
    b += constant_to_bytes(Constant.END_DATA)

    # "Hash block"
    b += constant_to_bytes(Constant.BEG_HASH)
    b += utils.get_hash(s)
    b += constant_to_bytes(Constant.END_HASH)

    # Marker for the end of the chunk
    b += constant_to_bytes(Constant.END_CHUNK)

    # logger.debug("Constructed chunk {} for byteseq {}"
    #  .format(pretty_print(b), pretty_print(s)))
    return bytes(b)
Exemplo n.º 20
0
    def new_block(self):
        """
        Mining a new block.
        """
        chain = self.chain
        header = {
            'version': self.version,
            'ts': time.time(),
            'prev_hash': '' if not chain else chain[-1]['hash'],
            'nonce': '0',
            'target': self.get_target(),
            # TODO merkle root
        }
        # TODO does the coinbase tx affects merkle root?
        coinbase_tx = self.new_tx('0', self.address, 50, 0)
        tx_pool = self.tx_pool
        # use pop keep thread safe
        txs = [coinbase_tx] + [tx_pool.pop(0) for _ in range(len(tx_pool))]

        block = dict(header)

        # guess a nonce, this gonna takes ttttttttime
        nonce = utils.get_nonce(header)
        block['nonce'] = nonce
        block['hash'] = utils.get_hash(block)
        block['tx'] = txs
        block['confirmation'] = 1
        chain.append(block)
        self.broadcast('block', block)

        # update coinbase reward on ledger after fount the block
        if not self.update_ledger([coinbase_tx]):
            print('update ledger error???')
Exemplo n.º 21
0
 def get_wpath(self):
     """
     Gets the path to the saved linear model
     :return: path for the pickle file
     """
     fn = utils.get_hash(self.detector_dict) + '_lr.joblib'
     return os.path.join(self.gift_basepath, 'calibration', 'fitted', fn)
Exemplo n.º 22
0
    def compute_att(self, model_filepath, examples_dirpath, scratch_dirpath):
        """
        Run the attack described by the detectors' attack dict (if any)
        :param model_filepath: standard input argument
        :param examples_dirpath: standard input argument
        :param scratch_dirpath: standard input argument
        :return: None
        """

        attpath = os.path.join(scratch_dirpath, self.get_attfn())
        if attpath is None or os.path.exists(attpath):
            return

        att_dict = self.detector_dict['att_dict']

        assert 'type' in att_dict, 'Need to specify an attack type'
        assert att_dict['type'] in ['l1', 'filt'], 'attack type must be l1 or filt'

        print('starting attack on ', model_filepath, "with attack", utils.get_hash(att_dict))

        if att_dict['type'] == 'l1':
            dump_adv.dump_model_l1(model_filepath, examples_dirpath, attpath,
                                   random_examples_dirpath=self.imagenet_path, **att_dict['kwargs'])
        else:
            dump_adv.dump_model_filt_attack(model_filepath, examples_dirpath, attpath,
                                            random_examples_dirpath=self.imagenet_path, **att_dict['kwargs'])
def get_transcript_segments(fname):
    transcript_segments = [] # list. each element is (start_time, end_time, text, uid). times are in seconds.
    pattern = re.compile("\d\d:\d\d:\d\d - \d\d:\d\d:\d\d", re.ASCII)

    doc = docx.Document(fname)
    for para in doc.paragraphs:
        text = para.text
        text = text.strip()
        if text=="":
            continue
        if not pattern.match(text[:19]):
            continue
        start_time = convert_to_seconds(text[:8])
        end_time   = convert_to_seconds(text[11:19])
        transcript = text[19:]
        for punct in punctuations:
            transcript = transcript.replace(punct, "")
        if transcript=="":
            continue
        transcript = transcript.strip()

        # see reason for not using global speaker ID by searching for "bold" in http://kaldi-asr.org/doc/data_prep.html
        utt_id = str(uuid.uuid4())[:8]
        uid = "{}-{}-{}".format(get_hash(transcript)[:8], "speaker1", str(uuid.uuid4())[:8])
        #uid = "{}-{}-{}".format(get_hash(transcript)[:8], utt_id, utt_id)

        if start_time >= end_time:
            continue

        transcript_segments.append((start_time, end_time, transcript, uid))
    return transcript_segments
 def flush_buffer_client(self, flow):
     curr_buffer = self.flows_to_buffers[flow]
     hashed = utils.get_hash(curr_buffer)
     if hashed is not None:
         cache = self.caches
         curr_buffer = self.flows_to_buffers[flow]
         cache[hashed] = curr_buffer
     self.reset_buffer(flow)
Exemplo n.º 25
0
    def detect_mag(self, model_filepath, examples_dirpath, scratch_dirpath):
        """
        Compute the raw (uncalibrated) detection score for a model
        :param model_filepath: standard input argument
        :param examples_dirpath: standard input argument
        :param scratch_dirpath: standard input argument
        :return: detection score
        """

        self.compute_att(model_filepath, examples_dirpath, scratch_dirpath)
        attpath = os.path.join(scratch_dirpath, self.get_attfn())

        print("computing blur score on", model_filepath, 'with detector', utils.get_hash(self.detector_dict),
              'and attack', utils.get_hash(self.detector_dict['att_dict']))
        mag = blur_detector.get_blur_mag(attpath, sigma=2.0)

        return mag
Exemplo n.º 26
0
 def get_attfn(self):
     """
     Gets the path for the detectors' attack numpy file
     :return: path for the numpy file or None if there is no attack dict
     """
     if 'att_dict' in self.detector_dict:
         return utils.get_hash(self.detector_dict['att_dict']) + '_att.npy'
     else:
         return None
 def flush_buffer(self, flow):
     # Caches whatever is leftover in the buffer. Then deletes buffer.
     # Used when FIN packet is seen.
     curr_buffer = self.flows_to_buffers[flow]
     cache = self.caches
     if (len(curr_buffer) > 0):
         hashed = utils.get_hash(curr_buffer)
         cache[hashed] = curr_buffer
     self.delete_buffer(flow)
 def compute_hash(self, flow):
     # Returns hash for a flow. Returns None if not enough data to hash.
     curr_buffer = self.flows_to_buffers[flow]
     curr_ptr = self.buffer_pointers[flow]
     hashed = None
     if self.is_long_enough(curr_buffer):
         to_hash = curr_buffer[curr_ptr:curr_ptr + self.WINDOW_SIZE]
         hashed = utils.get_hash(to_hash)
     return hashed
 def handle_flow_fin_client(self, flow):
     # Caches whatever is leftover in the buffer. Then closes the flow.
     curr_buffer = self.flows_to_buffers[flow]
     hashed = utils.get_hash(curr_buffer)
     if hashed is not None:
         curr_buffer = self.flows_to_buffers[flow]
         cache = self.caches
         cache[hashed] = curr_buffer
     self.close_flow(flow)
Exemplo n.º 30
0
    def get_topic_hash(self):
        topic_hash = self.cleaned_data.get('topic_hash', None)

        if topic_hash:
            return topic_hash

        return utils.get_hash((smart_bytes(self.cleaned_data['title']),
                               smart_bytes('category-{}'.format(
                                   self.cleaned_data['category'].pk))))
Exemplo n.º 31
0
    def get_one_passport(self, username, password):
        # 根据用户名和密码查找账户的信息
        try:
            passport = self.get(username=username, password=get_hash(password))
        except self.model.DoesNotExist:

            # 账户不存在
            passport = None
        return passport
Exemplo n.º 32
0
 def output(self):
     print("Block:", self.idx, "ver: ", self.ver, "time_stamp: ", self.ts,
           "fee", self.fee, "thresh", self.thresh)
     print("pre_hash: ", self.pre_hash[:8])
     print("addr: ", self.addr[:8])
     print("mr_root: ", self.mt_root[:8])
     print("nonce: ", self.nonce)
     print("hash: ", utils.get_hash(self)[:8])
     print("Fin.")
Exemplo n.º 33
0
 def add(self, name):
     '''add new file info to db'''
     hash_value = utils.get_hash(name)
     size = str(os.path.getsize(name))
     mtime = str(os.path.getmtime(name))
     with self.connect() as conn:
         cur = conn.cursor()
         cur.execute('INSERT INTO hash_table VALUES (?,?,?,?)',(hash_value, name, size, mtime))
         print 'Added: ',name.encode('utf-8')
         conn.commit()
Exemplo n.º 34
0
    def send_sized_msg(self, msg, send_hash=True, retry=5):
        msg = msg.encode()
        msg_len = len(msg)
        s_msg_len = len(str(msg_len))

        # The order is:
        # size_size, size, msg, hash
        self.sock.sendall(str(s_msg_len).zfill(4).encode())
        self.sock.sendall(str(msg_len).encode())
        self.sock.sendall(msg)
        self.sock.sendall(utils.get_hash(msg).encode())
Exemplo n.º 35
0
    def download(self, temp_ver, store_metadata=True):
        """
        Retrieve the given template version

        Args:
            temp_ver (TemplateVersion): template version to retrieve
            store_metadata (bool): If set to ``False``, will not refresh the
                local metadata with the retrieved one

        Returns:
            None
        """
        dest = self._prefixed(temp_ver.name)
        temp_dest = '%s.tmp' % dest

        with utils.LockFile(dest + '.lock'):
            # Image was downloaded while we were waiting
            if os.path.exists(dest):
                return

            temp_ver.download(temp_dest)
            if store_metadata:
                with open('%s.metadata' % dest, 'w') as f:
                    utils.json_dump(temp_ver.get_metadata(), f)

            sha1 = utils.get_hash(temp_dest)
            if temp_ver.get_hash() != sha1:
                raise RuntimeError(
                    'Image %s does not match the expected hash %s' % (
                        temp_ver.name,
                        sha1.hexdigest(),
                    )
                )

            with open('%s.hash' % dest, 'w') as f:
                f.write(sha1)

            with log_utils.LogTask('Convert image', logger=LOGGER):
                result = utils.run_command(
                    [
                        'qemu-img',
                        'convert',
                        '-O',
                        'raw',
                        temp_dest,
                        dest,
                    ],
                )

                os.unlink(temp_dest)
                if result:
                    raise RuntimeError(result.err)
Exemplo n.º 36
0
def login():
    if request.method == 'POST':
        mail = request.form['email']
        password = request.form['password']
        q = None
        try:
            q = User.get(User.mail == mail)
        except DoesNotExist:
            pass

        if q and get_hash(password) == q.password:
            session['user_mail'] = mail
            return redirect(url_for('htm_settings'))

    return redirect(url_for('htm_main'))
Exemplo n.º 37
0
    def create(username, password, is_staff=False):
        """Create user"""

        assert username and password

        if not db.users.find_one({'username': username}):
            fields = {
                'username': username,
                'password': utils.get_hash(password),
                'is_staff': is_staff
            }

            db.users.insert(fields)
        else:
            raise UsernameExists
Exemplo n.º 38
0
    def logon(username, password):
        """Try to log on user using specified username and password"""

        assert username and password

        fields = {
            'username': username,
            'password': utils.get_hash(password)
        }

        user = db.users.find_one(fields)

        if user is not None:
            return User(user['username'])
        else:
            raise UserNotFound
Exemplo n.º 39
0
 def __init__(
     self,
     subred,
     ragent=None,
     metadata_dir="./.crawler-data/subreddit-data/",
     ignored_domains=["reddit.com", "imgur.com", "youtube.com"],
     ignored_content_types=["image", "application/pdf"],
 ):
     self.subred = subred
     self.ragent = ragent
     self.metadata_dir = metadata_dir
     self.ignored_domains = set(ignored_domains)
     dname = self.metadata_dir + "/" + utils.get_hash(self.subred)
     if not os.path.isdir(dname):
         os.makedirs(dname)
     self.fname = dname + "/data"
     self.ignored_content_types = ignored_content_types
Exemplo n.º 40
0
    def _create_graph_head(self, producer, program_file, machine_model):
        graph = ET.Element('graph')
        graph.set('edgedefault', 'directed')

        # Create data elements that describe witness
        graph.append(self._create_data_element('witness-type', 'violation_witness'))
        graph.append(self._create_data_element('sourcecodelang', 'C'))
        graph.append(self._create_data_element('producer', producer))
        graph.append(self._create_data_element('specification', 'CHECK( init(main()), LTL(G ! call(__VERIFIER_error())) )'))
        #graph.append(self._create_data_element('testfile', test_file))
        #timestamp = utils.get_time()
        #graph.append(self._create_data_element('creationtime', timestamp))
        graph.append(self._create_data_element('programfile', program_file))
        filehash = utils.get_hash(program_file)
        graph.append(self._create_data_element('programhash', filehash))
        graph.append(self._create_data_element('architecture', machine_model.witness_key))

        return graph
Exemplo n.º 41
0
    def recv_sized_msg(self):
        # This is hideous
        msg_size_size = self.recv_int()
        self.logger.debug("Size of msg_size: {}".format(msg_size_size))
        # What can I do?
        msg_size = self.recv_int(num_chars=msg_size_size)
        self.logger.debug("msg_size: {}".format(msg_size))

        msg = self.recv_string(msg_size)
        actual_hash = self.recv_string(load_config.get_hash_len(), cust_label="hash").decode()
        computed_hash = utils.get_hash(msg)

        if actual_hash == computed_hash:
            self.logger.debug("Actual and computed hashes match (both {})".format(actual_hash))
        else:
            self.logger.error("Hash mismatch! Actual: {}, computed: {}".format(actual_hash, computed_hash))

        return msg
Exemplo n.º 42
0
        def __do_iteration(data: List[str]) -> str:
            new_data = []
            len_data = len(data)

            for i in range(0, len_data, 2):
                row = data[i]

                if (i + 1) == len_data:
                    row += data[i]
                else:
                    row += data[i + 1]

                new_data.append(get_hash(row))

            if len(new_data) == 1:
                return new_data[0]

            return __do_iteration(new_data)
Exemplo n.º 43
0
 def fetch(self, url):
     fname = self.storage_dir + "/" + utils.get_hash(url)
     if not os.path.isfile(fname):
         try_cnt = 0
         while try_cnt < self.retry:
             try:
                 logger.debug("Fetching: %s" % url)
                 content = urllib2.urlopen(url).read()
                 f = open(fname, "w")
                 f.write(content)
                 f.close()
                 break
             except urllib2.URLError, e:
                 print "Error while fetching URL: %s. Error: %s" % (url, str(e))
                 try_cnt = try_cnt + 1
                 time.sleep(self.retry_wait)
         if try_cnt >= self.retry:
             print "Failed to fetch URL: %s" % (url)
             return None
 def classify(self, url, content):
     words = utils.read_cached(self.cache_dir + "/" + utils.get_hash(url) ,self.preprocessor.process, content)
     argmax = {"label":None, "prob":None}
     probs = {}
     for label,label_data in self.labels_metadata.iteritems():
         prob_sum=0.0
         for word in words:
             tot_doc_count = len(label_data["doc_ids"])
             if word in label_data["word_doc_counts"]:
                 prob = math.log(float(label_data["word_doc_counts"][word])/tot_doc_count)
                 logger.debug("Found word: %s. Prob %f. Count in the %s data: %d" % (word,prob, label, label_data["word_doc_counts"][word]))
                 prob_sum = prob_sum + prob
             else:
                 prob_sum = prob_sum + math.log(self.missing_prob)
         probs[label] = prob_sum
         if not argmax["prob"] or prob_sum > argmax["prob"]:
             argmax["prob"] = prob_sum
             argmax["label"] = label
     logger.debug("URl: %s, debuginfo: %s " % (url,str(probs)))
     return argmax
Exemplo n.º 45
0
def recv_chunk(conn, num_retries):
    tries_left = num_retries
    while tries_left > 0:
        try:
            # Assume that the previous send failed, and skip over as much of
            # the stream as is required.
            consume_till_next(Constant.BEG_CHUNK, conn)

            match_next(Constant.BEG_SIZE, conn)
            size = recv_uint(conn)
            match_next(Constant.END_SIZE, conn)

            match_next(Constant.BEG_DATA, conn)
            data = conn.recv(size)
            match_next(Constant.END_DATA, conn)

            match_next(Constant.BEG_HASH, conn)
            actual_hash = conn.recv(settings.HASH_LEN)
            match_next(Constant.END_HASH, conn)

            match_next(Constant.END_CHUNK, conn)
            calc_hash = utils.get_hash(data)

            if (calc_hash == actual_hash):

                logger.debug("Chunk received, hashes match (both {})"
                             .format(pretty_print(calc_hash)))
                send_info(Constant.INFO_RECV_OK, conn)
                return data
            else:
                tries_left -= 1
                send_info(Constant.INFO_HASH_MISMATCH, conn)
                raise LoggedException("Hash mismatch!")
        except LoggedException as le:
            tries_left -= 1
            le.log()
 def train(self, source_url, content, label):
     docid = utils.get_hash(source_url)
     words = utils.read_cached(self.cache_dir + "/" + docid ,self.preprocessor.process, content)
     self.__train_with_words(docid, words, label)
Exemplo n.º 47
0
 def get_header_hash(self) -> str:
     return get_hash(self.__previous_block_hash + self.__merkle_root)
Exemplo n.º 48
0
 def get_header_hash(self) -> str:
     return get_hash(self.chain_seed)