Ejemplo n.º 1
0
def test_process_comment_timezone(database, reddit):
    user = database.get_or_add_user(user_name="Watchful1")
    user.timezone = "America/Los_Angeles"

    username = "******"
    comment_id = utils.random_id()
    thread_id = utils.random_id()
    created = utils.datetime_now()
    comment = reddit_test.RedditObject(
        body=f"{static.TRIGGER}! 1 day",
        author=username,
        created=created,
        id=comment_id,
        link_id="t3_" + thread_id,
        permalink=f"/r/test/{thread_id}/_/{comment_id}/",
        subreddit="test")
    reddit.add_comment(comment)

    comments.process_comment(comment.get_pushshift_dict(), reddit, database)
    result = comment.get_first_child().body

    assert "default time zone" in result
    assert "`America/Los_Angeles`" in result

    reminders = database.get_all_user_reminders(username)
    assert reminders[0].target_date == created + timedelta(hours=24)
Ejemplo n.º 2
0
def test_process_comment(database, reddit):
    created = utils.datetime_now()
    username = "******"
    comment_id = utils.random_id()
    thread_id = utils.random_id()
    comment = reddit_test.RedditObject(
        body=f"{static.TRIGGER}! 1 day",
        author=username,
        created=created,
        id=comment_id,
        link_id="t3_" + thread_id,
        permalink=f"/r/test/{thread_id}/_/{comment_id}/",
        subreddit="test")

    reddit.add_comment(comment)

    comments.process_comment(comment.get_pushshift_dict(), reddit, database)
    result = comment.get_first_child().body

    assert "CLICK THIS LINK" in result

    reminders = database.get_all_user_reminders(username)
    assert len(reminders) == 1
    assert reminders[0].user.name == username
    assert reminders[0].message is None
    assert reminders[0].source == utils.reddit_link(comment.permalink)
    assert reminders[0].requested_date == created
    assert reminders[0].target_date == created + timedelta(hours=24)
    assert reminders[0].id is not None
    assert reminders[0].recurrence is None
Ejemplo n.º 3
0
def test_comment_in_thread(database, reddit):
    comment_id = utils.random_id()
    thread_id = utils.random_id()
    comment = reddit_test.RedditObject(
        body=f"{static.TRIGGER}! 1 day",
        author="Watchful1",
        created=utils.datetime_now(),
        id=comment_id,
        link_id="t3_" + thread_id,
        permalink=f"/r/test/{thread_id}/_/{comment_id}/",
        subreddit="test")
    reddit.add_comment(comment)

    comments.process_comment(comment.get_pushshift_dict(), reddit, database)

    comment_id_2 = utils.random_id()
    comment_2 = reddit_test.RedditObject(
        body=f"{static.TRIGGER}! 1 day",
        author="Watchful1",
        created=utils.datetime_now(),
        id=comment_id_2,
        link_id="t3_" + thread_id,
        permalink=f"/r/test/{thread_id}/_/{comment_id_2}/")
    reddit.add_comment(comment_2)

    comments.process_comment(comment_2.get_pushshift_dict(), reddit, database)

    assert len(comment_2.children) == 0
    assert len(reddit.sent_messages) == 1
    assert reddit.sent_messages[0].author.name == static.ACCOUNT_NAME
    assert "I've already replied to another comment in this thread" in reddit.sent_messages[
        0].body
Ejemplo n.º 4
0
    def create_job(self):
        table = self.dynamo_connector.Table(self.db_table)
        try:
            lambdaArn = self.body['lambdaArn']
            time = self.body['time']
        except KeyError as exc:
            raise HTTPError(status=400,
                            message='Missing required body fields: %s' % exc)

        try:
            parse(time)
        except ValueError as exc:
            raise HTTPError(status=400,
                            message='Invalid date format: %s' % exc)

        db_item = {
            'jobid': utils.random_id(),
            'lambdaArn': lambdaArn,
            'time': time,
            'created_at': str(datetime.now()),
            'modified_at': str(datetime.now())
        }

        try:
            table.put_item(
                Item=db_item
            )

        except Exception as exc:
            warning_string = "Error creating new At Job DB entry {}"
            LOG.warning(warning_string.format(lambdaArn), exc_info=exc)
            raise

        return db_item
Ejemplo n.º 5
0
    def __init__(self,
                 body=None,
                 author=None,
                 created=None,
                 id=None,
                 permalink=None,
                 link_id=None,
                 prefix="t4",
                 subreddit=None):
        self.body = body
        if isinstance(author, User):
            self.author = author
        else:
            self.author = User(author)
        if id is None:
            self.id = utils.random_id()
        else:
            self.id = id
        self.fullname = f"{prefix}_{self.id}"
        if created is None:
            self.created_utc = utils.datetime_now().timestamp()
        else:
            self.created_utc = created.timestamp()
        self.permalink = permalink
        self.link_id = link_id
        self.subreddit = subreddit

        self.parent = None
        self.children = []
Ejemplo n.º 6
0
    def _add_random_state(self):
        """
        Creates a random new state
        """

        new_id = utils.random_id()
        state_ids = self.states.keys() + [new_id]
        look = random.choice([True, False])
        if look:
            look_high_action = random.choice(['cooperate', 'defect'])
            look_low_action = random.choice(['cooperate', 'defect'])
        else:
            look_high_action = look_low_action = random.choice(['cooperate', 'defect'])


        set = {
        'strategy_set': {
            'next': random.choice(state_ids),
             },

        'action_set': {
                'look': look,
                'high': look_high_action,
                'low': look_low_action,
            },
        'id': new_id,
        }

        self.states[new_id] = State(**set)

        return new_id
Ejemplo n.º 7
0
def test_add_recurring_reminder(database, reddit):
    created = utils.datetime_now()
    username = "******"
    keyword = "reminderstring"
    id = utils.random_id()
    message = reddit_test.RedditObject(
        body=f"[{keyword}]\n{static.TRIGGER_RECURRING}! 1 day",
        author=username,
        created=created,
        id=id)

    messages.process_message(message, reddit, database)
    result = message.get_first_child().body

    assert "reminderstring" in result
    assert "and then every `1 day`" in result

    assert "This time has already passed" not in result
    assert "Could not find a time in message" not in result
    assert "Could not parse date" not in result
    assert "Can't use a default for a recurring reminder" not in result
    assert "I got the same date rather than one after it" not in result
    assert "I got a date before that rather than one after it" not in result

    reminders = database.get_all_user_reminders(username)
    assert len(reminders) == 1
    assert reminders[0].user.name == username
    assert reminders[0].message == keyword
    assert reminders[0].source == utils.message_link(id)
    assert reminders[0].requested_date == created
    assert reminders[0].target_date == created + timedelta(hours=24)
    assert reminders[0].id is not None
    assert reminders[0].recurrence == "1 day"
Ejemplo n.º 8
0
def test_add_reminder(database, reddit):
    created = utils.datetime_now()
    username = "******"
    keyword = "reminderstring"
    id = utils.random_id()
    message = reddit_test.RedditObject(
        body=f"[{keyword}]\n{static.TRIGGER}! 1 day",
        author=username,
        created=created,
        id=id)

    messages.process_message(message, reddit, database)
    result = message.get_first_child().body

    assert "reminderstring" in result

    assert "This time has already passed" not in result
    assert "Could not find a time in message" not in result
    assert "Could not parse date" not in result

    reminders = database.get_user_reminders(username)
    assert len(reminders) == 1
    assert reminders[0].user == username
    assert reminders[0].message == keyword
    assert reminders[0].source == utils.message_link(id)
    assert reminders[0].requested_date == created
    assert reminders[0].target_date == created + timedelta(hours=24)
    assert reminders[0].db_id is not None
Ejemplo n.º 9
0
    def create_job(self):
        table = self.dynamo_connector.Table(self.db_table)
        try:
            lambdaArn = self.body['lambdaArn']
            time = self.body['time']
        except KeyError as exc:
            raise HTTPError(status=400,
                            message='Missing required body fields: %s' % exc)

        try:
            parse(time)
        except ValueError as exc:
            raise HTTPError(status=400,
                            message='Invalid date format: %s' % exc)

        db_item = {
            'jobid': utils.random_id(),
            'lambdaArn': lambdaArn,
            'time': time,
            'created_at': str(datetime.now()),
            'modified_at': str(datetime.now())
        }

        try:
            table.put_item(Item=db_item)

        except Exception as exc:
            warning_string = "Error creating new At Job DB entry {}"
            LOG.warning(warning_string.format(lambdaArn), exc_info=exc)
            raise

        return db_item
Ejemplo n.º 10
0
def test_commenting_deleted(database, reddit):
    comment_id = utils.random_id()
    thread_id = utils.random_id()
    comment = reddit_test.RedditObject(
        body=f"{static.TRIGGER}! 1 day",
        author="Watchful1",
        created=utils.datetime_now(),
        id=comment_id,
        link_id="t3_" + thread_id,
        permalink=f"/r/test/{thread_id}/_/{comment_id}/",
        subreddit="test")
    comments.process_comment(comment.get_pushshift_dict(), reddit, database)

    assert len(comment.children) == 0
    assert len(reddit.sent_messages) == 1
    assert "it was deleted before I could get to it" in reddit.sent_messages[
        0].body
Ejemplo n.º 11
0
    def eval_gen(self, xs, ys):
        """
        Predicts pretreatment
        :param xs: x: (N, T1); seqlens; sents1
        :param ys:
        :return:
            y_hat: (N, T2)
        """
        x, seqlens, sents1 = xs
        decoder_inputs, y, y_seqlen, sents2 = ys

        decoder_inputs = tf.ones(
            (tf.shape(xs[0])[0], 1), tf.int32) * self.token2idx["<s>"]
        ys = (decoder_inputs, y, y_seqlen, sents2)

        logging.info("Inference graph is being built. Please be patient.")

        for _ in tqdm(range(3)):
            memory, sents1, src_masks = self.encode(xs, False)
            # memory_ = tf.to_int32(tf.argmax(memory, axis=-1))
            # memory_ = random_id(memory)

            logits, y_hat, y, sents2 = self.decode(ys, memory, src_masks,
                                                   False)
            if tf.reduce_sum(y_hat, 1) == self.token2idx["<pad>"]: break

            # concat input
            _x = tf.concat((x, random_id(logits)), 1)
            xs = (_x, seqlens, sents1)

            _decoder_inputs = tf.concat((decoder_inputs, random_id(logits)), 1)
            ys = (_decoder_inputs, y, y_seqlen, sents2)

        # monitor a random sample
        n = tf.random_uniform((), 0, tf.shape(y_hat)[0] - 1, tf.int32)
        sent1 = sents1[n]
        pred = convert_idx_to_token_tensor(y_hat[n], self.idx2token)
        sent2 = sents2[n]

        tf.summary.text("sent1", sent1)
        tf.summary.text("pred", pred)
        tf.summary.text("sent2", sent2)
        summaries = tf.summary.merge_all()

        return y_hat, summaries, logits
Ejemplo n.º 12
0
async def process_save(message: types.Message, state: FSMContext):
    if not message.photo:
        await message.answer('Отправьте валидное фото')
    async with state.proxy() as data:
        db.add_fake(random_id(), data['name'], data['age'], data['gender'],
                    data['city'], data['occupation'], data['about'],
                    message.photo[-1].file_id)
        await state.finish()
        await message.answer('Фейк добавлен. Нажмите /fake чтобы добавить ещё')
Ejemplo n.º 13
0
def test_commenting_banned(database, reddit):
    reddit.ban_subreddit("test")

    comment_id = utils.random_id()
    thread_id = utils.random_id()
    comment = reddit_test.RedditObject(
        body=f"{static.TRIGGER}! 1 day",
        author="Watchful1",
        created=utils.datetime_now(),
        id=comment_id,
        link_id="t3_" + thread_id,
        permalink=f"/r/test/{thread_id}/_/{comment_id}/",
        subreddit="test")
    reddit.add_comment(comment)
    comments.process_comment(comment.get_pushshift_dict(), reddit, database)

    assert len(comment.children) == 0
    assert len(reddit.sent_messages) == 1
    assert "I'm not allowed to reply in this subreddit" in reddit.sent_messages[
        0].body
Ejemplo n.º 14
0
def test_commenting_locked(database, reddit):
    thread_id = utils.random_id()

    reddit.lock_thread(thread_id)

    comment_id = utils.random_id()
    comment = reddit_test.RedditObject(
        body=f"{static.TRIGGER}! 1 day",
        author="Watchful1",
        created=utils.datetime_now(),
        id=comment_id,
        link_id="t3_" + thread_id,
        permalink=f"/r/test/{thread_id}/_/{comment_id}/",
        subreddit="test")
    reddit.add_comment(comment)
    comments.process_comment(comment.get_pushshift_dict(), reddit, database)

    assert len(comment.children) == 0
    assert len(reddit.sent_messages) == 1
    assert "the thread is locked" in reddit.sent_messages[0].body
Ejemplo n.º 15
0
def save_docs(idxs, docs, params, name):
    '''Save the idxs, docs with pickle.
       Then you can load it again by using get_docs with name'''
    docs = np.array(docs).astype(np.int)
    idxs = np.array(idxs).astype(np.int)
    params = filter_dict(params, prec.default_params.keys()) # only save parameters relevant to the preproc
    params["docs_id"] = random_id()
    file_path = get_docs_path(name)
    file = open(file_path, "wb" )
    pickle.dump((idxs, docs, params),  file)
    file.close()
Ejemplo n.º 16
0
Archivo: db.py Proyecto: sjahu/imiji
    def gen_unique_id(self):
        """
        Generate a unique alphanumeric id for the image

        The length of the id is controlled by self.id_size, set during class init by ID_SIZE config property.
        """
        for _ in range(5):
            id = utils.random_id(self.id_size)
            if self.images.find_one({ "id": id }) == None and \
               self.galleries.find_one({ "id": id }) == None:
                return id
        raise Exception("Failed to generate unique ID!")
Ejemplo n.º 17
0
async def _shorten(req):
    data = await req.post()
    url = data.get("input")
    if url is None:
        raise web.HTTPBadRequest()

    if not re.match(r"^https?:\/\/.+\..+$", url):
        raise web.HTTPBadRequest

    url_id = random_id(req.app.config.id_length)
    await req.app.db.urls.insert_one({"_id": url_id, "url": url})

    return web.Response(text=url_id)
Ejemplo n.º 18
0
    def Initialization(self,
                       request: environment_pb2.InitializationRequest,
                       context
                       ) -> environment_pb2.InitializationResponse:

        environment_parameters = from_bytes_dict(request.environment_parameters)
        environment_id = random_id()
        environment = self.environment_class(**environment_parameters)
        with self.environments_lock:
            self.environments[environment_id] = environment

        print(f'started environment with id={environment_id}, parameters={environment_parameters}')
        return environment_pb2.InitializationResponse(success=True, environment_id=environment_id)
Ejemplo n.º 19
0
def test_process_cakeday_comment(database, reddit):
    username = "******"
    user = reddit_test.User(
        username,
        utils.parse_datetime_string("2015-05-05 15:25:17").timestamp())
    reddit.add_user(user)
    created = utils.parse_datetime_string("2019-01-05 11:00:00")
    comment_id = utils.random_id()
    thread_id = utils.random_id()
    comment = reddit_test.RedditObject(
        body=f"{static.TRIGGER_CAKEDAY}!",
        author=username,
        created=created,
        id=comment_id,
        link_id="t3_" + thread_id,
        permalink=f"/r/test/{thread_id}/_/{comment_id}/",
        subreddit="test")

    reddit.add_comment(comment)

    utils.debug_time = utils.parse_datetime_string("2019-01-05 12:00:00")
    comments.process_comment(comment.get_pushshift_dict(), reddit, database)
    result = comment.get_first_child().body

    assert "to remind you of your cakeday" in result

    reminders = database.get_all_user_reminders(username)
    assert len(reminders) == 1
    assert reminders[0].user.name == username
    assert reminders[0].source == utils.reddit_link(comment.permalink)
    assert reminders[0].requested_date == created
    assert reminders[0].target_date == utils.parse_datetime_string(
        "2019-05-05 15:25:17")
    assert reminders[0].id is not None
    assert reminders[0].recurrence == "1 year"
    assert reminders[0].message == "Happy Cakeday!"
Ejemplo n.º 20
0
async def _shorten(req):
    data = await req.post()
    text = data.get("input")
    filename = data.get("filename")
    if text is None:
        raise web.HTTPBadRequest()

    text_id = random_id(req.app.config.id_length)
    await req.app.db.text.insert_one({
        "_id": text_id,
        "text": text,
        "filename": filename
    })

    return web.Response(text=text_id)
Ejemplo n.º 21
0
Archivo: main.py Proyecto: tbsd/hehmda
 def registration():
     # Считывание логин, пароль, повтор пороля
     data = request.get_json(force=True)
     new_login = data['new_login']
     new_password = data['new_password']
     new_repeat_password = data['new_repeat_password']
     new_nickname = data['new_nickname']
     # Проверка, логина на дубляж и сравнение двух паролей.
     if users.find({"login": new_login}).count() == 0:
         new_id = random_id()
         while users.find_one({"id": new_id}):
             new_id = random_id()
         token = random_string()
         response = make_response()
         if new_password == new_repeat_password:
             password_hash = hashlib.md5(
                 new_password.strip().encode('utf-8'))
             users.insert_one({
                 "id": new_id,
                 "login": new_login,
                 "password_hash": password_hash.hexdigest(),
                 "nickname": new_nickname,
                 "chat_list": [],
                 "contacts": [],
                 "session": token
             })
             response.set_cookie('session', token)
             return json_util.dumps({'session': token})
         return json_util.dumps({
             'code': 400,
             'status_msg': 'Пароли не совпадают.'
         })
     return json_util.dumps({
         'code': 400,
         'status_msg': 'Такой логин уже занят.'
     })
Ejemplo n.º 22
0
def create_member(name: str, photo: str, parent: str) -> str:
    command = "INSERT INTO arbre VALUES (?, ?, ?, ?)"
    # create member
    id = random_id()  # gen ID for the new member
    cursor.execute(command, (
        id,
        name,
        photo,
        parent,
    ))
    try:
        conn.commit()  # commit changes to db
        member = {"id": id, "name": name, "photo": photo, "parent": parent}
        return member
    except:
        return "Member creation failed"
Ejemplo n.º 23
0
    def join(self, known_node):
        """
        Run by a node when it wants to join the network.

        http://xlattice.sourceforge.net/components/protocol/kademlia/specs.html#join
        """

        # When a new node is created, ping some known_node
        logger.info("Pinging %r", known_node)

        try:
            yield from self.ping(known_node, self.identifier)
        except socket.timeout:
            logger.warn("Could not ping %r", known_node)
            return

        # Try to find all peers close to myself
        # (this'll update my routing table)
        yield from self.lookup_node(self.identifier)

        # Pinging all neighbors will update their routing tables
        logger.info("Pinging all neighbors")
        yield from self.ping_all_neighbors()

        try:
            # Check if my public key is already in the network
            yield from self.get(self.identifier)
        except KeyError:
            # Store my information onto the network
            # (allowing others to find me)
            yield from self.put(self.identifier,
                                (self.socket_addr, self.pub_key))

            logger.info("Sending my genesis transaction %r",
                        self.ledger.genesis_tx)
            yield from self.add_tx_to_ledger(
                known_node, self.identifier,
                self.ledger.genesis_tx)  # add it to the ledger of bootstrapper

            ledger_bootstrap = yield from self.get_ledger(
                known_node, self.identifier)  # get the bootstrapper's ledger
            logger.info("Got Ledger %r", ledger_bootstrap)
            self.ledger.record = ledger_bootstrap.record  # replace my ledger with that of bootstrappers

            yield from self.broadcast(
                random_id(), 'add_tx_to_ledger', self.identifier, self.ledger.
                genesis_tx)  # broadcast my genesis transaction to everyone
Ejemplo n.º 24
0
    def __init__(self, initial_strategy=None, states=None, initial_state=None):
        if states is None:
            self.states = {}
        else:
            self.states = states

        if initial_state is None:
            self.initial_state = utils.random_id()
        else:
            self.initial_state = initial_state
        
        self.state = self.initial_state
        self.type = 2
        self.payoffs = 0

        if initial_strategy:
            self._set_initial(initial_strategy)
Ejemplo n.º 25
0
    def __init__(self, game):
        self.game = game
        self.x = 3
        self.y = 21
        self.rotation = 0

        self.shape_cache = {}
        self.shape_cache[0] =  self.shape
        for i in range(3)[::-1]:
            self.shape_cache[i + 1] = rotate_cw(self.shape_cache[(i + 2) % 4])

        self.last_successful_movement = None
        self.last_rotation_info = None
        self.phase = 'drop'
        self.locking_start = 0
        self.tick = 0
        self.move_counter = 0
        self.id = random_id()
Ejemplo n.º 26
0
    def _add_random_state(self):
        """
        Creates a random new state
        """

        action = random.choice(['continue', 'exit'])
        num_states = len(self.states)
        new_id = utils.random_id()
        ids = self.states.keys() + [new_id]

        set = {
        'strategy_set': {
            'looked': {
                'low': {
                    'defect': random.choice(ids),
                    'cooperate': random.choice(ids),
                    },
                'high': {
                    'defect': random.choice(ids),
                    'cooperate': random.choice(ids),
                    },
                },
            'nolook': {
                'low': {
                    'defect': random.choice(ids),
                    'cooperate': random.choice(ids),
                    },
                'high': {
                    'defect': random.choice(ids),
                    'cooperate': random.choice(ids),
                    },
                }
            },

        'action_set': {
                'action': action,
            },

        'id': new_id,
        }

        self.states[new_id] = State(**set)

        return new_id
Ejemplo n.º 27
0
    def __init__(self, alpha=3, k=20, identifier=None):

        # Initialiaze DatagramRPCProtocol
        super(KademliaNode, self).__init__()

        # TODO: Make the node id a function of node's public key
        # Just like Bitcoin wallet IDs use HASH160
        if identifier is None:
            identifier = random_id()

        self.identifier = identifier

        # Constants from the kademlia protocol
        self.k = k
        self.alpha = alpha

        # Each node has their own dictionary
        self.storage = {}

        # The k-bucket based kademlia routing table
        self.routing_table = RoutingTable(self.identifier, k=self.k)
Ejemplo n.º 28
0
def test_add_reminder_no_date(database, reddit):
    created = utils.datetime_now()
    username = "******"
    id = utils.random_id()
    message = reddit_test.RedditObject(
        body=f"{static.TRIGGER}! \"error test\"",
        author=username,
        created=created,
        id=id)

    messages.process_message(message, reddit, database)
    result = message.get_first_child().body

    assert "This time has already passed" not in result
    assert "Could not find a time in message, defaulting to one day" in result

    reminders = database.get_all_user_reminders(username)
    assert len(reminders) == 1
    assert reminders[0].user.name == username
    assert reminders[0].message == "error test"
    assert reminders[0].source == utils.message_link(id)
    assert reminders[0].requested_date == created
    assert reminders[0].target_date == created + timedelta(hours=24)
    assert reminders[0].id is not None
Ejemplo n.º 29
0
 def __init__(self, torrent):
     self.torrent = torrent
     self.peer_id = "-DE13F0-" + utils.random_id(12)
     self.download_key = utils.random_id(12)
     self.port = 12394
Ejemplo n.º 30
0
def test_update_incorrect_comments(database, reddit):
    comment_id1 = utils.random_id()
    thread_id1 = utils.random_id()
    comment1 = reddit_test.RedditObject(
        body=f"{static.TRIGGER}! 1 day",
        author="Watchful1",
        created=utils.datetime_now(),
        id=comment_id1,
        link_id="t3_" + thread_id1,
        permalink=f"/r/test/{thread_id1}/_/{comment_id1}/",
        subreddit="test")
    reddit.add_comment(comment1)
    comments.process_comment(comment1.get_pushshift_dict(), reddit, database)

    comment_id2 = utils.random_id()
    thread_id2 = utils.random_id()
    comment2 = reddit_test.RedditObject(
        body=f"{static.TRIGGER}! 1 day",
        author="Watchful1",
        created=utils.datetime_now(),
        id=comment_id2,
        link_id="t3_" + thread_id2,
        permalink=f"/r/test/{thread_id2}/_/{comment_id2}/",
        subreddit="test")
    reddit.add_comment(comment2)
    comments.process_comment(comment2.get_pushshift_dict(), reddit, database)

    comment_id3 = utils.random_id()
    thread_id3 = utils.random_id()
    comment3 = reddit_test.RedditObject(
        body=f"{static.TRIGGER}! 1 day",
        author="Watchful1",
        created=utils.datetime_now(),
        id=comment_id3,
        link_id="t3_" + thread_id3,
        permalink=f"/r/test/{thread_id3}/_/{comment_id3}/",
        subreddit="test")
    reddit.add_comment(comment3)
    comments.process_comment(comment3.get_pushshift_dict(), reddit, database)

    reminders = [
        Reminder(
            source="https://www.reddit.com/message/messages/XXXXX",
            message=utils.reddit_link(comment1.permalink),
            user=database.get_or_add_user("Watchful1"),
            requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"),
            target_date=utils.parse_datetime_string("2019-01-05 05:00:00")),
        Reminder(
            source="https://www.reddit.com/message/messages/XXXXX",
            message=utils.reddit_link(comment1.permalink),
            user=database.get_or_add_user("Watchful1"),
            requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"),
            target_date=utils.parse_datetime_string("2019-01-06 05:00:00")),
        Reminder(
            source="https://www.reddit.com/message/messages/XXXXX",
            message=utils.reddit_link(comment1.permalink),
            user=database.get_or_add_user("Watchful1"),
            requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"),
            target_date=utils.parse_datetime_string("2019-01-07 05:00:00")),
        Reminder(
            source="https://www.reddit.com/message/messages/XXXXX",
            message=utils.reddit_link(comment2.permalink),
            user=database.get_or_add_user("Watchful1"),
            requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"),
            target_date=utils.parse_datetime_string("2019-01-08 05:00:00")),
        Reminder(
            source="https://www.reddit.com/message/messages/XXXXX",
            message=utils.reddit_link(comment2.permalink),
            user=database.get_or_add_user("Watchful1"),
            requested_date=utils.parse_datetime_string("2019-01-01 04:00:00"),
            target_date=utils.parse_datetime_string("2019-01-09 05:00:00"))
    ]
    for reminder in reminders:
        database.add_reminder(reminder)

    comments.update_comments(reddit, database)

    assert "3 OTHERS CLICKED THIS LINK" in reddit.get_comment(
        comment_id1).get_first_child().body
    assert "2 OTHERS CLICKED THIS LINK" in reddit.get_comment(
        comment_id2).get_first_child().body
    assert "CLICK THIS LINK" in reddit.get_comment(
        comment_id3).get_first_child().body
Ejemplo n.º 31
0
def two_phase_protocol(node):
    logger = logging.getLogger('node')
    while True:
        if (node.isbusy[0]):  # if involved in some transaction
            txs = node.isbusy[1]  # get that transaction

            if txs[0].sender == node.identifier:  # if current node is the sender
                """Phase 1"""
                print("I am sender")

                digital_signature = sign_msg(node.pvt_key, repr(txs))
                logger.info("Generated Digital Signature %r",
                            digital_signature)
                senders_pub_key = (yield from node.get(txs[0].sender))[1]

                receiver_sock = (yield from node.get(txs[0].receiver))[0]
                receiver_status = yield from node.become_receiver(
                    receiver_sock, node.identifier, txs)

                witness_sock = (yield from node.get(txs[0].witness))[0]
                witness_status = yield from node.become_witness(
                    witness_sock, node.identifier, txs)

                if receiver_status == "busy" or witness_status == "busy":
                    logger.info("Phase 1 failed, aborting transaction!")

                    # Send abort to both receiver & witness
                    receiver_abort = yield from node.abort_tx(
                        receiver_sock, node.identifier, txs)
                    witness_abort = yield from node.abort_tx(
                        witness_sock, node.identifier, txs)

                    # If both of them have aborted then I'll abort too
                    if (witness_abort == "aborted"
                            and receiver_abort == "aborted"):
                        yield from node.abort_tx(node.socket_addr,
                                                 node.identifier, txs)
                else:
                    """ Phase 2 """
                    logger.info("Phase 1 complete - Now entering Phase 2")

                    # Send commit to both receiver & witness
                    receiver_commit = yield from node.commit_tx(
                        receiver_sock, node.identifier, txs, digital_signature,
                        senders_pub_key)
                    witness_commit = yield from node.commit_tx(
                        witness_sock, node.identifier, txs, digital_signature,
                        senders_pub_key)

                    if (witness_commit == "committed"
                            and receiver_commit == "committed"):
                        logger.info("Phase 2 complete")
                        yield from node.commit_tx(
                            node.socket_addr, node.identifier, txs,
                            digital_signature,
                            senders_pub_key)  # Commit transaction
                        yield from node.broadcast(random_id(), 'commit_tx',
                                                  node.identifier, txs,
                                                  digital_signature,
                                                  senders_pub_key)
                        node.isbusy = (False, None)

                    else:
                        receiver_abort = yield from node.abort_tx(
                            receiver_sock, node.identifier,
                            txs)  # send abort to receiver
                        witness_abort = yield from node.abort_tx(
                            witness_sock, node.identifier,
                            txs)  # send abort to witness

                        if (witness_abort == "aborted"
                                and receiver_abort == "aborted"):
                            yield from node.abort_tx(
                                node.socket_addr, node.identifier,
                                txs)  # send abort to itslef(sender)
                # do the work of sender

            elif txs[0].receiver == node.identifier:
                print("I am receiver")
                # do the work of receiver

            # Do the work of witnes
            elif txs[0].witness == node.identifier:
                print("I am witness")

        yield from asyncio.sleep(1)
Ejemplo n.º 32
0
    def _set_initial(self, strat='alle'):
        """
        Sets initial strategy
        Options are:
            alle
            allc
            only_cwol
        """

        if strat == 'alle':
            strategy_set ={
                    'looked':{
                        'low': {
                            'defect': self.initial_state,
                            'cooperate': self.initial_state,
                            },
                        'high': {
                            'defect': self.initial_state,
                            'cooperate': self.initial_state,
                        },
                    },
                    'nolook': {
                        'low': {
                            'defect': self.initial_state,
                            'cooperate': self.initial_state,
                            },
                        'high': {
                            'defect': self.initial_state,
                            'cooperate': self.initial_state,
                            },
                        }
                    }

            action_set = {
                        'action': 'exit',
                    }

            self.states[self.initial_state] = State(strategy_set=copy.deepcopy(strategy_set), action_set=copy.deepcopy(action_set), id=self.initial_state)
            self.state = self.initial_state

        elif strat == 'allc':
            strategy_set ={
                    'looked':{
                        'low': {
                            'defect': self.initial_state,
                            'cooperate': self.initial_state,
                            },
                        'high': {
                            'defect': self.initial_state,
                            'cooperate': self.initial_state,
                        },
                    },
                    'nolook': {
                        'low': {
                            'defect': self.initial_state,
                            'cooperate': self.initial_state,
                            },
                        'high': {
                            'defect': self.initial_state,
                            'cooperate': self.initial_state,
                            },
                        }
                    }

            action_set = {
                        'action': 'continue',
                    }


            self.states[self.initial_state] = State(strategy_set=copy.deepcopy(strategy_set), action_set=copy.deepcopy(action_set), id=self.initial_state)
            self.state = self.initial_state

        elif strat == 'only_cwol':
            id_2 = utils.random_id()
            strategy_set ={
                    'looked':{
                        'low': {
                            'defect': self.initial_state,
                            'cooperate': self.initial_state,
                            },
                        'high': {
                            'defect': self.initial_state,
                            'cooperate': self.initial_state,
                        },
                    },
                    'nolook': {
                        'low': {
                            'defect': self.initial_state,
                            'cooperate': id_2,
                            },
                        'high': {
                            'defect': self.initial_state,
                            'cooperate': id_2,
                            },
                        }
                    }

            action_set = {
                        'action': 'exit',
                    }

            strategy_set2 ={
                    'looked':{
                        'low': {
                            'defect': self.initial_state,
                            'cooperate': self.initial_state,
                            },
                        'high': {
                            'defect': self.initial_state,
                            'cooperate': self.initial_state,
                        },
                    },
                    'nolook': {
                        'low': {
                            'defect': self.initial_state,
                            'cooperate': id_2,
                            },
                        'high': {
                            'defect': self.initial_state,
                            'cooperate': id_2,
                            },
                        }
                    }

            action_set2 = {
                        'action': 'continue',
                    }

            self.states[self.initial_state] = State(strategy_set=copy.deepcopy(strategy_set), action_set=copy.deepcopy(action_set), id=self.initial_state)
            self.states[id_2] = State(strategy_set=copy.deepcopy(strategy_set2), action_set=copy.deepcopy(action_set2), id=id_2)
            self.state = self.initial_state
Ejemplo n.º 33
0
def run_training(df_name, model_name, is_GPU=True, params=None):

    default_params = {
        "nb_epochs": 10,
        "my_patience": 4,
        "batch_size": 80,
        "optimizer": "adam",
        "learning_rate": 0.01,
        "momentum": 0.9,
        "nesterov": True,
        "activation": "linear",
        "drop_rate": 0.3,
        "n_units": 50,
        "roll2vec": True,
        "embs_multiplier": 1,
        "multi_dense": True,
        "dense_acti": "linear",
        "full_pred": True,
    }
    params = merge_params(params, default_params)

    docs, target, params_data = data.get_dataset(df_name)
    params = merge_params(
        params_data,
        params)  # Force the parameters to be the one of the dataset
    X_train, X_test, y_train, y_test = train_test_split(docs,
                                                        target,
                                                        test_size=0.3)
    params["split_id"] = random_id()  # id to identify the split later

    # = = = = = fitting the model on 4 targets = = = = #

    # Building the models
    embeddings = data.get_embeddings(roll2vec=params["roll2vec"],
                                     multiplier=params["embs_multiplier"])
    print("### EMBS SHAPE : {} ###".format(embeddings.shape))
    model = HAN(embeddings,
                docs.shape,
                is_GPU=is_GPU,
                activation=params["activation"],
                drop_rate=params["drop_rate"],
                n_units=params["n_units"],
                multi_dense=params["multi_dense"],
                dense_acti=params["dense_acti"],
                full_pred=params["full_pred"])

    if params["optimizer"] == 'sgd':
        decay_rate = params["learning_rate"] / params["nb_epochs"]
        my_optimizer = optimizers.SGD(lr=params["learning_rate"],
                                      decay=decay_rate,
                                      momentum=params["momentum"],
                                      nesterov=params["nesterov"])
    elif params["optimizer"] == 'adam':
        my_optimizer = optimizers.Adam()
    elif params["optimizer"] == 'nadam':
        my_optimizer = optimizers.Nadam()

    model.compile(loss='mean_squared_error',
                  optimizer=my_optimizer,
                  metrics=['mae'])

    # Training for each target
    params["train_id"] = random_id()
    n_target = 1 if params["full_pred"] else 4
    for tgt in range(n_target):
        t0 = time.process_time()
        # = = = = = training = = = = =

        early_stopping = EarlyStopping(monitor='val_loss',
                                       patience=params["my_patience"],
                                       mode='min')

        # save model corresponding to best epoch
        if params["full_pred"]: tgt = "full"
        model_file = os.path.join(
            data.data_path, "models/",
            "{}_{}_{}_model.h5".format(model_name, df_name, tgt))
        checkpointer = ModelCheckpoint(filepath=model_file,
                                       verbose=1,
                                       save_best_only=True,
                                       save_weights_only=True)

        my_callbacks = [early_stopping, checkpointer]

        y_train_tgt = y_train if params["full_pred"] else y_train[tgt]
        y_test_tgt = y_test if params["full_pred"] else y_test[tgt]
        model.fit(X_train,
                  y_train_tgt,
                  batch_size=params["batch_size"],
                  epochs=params["nb_epochs"],
                  validation_data=(X_test, y_test_tgt),
                  callbacks=my_callbacks)

        T = time.process_time() - t0
        hist = model.history.history
        scores = get_scores(hist)
        scores["T"] = time.process_time() - t0

        data.save_perf(params, scores, tgt)
        print("################ {} minutes spent...###########".format(
            round(T / 60)))

    return params
Ejemplo n.º 34
0
 def __init__(self, torrent):
     self.torrent = torrent
     self.peer_id = "-DE13F0-" + utils.random_id(12)
     self.download_key = utils.random_id(12)
     self.port = random.randint(1025, 65535)
Ejemplo n.º 35
0
async def node_repl(node):

    while True:

        line = await ainput(">>> ")

        # Since it reads the "\n" when you press enter
        line = line.strip()

        if not line:
            continue

        # Handle arguments with spaces etc.
        line = list(shlex.shlex(line))

        cmd = line[0]
        args = line[1:]

        if cmd in ['id']:
            "Print id of a node"

            if len(args) == 1:
                peer_name = args[0]
                peer_socket = get_sock_from_name(args[0])
                try:
                    peer_id = await node.ping(peer_socket, node.identifier)
                    print("%s's id is %d" % (peer_name, peer_id))
                except socket.timeout:
                    print("Failed to ping node %s" % args[0])
            else:
                print("My id is %d" % node.identifier)

        # TODO: Print hash table of a particular node
        elif cmd in ['ht', 'hash_table']:
            "Print my hash table"

            print(node.storage_str())

        # TODO: Print routing table of a particular node
        elif cmd in ['rt', 'routing_table']:
            "Print my routing table"

            print(node.routing_table)

        elif cmd in ['put']:
            "Store a (key, value) pair on the network DHT"

            if (len(args) != 2):
                print("Expected 2 arguments, %d given" % len(args))
            else:
                num = await node.put(args[0], args[1], hashed=False)
                print("Value stored at %d node(s)" % num)

        elif cmd in ['get']:
            "Access a previously stored value by its key"

            if (len(args) != 1):
                print("Expected 1 argument, %d given" % len(args))
            else:
                try:
                    value = await node.get(args[0], hashed=False)
                    print(value)
                except KeyError:
                    print("Key not found")

        elif cmd in ['sa', 'send_amount']:
            "Send bitcoins to a node"

            if (len(args) != 4):
                print("Expected 4 arguments, %d given" % len(args))
            else:
                try:
                    sender_sock = get_sock_from_name(args[0])
                    receiver_sock = get_sock_from_name(args[1])
                    witness_sock = get_sock_from_name(args[2])

                    receiver_id = await node.ping(receiver_sock,
                                                  node.identifier)
                    witness_id = await node.ping(witness_sock, node.identifier)
                    amount = int(args[3])

                    reply = await node.request(sender_sock,
                                               "send_amount", node.identifier,
                                               int(receiver_id),
                                               int(witness_id), amount)
                    print(reply)

                except Exception as e:
                    print("Exception Caught : ", e)

        elif cmd in ['?', 'help']:
            "List commands"

            # Find left-justification factor
            ljust = max(map(len, HELP_DICT.keys()))
            for cmd, doc in HELP_DICT.items():
                print(cmd.ljust(ljust) + " : " + doc)

            print()

        elif cmd in ['bd', 'brd', 'bc', 'broadcast']:
            "Broadcast an RPC over the network"

            if (len(args) < 1):
                print("Expected atleast 1 argument, %d given" % len(args))
            else:
                await node.broadcast(random_id(), args[0], node.identifier,
                                     *args[1:])

        elif cmd in ['ld', 'ledger']:
            "Pretty print the ledger."

            print(node.ledger)

        else:
            print("Please enter valid input.\nType help to see commands")