Beispiel #1
0
def build(data, status_code=200):
    data = json.dumps({
        'error': None,
        'status_code': status_code,
        'result': util.to_dict(data),
    })
    return Response(data, status=status_code, content_type='application/json')
Beispiel #2
0
    def mine(self, fuel, chain):
        if self.node:
            #get 3 transactions from the mempool
            try:
                block = Block()
                for i in range(3):
                    zeroTransaction = chain.mempool[-i]
                    if zeroTransaction not in chain.verified:
                        transactionDetails = chain.mempoolData[zeroTransaction]
                        if self.verification(transactionDetails,
                                             zeroTransaction):
                            #managing wallet
                            transactionDetails[
                                'sender'].wallet -= transactionDetails[
                                    'amount']
                            transactionDetails[
                                'receiver'].wallet += transactionDetails[
                                    'amount']
                            prevBlock = chain.chaindata[chain.prevHash]
                            #removing transaction from mempool
                            chain.mempool.remove(zeroTransaction)
                            chain.verified.append(zeroTransaction)

                            #adding transaction to block
                            block.transactions.append(zeroTransaction)
                            block.transactionsData[
                                zeroTransaction] = transactionDetails

                #prepating block
                block.index = prevBlock['index'] + 1
                block.prevHash = chain.prevHash
                block.timestamp = formatTime(datetime.now())
                count = 0
                blockHash = block()
                while count < fuel:
                    block.nonce += 1
                    blockHash = block()
                    if blockHash.startswith('0' * chain.difficulty):
                        chain.chain.append(blockHash)
                        chain.chaindata[blockHash] = to_dict(block)
                        chain.prevHash = blockHash
                        chain.submit(self, blockHash)
                        break
                    count += 1
                #linking the block to the chain

            except IndexError:
                print('Not Enough Transactions to mine')
        else:
            print('Mine Function is not allowed for Non-Node Clients')
Beispiel #3
0
def step_impl(context):
    service_model = to_dict(getattr(context, 'raw_service_model', None))
    service_image = ServiceImage(getattr(context, 'image', 'dummy-service'))
    socket_address = {
        'ip': '192.168.10.7',
        'external_port': '9000',
        'internal_port': '9000'
    }
    volume_name = getattr(context, 'image',
                          'x-us-field:34').split(':')[0] + '-config'
    context.service_data = ServiceData(
        service_model, getattr(context, 'volume_enabled',
                               'False')).generate(service_image,
                                                  socket_address,
                                                  volume_name=volume_name)
Beispiel #4
0
 def get(self):
     name = self.request.get('name')
     if name:
         metric = Metric.get_by_name(name)
         if metric:
             self.write_json(util.to_dict(metric))
         else:
             default_description = "<h3>" + name + "</h3>"
             self.write_json({
                 'survey': self.default_survey,
                 'rubric': self.default_rubric,
                 'description': default_description
             })
     else:
         logging.error('Metric request had no name')
         self.write_json({'error': 'a name is required'})
def step_impl(context):
    compose_model = to_dict(context.raw_compose_model)

    if 'volumes' in compose_model:
        context.previous_volumes = deepcopy(compose_model['volumes'])

    service_image = ServiceImage(
        getattr(context, 'service_image', 'x-us-field:34'))
    proposed_address = {
        'ip': getattr(context, 'ip_address', None),
        'external_port': getattr(context, 'external_port', None),
        'internal_port': getattr(context, 'internal_port', None)
    }
    service_data = ServiceData(
        volume_enabled=getattr(context, 'volume_enabled', False))
    context.compose, context.service_name = Compose(
        compose_model, service_data).generate(service_image, proposed_address)
 def __refresh_devices(self):
     try:
         stored_devices = to_dict(
             self.__local_storage.read(Discovery.__devices_table[0]), "id")
         new_devices, missing_devices, existing_devices = diff(
             self.__device_pool, stored_devices)
         if new_devices:
             for device_id in new_devices:
                 self.__handle_new_device(device_id,
                                          stored_devices[device_id])
         if missing_devices:
             for device_id in missing_devices:
                 self.__handle_missing_device(device_id)
         if existing_devices:
             for device_id in existing_devices:
                 self.__handle_existing_device(device_id,
                                               stored_devices[device_id])
     except Exception as ex:
         logger.error("refreshing devices failed - {}".format(ex))
Beispiel #7
0
    def init(self):
        gen0 = Client()
        gen1 = Client()
        genesis_transaction = Transactions(gen0, gen1, 3)
        genesis_transaction(self)

        #verify
        transaction = self.mempool[-1]
        transactionData = self.mempoolData[transaction]

        #getting signature and public key fromo data
        transactionSignature = transactionData['signature']
        transactionVerifyKey = transactionData['sender'].publickey

        if gen0.verification(transactionData, transaction):
            #managing accounts
            transactionData['sender'].wallet -= transactionData['amount']
            transactionData['receiver'].wallet += transactionData['amount']

            #deleting the transaction from mempool
            self.mempool.remove(transaction)
            self.verified.append(transaction)
            genesis = Block()
            genesis.transactions.append(transaction)
            genesis.transactionsData[transaction] = transactionData
            genesis.index = 0
            genesis.timestamp = formatTime(datetime.now())
            genesis.prevHash = "Genesis"

            #Mining Block
            proof = genesis()
            while not proof.startswith('0' * self.difficulty):
                genesis.nonce += 1
                proof = genesis()
            self.chain.append(proof)
            self.chaindata[proof] = to_dict(genesis)
            self.prevHash = genesis.genHash(genesis.nonce)
            print('Blockchain is initialised')
Beispiel #8
0
 def to_dict(self):
     return {key: util.to_dict(value)
             for key, value in self.__dict__.items()}
Beispiel #9
0
def model_in_service_data(context, raw_service_model):
    service_model = to_dict(raw_service_model)
    service_data = deepcopy(context.service_data)
    rec_merge(service_data, service_model)
    return service_model.items() <= service_data.items()
 def __refresh_local_storage(self):
     try:
         logger.info("refreshing local storage ...")
         local_devices = to_dict(
             self.__local_storage.read(Discovery.__devices_table[0]), "id")
         remote_devices = get_cloud_devices(*get_cloud_credentials())
         new_devices, missing_devices, existing_devices = diff(
             local_devices, remote_devices)
         if new_devices:
             for device_id in new_devices:
                 logger.info("adding record for '{}' ...".format(device_id))
                 try:
                     self.__local_storage.create(
                         Discovery.__devices_table[0], {
                             "id": device_id,
                             **remote_devices[device_id]
                         })
                 except Exception as ex:
                     logger.error(
                         "adding record for '{}' failed - {}".format(
                             device_id, ex))
         if missing_devices:
             for device_id in missing_devices:
                 try:
                     device_data = self.__local_storage.read(
                         Discovery.__devices_table[0], id=device_id)
                     now = time.time()
                     age = now - float(device_data[0]["last_seen"])
                     if age > conf.Discovery.grace_period:
                         logger.info(
                             "removing record for '{}' due to exceeded grace period ..."
                             .format(device_id))
                         try:
                             self.__local_storage.delete(
                                 Discovery.__devices_table[0], id=device_id)
                         except Exception as ex:
                             logger.error(
                                 "removing record for '{}' failed - {}".
                                 format(device_id, ex))
                     else:
                         logger.info(
                             "remaining grace period for missing '{}': {}s".
                             format(device_id,
                                    conf.Discovery.grace_period - age))
                 except Exception as ex:
                     logger.error(
                         "can't calculate grace period for missing '{}' - {}"
                         .format(device_id, ex))
         if existing_devices:
             for device_id in existing_devices:
                 logger.info(
                     "updating record for '{}' ...".format(device_id))
                 try:
                     self.__local_storage.update(
                         Discovery.__devices_table[0],
                         remote_devices[device_id],
                         id=device_id)
                 except Exception as ex:
                     logger.error(
                         "updating record for '{}' failed - {}".format(
                             device_id, ex))
     except Exception as ex:
         logger.error("refreshing local storage failed - {}".format(ex))
Beispiel #11
0
def load_reductions(filename: str) -> Dict:
    return to_dict(transform_reductions(load_tsv(filename)), "slug")
Beispiel #12
0
def main(args):
    splitted_path = os.path.join(args.out, SPLIT_DIR)
    if not os.path.exists(splitted_path):
        os.makedirs(splitted_path)

    profile = pd.read_csv(args.input)
    profile["id"] = (profile["reviewerID"].map(str) + "-" +
                     profile["asin"].map(str) + "-" +
                     profile["unixReviewTime"].map(str))
    reviews = (profile.groupby([
        "reviewerID", "asin", "unixReviewTime", "id"
    ])[["reviewerID", "asin", "unixReviewTime", "id"]].nunique().drop(
        columns=["reviewerID", "asin", "unixReviewTime", "id"
                 ]).reset_index().sort_values(
                     by=["reviewerID", "unixReviewTime"],
                     ascending=False).reset_index(drop=True).reset_index())
    maxIdx = reviews.groupby(["reviewerID"])["index"].max()
    minIdx = reviews.groupby(["reviewerID"])["index"].min()
    reviews["maxIdx"] = reviews["reviewerID"].map(maxIdx)
    reviews["minIdx"] = reviews["reviewerID"].map(minIdx)
    reviews["ordered"] = reviews["index"].map(int) - reviews["minIdx"].map(
        int) + 1
    reviews["count"] = reviews["maxIdx"].map(int) - reviews["minIdx"].map(
        int) + 1
    reviews["pct"] = 1.0 * reviews["ordered"].map(
        float) / reviews["count"].map(float)

    test_pct = float(args.ratio_test)
    validation_pct = float(args.ratio_test + args.ratio_validation)

    print("Getting list of train/validation/test instances")
    testIdx = list(reviews[(reviews["pct"] <= test_pct)]["id"])
    validationIdx = list(reviews[((reviews["pct"] > test_pct) &
                                  (reviews["pct"] <= validation_pct))]["id"])
    trainIdx = list(reviews[(reviews["pct"] > validation_pct)]["id"])

    test = profile[(profile["id"].isin(testIdx))]
    validation = profile[(profile["id"].isin(validationIdx))]
    train = profile[(profile["id"].isin(trainIdx))]

    print("Filtering out unseen data in validation/test")
    users, items, aspects, opinions = (
        list(train["reviewerID"]),
        list(train["asin"]),
        list(train["aspect"]),
        list(train["opinion"]),
    )
    test = test[(test["reviewerID"].isin(users)
                 & test["asin"].isin(items)
                 & test["opinion"].isin(opinions)
                 & test["aspect"].isin(aspects))]
    validation = validation[(validation["reviewerID"].isin(users)
                             & validation["asin"].isin(items)
                             & validation["opinion"].isin(opinions)
                             & validation["aspect"].isin(aspects))]

    with open(os.path.join(args.out, SPLITTED_INFO_FILENAME), "w") as f:
        f.write("count_by,n_train,n_validation,n_test\n")
        f.write("%s,%d,%d,%d\n" %
                ("n_sentence", len(train), len(validation), len(test)))
        f.write("%s,%d,%d,%d\n" % (
            "n_review",
            len(set(train["id"])),
            len(set(validation["id"])),
            len(set(test["id"])),
        ))
        f.write("\n")
        f.write("# aspect: %d\n" % len(set(aspects)))
        f.write("# opinion: %d\n" % len(set(opinions)))

    test.drop(columns=["id"]).to_csv(os.path.join(args.out,
                                                  SPLITTED_TEST_FILE),
                                     index=False)
    validation.drop(columns=["id"]).to_csv(os.path.join(
        args.out, SPLITTED_VALIDATION_FILE),
                                           index=False)
    train_file = os.path.join(args.out, SPLITTED_TRAIN_FILE)
    train.drop(columns=["id"]).to_csv(train_file, index=False)

    # export dictionary: users, items, aspects, opinions
    save_dict(to_dict(users), os.path.join(args.out, USER_DICT_FILENAME))
    save_dict(to_dict(items), os.path.join(args.out, ITEM_DICT_FILENAME))
    save_dict(to_dict(aspects), os.path.join(args.out, ASPECT_DICT_FILENAME))
    save_dict(to_dict(opinions), os.path.join(args.out, OPINION_DICT_FILENAME))

    # export data for EFM/MTER
    print("Exporting data for EFM/MTER training")
    train[["reviewerID", "asin", "overall", "unixReviewTime"
           ]].drop_duplicates().to_csv(os.path.join(args.out, "train.txt"),
                                       header=False,
                                       index=False)
    test[["reviewerID", "asin", "overall", "unixReviewTime"
          ]].drop_duplicates().to_csv(os.path.join(args.out, "test.txt"),
                                      header=False,
                                      index=False)
    profile["aspect_sentiment"] = (profile["aspect"].map(str) + ":" +
                                   profile["opinion"].map(str) + ":" +
                                   profile["sentiment"].map(str))
    sentiment = (profile.groupby(
        ["reviewerID", "asin"])["aspect_sentiment"].apply(list).reset_index())
    sentiment["aspect_sentiment"] = sentiment["aspect_sentiment"].apply(
        lambda x: ",".join(x))
    with open(os.path.join(args.out, "sentiment.txt"), "w") as f:
        for row in sentiment.itertuples():
            f.write("{}\n".format(",".join([row[1], row[2], row[3]])))

    print("Done")