Exemple #1
0
    def aggregate_sigs_secure(signatures, public_keys, message_hashes):
        """
        Aggregate signatures using the secure method, which calculates
        exponents based on public keys, and raises each signature to an
        exponent before multiplying them together. This is secure against
        rogue public key attack, but is slower than simple aggregation.
        """
        if (len(signatures) != len(public_keys)
                or len(public_keys) != len(message_hashes)):
            raise Exception("Invalid number of keys")
        mh_pub_sigs = [(message_hashes[i], public_keys[i], signatures[i])
                       for i in range(len(signatures))]

        # Sort by message hash + pk
        mh_pub_sigs.sort()

        computed_Ts = BLS.hash_pks(len(public_keys), public_keys)

        # Raise each sig to a power of each t,
        # and multiply all together into agg_sig
        ec = public_keys[0].ec
        agg_sig = JacobianPoint(Fq2.one(ec.q), Fq2.one(ec.q), Fq2.zero(ec.q),
                                True, ec)

        for i, (_, _, signature) in enumerate(mh_pub_sigs):
            agg_sig += signature * computed_Ts[i]

        return Signature.from_g2(agg_sig)
    def reply_commit(blockchain, logger, values):
        logger.debug("in reply_commit fxn")
        signature = values.get('tc_signed')
        node_address = values.get('address')
        if signature is None or node_address is None:
            return jsonify("Error: invalid json received, Bad request"), 400
        if node_address not in blockchain.public_key_list:
            return jsonify("Bad request"), 400

        signature = BLS.deserialize(signature, Signature)
        hash_of_priority_block = blockchain.proposed_block.get_hash()
        temp_array = []
        for c in hash_of_priority_block:
            temp_array.append(ord(c))
        msg = bytes(temp_array)

        signature.set_aggregation_info(
            AggregationInfo.from_msg(
                PublicKey.from_bytes(
                    bytes(blockchain.public_key_list[node_address],
                          "ISO-8859-1")), msg))
        verify_sign = signature.verify()

        if verify_sign:
            logger.debug("reply commit signature verified")
            blockchain.commit_accepted[node_address] = signature
            # print("commit accepted by ", len(blockchain.commit_accepted))
            return jsonify("True"), 200
        else:
            logger.warning("reply commit signature tempered")
            return jsonify("False"), 300
Exemple #3
0
def bls_search(lc, target_ID, save_path):
    """
    Perform bls analysis using foreman-mackey's bls.py function
    """
    durations = np.linspace(0.05, 0.2, 22) * u.day
    model = BLS(lc.time * u.day, lc.flux)
    results = model.autopower(durations, frequency_factor=5.0)

    # Find the period and epoch of the peak
    index = np.argmax(results.power)
    period = results.period[index]
    t0 = results.transit_time[index]
    duration = results.duration[index]
    transit_info = model.compute_stats(period, duration, t0)

    epoch = transit_info['transit_times'][0]

    fig, ax = plt.subplots(1, 1, figsize=(8, 4))

    # Highlight the harmonics of the peak period
    ax.axvline(period.value, alpha=0.4, lw=3)
    for n in range(2, 10):
        ax.axvline(n * period.value, alpha=0.4, lw=1, linestyle="dashed")
        ax.axvline(period.value / n, alpha=0.4, lw=1, linestyle="dashed")

    # Plot the periodogram
    ax.plot(results.period, results.power, "k", lw=0.5)

    ax.set_xlim(results.period.min().value, results.period.max().value)
    ax.set_xlabel("period [days]")
    ax.set_ylabel("log likelihood")
    ax.set_title('{} - BLS Periodogram'.format(target_ID))
    #plt.savefig(save_path + '{} - BLS Periodogram.png'.format(target_ID))
    #    plt.close(fig)

    # Fold by most significant period
    phase_fold_plot(lc.time * u.day,
                    lc.flux,
                    period,
                    epoch,
                    target_ID,
                    save_path,
                    title='{} Lightcurve folded by {} days'.format(
                        target_ID, period))

    return results, transit_info
Exemple #4
0
def test2():
    seed = bytes([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
    seed2 = bytes([1, 20, 102, 229, 1, 157])

    sk = PrivateKey.from_seed(seed)
    sk_cp = PrivateKey.from_seed(seed)
    sk2 = PrivateKey.from_seed(seed2)
    pk = sk.get_public_key()
    pk2 = sk2.get_public_key()
    assert(sk == sk_cp)
    assert(sk != sk2)
    assert(pk.get_fingerprint() == 0xddad59bb)

    pk2_ser = pk2.serialize()
    pk2_copy = PublicKey.from_bytes(pk2_ser)
    assert(pk2 == pk2_copy)
    assert(pk != pk2)
    assert(pk2.size() == 48)
    assert(sk2.size() == 32)

    message = bytes("this is the message", "utf-8")
    sig = sk.sign(message)
    sig_ser = sig.serialize()
    sig_cp = Signature.from_bytes(sig_ser)
    a1 = AggregationInfo.from_msg(pk, message)
    sig_cp.set_aggregation_info(a1)
    a2 = sig_cp.get_aggregation_info()
    assert(a1 == a2)
    sig2 = sk2.sign(message)

    assert(sig.size() == 96)
    assert(sig != sig2)
    assert(sig == sig_cp)

    sig_agg = BLS.aggregate_sigs([sig, sig2])

    result = BLS.verify(sig_cp)
    result2 = BLS.verify(sig2)
    result3 = BLS.verify(sig_agg)
    assert(result)
    assert(result2)
    assert(result3)

    sk2 = sk
def boxleastsq(BJD, flux, mindur=0.5, maxdur=10.0):
    '''
	Box least squares module that uses bls.py from:
		https://github.com/dfm/bls.py

	Input:
		BJD     : array  - barycentric julian dates.
		flux    : array  - the normalized flux.
		mindur  : float  - the minimum duration of the transist.
		maxdur  : float  - the maximum duration of the transist.

	Output:
		BLSdict : dict   - dictionary containing period, mid transit time, transit duration, transit
						   depth, and the error on the depth.
		results : object - the results from the BLS.
	'''

    ## BOX LEASTSQUARE
    durations = np.linspace(mindur, maxdur, 10) * u.hour
    model = BLS(BJD * u.day, flux)
    results = model.autopower(durations,
                              minimum_n_transit=2,
                              frequency_factor=5.0)

    period = results.period[np.argmax(results.power)].value
    t0 = results.transit_time[np.argmax(results.power)].value
    dur = results.duration[np.argmax(results.power)].value
    dep = results.depth[np.argmax(results.power)]
    errdep = results.depth_err[np.argmax(results.power)]

    dep_even = model.compute_stats(period, dur, t0)['depth_even'][0]
    dep_odd = model.compute_stats(period, dur, t0)['depth_odd'][0]

    BLSdict = {
        'period': period,
        'midtransit_time': t0,
        'duration': dur,
        'depth': dep,
        'errdepth': errdep,
        'depth_even': dep_even,
        'depth_odd': dep_odd
    }

    return BLSdict, results
    def verified_commit(blockchain, logger, values):
        block_hash = values.get('block')
        if block_hash is None:
            return jsonify("tempered Data"), 401
        block = blockchain.commit_verified_list.get(block_hash)
        if block is None:
            return jsonify("verification block missing"), 402
        signers = values.get('n_list')
        co_sig = values.get('co_sig')
        if (signers is None) or (co_sig is None):
            return jsonify("tempered block data"), 403
        co_sig = BLS.deserialize(co_sig, Signature)
        flag = block.verify_block(blockchain)
        if not flag:
            return jsonify("invalid block!"), 301
        node_address = block.harvester
        block_hash_hexdigest = block.get_hash()
        if node_address in blockchain.public_key_list:
            if len(signers) / len(blockchain.public_key_list) > 0.66:

                temp_array = []
                for c in block_hash_hexdigest:
                    temp_array.append(ord(c))
                msg = bytes(temp_array)
                agg_info_list = []
                for node in signers:
                    if node in blockchain.public_key_list:
                        agg_info = AggregationInfo.from_msg(
                            PublicKey.from_bytes(
                                bytes(blockchain.public_key_list[node],
                                      "ISO-8859-1")), msg)
                        agg_info_list.append(agg_info)
                    else:
                        return jsonify("BlockChain couldn't updated "), 302

                agg_public_key = AggregationInfo.merge_infos(agg_info_list)
                co_sig.set_aggregation_info(agg_public_key)
                verify_signature = co_sig.verify()

                if verify_signature:
                    logger.debug("hey you verified commit block" +
                                 block.get_hash())
                    block.signers = signers
                    block.signature = values.get('co_sig')
                    blockchain.update_blockchain(block)
                    return jsonify("BlockChain should be updated "), 200
                else:
                    return jsonify("BlockChain couldn't updated "), 303
            else:
                logger.warning("you didn't get majority")
                return jsonify("BlockChain couldn't updated "), 304
        logger.debug("node address didn't exists")
        return jsonify("BlockChain couldn't updated "), 305
Exemple #7
0
    def do_bls(self):
        """
        """
        self.bls_time = self.time[30:-20]
        self.bls_flux = self.det_flux[30:-20]

        durations = np.linspace(0.05, 0.2, 10)
        bls_model = BLS(self.bls_time, self.bls_flux)
        bls_results = bls_model.autopower(durations, frequency_factor=5.0)
        self.bls_results = bls_results

        index = np.argmax(bls_results.power)
        bls_period = bls_results.period[index]
        bls_t0 = bls_results.transit_time[index]
        bls_duration = bls_results.duration[index]
        bls_depth = bls_results.depth[index]

        self.bls_model = bls_model
        self.bls_period = bls_period
        self.bls_t0 = bls_t0
        self.bls_depth = bls_depth
        self.bls_duration = bls_duration
 def commit(blockchain, logger, values):
     block_hash = values.get('block')
     signers = values.get('n_list')
     co_sig = values.get('co_sig')
     if block_hash is None or co_sig is None:
         return jsonify("tempered Data"), 300
     # co_sig = bytes(co_sig, "ISO-8859-1")
     # co_sig = Signature.from_bytes(co_sig)
     block = blockchain.proposal_verified_list.get(block_hash)
     if (block is None) or (signers is None):
         return jsonify("tempered Data"), 301
     # temp_co_sig = []
     # if type(co_sig) == list:
     #     for each_sig in co_sig:
     #         each_sig = bytes(each_sig, "ISO-8859-1")
     #         each_sig = Signature.from_bytes(each_sig)
     #         temp_co_sig.append(each_sig)
     #
     #     co_sig = Signature.aggregate(temp_co_sig)
     #     co_sig = str(co_sig.serialize(), "ISO-8859-1")
     node_address = block.harvester
     co_sig = BLS.deserialize(co_sig, Signature)
     block_hash_hash_digest = sha256(str(block_hash).encode()).hexdigest()
     if node_address in blockchain.public_key_list and block_hash_hash_digest not in blockchain.broadcast_list:
         if len(signers) / len(blockchain.public_key_list) > 0.66:
             _block = copy(block)
             _block.signature = co_sig
             _block.signers = signers
             if _block.verify_block_signature(blockchain):
                 blockchain.broadcast_list.append(block_hash_hash_digest)
                 blockchain.broadcast_it("/commit", values)
                 logger.debug("commit fxn call using hash: " + block_hash)
                 blockchain.commit_queue[block_hash] = block
                 threading.Thread(target=blockchain.sign_commit_block,
                                  args={blockchain.sign_commit_block_delay
                                        }).start()
                 return jsonify("BlockChain should be updated "), 200
             else:
                 logger.warning(
                     "given signature for commit verification is tempered")
                 return jsonify("BlockChain couldn't updated "), 201
         else:
             logger.warning("you did not get majority")
             return jsonify("BlockChain couldn't updated "), 202
     logger.warning("tempered data or retransmitted data")
     return jsonify("BlockChain couldn't updated "), 203
Exemple #9
0
def test_vectors():
    sk1 = PrivateKey.from_seed(bytes([1, 2, 3, 4, 5]))
    pk1 = sk1.get_public_key()
    sig1 = sk1.sign(bytes([7, 8, 9]))

    sk2 = PrivateKey.from_seed(bytes([1, 2, 3, 4, 5, 6]))
    pk2 = sk2.get_public_key()
    sig2 = sk2.sign(bytes([7, 8, 9]))
    assert(sk1.serialize() == bytes.fromhex("022fb42c08c12de3a6af053880199806532e79515f94e83461612101f9412f9e"))
    assert(pk1.get_fingerprint() == 0x26d53247)
    assert(pk2.get_fingerprint() == 0x289bb56e)
    assert(sig1.serialize() == bytes.fromhex("93eb2e1cb5efcfb31f2c08b235e8203a67265bc6a13d9f0ab77727293b74a357ff0459ac210dc851fcb8a60cb7d393a419915cfcf83908ddbeac32039aaa3e8fea82efcb3ba4f740f20c76df5e97109b57370ae32d9b70d256a98942e5806065"))
    assert(sig2.serialize() == bytes.fromhex("975b5daa64b915be19b5ac6d47bc1c2fc832d2fb8ca3e95c4805d8216f95cf2bdbb36cc23645f52040e381550727db420b523b57d494959e0e8c0c6060c46cf173872897f14d43b2ac2aec52fc7b46c02c5699ff7a10beba24d3ced4e89c821e"))

    agg_sig = BLS.aggregate_sigs([sig1, sig2])
    agg_pk = BLS.aggregate_pub_keys([pk1, pk2], True)
    agg_sk = BLS.aggregate_priv_keys([sk1, sk2], [pk1, pk2], True)
    assert(agg_sig.serialize() == bytes.fromhex("0a638495c1403b25be391ed44c0ab013390026b5892c796a85ede46310ff7d0e0671f86ebe0e8f56bee80f28eb6d999c0a418c5fc52debac8fc338784cd32b76338d629dc2b4045a5833a357809795ef55ee3e9bee532edfc1d9c443bf5bc658"))
    assert(agg_sk.sign(bytes([7, 8, 9])).serialize() == agg_sig.serialize())


    assert(BLS.verify(sig1))
    assert(BLS.verify(agg_sig))

    agg_sig.set_aggregation_info(AggregationInfo.from_msg(agg_pk, bytes([7, 8, 9])))
    assert(BLS.verify(agg_sig))

    sig1.set_aggregation_info(sig2.aggregation_info)
    assert(not BLS.verify(sig1))

    sig3 = sk1.sign(bytes([1, 2, 3]))
    sig4 = sk1.sign(bytes([1, 2, 3, 4]))
    sig5 = sk2.sign(bytes([1, 2]))


    agg_sig2 = BLS.aggregate_sigs([sig3, sig4, sig5])
    assert(BLS.verify(agg_sig2))
    assert(agg_sig2.serialize() == bytes.fromhex("8b11daf73cd05f2fe27809b74a7b4c65b1bb79cc1066bdf839d96b97e073c1a635d2ec048e0801b4a208118fdbbb63a516bab8755cc8d850862eeaa099540cd83621ff9db97b4ada857ef54c50715486217bd2ecb4517e05ab49380c041e159b"))
Exemple #10
0
    def aggregate(public_keys, secure):
        """
        Aggregates public keys together
        """
        if len(public_keys) < 1:
            raise Exception("Invalid number of keys")
        public_keys.sort()

        computed_Ts = BLS.hash_pks(len(public_keys), public_keys)

        ec = public_keys[0].value.ec
        sum_keys = JacobianPoint(Fq.one(ec.q), Fq.one(ec.q),
                                 Fq.zero(ec.q), True, ec)
        for i in range(len(public_keys)):
            addend = public_keys[i].value
            if secure:
                addend *= computed_Ts[i]
            sum_keys += addend

        return PublicKey.from_g1(sum_keys)
 def reply_proposed(blockchain, logger, values):
     logger.debug("in reply_proposed fxn")
     signature = values.get('p_signed')
     node_address = values.get('address')
     if signature is None or node_address is None:
         return jsonify("Error: invalid json received, Bad request"), 200
     if node_address not in blockchain.public_key_list:
         return jsonify("Bad request"), 200
     signature = BLS.deserialize(signature, Signature)
     _block = copy(blockchain.proposed_block)
     _block.signature = signature
     _block.signers = []
     _block.signers.append(node_address)
     if _block.verify_block_signature(blockchain):
         logger.debug("reply proposal signature verified")
         blockchain.proposal_accepted[node_address] = signature
         return jsonify("proposal reply signature verified"), 200
     else:
         logger.warning("proposal reply signature tempered")
         return jsonify("proposal reply signature tempered"), 400
Exemple #12
0
    def verify_block_signature(self, blockchain):
        if type(self.txn) == list:
            txn_hash = self.txn
            txn_hash.sort()
        elif type(self.txn) == dict:
            txn_hash = list(self.txn.keys())
            txn_hash.sort()
        else:
            return False
        msg = dumps({
            "index": self.index,
            "harvester": self.harvester,
            "previous_hash": self.previous_hash,
            "txn": txn_hash,
            "signature": "",
            "signers": "",
            "timestamp": self.timestamp,
        })
        temp_array = []
        for c in msg:
            temp_array.append(ord(c))
        msg = bytes(temp_array)

        if type(self.signature) == Signature:
            pass
        else:
            self.signature = BLS.deserialize(self.signature, Signature)

        agg_info_list = []
        for node in self.signers:
            if node in blockchain.public_key_list:
                agg_info = AggregationInfo.from_msg(
                    PublicKey.from_bytes(
                        bytes(blockchain.public_key_list[node], "ISO-8859-1")),
                    msg)
                agg_info_list.append(agg_info)
            else:
                return False
        agg_public_key = AggregationInfo.merge_infos(agg_info_list)
        self.signature.set_aggregation_info(agg_public_key)
        return self.signature.verify()
    def secure_merge_infos(colliding_infos):
        """
        Infos are merged together with combination of exponents
        """

        # Groups are sorted by message then pk then exponent
        # Each info object (and all of it's exponents) will be
        # exponentiated by one of the Ts
        colliding_infos.sort()

        sorted_keys = []
        for info in colliding_infos:
            for key, value in info.tree.items():
                sorted_keys.append(key)
        sorted_keys.sort()
        sorted_pks = [public_key for (message_hash, public_key) in sorted_keys]
        computed_Ts = BLS.hash_pks(len(colliding_infos), sorted_pks)

        # Group order, exponents can be reduced mod the order
        order = sorted_pks[0].value.ec.n

        new_tree = {}
        for i in range(len(colliding_infos)):
            for key, value in colliding_infos[i].tree.items():
                if key not in new_tree:
                    # This message & pk have not been included yet
                    new_tree[key] = (value * computed_Ts[i]) % order
                else:
                    # This message and pk are already included, so multiply
                    addend = value * computed_Ts[i]
                    new_tree[key] = (new_tree[key] + addend) % order
        mh_pubkeys = [k for k, v in new_tree.items()]
        mh_pubkeys.sort()
        message_hashes = [
            message_hash for (message_hash, public_key) in mh_pubkeys
        ]
        public_keys = [public_key for (message_hash, public_key) in mh_pubkeys]
        return AggregationInfo(new_tree, message_hashes, public_keys)
Exemple #14
0
    def verify_offline_block_signature(self, blockchain):
        if type(self.txn) == list:
            txn_hash = self.txn
            txn_hash.sort()
        elif type(self.txn) == dict:
            txn_hash = list(self.txn.keys())
            txn_hash.sort()
        else:
            return False
        block = copy(self)
        block.signature = ""
        block.signers = ""
        block.txn = txn_hash
        msg = block.get_hash()

        temp_array = []
        for c in msg:
            temp_array.append(ord(c))
        msg = bytes(temp_array)

        if type(self.signature) == Signature:
            pass
        else:
            self.signature = BLS.deserialize(self.signature, Signature)

        agg_info_list = []
        for node in self.signers:
            if node in blockchain.public_key_list:
                agg_info = AggregationInfo.from_msg(
                    PublicKey.from_bytes(
                        bytes(blockchain.public_key_list[node], "ISO-8859-1")),
                    msg)
                agg_info_list.append(agg_info)
            else:
                return False
        agg_public_key = AggregationInfo.merge_infos(agg_info_list)
        self.signature.set_aggregation_info(agg_public_key)
        return self.signature.verify()
Exemple #15
0
    def aggregate(private_keys, public_keys, secure):
        """
        Aggregates private keys together
        """
        if not secure:
            sum_keys = sum(pk.value for pk in private_keys) % default_ec.n

        else:
            if not public_keys:
                raise Exception("Must include public keys in secure" +
                                " aggregation")
            if len(private_keys) != len(public_keys):
                raise Exception("Invalid number of keys")

            priv_pub_keys = sorted(zip(public_keys, private_keys))
            computed_Ts = BLS.hash_pks(len(private_keys), public_keys)
            n = public_keys[0].value.ec.n

            sum_keys = 0
            for i, (_, privkey) in enumerate(priv_pub_keys):
                sum_keys += privkey.value * computed_Ts[i]
                sum_keys %= n

        return PrivateKey.from_bytes(sum_keys.to_bytes(32, "big"))
Exemple #16
0
    def aggregate(signatures):
        """
        Aggregates many (aggregate) signatures, using a combination of simple
        and secure aggregation. Signatures are grouped based on which ones
        share common messages, and these are all merged securely.
        """
        public_keys = []  # List of lists
        message_hashes = []  # List of lists

        for signature in signatures:
            if signature.aggregation_info.empty():
                raise Exception(
                    "Each signature must have a valid aggregation " + "info")
            public_keys.append(signature.aggregation_info.public_keys)
            message_hashes.append(signature.aggregation_info.message_hashes)

        # Find colliding vectors, save colliding messages
        messages_set = set()
        colliding_messages_set = set()

        for msg_vector in message_hashes:
            messages_set_local = set()
            for msg in msg_vector:
                if msg in messages_set and msg not in messages_set_local:
                    colliding_messages_set.add(msg)
                messages_set.add(msg)
                messages_set_local.add(msg)

        if len(colliding_messages_set) == 0:
            # There are no colliding messages between the groups, so we
            # will just aggregate them all simply. Note that we assume
            # that every group is a valid aggregate signature. If an invalid
            # or insecure signature is given, and invalid signature will
            # be created. We don't verify for performance reasons.
            final_sig = Signature.aggregate_sigs_simple(signatures)
            aggregation_infos = [sig.aggregation_info for sig in signatures]
            final_agg_info = AggregationInfo.merge_infos(aggregation_infos)
            final_sig.set_aggregation_info(final_agg_info)
            return final_sig

        # There are groups that share messages, therefore we need
        # to use a secure form of aggregation. First we find which
        # groups collide, and securely aggregate these. Then, we
        # use simple aggregation at the end.
        colliding_sigs = []
        non_colliding_sigs = []
        colliding_message_hashes = []  # List of lists
        colliding_public_keys = []  # List of lists

        for i in range(len(signatures)):
            group_collides = False
            for msg in message_hashes[i]:
                if msg in colliding_messages_set:
                    group_collides = True
                    colliding_sigs.append(signatures[i])
                    colliding_message_hashes.append(message_hashes[i])
                    colliding_public_keys.append(public_keys[i])
                    break
            if not group_collides:
                non_colliding_sigs.append(signatures[i])

        # Arrange all signatures, sorted by their aggregation info
        colliding_sigs.sort(key=lambda s: s.aggregation_info)

        # Arrange all public keys in sorted order, by (m, pk)
        sort_keys_sorted = []
        for i in range(len(colliding_public_keys)):
            for j in range(len(colliding_public_keys[i])):
                sort_keys_sorted.append((colliding_message_hashes[i][j],
                                         colliding_public_keys[i][j]))
        sort_keys_sorted.sort()
        sorted_public_keys = [pk for (mh, pk) in sort_keys_sorted]

        computed_Ts = BLS.hash_pks(len(colliding_sigs), sorted_public_keys)

        # Raise each sig to a power of each t,
        # and multiply all together into agg_sig
        ec = sorted_public_keys[0].value.ec
        agg_sig = JacobianPoint(Fq2.one(ec.q), Fq2.one(ec.q), Fq2.zero(ec.q),
                                True, ec)

        for i, signature in enumerate(colliding_sigs):
            agg_sig += signature.value * computed_Ts[i]

        for signature in non_colliding_sigs:
            agg_sig += signature.value

        final_sig = Signature.from_g2(agg_sig)
        aggregation_infos = [sig.aggregation_info for sig in signatures]
        final_agg_info = AggregationInfo.merge_infos(aggregation_infos)
        final_sig.set_aggregation_info(final_agg_info)

        return final_sig
    def check_proposed(blockchain, logger, values):
        block = values.get('block')
        sig = values.get('sig')
        a = values.get('int_a')
        b = values.get('int_b')

        if (block is None) or (sig is None) or (a is None) or (b is None):
            return jsonify("Error: invalid json data received"), 301
        # sig = bytes(sig, "ISO-8859-1")
        # sig = Signature.from_bytes(sig)
        block = Block.create_block(block)
        flag = block.verify_block(blockchain)
        node_address = block.harvester
        if not flag:

            url = "http://" + node_address + "/check_conflict"
            json_data = {'node_address': blockchain.address}
            try:
                response = requests.post(url, json=json_data)
                blockchain.logger.info("----conflict response----")
                blockchain.logger.info(response.status_code)
                blockchain.logger.info(response.json())
                blockchain.logger.info("****conflict response****")
            except:
                pass
            print(node_address, " block didn't verified ")
            return jsonify("invalid block!"), 302

        # leader verification
        blockchain.logger.info("----fp----")
        blockchain.logger.info("before_true_leader " + node_address)
        blockchain.logger.info("first_sign_hash " +
                               sha256(a.encode()).hexdigest())
        blockchain.logger.info("second_sign_hash " +
                               sha256(b.encode()).hexdigest())
        blockchain.logger.info("****fp****")
        is_verified_leader = blockchain.true_leader(a, b, node_address)

        if not is_verified_leader:
            print("invalid leader", node_address)
            return jsonify("invalid leader!"), 303
        sig = BLS.deserialize(sig, Signature)
        # verify whether proposed block is from valid validator(node) with valid signature
        if node_address in blockchain.public_key_list and block.get_hash(
        ) not in blockchain.broadcast_list:
            _block = copy(block)
            _block.signature = sig
            _block.signers = []
            _block.signers.append(node_address)
            if _block.verify_block_signature(blockchain):

                # print("received proposed block, signature verified")

                blockchain.broadcast_list.append(block.get_hash())
                blockchain.broadcast_it("/proposed", values)
                logger.debug("proposed fxn call using hash: " +
                             block.get_hash())

                blockchain.proposal_queue[a] = block
                threading.Thread(target=blockchain.sign_proposed_block,
                                 args={blockchain.sign_proposed_block_delay
                                       }).start()
            else:
                print("verify_block_signature")
                return jsonify("block signature couldn't verified"), 304
        return jsonify("True"), 200
Exemple #18
0
def test_vectors2():
    m1 = bytes([1, 2, 3, 40])
    m2 = bytes([5, 6, 70, 201])
    m3 = bytes([9, 10, 11, 12, 13])
    m4 = bytes([15, 63, 244, 92, 0, 1])

    sk1 = PrivateKey.from_seed(bytes([1, 2, 3, 4, 5]))
    sk2 = PrivateKey.from_seed(bytes([1, 2, 3, 4, 5, 6]))

    sig1 = sk1.sign(m1)
    sig2 = sk2.sign(m2)
    sig3 = sk2.sign(m1)
    sig4 = sk1.sign(m3)
    sig5 = sk1.sign(m1)
    sig6 = sk1.sign(m4)

    sig_L = BLS.aggregate_sigs([sig1, sig2])
    sig_R = BLS.aggregate_sigs([sig3, sig4, sig5])
    assert(BLS.verify(sig_L))
    assert(BLS.verify(sig_R))

    sig_final = BLS.aggregate_sigs([sig_L, sig_R, sig6])
    assert(sig_final.serialize() == bytes.fromhex("07969958fbf82e65bd13ba0749990764cac81cf10d923af9fdd2723f1e3910c3fdb874a67f9d511bb7e4920f8c01232b12e2fb5e64a7c2d177a475dab5c3729ca1f580301ccdef809c57a8846890265d195b694fa414a2a3aa55c32837fddd80"))
    assert(BLS.verify(sig_final))
    quotient = sig_final.divide_by([sig2, sig5, sig6])
    assert(BLS.verify(quotient))
    assert(BLS.verify(sig_final))
    assert(quotient.serialize() == bytes.fromhex("8ebc8a73a2291e689ce51769ff87e517be6089fd0627b2ce3cd2f0ee1ce134b39c4da40928954175014e9bbe623d845d0bdba8bfd2a85af9507ddf145579480132b676f027381314d983a63842fcc7bf5c8c088461e3ebb04dcf86b431d6238f"))
    assert(quotient.divide_by([]) == quotient)
    try:
        quotient.divide_by([sig6])
        assert(False)  # Should fail due to not subset
    except:
        pass
    sig_final.divide_by([sig1]) # Should not throw
    try:
        sig_final.divide_by([sig_L]) # Should throw due to not unique
        assert(False)  # Should fail due to not unique
    except:
        pass

    # Divide by aggregate
    sig7 = sk2.sign(m3)
    sig8 = sk2.sign(m4)
    sig_R2 = BLS.aggregate_sigs([sig7, sig8])
    sig_final2 = BLS.aggregate_sigs([sig_final, sig_R2])
    quotient2 = sig_final2.divide_by([sig_R2])
    assert(BLS.verify(quotient2))
    assert(quotient2.serialize() == bytes.fromhex("06af6930bd06838f2e4b00b62911fb290245cce503ccf5bfc2901459897731dd08fc4c56dbde75a11677ccfbfa61ab8b14735fddc66a02b7aeebb54ab9a41488f89f641d83d4515c4dd20dfcf28cbbccb1472c327f0780be3a90c005c58a47d3"))
Exemple #19
0
    def __init__(self, infile, inject=False, **kwargs):
        self.d = d = pf.getdata(infile, 1)
        m = isfinite(d.flux_1) & (~(d.mflags_1 & 2**3).astype(np.bool))
        m &= ~binary_dilation((d.quality & 2**20) != 0)

        self.Kp = pf.getval(infile, 'kepmag')
        self.Kp = self.Kp if not isinstance(self.Kp, Undefined) else nan

        self.tm = MA(supersampling=12, nthr=1)
        self.em = MA(supersampling=10, nldc=0, nthr=1)

        self.epic = int(basename(infile).split('_')[1])
        self.time = d.time[m]
        self.flux = (d.flux_1[m] - d.trend_t_1[m] + nanmedian(d.trend_t_1[m]) -
                     d.trend_p_1[m] + nanmedian(d.trend_p_1[m]))
        self.mflux = nanmedian(self.flux)
        self.flux /= self.mflux
        self.flux_e = d.error_1[m] / abs(self.mflux)

        self.flux_r = d.flux_1[m] / self.mflux
        self.trend_t = d.trend_t_1[m] / self.mflux
        self.trend_p = d.trend_p_1[m] / self.mflux

        self.period_range = kwargs.get(
            'period_range', (0.7, 0.98 * (self.time.max() - self.time.min())))
        self.nbin = kwargs.get('nbin', 900)
        self.qmin = kwargs.get('qmin', 0.002)
        self.qmax = kwargs.get('qmax', 0.115)
        self.nf = kwargs.get('nfreq', 10000)

        self.bls = BLS(self.time,
                       self.flux,
                       self.flux_e,
                       period_range=self.period_range,
                       nbin=self.nbin,
                       qmin=self.qmin,
                       qmax=self.qmax,
                       nf=self.nf,
                       pmean='running_median')

        def ndist(p=0.302):
            return 1. - 2 * abs(((self.bls.period - p) % p) / p - 0.5)

        def cmask(s=0.05):
            return 1. - np.exp(-ndist() / s)

        self.bls.pmul = cmask()

        try:
            ar, ac, ap, at = acor(self.flux_r)[0], acor(self.flux)[0], acor(
                self.trend_p)[0], acor(self.trend_t)[0]
        except RuntimeError:
            ar, ac, ap, at = nan, nan, nan, nan
        self.lcinfo = array(
            (self.epic, self.mflux, self.flux.std(), nan, nan, ar, ac, ap, at),
            dtype=dt_lcinfo)

        self._rbls = None
        self._rtrf = None
        self._rvar = None
        self._rtoe = None
        self._rpol = None
        self._recl = None

        ## Transit fit pv [k u t0 p a i]
        self._pv_bls = None
        self._pv_trf = None

        self.period = None
        self.zero_epoch = None
        self.duration = None
Exemple #20
0
def test1():
    seed = bytes([0, 50, 6, 244, 24, 199, 1, 25, 52, 88, 192,
                  19, 18, 12, 89, 6, 220, 18, 102, 58, 209,
                  82, 12, 62, 89, 110, 182, 9, 44, 20, 254, 22])
    sk = PrivateKey.from_seed(seed)
    pk = sk.get_public_key()

    msg = bytes([100, 2, 254, 88, 90, 45, 23])

    sig = sk.sign(msg)

    sk_bytes = sk.serialize()
    pk_bytes = pk.serialize()
    sig_bytes = sig.serialize()

    sk = PrivateKey.from_bytes(sk_bytes)
    pk = PublicKey.from_bytes(pk_bytes)
    sig = Signature.from_bytes(sig_bytes)

    sig.set_aggregation_info(AggregationInfo.from_msg(pk, msg))
    assert(BLS.verify(sig))

    seed = bytes([1]) + seed[1:]
    sk1 = PrivateKey.from_seed(seed)
    seed = bytes([2]) + seed[1:]
    sk2 = PrivateKey.from_seed(seed)

    pk1 = sk1.get_public_key()
    sig1 = sk1.sign(msg)

    pk2 = sk2.get_public_key()
    sig2 = sk2.sign(msg)

    agg_sig = BLS.aggregate_sigs([sig1, sig2])
    agg_pubkey = BLS.aggregate_pub_keys([pk1, pk2], True)

    agg_sig.set_aggregation_info(AggregationInfo.from_msg(agg_pubkey, msg))
    assert(BLS.verify(agg_sig))

    seed = bytes([3]) + seed[1:]
    sk3 = PrivateKey.from_seed(seed)
    pk3 = sk3.get_public_key()
    msg2 = bytes([100, 2, 254, 88, 90, 45, 23])

    sig1 = sk1.sign(msg)
    sig2 = sk2.sign(msg)
    sig3 = sk3.sign(msg2)
    agg_sig_l = BLS.aggregate_sigs([sig1, sig2])
    agg_sig_final = BLS.aggregate_sigs([agg_sig_l, sig3])

    sig_bytes = agg_sig_final.serialize()

    agg_sig_final = Signature.from_bytes(sig_bytes)
    a1 = AggregationInfo.from_msg(pk1, msg)
    a2 = AggregationInfo.from_msg(pk2, msg)
    a3 = AggregationInfo.from_msg(pk3, msg2)
    a1a2 = AggregationInfo.merge_infos([a1, a2])
    a_final = AggregationInfo.merge_infos([a1a2, a3])
    print(a_final)
    agg_sig_final.set_aggregation_info(a_final)
    assert(BLS.verify(agg_sig_final))

    assert(BLS.verify(agg_sig_l))
    agg_sig_final = agg_sig_final.divide_by([agg_sig_l])

    assert(BLS.verify(agg_sig_final))

    agg_sk = BLS.aggregate_priv_keys([sk1, sk2], [pk1, pk2], True)
    agg_sk.sign(msg)

    seed = bytes([1, 50, 6, 244, 24, 199, 1, 25, 52, 88, 192,
                  19, 18, 12, 89, 6, 220, 18, 102, 58, 209,
                  82, 12, 62, 89, 110, 182, 9, 44, 20, 254, 22])

    esk = ExtendedPrivateKey.from_seed(seed)
    epk = esk.get_extended_public_key()

    sk_child = esk.private_child(0).private_child(5)
    pk_child = epk.public_child(0).public_child(5)

    assert(sk_child.get_extended_public_key() == pk_child)
Exemple #21
0
     
     near_peak_or_trough = np.array(near_peak_or_trough)
     
     t_cut = lc_30min.time[~near_peak_or_trough]
     flux_cut = combined_flux[~near_peak_or_trough]
     flux_err_cut = lc_30min.flux_err[~near_peak_or_trough]
     
 #    flux = combined_flux
 #    frequency, power = LombScargle(lc_30min.time, flux).autopower()
 #    plt.plot(frequency, power)
 #    i = np.argmax(power)
 #    freq_rot = frequency[i]
 #    p_rot = 1/freq_rot
     
     durations = np.linspace(0.05, 0.2, 22) * u.day
     model = BLS(lc_30min.time*u.day, combined_flux)
 #    model = BLS(lc_30min.time*u.day, BLS_flux)
     results = model.autopower(durations, frequency_factor=5.0)
     
     # Find the period and epoch of the peak
     rot_index = np.argmax(results.power)
     rot_period = results.period[rot_index]
     rot_t0 = results.transit_time[rot_index]
 
 ##    p_rot = 3.933 # AB Pic
 ##    p_rot = 3.6 # HIP 1113
 ##    p_rot = 4.36 # HIP 1993
 ##    p_rot = 3.405 # HIP 105388
 ##    p_rot = 3.9 # HIP 32235
 ##    p_rot = 2.67 # AO Men
 ##    p_rot = 0.57045 # 2 MASS J23261069-7323498
Exemple #22
0
def test_threshold_instance(T, N):
    commitments = []
    # fragments[i][j] = fragment held by player i,
    #                   received from player j
    fragments = [[None] * N for _ in range(N)]
    secrets = []

    # Step 1 : PrivateKey.new_threshold
    for player in range(N):
        secret_key, commi, frags = PrivateKey.new_threshold(T, N)
        for target, frag in enumerate(frags):
            fragments[target][player] = frag
        commitments.append(commi)
        secrets.append(secret_key)

    # Step 2 : Threshold.verify_secret_fragment
    for player_source in range(1, N+1):
        for player_target in range(1, N+1):
            assert Threshold.verify_secret_fragment(
                T, fragments[player_target - 1][player_source - 1],
                player_target, commitments[player_source - 1])

    # Step 3 : master_pubkey = BLS.aggregate_pub_keys(...)
    #          secret_share = BLS.aggregate_priv_keys(...)
    master_pubkey = BLS.aggregate_pub_keys(
           [PublicKey.from_g1(cpoly[0].to_jacobian())
            for cpoly in commitments],
           False)

    secret_shares = [BLS.aggregate_priv_keys(map(PrivateKey, row), None, False)
                     for row in fragments]

    master_privkey = BLS.aggregate_priv_keys(secrets, None, False)
    msg = 'Test'
    signature_actual = master_privkey.sign(msg)

    # Step 4 : sig_share = secret_share.sign_threshold(...)
    # Check every combination of T players
    for X in combinations(range(1, N+1), T):
        # X: a list of T indices like [1, 2, 5]

        # Check underlying secret key is correct
        r = Threshold.interpolate_at_zero(X,
                [secret_shares[x-1].value for x in X])
        secret_cand = PrivateKey(r)
        assert secret_cand == master_privkey

        # Check signatures
        signature_shares = [secret_shares[x-1].sign_threshold(msg, x, X)
                            for x in X]
        signature_cand = BLS.aggregate_sigs_simple(signature_shares)
        assert signature_cand == signature_actual

    # Check that the signature actually verifies the message
    agg_info = AggregationInfo.from_msg(master_pubkey, msg)
    signature_actual.set_aggregation_info(agg_info)
    assert BLS.verify(signature_actual)

    # Step 4b : Alternatively, we can add the lagrange coefficients
    # to 'unit' signatures.
    for X in combinations(range(1, N+1), T):
        # X: a list of T indices like [1, 2, 5]

        # Check signatures
        signature_shares = [secret_shares[x-1].sign(msg) for x in X]
        signature_cand = Threshold.aggregate_unit_sigs(signature_shares, X, T)
        assert signature_cand == signature_actual
Exemple #23
0
## Use the default
#lc3 = tpf2.to_lightcurve(aperture_mask=tpf.pipeline_mask).flatten(window_length=1001)
#lc3 = lc3[(lc3.time < 1346) | (lc3.time > 1350)].remove_outliers(6).fold(period=6.27, phase=0.4).bin(10)
#
## Use a custom aperture
#custom_lc3 = tpf2.to_lightcurve(aperture_mask=aperture_mask).flatten(window_length=1001)
#custom_lc3 = custom_lc3[(custom_lc3.time < 1346) | (custom_lc3.time > 1350)].remove_outliers(6).fold(period=6.27, phase=0.4).bin(10)
#
## Plot both
#ax = lc3.errorbar(label='Default aperture')
#custom_lc3.errorbar(ax=ax, label='Custom aperture')

############################ bls.py stuff #####################################
durations = np.linspace(0.05, 0.2, 22) * u.day
model = BLS(lc.time * u.day, TESSflatten_lc)
results = model.autopower(durations, frequency_factor=5.0)

# Find the period and epoch of the peak
index = np.argmax(results.power)
period = results.period[index]
t0 = results.transit_time[index]
duration = results.duration[index]
transit_info = model.compute_stats(period, duration, t0)

epoch = transit_info['transit_times'][0]

fig, ax = plt.subplots(1, 1, figsize=(8, 4))

# Highlight the harmonics of the peak period
ax.axvline(period.value, alpha=0.4, lw=3)
Exemple #24
0
    stmodel = ((tmodel - tmodel.min()) / max(tmodel - tmodel.min()))[:2000]
    intrans = -1 * (stmodel[stmodel < 1] - 1)
    xp = np.linspace(0, 1, len(intrans))
    fmodel = lambda x: np.interp(x, xp, intrans)

    # search for periodic signals
    # compute the phase space to search
    pdata = BLLS(t, data, fmodel, np.linspace(2, 10, 250),
                 np.linspace(0.05, 0.15, 50))
    snr = np.array(pdata.m) * -1 / np.array(pdata.std)

    # figure out BLS scaling to SNR
    bls = BLS(t,
              data,
              np.ones(dataerr.shape[0]),
              period_range=(2, 10),
              q_range=(0.05, 0.15),
              nf=500,
              nbin=100)
    res = bls()
    periods = 1. / bls.freqs

    fig = plt.figure()
    ax0 = plt.subplot2grid((1, 3), (0, 0), colspan=2)
    ax1 = plt.subplot2grid((1, 3), (0, 2))
    #ax2 = plt.subplot2grid((2,2),(1,1))

    ax0.plot(t, data, 'k.', alpha=0.5)
    ax0.set_title("Test Data")
    ax0.set_ylabel("Relative Flux")
    ax0.set_xlabel("Time [day]")
        plt.title(
            '{} LombScargle Periodogram for original lc'.format(target_ID))
        #ls_plot.show(block=True)
        ls_fig.savefig(save_path +
                       '{} - Lomb-Sacrgle Periodogram for original lc.png'.
                       format(target_ID))
        plt.close(ls_fig)
        i = np.argmax(power)
        freq_rot = freq[i]
        p_rot = 1 / freq_rot
        print('Rotation Period = {:.3f}d'.format(p_rot))

        # From BLS
        durations = np.linspace(0.05, 0.2, 100) * u.day
        #model = BoxLeastSquares(lc_30min.time*u.day, normalized_flux)
        model = BLS(lc_30min.time * u.day, normalized_flux)
        results = model.autopower(durations, frequency_factor=5.0)
        rot_index = np.argmax(results.power)
        rot_period = results.period[rot_index]
        rot_t0 = results.transit_time[rot_index]
        print("Rotation Period from BLS of original = {}d".format(rot_period))

        ########################### batman stuff ######################################
        #      type_of_planet = 'Hot Jupiter'
        #     stellar_type = 'F or G'
        #params = batman.TransitParams()       #object to store transit parameters
        #print("batman works y'all")
        #params.t0 = -4.5                      #time of inferior conjunction
        #params.per = 8.0                      #orbital period (days) - try 0.5, 1, 2, 4, 8 & 10d periods
        # Change for type of star
        #params.rp = 0.05                      #planet radius (in units of stellar radii) - Try between 0.01 and 0.1 (F/G) or 0.025 to 0.18 (K/M)
Exemple #26
0
TESSflatten_fig = plt.figure()
TESSflatten_lc = lc[:, 1]
plt.scatter(lc[:, 0], TESSflatten_lc, c='k', s=1, label='TESSflatten flux')
#plt.scatter(p1_times, p1_marker_y, c = 'r', s = 5, label = 'Planet 1')
#plt.scatter(p2_times, p2_marker_y, c = 'g', s = 5, label = 'Planet 2')
plt.title('{} with TESSflatten - Sector {}'.format(target_ID, sector))
plt.ylabel('Normalized Flux')
plt.xlabel('Time - 2457000 [BTJD days]')
#plt.savefig(save_path + '{} - Sector {} - TESSflatten lightcurve.png'.format(target_ID, tpf.sector))
#plt.close(TESSflatten_fig)
plt.show()

# Create periodogram
durations = np.linspace(0.05, 0.2, 22) * u.day
model = BLS(lc[:, 0] * u.day, lc[:, 1])
results = model.autopower(durations, frequency_factor=5.0)

# Find the period and epoch of the peak
index = np.argmax(results.power)
period = results.period[index]
t0 = results.transit_time[index]
duration = results.duration[index]
transit_info = model.compute_stats(period, duration, t0)

epoch = transit_info['transit_times'][0]

fig, ax = plt.subplots(1, 1, figsize=(8, 4))

# Highlight the harmonics of the peak period
ax.axvline(period.value, alpha=0.4, lw=3)