Example #1
0
File: photo.py Project: yqingp/Zeus
    def upload_post_photo(self, path_to_photo):
        # Open the app and get timeline page before post.
        # If timeline does not respond, update the account status
        if not self.login.open_app():
            return

        time.sleep(random.randint(3, 10))

        self.reload_user(self.username) # TODO

        time.sleep(random.randint(10, 20))
        self.location_search()
        self.qp_batch_fetch()

        time.sleep(random.randint(5, 10))
        upload_id = str(int(time.time() * 1000))
        some_number = str(random.randint(-2 ** 10, 2 ** 10))
        waterfall_id = Signature.generate_UUID(True)

        def upload():
            response = self.upload_photo(path_to_photo, upload_id, some_number, waterfall_id, force_resize=True,
                                                post=True)
            if not response:
                return upload()
            return True

        upload()

        self.session.set_headers(retry_context=True ,is_post=True, auth=True)
        width, height = self.get_image_size(path_to_photo)

        data = dict()
        data['timezone_offset'] = self.geo.timezone_offset
        data['_csrftoken'] = self.session.get_csrftoken()
        data['media_folder'] = 'Download'
        data['source_type'] = '4'
        data['_uid'] = self.account.get('user_id')
        data['device_id'] = self.device.get('android_device_id')
        data['_uuid'] = self.device.get('uuid')
        data['creation_logger_session_id'] = Signature.generate_UUID(True)
        # data['location'] = json.dumps({}, separators=(",",":")) # TODO
        # data['suggested_venue_position'] = '-1' # TODO
        data['caption'] = ''
        data['upload_id'] = upload_id
        data['device'] = self.get_device_details()
        data['edits'] = {
            "crop_original_size": [width * 1.0, height * 1.0],
            "crop_center": [0.0, -0.0], # TODO
            "crop_zoom": 1.0,
        }
        data['extra'] = {"source_width": width, "source_height": height}
        # data['is_suggested_venue'] = 'False' # TODO

        try:
            is_photo_posted = self.request.send_request(endpoint=Constants.API_URL1 + 'media/configure/',
                                                        post=data,
                                                        session=self.session
                                                        )

            if is_photo_posted:
                print(colored('*** PHOTO POSTED SUCCESSFULLY ***', 'green', attrs=['bold']))
            else:
                print(colored('*** POSTING PHOTO ATTEMPT IS FAILED ***', 'red', attrs=['bold']))
        except:
            print(colored('SOMETHING WENT WRONG WHEN UPLOADING PROFILE PHOTO', 'red', attrs=['bold']))
        update_cookie(self.account.get('username'), self.request.cookie)
 def test_create(self):
     signature_id = Signature.create('Signature Test', self.card_id).id()
     signature = Signature.find_by_id(signature_id)
     self.assertEqual('Signature Test', signature.name)
     self.assertEqual(self.card_id, signature.card.key().id())
Example #3
0
 def sign_prehashed(self, h):
     r = hash_to_point_prehashed_Fq2(h).to_jacobian()
     aggregation_info = AggregationInfo.from_msg_hash(
         self.get_public_key(), h)
     return Signature.from_g2(self.value * r, aggregation_info)
Example #4
0
File: photo.py Project: yqingp/Zeus
    def set_profile_photo(self, path_to_photo):

        # OPEN THE APP OR LOGIN AGAIN IN ORDER TO CHANGE PROFILE PHOTO ( We use 'loginapi' module to do this )
        self.login.login(False)

        time.sleep(random.randint(4, 10))  # TODO

        self.reload_user(self.username)

        self.feed_user_story(1)
        self.feed_user_story(2)
        self.profile_su_badge()
        self.users_info(self.account.get('user_id'))
        self.qp_batch_fetch()
        self.highlights()
        self.profile_archive_badge()
        self.get_invite_suggestions()

        time.sleep(random.randint(5, 15))
        self.location_search() # TODO

        upload_id = str(int(time.time() * 1000))
        some_number = str(random.randint(-2 ** 10, 2 ** 10))
        waterfall_id = Signature.generate_UUID(True)

        time.sleep(random.randint(10, 15))
        try:
            if not self.get_upload_photo(upload_id, some_number, waterfall_id):
                raise Exception
        except Exception as e:
            print(e)
            print('MISSING OR INCOMPATIBLE PHOTO')

        try:
            if not self.upload_photo(path_to_photo, upload_id, some_number, waterfall_id, force_resize=True):
                raise Exception
        except Exception as e:
            print(e)
            print('MISSING OR INCOMPATIBLE PHOTO')
            raise

        data = dict()
        data['_csrftoken'] = self.session.get_csrftoken()
        data['_uuid'] = self.device.get('uuid')
        data['use_fbuploader'] = 'true'
        data['upload_id'] = upload_id  # TODO

        self.session.set_headers(is_post=True)

        is_photo_uploaded = self.request.send_request(endpoint=Constants.API_URL1 + 'accounts/change_profile_picture/',
                                                      post=data,
                                                      with_signature=False,
                                                      session=self.session)

        if is_photo_uploaded:
            print(colored('*** YOUR PROFILE PICTURE IS CHANGED ***', 'green', attrs=['bold']))
        else:
            print(colored('*** CHANGING PROFILE PICTURE ATTEMPT IS FAILED ***', 'red', attrs=['bold']))

        try:
            update_cookie(self.account.get('username'), self.request.cookie)
        except:
            pass
Example #5
0
    def aggregate_sigs(signatures):
        """
        Aggregates many (aggregate) signatures, using a combination of simple
        and secure aggregation. Signatures are grouped based on which ones
        share common messages, and these are all merged securely.
        """
        public_keys = []  # List of lists
        message_hashes = []  # List of lists

        for signature in signatures:
            if signature.aggregation_info.empty():
                raise Exception(
                    "Each signature must have a valid aggregation " + "info")
            public_keys.append(signature.aggregation_info.public_keys)
            message_hashes.append(signature.aggregation_info.message_hashes)

        # Find colliding vectors, save colliding messages
        messages_set = set()
        colliding_messages_set = set()

        for msg_vector in message_hashes:
            messages_set_local = set()
            for msg in msg_vector:
                if msg in messages_set and msg not in messages_set_local:
                    colliding_messages_set.add(msg)
                messages_set.add(msg)
                messages_set_local.add(msg)

        if len(colliding_messages_set) == 0:
            # There are no colliding messages between the groups, so we
            # will just aggregate them all simply. Note that we assume
            # that every group is a valid aggregate signature. If an invalid
            # or insecure signature is given, and invalid signature will
            # be created. We don't verify for performance reasons.
            final_sig = BLS.aggregate_sigs_simple(signatures)
            aggregation_infos = [sig.aggregation_info for sig in signatures]
            final_agg_info = AggregationInfo.merge_infos(aggregation_infos)
            final_sig.set_aggregation_info(final_agg_info)
            return final_sig

        # There are groups that share messages, therefore we need
        # to use a secure form of aggregation. First we find which
        # groups collide, and securely aggregate these. Then, we
        # use simple aggregation at the end.
        colliding_sigs = []
        non_colliding_sigs = []
        colliding_message_hashes = []  # List of lists
        colliding_public_keys = []  # List of lists

        for i in range(len(signatures)):
            group_collides = False
            for msg in message_hashes[i]:
                if msg in colliding_messages_set:
                    group_collides = True
                    colliding_sigs.append(signatures[i])
                    colliding_message_hashes.append(message_hashes[i])
                    colliding_public_keys.append(public_keys[i])
                    break
            if not group_collides:
                non_colliding_sigs.append(signatures[i])

        # Arrange all signatures, sorted by their aggregation info
        colliding_sigs.sort(key=lambda s: s.aggregation_info)

        # Arrange all public keys in sorted order, by (m, pk)
        sort_keys_sorted = []
        for i in range(len(colliding_public_keys)):
            for j in range(len(colliding_public_keys[i])):
                sort_keys_sorted.append((colliding_message_hashes[i][j],
                                         colliding_public_keys[i][j]))
        sort_keys_sorted.sort()
        sorted_public_keys = [pk for (mh, pk) in sort_keys_sorted]

        computed_Ts = hash_pks(len(colliding_sigs), sorted_public_keys)

        # Raise each sig to a power of each t,
        # and multiply all together into agg_sig
        ec = sorted_public_keys[0].value.ec
        agg_sig = JacobianPoint(Fq2.one(ec.q), Fq2.one(ec.q), Fq2.zero(ec.q),
                                True, ec)

        for i, signature in enumerate(colliding_sigs):
            agg_sig += signature.value * computed_Ts[i]

        for signature in non_colliding_sigs:
            agg_sig += signature.value

        final_sig = Signature.from_g2(agg_sig)
        aggregation_infos = [sig.aggregation_info for sig in signatures]
        final_agg_info = AggregationInfo.merge_infos(aggregation_infos)
        final_sig.set_aggregation_info(final_agg_info)

        return final_sig
Example #6
0
 def sign(self, m):
     r = hash_to_point_Fq2(m).to_jacobian()
     aggregation_info = AggregationInfo.from_msg(self.get_public_key(), m)
     return Signature.from_g2(self.value * r, aggregation_info)
Example #7
0
    def send_request(self,
                     endpoint=None,
                     post=None,
                     headers=None,
                     with_signature=True,
                     extra_sig=None,
                     timeout=None,
                     account=None,
                     device=None,
                     session=None,
                     params=None,
                     must_respond=False):
        print(colored(endpoint, 'blue', attrs=['bold']))

        logging.basicConfig(filename='logfile.log',
                            level=logging.DEBUG,
                            format='%(asctime)s %(levelname)s %(message)s')
        logger = logging.getLogger('Instagram')
        logger.info(session.headers)
        self.cookie = self.get_cookie_string(session)

        try:
            if post is not None:
                if with_signature:
                    post = json.dumps(post, separators=(',', ':'))
                    post = Signature.generate_signature_data(post)
                    if extra_sig is not None and extra_sig != []:
                        post += "&".join(extra_sig)
                try:
                    session.headers['Content-Length'] = str(
                        len(urlencode(post)))
                except:
                    session.headers['Content-Length'] = str(len(post))

                if 'Content-Encoding' in session.headers.keys():
                    post = json.dumps(post, separators=(',', ':'))
                    post = gzip.compress(post.encode())
                response = session.post(
                    endpoint,
                    data=post,
                    params=params,
                    timeout=30 if timeout is None else timeout,
                    verify=False)

            else:
                response = session.get(
                    endpoint,
                    params=params,
                    timeout=30 if timeout is None else timeout,
                    verify=False)

        except ProxyError as e:
            logger.exception(e)
            print(colored('PROXY ERROR', 'red', attrs=['bold']))
            if must_respond:
                raise
        except TimeoutError as e:
            logger.exception(e)
            print(colored('REQUEST TIMED OUT', 'red', attrs=['bold']))
            if must_respond:
                raise
        except ConnectTimeout as e:
            logger.exception(e)
            print(colored('CONNECT TIMEOUT ERROR', 'red', attrs=['bold']))
            if must_respond:
                raise
        except ConnectionError as e:
            logger.exception(e)
            print(colored('CONNECTION', 'red', attrs=['bold']))
            if must_respond:
                raise
        except Exception as e:
            print(e)
            print(
                colored('SOME REQUESTS ERROR HAS OCCURRED !',
                        'red',
                        attrs=['bold']))
            logger.exception(e)
            if must_respond:
                raise

        else:
            # Print headers to check
            # for k, v in session.headers.items():
            #     print(k + ": " + str(v))
            self.last_response = response
            try:
                print(
                    colored('RESPONSE : ' + str(response.content.decode()),
                            'magenta',
                            attrs=['bold']))
            except Exception as e:
                print(e)
                print('Find it')

            try:
                self.last_json = json.loads(response.text)
            except json.decoder.JSONDecodeError:
                print(colored('RESPONSE UNAVAILABLE', 'red', attrs=['bold']))

            if response.status_code == 200:
                print(
                    colored('STATUS_CODE : ' + str(response.status_code),
                            'green',
                            attrs=['bold']))
                return True

            elif response.status_code == 400:
                print(
                    colored('STATUS_CODE : ' + str(response.status_code),
                            'red',
                            attrs=['bold']))
                # self.last_response = response
                # self.last_json = json.loads(response.text)
                return False
            # elif response.status_code == 429:
            #     # if we come to this error, add 5 minutes of sleep everytime we hit the 429 error (aka soft bann) keep increasing untill we are unbanned
            #     if timeout is None:
            #         timeout = 0
            #     timeout += 1
            #     logger.warning(
            #         "That means 'too many requests'. I'll go to sleep "
            #         "for {} seconds.".format(timeout * 15)
            #     )
            #     time.sleep(timeout * 15)
            #     return self.send_request(endpoint, post, headers, with_signature, extra_sig, timeout)
            else:
                # raise Exception("Cannot receive successful response !")
                print(
                    colored('STATUS_CODE : ' + str(response.status_code),
                            'red',
                            attrs=['bold']))
                print("Cannot receive successful response !")
from signature import Signature, ServerType
from tornado.concurrent import run_on_executor
from concurrent.futures import ThreadPoolExecutor
from middleware import *
from event_loop import event_loop

tornado.options.define('ip_blacklist', default=list(), type=list)
tornado.options.define('ip_whitelist', default=list(), type=list)
tornado.options.define('ip_risk_times', default=dict(), type=dict)
tornado.options.define('request_count', default=dict(), type=dict)
tornado.options.define('global_request_count', default=0, type=int)
model_path = "model"
system_config = Config(conf_path="config.yaml",
                       model_path=model_path,
                       graph_path="graph")
sign = Signature(ServerType.TORNADO, system_config)
arithmetic = Arithmetic()
semaphore = asyncio.Semaphore(500)

scheduler = BackgroundScheduler(timezone=utc)


class BaseHandler(RequestHandler):
    def __init__(self, application, request, **kwargs):
        super().__init__(application, request, **kwargs)
        self.exception = Response(system_config.response_def_map)
        self.executor = ThreadPoolExecutor(workers)
        self.image_utils = ImageUtils(system_config)

    @property
    def request_incr(self):
from gevent.pywsgi import WSGIServer
from geventwebsocket.handler import WebSocketHandler
from config import Config
from utils import ImageUtils
from constants import Response
from interface import InterfaceManager
from signature import Signature, ServerType, InvalidUsage
from watchdog.observers import Observer
from event_handler import FileEventHandler

# The order cannot be changed, it must be before the flask.
# monkey.patch_all()

app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
sign = Signature(ServerType.FLASK)
_except = Response()


@cache.cached(timeout=30)
@app.before_request
def before_request():
    try:
        # Here you can add the relevant code to get the authentication information from the custom database.
        # The existing code is for reference only in terms of format.
        sign.set_auth([{
            'accessKey': system_config.access_key,
            'secretKey': system_config.secret_key
        }])
    except Exception:
        # Here Exception needs to be changed to the corresponding exception you need.
Example #10
0
def test_threshold_instance(T, N):
    commitments = []
    # fragments[i][j] = fragment held by player i,
    #                   received from player j
    fragments = [[None] * N for _ in range(N)]
    secrets = []

    # Step 1 : Threshold.create
    for player in range(N):
        secret_key, commi, frags = Threshold.create(T, N)
        for target, frag in enumerate(frags):
            fragments[target][player] = frag
        commitments.append(commi)
        secrets.append(secret_key)

    # Step 2 : Threshold.verify_secret_fragment
    for player_source in range(1, N + 1):
        for player_target in range(1, N + 1):
            assert Threshold.verify_secret_fragment(
                player_target, fragments[player_target - 1][player_source - 1],
                commitments[player_source - 1], T)

    # Step 3 : master_pubkey = PublicKey.aggregate(...)
    #          secret_share = PrivateKey.aggregate(...)
    master_pubkey = PublicKey.aggregate(
        [PublicKey.from_g1(cpoly[0].to_jacobian()) for cpoly in commitments],
        False)

    secret_shares = [
        PrivateKey.aggregate(map(PrivateKey, row), None, False)
        for row in fragments
    ]

    master_privkey = PrivateKey.aggregate(secrets, None, False)
    msg = 'Test'
    signature_actual = master_privkey.sign(msg)

    # Step 4 : sig_share = Threshold.sign_with_coefficient(...)
    # Check every combination of T players
    for X in combinations(range(1, N + 1), T):
        # X: a list of T indices like [1, 2, 5]

        # Check underlying secret key is correct
        r = Threshold.interpolate_at_zero(
            X, [secret_shares[x - 1].value for x in X])
        secret_cand = PrivateKey(r)
        assert secret_cand == master_privkey

        # Check signatures
        signature_shares = [
            Threshold.sign_with_coefficient(secret_shares[x - 1], msg, x, X)
            for x in X
        ]
        signature_cand = Signature.aggregate_sigs_simple(signature_shares)
        assert signature_cand == signature_actual

    # Check that the signature actually verifies the message
    agg_info = AggregationInfo.from_msg(master_pubkey, msg)
    signature_actual.set_aggregation_info(agg_info)
    assert signature_actual.verify()

    # Step 4b : Alternatively, we can add the lagrange coefficients
    # to 'unit' signatures.
    for X in combinations(range(1, N + 1), T):
        # X: a list of T indices like [1, 2, 5]

        # Check signatures
        signature_shares = [secret_shares[x - 1].sign(msg) for x in X]
        signature_cand = Threshold.aggregate_unit_sigs(signature_shares, X, T)
        assert signature_cand == signature_actual
Example #11
0
def test1():
    seed = bytes([
        0, 50, 6, 244, 24, 199, 1, 25, 52, 88, 192, 19, 18, 12, 89, 6, 220, 18,
        102, 58, 209, 82, 12, 62, 89, 110, 182, 9, 44, 20, 254, 22
    ])
    sk = PrivateKey.from_seed(seed)
    pk = sk.get_public_key()

    msg = bytes([100, 2, 254, 88, 90, 45, 23])

    sig = sk.sign(msg)

    sk_bytes = sk.serialize()
    pk_bytes = pk.serialize()
    sig_bytes = sig.serialize()

    sk = PrivateKey.from_bytes(sk_bytes)
    pk = PublicKey.from_bytes(pk_bytes)
    sig = Signature.from_bytes(sig_bytes)

    sig.set_aggregation_info(AggregationInfo.from_msg(pk, msg))
    ok = sig.verify()
    assert (ok)

    seed = bytes([1]) + seed[1:]
    sk1 = PrivateKey.from_seed(seed)
    seed = bytes([2]) + seed[1:]
    sk2 = PrivateKey.from_seed(seed)

    pk1 = sk1.get_public_key()
    sig1 = sk1.sign(msg)

    pk2 = sk2.get_public_key()
    sig2 = sk2.sign(msg)

    agg_sig = Signature.aggregate([sig1, sig2])
    agg_pubkey = PublicKey.aggregate([pk1, pk2], False)

    agg_sig.set_aggregation_info(AggregationInfo.from_msg(agg_pubkey, msg))
    assert (agg_sig.verify())

    seed = bytes([3]) + seed[1:]
    sk3 = PrivateKey.from_seed(seed)
    pk3 = sk3.get_public_key()
    msg2 = bytes([100, 2, 254, 88, 90, 45, 23])

    sig1 = sk1.sign(msg)
    sig2 = sk2.sign(msg)
    sig3 = sk3.sign(msg2)
    agg_sig_l = Signature.aggregate([sig1, sig2])
    agg_sig_final = Signature.aggregate([agg_sig_l, sig3])

    sig_bytes = agg_sig_final.serialize()

    agg_sig_final = Signature.from_bytes(sig_bytes)
    a1 = AggregationInfo.from_msg(pk1, msg)
    a2 = AggregationInfo.from_msg(pk2, msg)
    a3 = AggregationInfo.from_msg(pk3, msg2)
    a1a2 = AggregationInfo.merge_infos([a1, a2])
    a_final = AggregationInfo.merge_infos([a1a2, a3])
    print(a_final)
    agg_sig_final.set_aggregation_info(a_final)
    ok = agg_sig_final.verify()

    ok = agg_sig_l.verify()
    agg_sig_final = agg_sig_final.divide_by([agg_sig_l])

    ok = agg_sig_final.verify()

    agg_sk = PrivateKey.aggregate([sk1, sk2], [pk1, pk2])
    agg_sk.sign(msg)

    seed = bytes([
        1, 50, 6, 244, 24, 199, 1, 25, 52, 88, 192, 19, 18, 12, 89, 6, 220, 18,
        102, 58, 209, 82, 12, 62, 89, 110, 182, 9, 44, 20, 254, 22
    ])

    esk = ExtendedPrivateKey.from_seed(seed)
    epk = esk.get_extended_public_key()

    sk_child = esk.private_child(0).private_child(5)
    pk_child = epk.public_child(0).public_child(5)

    buffer1 = pk_child.serialize()
    buffer2 = sk_child.serialize()

    print(len(buffer1), buffer1)
    print(len(buffer2), buffer2)
    assert (sk_child.get_extended_public_key() == pk_child)