Exemple #1
0
def getBusinessesInfo(searchTerm, lat, lng, radius):
    '''
    Finds all nearby businesses around a given location on a map via the Yelp API.
    We are using Yelp because it can return up to 1000 unique businesses rather than 60 from Google.
    It seems pretty feasible that a person can walk to >60 unique restaurants within a half hour in a city.
    
    Example Call:
        getBusinessesInfo("delis", 37.786882, -122.399972, 1000)

    Args:
        searchTerm (a string): what kind of business the user is searching for
        lat (a float): the latitude of where the user is
        lng (a float): the longitude of where the user is
        radius (an int): the search radius in meters - how far a person is willing to walk from their location


    Returns:
        A dictionary where each key is the unique id string of a business and the value is a dictionary with the following:
            - name (string): name of the business
            - lat (float): latitude location of the business
            - lng (float): longitude location of the business
            - distance (float): how far the distance is from the query latitude and longitude in meters
    '''
    limit = 50
    url = "https://api.yelp.com/v3/businesses/search?" + \
          "term=" + searchTerm + \
          "&limit=" + str(limit) + \
          "&radius=" + str(radius) + \
          "&latitude=" + str(lat) + \
          "&longitude=" + str(lng)
    print url
    offset = 0
    d = {}

    total = 1
    while offset < total:
        r = requests.get(url + "&offset=" + str(offset), headers=HEADERS)
        json = byteify(r.json())
        while "total" not in json:
            # exceeded yelp's limit rate
            time.sleep(1)
            r = requests.get(url + "&offset=" + str(offset), headers=HEADERS)
            json = byteify(r.json())
            print "waiting"
            print json

        total = json["total"]
        businesses = json["businesses"]
        addBusinessesToDict(businesses, radius, d)
        print "fetched from", offset, "entry, got", len(
            businesses), "entries out of", total

        offset += limit

    return d
    def export_swagger_files(self, instance, target_dir):
        sas_token = self._token_factory.get_sas_token(instance)
        base_url = self._token_factory.get_base_url(instance)
        api_version = self._token_factory.get_api_version()

        apis_res = requests.get(base_url + 'apis' + api_version,
                                headers = {'Authorization': sas_token})
        if (200 != apis_res.status_code):
            print "Could not retrieve APIs."
            print apis_res.text
            return False
        
        apis_json = byteify(json.loads(apis_res.text))
        for api_def in apis_json['value']:
            api_url = api_def['id']
            
            swagger_res = requests.get(base_url + api_url[1:] + api_version + '&export=true', 
                                       headers={'Authorization': sas_token,
                                                'Accept': 'application/vnd.swagger.doc+json'})
            if (200 != swagger_res.status_code):
                print "Could not export Swagger definition."
                print swagger_res.text
                return False
            
            swagger = json.loads(swagger_res.text)
            id_name = swagger['basePath'].replace('/', '_')
            if (id_name.startswith('_')):
                id_name = id_name[1:]
            sep = ''
            if not target_dir.endswith(os.sep):
                sep = os.sep
            with open(target_dir + sep + id_name + ".json", 'w') as outfile:
                json.dump(swagger, outfile, indent=4)
                
        return True
Exemple #3
0
 def download_prices(self):
     url = "http://www.google.com/finance/option_chain?q=%s&expd=%d&expm=%d&expy=%d&output=json" % \
           (self.parent_stock.get_symbol_for_url(), self.expiryDay, self.expiryMonth, self.expiryYear)
     raw_data = fix_output(urllib2.urlopen(url).read())
     option_data = byteify(json.loads(raw_data))
     self.calls = self._get_prices('call', option_data)
     self.puts = self._get_prices('put', option_data)
Exemple #4
0
    def get_scm_sas_token(self, instance):
        rest_token = self.get_sas_token(instance)

        git_access = requests.get(self.get_base_url(instance) +
                                  'tenant/access/git' + self.get_api_version(),
                                  headers={'Authorization': rest_token})

        if (requests.codes.ok != git_access.status_code):
            return git_access.text

        git_data = byteify(json.loads(git_access.text))

        if not git_data['enabled']:
            print "Enabling git repository..."
            enable_res = requests.patch(self.get_base_url(instance) +
                                        'tenant/access/git' +
                                        self.get_api_version(),
                                        headers={'Authorization': rest_token},
                                        json={'enabled': True})
            if (204 != enable_res.status_code):
                print "Failed to enable git access!"
                return False
            return self.get_scm_sas_token(instance)

        return urllib.quote_plus(
            self.get_sas_token_internal(git_data['id'],
                                        git_data['primaryKey']))
Exemple #5
0
    def get_params(internal_entity_id, cache, log):
        # this module id must be internal id, which is from one of the 'id' columns in tables in cache. Not from Oracle!

        if internal_entity_id < 0 or cache is None:
            log.error('ERROR: get_module - input parameters error')
            return -2

        try:
            cached_params = cache.query(ParamsCached).filter(
                ParamsCached.id == internal_entity_id).first()
            if cached_params is None:
                return None
            else:
                dict_params = byteify(json.loads(cached_params.data))
                # since after json.load we have list of dicts as params, and existing method ParamsBuilder.buildParameterStructure
                # accepts instances of ModuleitemFull object, there is no reason to create another params builder for dicts,
                # it is easier to create objects out of dicts
                obj_params = convert_module_dict2obj(dict_params, log)
                wrapped_params = ParamsBuilder.buildParameterStructure(log, obj_params, cached_params.id, set_default=False)
                print('from cache')
                return wrapped_params

        except Exception as e:
            msg = 'ERROR: Query get_params() Error: ' + e.args[0]
            log.error(msg)
            return None
Exemple #6
0
 def ask_service(self, trip):
     """
         Makes a request to the webservice, and returns the result
     """
     logger.debug("Trip going from %s To %s" % (str(trip[0]), str(trip[1])))
     chosenAlgo = AlgorithmSelector.pickFirst()
     full_url = self.create_url(trip, chosenAlgo)
     req = requests.get(full_url[0], params=full_url[1])
     logger.info("Full url used for connection is: ")
     logger.info(req.url)
     try:
         graphy_json = req.json()
         logger.info("The route returned by the webservice is")
         logger.debug(graphy_json)
         graphy_directions = utils.byteify(graphy_json)
         logger.info("Distance of returned route: \
                 %s" % (graphy_directions['distance']))
         trip_time_in_milliseconds = graphy_directions['time']
         logger.debug("Estimated time of trip in ms: \
                 %s" % (trip_time_in_milliseconds))
         trip_time_in_mins = ((trip_time_in_milliseconds / 1000) / 60)
         logger.info("Estimated trip time: %s" % (trip_time_in_mins))
     except Exception as e:
         logger.warn("Could not retrieve data from the webservice")
         logger.warn("Reason: %s" % (e.str()))
    def get_route(self, trip):
        """
            Makes a request to the webservice, and returns the result
        """
        logger.debug("Trip going from %s To %s"%(str(trip[0]),str(trip[1]))) 
        chosenAlgo = AlgorithmSelector.pickDijkstra()
        logger.info("Using algorithm: %s"%(chosenAlgo))
        full_url = self.create_url(trip, chosenAlgo)
        req = requests.get(full_url[0], params=full_url[1])
        logger.debug("Full url used for connection is: ")
        logger.debug(req.url)
        try:
            graphy_json = req.json()
            logger.debug("The route returned by the webservice is")
            logger.debug(graphy_json)
            graphy_directions = utils.byteify(graphy_json)
            route_distance_in_metres = graphy_directions['distance'] 
            logger.debug("Distance of returned route: \
%s metres"%(route_distance_in_metres))
            trip_time_in_milliseconds = graphy_directions['time']
            logger.debug("Estimated time of trip in ms: \
%s"%(trip_time_in_milliseconds))
            trip_time_in_mins = ((trip_time_in_milliseconds / 1000 )/ 60)
            logger.debug("Estimated trip time: %s mins"%(trip_time_in_mins))
            return [route_distance_in_metres, trip_time_in_mins]
        except Exception as e:
            logger.warn("Could not retrieve data from the webservice")
            logger.warn("Reason: %s"%(str(e)))
 def _json_load_byteified(self, file_handle):
     try:
         return byteify(
             json.load(file_handle, object_hook=byteify),
             ignore_dicts=True
         )
     except ValueError:
         raise WrongObjectException("Can't load JSON")
Exemple #9
0
 def __call__(self, line):
     rval = []
     a = byteify(json.loads(line.strip()))
     for key in self.d_processors:
         process = self.d_processors[key]
         part = a[key]
         rval += process(part)
     return rval
    def extract_swaggerfiles_to_file(self, instance, swaggerfiles_file,
                                     swaggerfiles_dir):
        sas_token = self._token_factory.get_sas_token(instance)
        base_url = self._token_factory.get_base_url(instance)
        api_version = self._token_factory.get_api_version()

        apis_res = requests.get(base_url + 'apis' + api_version,
                                headers={'Authorization': sas_token})
        if (200 != apis_res.status_code):
            print "Could not retrieve APIs."
            print apis_res.text
            return False

        swaggerfiles_json = {'swaggerFiles': []}
        swaggerfiles_list = swaggerfiles_json['swaggerFiles']
        apis_json = byteify(json.loads(apis_res.text))
        for api_def in apis_json['value']:
            api_url = api_def['id']

            swagger_res = requests.get(base_url + api_url[1:] + api_version +
                                       '&export=true',
                                       headers={
                                           'Authorization':
                                           sas_token,
                                           'Accept':
                                           'application/vnd.swagger.doc+json'
                                       })
            if (200 != swagger_res.status_code):
                print "Could not export Swagger definition."
                print swagger_res.text
                return False

            swagger = json.loads(swagger_res.text)
            id_name = swagger['basePath'].replace('/', '_')
            if (id_name.startswith('_')):
                id_name = id_name[1:]

            target_dir = self._base_config_dir
            if swaggerfiles_dir:
                target_dir = os.path.join(target_dir, swaggerfiles_dir)
            target_file = id_name + '.json'
            with open(os.path.join(target_dir, target_file), 'w') as outfile:
                json.dump(swagger, outfile, indent=4)

            local_file_name = target_file
            if swaggerfiles_dir:
                local_file_name = os.path.join(swaggerfiles_dir, target_file)

            swaggerfiles_list.append({
                'serviceUrl': api_def['serviceUrl'],
                'swagger': local_file_name
            })

        with open(swaggerfiles_file, 'w') as outfile:
            json.dump(swaggerfiles_json, outfile, indent=4)

        return True
    def update_swagger(self, instance, swaggerfiles):
        sas_token = self._token_factory.get_sas_token(instance)
        base_url = self._token_factory.get_base_url(instance)
        api_version = self._token_factory.get_api_version()
        # First, find the ids of the APIs.
        api_res = requests.get(base_url + 'apis' + api_version,
                               headers = {'Authorization': sas_token})
        if (200 != api_res.status_code):
            print "Could not retrieve API information (/api endpoint)."
            print api_res.text
            return False

        apis_json = byteify(json.loads(api_res.text))

        api_id_bag = {}
        api_bag = {}
        for api_def in apis_json['value']:
            api_url = api_def['serviceUrl']
            api_id_url = api_def['id'] # /apis/3498734a389f7bc83749837493
            api_id = api_id_url[api_id_url.index('/', 2) + 1:]
            api_name = api_def['name']
            print "Found API '" + api_name + "' (id " + api_id + ")."
            api_id_bag[api_url] = api_id
            api_bag[api_url] = api_def

        for swaggerfile in swaggerfiles['swaggerFiles']:
            print "Updating '" + swaggerfile['swagger'] + "'."
            swagger_url = swaggerfile['serviceUrl']
            if swagger_url not in api_id_bag:
                print "Could not find serviceUrl '" + swagger_url + "'. Is it a new API? Import it once first in the Web UI."
                return False
            
            api_id = api_id_bag[swagger_url]
            swagger_json = self.__load_swagger(instance, swaggerfile['swagger'])
            swag_res = requests.put(base_url + 'apis/' + api_id + api_version + '&import=true',
                                    headers={'Authorization': sas_token,
                                             'If-Match': '*',
                                             'Content-Type': 'application/vnd.swagger.doc+json'},
                                    json = swagger_json)
            if (204 != swag_res.status_code):
                print "Updating the API did not succeed."
                print swag_res.status_code
                return False
            # Re-update the API definition because the Swagger import overwrites the serviceUrl
            api_res = requests.patch(base_url + 'apis/' + api_id + api_version,
                                     headers = {'Authorization': sas_token,
                                                'If-Match': '*'},
                                     json = api_bag[swagger_url])
            if (204 != api_res.status_code):
                print "Could not update serviceUrl (next update will break!)."
                print api_res.text
                return False
            print "Update succeeded."

        return True
    def _json_loads_byteified(self, json_text):
        try:
            return byteify(
                json.loads(json_text, object_hook=byteify),
                ignore_dicts=True
            )

            return res

        except ValueError:
            raise WrongObjectException("Can't load json")
    def __load_swagger(self, instance, swagger_file):
        with open(self.__resolve_file(swagger_file), 'r') as json_file:
            swagger_json = byteify(json.loads(json_file.read()))

        # Mandatory for importing swagger            
        swagger_json['host'] = self._token_factory.get_host(instance)
        if 'basePath' not in swagger_json:
            raise LookupError("Could not find 'basePath' property.")
        if 'schemes' not in swagger_json:
            raise LookupError("Could not find 'schemes' property.")
        return swagger_json
    def upsert_certificates(self, instance, certificate_infos):
        sas_token = self._token_factory.get_sas_token(instance)
        base_url = self._token_factory.get_base_url(instance)
        api_version = self._token_factory.get_api_version()

        sha1_bucket = {}
        for certificate_info in certificate_infos:
            fingerprint = apim_openssl.pkcs12_fingerprint_local(
                certificate_info['fileName'], certificate_info['password'],
                self._base_config_dir)
            sha1_bucket[fingerprint] = certificate_info

        certs_res = requests.get(base_url + 'certificates' + api_version,
                                 headers={'Authorization': sas_token})
        if (200 != certs_res.status_code):
            print certs_res.text
            return False

        fingerprint_bucket = {}
        certs_json = byteify(json.loads(certs_res.text))
        for cert in certs_json['value']:
            print "Certificate: " + cert['id'] + ", " + cert[u'subject']
            thumbprint = cert['thumbprint']
            cid_string = cert['id']  # /certificates/{unique id}
            cid = cid_string[cid_string.index('/', 2) +
                             1:]  # /certificates/{unique id} -- pick unique id
            print "cid: " + cid
            fingerprint_bucket[thumbprint] = cid

        print fingerprint_bucket

        for fingerprint in fingerprint_bucket:
            if not fingerprint in sha1_bucket:
                cid = fingerprint_bucket[fingerprint]
                print "Will delete cert with fingerprint '" + fingerprint + "' (cid " + cid + ")."
                if not self.__delete_certificate(base_url, sas_token,
                                                 api_version, cid):
                    return False
                print "Deleted cid '" + cid + "'."

        for cert in sha1_bucket:
            if not fingerprint in fingerprint_bucket:
                print "Will add cert '" + sha1_bucket[cert][
                    'fileName'] + "' (fingerprint " + cert + ")"
                if not self.__add_certificate(base_url, sas_token, api_version,
                                              sha1_bucket[fingerprint],
                                              fingerprint):
                    return False
                print "Added cert '" + sha1_bucket[fingerprint][
                    'fileName'] + "'"
            else:
                print "Found certificate with fingerprint '" + fingerprint + "'."

        return True
    def __load_swagger(self, instance, swagger_file):
        with open(self.__resolve_file(swagger_file), 'r') as json_file:
            swagger_json = byteify(json.loads(json_file.read()))

        # Mandatory for importing swagger
        swagger_json['host'] = self._token_factory.get_host(instance)
        if 'basePath' not in swagger_json:
            raise LookupError("Could not find 'basePath' property.")
        if 'schemes' not in swagger_json:
            raise LookupError("Could not find 'schemes' property.")
        return swagger_json
Exemple #16
0
 def __call__(self, line):
     rval=[[] for i in range(self.size)]
     a=byteify(json.loads(line.strip()))
     if len(set(a.keys())&self.keys)!=len(self.keys):
         return None
     for key in a:
         if key in self.keys:
             data=self.line_processing(a[key])
             assert len(data)==len(rval)
             for i in range(len(data)):
                 rval[i].append(data[i])
     return rval
 def get_admin_sso_link(self, instance):
     rest_token = self.get_sas_token(instance)
     
     sso_res = requests.post(self.get_base_url(instance) + 'users/1/generateSsoUrl' + self.get_api_version(),
                             headers = {'Authorization': rest_token})
     if (200 != sso_res.status_code):
         print "Could not create SSO URL for administrator."
         print sso_res.text
         raise RuntimeError("Could not create SSO URL for administrator")
     
     sso_json = byteify(json.loads(sso_res.text))
     return sso_json['value']
    def exec_async_operation(self, instance, operation, json_payload):
        sas_token = self._token_factory.get_sas_token(instance)
        base_url = self._token_factory.get_base_url(instance)
        api_version = self._token_factory.get_api_version()

        save_req = requests.post(base_url + operation + api_version,
                                 headers={
                                     'Authorization': sas_token,
                                     'Content-Type': 'application/json'
                                 },
                                 json=json_payload)

        if (202 != save_req.status_code):
            print "Failed to execute operation."
            print "Return Code: " + str(save_req.status_code)
            print save_req.text

            return False

        location = save_req.headers['Location']
        print location

        ready = False

        while not ready:
            time.sleep(5)
            status_req = requests.get(location,
                                      headers={'Authorization': sas_token})

            if (status_req.status_code > 299):
                print "Fetching the status of the process failed:"
                print status_req.text
                print "Status Code: " + str(status_req.status_code)

                return False

            status_json = byteify(json.loads(status_req.text))
            statusString = status_json['status']

            print "Operation Status: " + statusString

            if (statusString == "InProgress"):
                continue
            if (statusString == "Succeeded"):
                ready = True
                break
            if (statusString == "Failed"):
                print "Operation failed! See status text for more information:"
            else:
                print "Unexpected status string: '" + statusString + "'. Exiting."
            print status_req.text
            return False
        return True
    def extract_swaggerfiles_to_file(self, instance, swaggerfiles_file, swaggerfiles_dir):
        sas_token = self._token_factory.get_sas_token(instance)
        base_url = self._token_factory.get_base_url(instance)
        api_version = self._token_factory.get_api_version()

        apis_res = requests.get(base_url + 'apis' + api_version,
                                headers = {'Authorization': sas_token})
        if (200 != apis_res.status_code):
            print "Could not retrieve APIs."
            print apis_res.text
            return False
            
        swaggerfiles_json = {
            'swaggerFiles': []
        }
        swaggerfiles_list = swaggerfiles_json['swaggerFiles']
        apis_json = byteify(json.loads(apis_res.text))
        for api_def in apis_json['value']:
            api_url = api_def['id']
            
            swagger_res = requests.get(base_url + api_url[1:] + api_version + '&export=true', 
                                       headers={'Authorization': sas_token,
                                                'Accept': 'application/vnd.swagger.doc+json'})
            if (200 != swagger_res.status_code):
                print "Could not export Swagger definition."
                print swagger_res.text
                return False
            
            swagger = json.loads(swagger_res.text)
            id_name = swagger['basePath'].replace('/', '_')
            if (id_name.startswith('_')):
                id_name = id_name[1:]

            target_dir = self._base_config_dir
            if swaggerfiles_dir:
                target_dir = os.path.join(target_dir, swaggerfiles_dir)
            target_file = id_name + '.json'
            with open(os.path.join(target_dir, target_file), 'w') as outfile:
                json.dump(swagger, outfile, indent=4)
            
            local_file_name = target_file
            if swaggerfiles_dir:
                local_file_name = os.path.join(swaggerfiles_dir, target_file)
            
            swaggerfiles_list.append({
                'serviceUrl': api_def['serviceUrl'],
                'swagger': local_file_name
            })
            
        with open(swaggerfiles_file, 'w') as outfile:
            json.dump(swaggerfiles_json, outfile, indent=4)
        
        return True
Exemple #20
0
def extract(input_path, self_name):
    input_files = os.listdir(input_path)   
    messages[self_name] = []     

    for f in input_files:
        with open(input_path + f) as data_file:
            dump_lines = data_file.readlines()
            dump_json = [utils.byteify(json.loads(l)) for l in dump_lines]
            name = f.replace('.jsonl', '')
            messages[name] = [m['text'].lower() for m in dump_json if 'text' in m and m['out'] == False]
            messages[self_name].extend([m['text'].lower() for m in dump_json if 'text' in m and m['out'] == True])

    return messages
Exemple #21
0
def get_recs():
    tinder_headers = {'X-Auth-Token': tinder_token,
                      'Authorization': 'Token token="{0}"'.format(tinder_token).encode('ascii', 'ignore'),
                       'locale': 'en-GB'
                      }
    r = requests.post(base_url+recs_endpoint, headers = tinder_headers)
    with open('data.txt', 'w') as outfile:
        json.dump(r.text, outfile, sort_keys=True, indent=4)
    with open('data.txt') as data_file:
        recs_json = json.load(data_file)
    recs_json2 = utils.byteify(recs_json)
    dict = json.loads(r.text)
    return dict['results']
Exemple #22
0
    def get_admin_sso_link(self, instance):
        rest_token = self.get_sas_token(instance)

        sso_res = requests.post(self.get_base_url(instance) +
                                'users/1/generateSsoUrl' +
                                self.get_api_version(),
                                headers={'Authorization': rest_token})
        if (200 != sso_res.status_code):
            print "Could not create SSO URL for administrator."
            print sso_res.text
            raise RuntimeError("Could not create SSO URL for administrator")

        sso_json = byteify(json.loads(sso_res.text))
        return sso_json['value']
Exemple #23
0
def activity_create():
    data = request.form
    seminar = {u"date": data['date'], u"name": data['name'], u"speaker": data['speaker'], u"paper": data['paper'], u"paper_url": data['paper_url']}

    with open(os.path.join(APP_STATIC, 'data', 'activities.json'), 'r') as f:
        seminar_list = json.load(f, encoding='utf-8')
        seminar_list["activities"].insert(0, seminar)
        seminar_list["activities"].sort(key=lambda x: x['date'], reverse=True)
        seminar_list = byteify(seminar_list)
    # print seminar_list

    with open(os.path.join(APP_STATIC, 'data', 'activities.json'), 'w') as f:
        json.dump(seminar_list, f, ensure_ascii=False, encoding='utf-8', indent=2)
    return redirect(url_for('activities', lang_code=get_locale()))
    def exec_async_operation(self, instance, operation, json_payload):
        sas_token = self._token_factory.get_sas_token(instance)
        base_url = self._token_factory.get_base_url(instance)
        api_version = self._token_factory.get_api_version()

        save_req = requests.post(base_url + operation + api_version, 
            headers = {'Authorization': sas_token, 'Content-Type': 'application/json' },
            json = json_payload)

        if (202 != save_req.status_code):
            print "Failed to execute operation."
            print "Return Code: " + str(save_req.status_code)
            print save_req.text
        
            return False

        location = save_req.headers['Location']
        print location

        ready = False

        while not ready:
            time.sleep(5)
            status_req = requests.get(location, headers={'Authorization': sas_token})
        
            if (status_req.status_code > 299):
                print "Fetching the status of the process failed:"
                print status_req.text
                print "Status Code: " + str(status_req.status_code)
                
                return False
        
            status_json = byteify(json.loads(status_req.text))
            statusString = status_json['status']
        
            print "Operation Status: " + statusString
        
            if (statusString == "InProgress"):
                continue
            if (statusString == "Succeeded"):
                ready = True
                break
            if (statusString == "Failed"):
                print "Operation failed! See status text for more information:"
            else:
                print "Unexpected status string: '" + statusString + "'. Exiting."
            print status_req.text
            return False
        return True
Exemple #25
0
def get_vocab(min_count):
    files=[config.train_data, config.dev_data]
    count=defaultdict(int)
    for file_path in files:
        ii=open(file_path)
        for line in ii:
            a=byteify(json.loads(line.strip()))
            for lang in a:
                if lang not in config.langs:continue
                text = a[lang]
                for word in text.split():
                    count[word]+=1
    count=sorted([x for x in count.iteritems() if x[1]>min_count], key=lambda z:z[1], reverse=True)
    oo=open(config.words_path,'w')
    for w,c in count:
        oo.write(w+'\n')
    def test_new_1(self):
        """ Post new contract with all data
        """
        new_contracts = self._test_OK(delete=True)
        for new_contract in new_contracts:
            contract_filename = os.path.join('data', 'contracts',
                                             '{contractId}.json'.format(**{'contractId': new_contract['contractId']}))
            contract_filename_diff = os.path.join('contracts',
                                                  '{contractId}.diff'.format(**{'contractId': new_contract['contractId']}))
            contract = EmpoweringTinyClient.EmpoweringContract()
            contract.load_from_file(contract_filename)
            remove_from_dictionary(new_contract, ['_id', '_etag', '_created', '_updated', '_version', '_links'])

            new_diff = str(diff(byteify(new_contract), contract.root))
            test_diff = open(os.path.join('data', contract_filename_diff)).read()
            self.assertEqual(new_diff, test_diff[:-1])
    def upsert_certificates(self, instance, certificate_infos):
        sas_token = self._token_factory.get_sas_token(instance)
        base_url = self._token_factory.get_base_url(instance)
        api_version = self._token_factory.get_api_version()
        
        sha1_bucket = {}
        for certificate_info in certificate_infos:
            fingerprint = apim_openssl.pkcs12_fingerprint_local(certificate_info['fileName'], certificate_info['password'], self._base_config_dir)
            sha1_bucket[fingerprint] = certificate_info
        
        certs_res = requests.get(base_url + 'certificates' + api_version, 
                                headers = {'Authorization': sas_token})
        if (200 != certs_res.status_code):
            print certs_res.text
            return False
        
        fingerprint_bucket = {}
        certs_json = byteify(json.loads(certs_res.text))
        for cert in certs_json['value']:
            print "Certificate: " + cert['id'] + ", " + cert[u'subject']
            thumbprint = cert['thumbprint']
            cid_string = cert['id'] # /certificates/{unique id}
            cid = cid_string[cid_string.index('/', 2) + 1:] # /certificates/{unique id} -- pick unique id
            print "cid: " + cid
            fingerprint_bucket[thumbprint] = cid

        print fingerprint_bucket

        for fingerprint in fingerprint_bucket:
            if not fingerprint in sha1_bucket:
                cid = fingerprint_bucket[fingerprint]
                print "Will delete cert with fingerprint '" + fingerprint + "' (cid " + cid + ")."
                if not self.__delete_certificate(base_url, sas_token, api_version, cid):
                    return False
                print "Deleted cid '" + cid + "'."
        
        for cert in sha1_bucket:
            if not fingerprint in fingerprint_bucket:
                print "Will add cert '" + sha1_bucket[cert]['fileName'] + "' (fingerprint " + cert + ")"
                if not self.__add_certificate(base_url, sas_token, api_version, sha1_bucket[fingerprint], fingerprint):
                    return False
                print "Added cert '" + sha1_bucket[fingerprint]['fileName'] + "'"
            else:
                print "Found certificate with fingerprint '" + fingerprint + "'."

        return True
Exemple #28
0
def get_recs():
    tinder_headers = {'X-Auth-Token': tinder_token,
                      'Authorization': 'Token token="{0}"'.format(tinder_token).encode('ascii', 'ignore'),
                       'locale': 'en-GB'
                      }
    r = requests.post(base_url+recs_endpoint, headers = tinder_headers)
    with open('data.txt', 'w') as outfile:
        json.dump(r.text, outfile, sort_keys=True, indent=4)
    with open('data.txt') as data_file:
        recs_json = json.load(data_file)
    recs_json2 = utils.byteify(recs_json)
    print r.url
    print r.headers
    print r.request
    print r.status_code
    dict = json.loads(r.text)
    #print dict
    return dict['results']
    def test_get_1(self):
        """ Get contract
        """
        contract_filename = 'test_new_contract1.json'
        contract_filename_diff = 'test_new_contract1.diff'
        new_contract = uempowering.EmpoweringContract()
        new_contract.load_from_file(os.path.join('data', contract_filename))
        self.client.add_contract(new_contract.dump())
        contract  = self.client.get_contract(new_contract.root['contractId'])
        remove_from_dictionary(contract, ['_id', '_etag', '_created', '_updated', '_version', '_links'])

        new_diff = str(diff(byteify(new_contract.root), contract))
        test_diff = open(os.path.join('data', contract_filename_diff)).read()
        self.assertEqual(new_diff, test_diff[:-1])

        if contract and isinstance(contract, dict):
            self.to_delete.append((contract.get('contractId'),
                                   contract.get('_etag')))
    def upsert_properties(self, instance, properties):
        sas_token = self._token_factory.get_sas_token(instance)
        base_url = self._token_factory.get_base_url(instance)
        api_version = self._token_factory.get_api_version()

        prop_get = requests.get(base_url + 'properties' + api_version,
                                headers={'Authorization': sas_token})
        if (200 != prop_get.status_code):
            print "Could not get properties from '" + instance + "'."
            print prop_get.text
            return False

        props_json = byteify(json.loads(prop_get.text))

        prop_id_bag = {}
        for prop in props_json['value']:
            prop_id = prop['id']
            prop_id_bag[prop['name']] = prop_id[prop_id.index('/', 2) + 1:]

        for prop_name in properties:
            if prop_name in prop_id_bag:
                print "Updating '" + prop_name + "'"
                if not self.__update_property(
                        base_url, sas_token, api_version, prop_name,
                        prop_id_bag[prop_name], properties[prop_name]):
                    return False
            else:
                print "Inserting '" + prop_name + "'"
                if not self.__insert_property(base_url, sas_token, api_version,
                                              prop_name,
                                              properties[prop_name]):
                    return False

        for prop in props_json['value']:
            prop_name = prop['name']
            if not prop_name in properties:
                # Property in APIm, not in JSON, delete it
                print "Deleting property '" + prop_name + "'."
                if not self.__delete_property(base_url, sas_token, api_version,
                                              prop_name,
                                              prop_id_bag[prop_name]):
                    return False

        return True
Exemple #31
0
def get_recs():
    url2 = 'https://api.gotinder.com/user/recs'

    tinder_headers2 = {
        'X-Auth-Token':
        tinder_token,
        'Authorization':
        'Token token="{0}"'.format(tinder_token).encode('ascii', 'ignore'),
        'locale':
        'en-GB'
    }
    r = requests.post(url2, headers=tinder_headers2)
    with open('data.txt', 'w') as outfile:
        json.dump(r.text, outfile, sort_keys=True, indent=4)
    with open('data.txt') as data_file:
        recs_json = json.load(data_file)
    recs_json2 = utils.byteify(recs_json)
    dict = json.loads(recs_json2)
    return dict['results']
Exemple #32
0
def get_messages(min_word_length, remove_stopwords, adjust_self):
    raw_messages, messages = [], []
    if os.path.isfile(FILTERED_DUMP_FILE):
        with open(FILTERED_DUMP_FILE) as file:
            raw_messages = utils.byteify(json.load(file, encoding='utf-8'))
    else:
        raw_messages = extract_messages.extract(RAW_DATA_PATH, SELF_NAME)
        extract_messages.dump(FILTERED_DUMP_FILE)

    messages = utils.messages_to_vectors(raw_messages, min_word_length,
                                         remove_stopwords, adjust_self)
    print 'Number of messages\n----'
    c = 0
    for key in raw_messages:
        count = len([(m, k) for (m, k) in messages if k == key])
        print key + ': ' + str(count)
        c += count
    print 'Total: ' + str(c) + '\n----'

    return messages
    def extract_properties(self, instance):
        sas_token = self._token_factory.get_sas_token(instance)
        base_url = self._token_factory.get_base_url(instance)
        api_version = self._token_factory.get_api_version()

        prop_get = requests.get(base_url + 'properties' + api_version,
                                headers={'Authorization': sas_token})
        if (200 != prop_get.status_code):
            print "Could not get properties from '" + instance + "'."
            print prop_get.text
            return False

        props_json = byteify(json.loads(prop_get.text))
        props = {}
        for prop in props_json['value']:
            props[prop['name']] = {
                "value": prop['value'],
                "tags": prop['tags'],
                "secret": prop['secret']
            }
        return props
Exemple #34
0
def request(url, check, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, XHR=False, limit=None, referer=None, cookie=None, compression=True, output='', timeout='30'):
    try:
        r = client.request(url, close=close, redirect=redirect, proxy=proxy, post=post, headers=headers, mobile=mobile, XHR=XHR, limit=limit, referer=referer, cookie=cookie, compression=compression, output=output, timeout=timeout)
        if r is not None and error is not False: return r
        if check in str(r) or str(r) == '': return r

        proxies = sorted(get(), key=lambda x: random.random())
        proxies = sorted(proxies, key=lambda x: random.random())
        proxies = proxies[:3]

        for p in proxies:
            p += urllib.quote_plus(url)
            if post is not None:
                if isinstance(post, dict):
                    post = utils.byteify(post)
                    post = urllib.urlencode(post)
                p += urllib.quote_plus('?%s' % post)
            r = client.request(p, close=close, redirect=redirect, proxy=proxy, headers=headers, mobile=mobile, XHR=XHR, limit=limit, referer=referer, cookie=cookie, compression=compression, output=output, timeout='20')
            if check in str(r) or str(r) == '': return r
    except:
        pass
 def extract_properties(self, instance):
     sas_token = self._token_factory.get_sas_token(instance)
     base_url = self._token_factory.get_base_url(instance)
     api_version = self._token_factory.get_api_version()
     
     prop_get = requests.get(base_url + 'properties' + api_version,
                             headers = {'Authorization': sas_token})
     if (200 != prop_get.status_code):
         print "Could not get properties from '" + instance + "'."
         print prop_get.text
         return False
     
     props_json = byteify(json.loads(prop_get.text))
     props = {}
     for prop in props_json['value']:
         props[prop['name']] = {
             "value": prop['value'],
             "tags": prop['tags'],
             "secret": prop['secret']
         }
     return props
    def export_swagger_files(self, instance, target_dir):
        sas_token = self._token_factory.get_sas_token(instance)
        base_url = self._token_factory.get_base_url(instance)
        api_version = self._token_factory.get_api_version()

        apis_res = requests.get(base_url + 'apis' + api_version,
                                headers={'Authorization': sas_token})
        if (200 != apis_res.status_code):
            print "Could not retrieve APIs."
            print apis_res.text
            return False

        apis_json = byteify(json.loads(apis_res.text))
        for api_def in apis_json['value']:
            api_url = api_def['id']

            swagger_res = requests.get(base_url + api_url[1:] + api_version +
                                       '&export=true',
                                       headers={
                                           'Authorization':
                                           sas_token,
                                           'Accept':
                                           'application/vnd.swagger.doc+json'
                                       })
            if (200 != swagger_res.status_code):
                print "Could not export Swagger definition."
                print swagger_res.text
                return False

            swagger = json.loads(swagger_res.text)
            id_name = swagger['basePath'].replace('/', '_')
            if (id_name.startswith('_')):
                id_name = id_name[1:]
            sep = ''
            if not target_dir.endswith(os.sep):
                sep = os.sep
            with open(target_dir + sep + id_name + ".json", 'w') as outfile:
                json.dump(swagger, outfile, indent=4)

        return True
Exemple #37
0
    def __download_expiry_dates(self):
        option_dates = None
        base_url = "http://www.google.com/finance/option_chain?q=%s&output=json"
        for market in self.MARKETS:
            adjusted_symbol = self.symbol
            if len(market) > 0:
                # add the market as a prefix separated by a colon;  ex. 'NYSE:V'
                adjusted_symbol = '%s%%3A%s' % (market, self.symbol)

            url = base_url % adjusted_symbol
            logger.info('Attempting to get expiry dates for [%s] with URL: [%s]' % (self.symbol, url))
            raw_data = fix_output(urllib2.urlopen(url).read())
            option_dates = byteify(json.loads(raw_data))

            if 'expirations' in option_dates:
                self.symbol_for_url = adjusted_symbol
                break

            logger.info('No expiry dates found for symbol [%s] with market [%s]' % (self.symbol, market))
            option_dates = None

        return option_dates
    def get_scm_sas_token(self, instance):
        rest_token = self.get_sas_token(instance)
        
        git_access = requests.get(self.get_base_url(instance) + 'tenant/access/git' + self.get_api_version(), 
            headers = {'Authorization': rest_token})
        
        if (requests.codes.ok != git_access.status_code):
            return git_access.text
        
        git_data = byteify(json.loads(git_access.text))
        
        if not git_data['enabled']:
            print "Enabling git repository..."
            enable_res = requests.patch(self.get_base_url(instance) + 'tenant/access/git' + self.get_api_version(),
                                        headers = {'Authorization': rest_token},
                                        json = {'enabled': True})
            if (204 != enable_res.status_code):
                print "Failed to enable git access!"
                return False
            return self.get_scm_sas_token(instance)

        return urllib.quote_plus(self.get_sas_token_internal(git_data['id'], git_data['primaryKey']))
    def upsert_properties(self, instance, properties):
        sas_token = self._token_factory.get_sas_token(instance)
        base_url = self._token_factory.get_base_url(instance)
        api_version = self._token_factory.get_api_version()
        
        prop_get = requests.get(base_url + 'properties' + api_version,
                                headers = {'Authorization': sas_token})
        if (200 != prop_get.status_code):
            print "Could not get properties from '" + instance + "'."
            print prop_get.text
            return False
        
        props_json = byteify(json.loads(prop_get.text))
        
        prop_id_bag = {}
        for prop in props_json['value']:
            prop_id = prop['id']
            prop_id_bag[prop['name']] = prop_id[prop_id.index('/', 2) + 1:]

        for prop_name in properties:
            if prop_name in prop_id_bag:
                print "Updating '" + prop_name + "'"
                if not self.__update_property(base_url, sas_token, api_version, prop_name, prop_id_bag[prop_name], properties[prop_name]):
                    return False
            else:
                print "Inserting '" + prop_name + "'"
                if not self.__insert_property(base_url, sas_token, api_version, prop_name, properties[prop_name]):
                    return False

        for prop in props_json['value']:
            prop_name = prop['name']
            if not prop_name in properties:
                # Property in APIm, not in JSON, delete it
                print "Deleting property '" + prop_name + "'."
                if not self.__delete_property(base_url, sas_token, api_version, prop_name, prop_id_bag[prop_name]):
                    return False

        return True
    def extract_certificates_to_file(self, instance, certificates_file):
        sas_token = self._token_factory.get_sas_token(instance)
        base_url = self._token_factory.get_base_url(instance)
        api_version = self._token_factory.get_api_version()

        cert_res = requests.get(base_url + 'certificates' + api_version,
                                headers = {'Authorization': sas_token})
        if 200 != cert_res.status_code:
            print "Certificate extraction failed!"
            print cert_res.text
            return False
            
        cert_json = byteify(json.loads(cert_res.text))
        certs_file_json = { 'certificates': [] }
        cert_list = certs_file_json['certificates']
        for cert in cert_json['value']:
            cert_list.append({
                'fileName': cert['subject'],
                'password': cert['thumbprint']
            })
        with open (certificates_file, 'w') as outfile:
            json.dump(certs_file_json, outfile, indent=4)
            
        return True
    def extract_certificates_to_file(self, instance, certificates_file):
        sas_token = self._token_factory.get_sas_token(instance)
        base_url = self._token_factory.get_base_url(instance)
        api_version = self._token_factory.get_api_version()

        cert_res = requests.get(base_url + 'certificates' + api_version,
                                headers={'Authorization': sas_token})
        if 200 != cert_res.status_code:
            print "Certificate extraction failed!"
            print cert_res.text
            return False

        cert_json = byteify(json.loads(cert_res.text))
        certs_file_json = {'certificates': []}
        cert_list = certs_file_json['certificates']
        for cert in cert_json['value']:
            cert_list.append({
                'fileName': cert['subject'],
                'password': cert['thumbprint']
            })
        with open(certificates_file, 'w') as outfile:
            json.dump(certs_file_json, outfile, indent=4)

        return True
Exemple #42
0
def activity_create():
    data = request.form
    seminar = {
        u"date": data['date'],
        u"name": data['name'],
        u"speaker": data['speaker'],
        u"paper": data['paper'],
        u"paper_url": data['paper_url']
    }

    with open(os.path.join(APP_STATIC, 'data', 'activities.json'), 'r') as f:
        seminar_list = json.load(f, encoding='utf-8')
        seminar_list["activities"].insert(0, seminar)
        seminar_list["activities"].sort(key=lambda x: x['date'], reverse=True)
        seminar_list = byteify(seminar_list)
    # print seminar_list

    with open(os.path.join(APP_STATIC, 'data', 'activities.json'), 'w') as f:
        json.dump(seminar_list,
                  f,
                  ensure_ascii=False,
                  encoding='utf-8',
                  indent=2)
    return redirect(url_for('activities', lang_code=get_locale()))
def create_token_factory_from_file(instances_file):
    with open(instances_file, 'r') as json_file:
        json_instances = json.loads(json_file.read())
        return TokenFactory(replace_env(byteify(json_instances)))
 def upsert_certificates_from_file(self, instance, certificates_file):
     with open(self.__resolve_file(certificates_file), 'r') as json_file:
         json_certificates = replace_env(byteify(json.loads(json_file.read())))
     return self.upsert_certificates(instance, json_certificates['certificates'])
    def update_swagger(self, instance, swaggerfiles):
        sas_token = self._token_factory.get_sas_token(instance)
        base_url = self._token_factory.get_base_url(instance)
        api_version = self._token_factory.get_api_version()
        # First, find the ids of the APIs.
        api_res = requests.get(base_url + 'apis' + api_version,
                               headers={'Authorization': sas_token})
        if (200 != api_res.status_code):
            print "Could not retrieve API information (/api endpoint)."
            print api_res.text
            return False

        apis_json = byteify(json.loads(api_res.text))

        api_id_bag = {}
        api_bag = {}
        for api_def in apis_json['value']:
            api_url = api_def['serviceUrl']
            api_id_url = api_def['id']  # /apis/3498734a389f7bc83749837493
            api_id = api_id_url[api_id_url.index('/', 2) + 1:]
            api_name = api_def['name']
            print "Found API '" + api_name + "' (id " + api_id + ")."
            api_id_bag[api_url] = api_id
            api_bag[api_url] = api_def

        for swaggerfile in swaggerfiles['swaggerFiles']:
            print "Updating '" + swaggerfile['swagger'] + "'."
            swagger_url = swaggerfile['serviceUrl']
            if swagger_url not in api_id_bag:
                print "Could not find serviceUrl '" + swagger_url + "'. Is it a new API? Import it once first in the Web UI."
                return False

            api_id = api_id_bag[swagger_url]
            swagger_json = self.__load_swagger(instance,
                                               swaggerfile['swagger'])
            swag_res = requests.put(base_url + 'apis/' + api_id + api_version +
                                    '&import=true',
                                    headers={
                                        'Authorization':
                                        sas_token,
                                        'If-Match':
                                        '*',
                                        'Content-Type':
                                        'application/vnd.swagger.doc+json'
                                    },
                                    json=swagger_json)
            if (204 != swag_res.status_code):
                print "Updating the API did not succeed."
                print swag_res.status_code
                return False
            # Re-update the API definition because the Swagger import overwrites the serviceUrl
            api_res = requests.patch(base_url + 'apis/' + api_id + api_version,
                                     headers={
                                         'Authorization': sas_token,
                                         'If-Match': '*'
                                     },
                                     json=api_bag[swagger_url])
            if (204 != api_res.status_code):
                print "Could not update serviceUrl (next update will break!)."
                print api_res.text
                return False
            print "Update succeeded."

        return True
Exemple #46
0
def create_token_factory_from_file(instances_file):
    with open(instances_file, 'r') as json_file:
        json_instances = json.loads(json_file.read())
        return TokenFactory(replace_env(byteify(json_instances)))
    def serialize(self, obj):

        return json.dumps(byteify(obj.serializable))
 def upsert_properties_from_file(self, instance, properties_file):
     with open(self.__resolve_file(properties_file), 'r') as json_file:
         json_properties = replace_env(byteify(json.loads(json_file.read())))
     return self.upsert_properties(instance, json_properties)
 def upsert_certificates_from_file(self, instance, certificates_file):
     with open(self.__resolve_file(certificates_file), 'r') as json_file:
         json_certificates = replace_env(
             byteify(json.loads(json_file.read())))
     return self.upsert_certificates(instance,
                                     json_certificates['certificates'])
 def update_swagger_from_file(self, instance, swaggerfiles_file):
     with open(swaggerfiles_file, 'r') as json_file:
         swaggerfiles = replace_env(byteify(json.loads(json_file.read())))
     return self.update_swagger(instance, swaggerfiles)
 def upsert_properties_from_file(self, instance, properties_file):
     with open(self.__resolve_file(properties_file), 'r') as json_file:
         json_properties = replace_env(byteify(json.loads(
             json_file.read())))
     return self.upsert_properties(instance, json_properties)
Exemple #52
0
def request(url, close=True, redirect=True, error=False, proxy=None, post=None, headers=None, mobile=False, XHR=False, limit=None, referer=None, cookie=None, compression=True, output='', timeout='30'):
    try:
        if not url:
            return

        handlers = []

        if not proxy == None:
            handlers += [urllib2.ProxyHandler({'http':'%s' % (proxy)}), urllib2.HTTPHandler]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)


        if output == 'cookie' or output == 'extended' or not close == True:
            cookies = cookielib.LWPCookieJar()
            handlers += [urllib2.HTTPHandler(), urllib2.HTTPSHandler(), urllib2.HTTPCookieProcessor(cookies)]
            opener = urllib2.build_opener(*handlers)
            opener = urllib2.install_opener(opener)

        if (2, 7, 8) < sys.version_info < (2, 7, 12):
            try:
                import ssl; ssl_context = ssl.create_default_context()
                ssl_context.check_hostname = False
                ssl_context.verify_mode = ssl.CERT_NONE
                handlers += [urllib2.HTTPSHandler(context=ssl_context)]
                opener = urllib2.build_opener(*handlers)
                opener = urllib2.install_opener(opener)
            except:
                pass

        if url.startswith('//'): url = 'http:' + url

        _headers ={}
        try: _headers.update(headers)
        except: pass
        if 'User-Agent' in _headers:
            pass
        elif not mobile == True:
            #headers['User-Agent'] = agent()
            _headers['User-Agent'] = cache.get(randomagent, 1)
        else:
            _headers['User-Agent'] = 'Apple-iPhone/701.341'
        if 'Referer' in _headers:
            pass
        elif referer is not None:
            _headers['Referer'] = referer
        if not 'Accept-Language' in _headers:
            _headers['Accept-Language'] = 'en-US'
        if 'X-Requested-With' in _headers:
            pass
        elif XHR == True:
            _headers['X-Requested-With'] = 'XMLHttpRequest'
        if 'Cookie' in _headers:
            pass
        elif not cookie == None:
            _headers['Cookie'] = cookie
        if 'Accept-Encoding' in _headers:
            pass
        elif compression and limit is None:
            _headers['Accept-Encoding'] = 'gzip'


        if redirect == False:

            #old implementation
            #class NoRedirection(urllib2.HTTPErrorProcessor):
            #    def http_response(self, request, response): return response

            #opener = urllib2.build_opener(NoRedirection)
            #opener = urllib2.install_opener(opener)

            class NoRedirectHandler(urllib2.HTTPRedirectHandler):
                def http_error_302(self, req, fp, code, msg, headers):
                    infourl = urllib.addinfourl(fp, headers, req.get_full_url())
                    infourl.status = code
                    infourl.code = code
                    return infourl
                http_error_300 = http_error_302
                http_error_301 = http_error_302
                http_error_303 = http_error_302
                http_error_307 = http_error_302

            opener = urllib2.build_opener(NoRedirectHandler())
            urllib2.install_opener(opener)

            try: del _headers['Referer']
            except: pass

        if isinstance(post, dict):
            post = utils.byteify(post)
            post = urllib.urlencode(post)

        url = utils.byteify(url)

        request = urllib2.Request(url, data=post)
        _add_request_header(request, _headers)


        try:
            response = urllib2.urlopen(request, timeout=int(timeout))
        except urllib2.HTTPError as response:

            if response.code == 503:
                cf_result = response.read(5242880)
                try: encoding = response.info().getheader('Content-Encoding')
                except: encoding = None
                if encoding == 'gzip':
                    cf_result = gzip.GzipFile(fileobj=StringIO.StringIO(cf_result)).read()

                if 'cf-browser-verification' in cf_result:

                    netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc)
                    
                    if not netloc.endswith('/'): netloc += '/'

                    ua = _headers['User-Agent']

                    cf = cache.get(cfcookie().get, 168, netloc, ua, timeout)

                    _headers['Cookie'] = cf

                    request = urllib2.Request(url, data=post)
                    _add_request_header(request, _headers)

                    response = urllib2.urlopen(request, timeout=int(timeout))
                else:
                    log_utils.log('Request-Error (%s): %s' % (str(response.code), url), 'DEBUG')
                    if error == False: return
            else:
                log_utils.log('Request-Error (%s): %s' % (str(response.code), url), 'DEBUG')
                if error == False: return


        if output == 'cookie':
            try: result = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
            except: pass
            try: result = cf
            except: pass
            if close == True: response.close()
            return result

        elif output == 'geturl':
            result = response.geturl()
            if close == True: response.close()
            return result

        elif output == 'headers':
            result = response.headers
            if close == True: response.close()
            return result

        elif output == 'chunk':
            try: content = int(response.headers['Content-Length'])
            except: content = (2049 * 1024)
            if content < (2048 * 1024): return
            result = response.read(16 * 1024)
            if close == True: response.close()
            return result

        elif output == 'file_size':
            try: content = int(response.headers['Content-Length'])
            except: content = '0'
            response.close()
            return content
        
        if limit == '0':
            result = response.read(224 * 1024)
        elif not limit == None:
            result = response.read(int(limit) * 1024)
        else:
            result = response.read(5242880)

        try: encoding = response.info().getheader('Content-Encoding')
        except: encoding = None
        if encoding == 'gzip':
            result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()


        if 'sucuri_cloudproxy_js' in result:
            su = sucuri().get(result)

            _headers['Cookie'] = su

            request = urllib2.Request(url, data=post)
            _add_request_header(request, _headers)

            response = urllib2.urlopen(request, timeout=int(timeout))

            if limit == '0':
                result = response.read(224 * 1024)
            elif not limit == None:
                result = response.read(int(limit) * 1024)
            else:
                result = response.read(5242880)

            try: encoding = response.info().getheader('Content-Encoding')
            except: encoding = None
            if encoding == 'gzip':
                result = gzip.GzipFile(fileobj=StringIO.StringIO(result)).read()

        if 'Blazingfast.io' in result and 'xhr.open' in result:
            netloc = '%s://%s' % (urlparse.urlparse(url).scheme, urlparse.urlparse(url).netloc)
            ua = _headers['User-Agent']
            _headers['Cookie'] = cache.get(bfcookie().get, 168, netloc, ua, timeout)

            result = _basic_request(url, headers=_headers, post=post, timeout=timeout, limit=limit)

        if output == 'extended':
            try: response_headers = dict([(item[0].title(), item[1]) for item in response.info().items()])
            except: response_headers = response.headers
            response_code = str(response.code)
            try: cookie = '; '.join(['%s=%s' % (i.name, i.value) for i in cookies])
            except: pass
            try: cookie = cf
            except: pass
            if close == True: response.close()
            return (result, response_code, response_headers, _headers, cookie)
        else:
            if close == True: response.close()
            return result
    except Exception as e:
        log_utils.log('Request-Error: (%s) => %s' % (str(e), url), 'DEBUG')
        return
 def update_swagger_from_file(self, instance, swaggerfiles_file):
     with open(swaggerfiles_file, 'r') as json_file:
         swaggerfiles = replace_env(byteify(json.loads(json_file.read())))
     return self.update_swagger(instance, swaggerfiles)
Exemple #54
0
import utils
import random
import extract_messages

FASTTEXT_TRAIN_FILE = './data/fasttext.train.txt'
FASTTEXT_TEST_FILE = './data/fasttext.test.txt'
FASTTEXT_TEST_VALIDATION_FILE = './data/fasttext.test_validation.txt'
FILTERED_DUMP_FILE = './data/filtered_dump.json'
RAW_DATA_PATH = './telegram-history-dump/output/json/'
SELF_NAME = 'Ferdinand_Muetsch'

messages = []

if os.path.isfile(FILTERED_DUMP_FILE):
    with open(FILTERED_DUMP_FILE) as file:
        messages = utils.byteify(json.load(file, encoding='utf-8'))
else:
    messages = extract_messages.extract(RAW_DATA_PATH, SELF_NAME)
    extract_messages.dump(FILTERED_DUMP_FILE)

lines = []
f_train = open(FASTTEXT_TRAIN_FILE, 'w')
f_test = open(FASTTEXT_TEST_FILE, 'w')
f_validation = open(FASTTEXT_TEST_VALIDATION_FILE, 'w')

for key in messages:
    for msg in messages[key]:
        lines.append('__label__' + key + ' ' + msg.replace('\n', ' ') + '\n')

random.shuffle(lines)