예제 #1
0
 def __init__(self, username, password, key, logfile, verbose=False):
     self.inbox = pydelicious.dlcs_feed('user_inbox', format='rss',
                     username=username, key=key)
     self.api = pydelicious.DeliciousAPI(username, password)
     self.posters = json_load(pydelicious.json_network(username))
     self.logfile = logfile
     self.verbose = verbose
예제 #2
0
    def update_status(self):
        playerids = None

        if self.secret is None:
            playerids = MultiplayerHandler.session_status(self.session_id)
        else:
            url = 'http://%s/api/v1/multiplayer/status/session/%s' % (self.server, self.session_id)
            try:
                f = urlopen(url)
                try:
                    response = json_load(f)
                    # pylint: disable=E1103
                    if response['ok']:
                        data = response.get('data', None)
                        if data is not None:
                            playerids = data.get('playerids', None)
                    # pylint: enable=E1103
                finally:
                    f.close()
            except URLError:
                # Switch to internal server
                self.server = None
                self.secret = None
                return
            except KeyError:
                return

        playerids = set(playerids or [])
        players = self.players
        for player_id in players.keys():
            if player_id not in playerids:
                del players[player_id]
예제 #3
0
def merge(source_files, output_filename="default.json", output_metrics=True):
    """Utility function to merge JSON assets."""
    LOG.info("%i assets -> %s", len(source_files), output_filename)
    merged = { }
    for i, f in enumerate(source_files):
        LOG.info("Processing:%03i:%s", i + 1, f)
        try:
            with open(f, 'r') as source:
                j = json_load(source)
                if isinstance(j, dict):
                    merged = merge_dictionaries(j, merged)
                else:
                    merged = j
        except IOError as e:
            LOG.error("Failed processing: %s", f)
            LOG.error('  >> %s', e)
    try:
        with open(output_filename, 'w') as target:
            LOG.info("Writing:%s", output_filename)
            json_encoder.FLOAT_REPR = float_to_string
            json_dump(merged, target, sort_keys=True, separators=(',', ':'))
    except IOError as e:
        LOG.error('Failed processing: %s', output_filename)
        LOG.error('  >> %s', e)
    else:
        if output_metrics:
            log_metrics(merged)
예제 #4
0
    def update_status(self):
        playerids = None

        if self.secret is None:
            playerids = MultiplayerHandler.session_status(self.session_id)
        else:
            url = 'http://%s/api/v1/multiplayer/status/session/%s' % (
                self.server, self.session_id)
            try:
                f = urlopen(url)
                try:
                    response = json_load(f)
                    # pylint: disable=E1103
                    if response['ok']:
                        data = response.get('data', None)
                        if data is not None:
                            playerids = data.get('playerids', None)
                    # pylint: enable=E1103
                finally:
                    f.close()
            except URLError:
                # Switch to internal server
                self.server = None
                self.secret = None
                return
            except KeyError:
                return

        playerids = set(playerids or [])
        players = self.players
        for player_id in players.keys():
            if player_id not in playerids:
                del players[player_id]
예제 #5
0
def get_settings_definition():
    settings_filename = os.path.join(os.path.dirname(__file__),
                                     'application_settings.json')

    with open(settings_filename) as f:
        settings = json_load(f, object_pairs_hook=OrderedDict)

    return flatten_settings_definition(settings)
예제 #6
0
def generate_data(country='US'):
    if not os.path.isfile(TOWER_FILE) or not os.path.isfile(AP_FILE):
        tower_data = JSONTupleKeyedDict()
        ap_data = JSONTupleKeyedDict()
        cell_gen = random_cell(country=country)
        wifi_gen = random_ap(country=country)
        for i in range(TESTING_CELL_SUBSET):
            lat = random.randint(BBOX[country]['min_lat'],
                                 BBOX[country]['max_lat'])
            lat = float(lat) / FACTOR
            lon = random.randint(BBOX[country]['min_lon'],
                                 BBOX[country]['max_lon'])
            lon = float(lon) / FACTOR
            tower_data[(lat, lon)] = []
            ap_data[(lat, lon)] = []

            for x in range(random.randint(1, 5)):
                rcell = cell_gen.next()
                data = {
                    "radio": rcell['radio'],
                    "mcc": rcell['mcc'],
                    "mnc": rcell['mnc'],
                    "lac": rcell['lac'],
                    "cid": rcell['cid']
                }
                if data not in tower_data[(lat, lon)]:
                    tower_data[(lat, lon)].append(data)

            for x in range(random.randint(1, 20)):
                rapp = wifi_gen.next()
                ap_data[(lat, lon)].append({"key": rapp['key']})

        with open(TOWER_FILE, 'w') as f:
            f.write(json.dumps(tower_data, cls=LocationDictEncoder))
        with open(AP_FILE, 'w') as f:
            f.write(json.dumps(ap_data, cls=LocationDictEncoder))
    else:
        ap_data = json_load(open(AP_FILE), object_hook=JSONLocationDictDecoder)
        tower_data = json_load(open(TOWER_FILE),
                               object_hook=JSONLocationDictDecoder)

    return tower_data.items(), ap_data.items()
예제 #7
0
 def read_metadata_cache(self):
     try:
         file_name = self.get_meta_data_path()
         gzip_file = GzipFile(filename=file_name, mode='rb')
         meta_data_cache = json_load(gzip_file)
         gzip_file.close()
         cache_time = stat(file_name).st_mtime
     except IOError:
         cache_time = -1
         meta_data_cache = {}
     return cache_time, meta_data_cache
예제 #8
0
def generate_data(country='US'):
    if not os.path.isfile(TOWER_FILE) or not os.path.isfile(AP_FILE):
        tower_data = JSONTupleKeyedDict()
        ap_data = JSONTupleKeyedDict()
        cell_gen = random_cell(country=country)
        wifi_gen = random_ap(country=country)
        for i in range(TESTING_CELL_SUBSET):
            lat = random.randint(
                BBOX[country]['min_lat'], BBOX[country]['max_lat'])
            lat = float(lat) / FACTOR
            lon = random.randint(
                BBOX[country]['min_lon'], BBOX[country]['max_lon'])
            lon = float(lon) / FACTOR
            tower_data[(lat, lon)] = []
            ap_data[(lat, lon)] = []

            for x in range(random.randint(1, 5)):
                rcell = cell_gen.next()
                data = {"radio": rcell['radio'],
                        "mcc": rcell['mcc'],
                        "mnc": rcell['mnc'],
                        "lac": rcell['lac'],
                        "cid": rcell['cid']}
                if data not in tower_data[(lat, lon)]:
                    tower_data[(lat, lon)].append(data)

            for x in range(random.randint(1, 20)):
                rapp = wifi_gen.next()
                ap_data[(lat, lon)].append({"key": rapp['key']})

        with open(TOWER_FILE, 'w') as f:
            f.write(json.dumps(tower_data, cls=LocationDictEncoder))
        with open(AP_FILE, 'w') as f:
            f.write(json.dumps(ap_data, cls=LocationDictEncoder))
    else:
        ap_data = json_load(open(AP_FILE),
                            object_hook=JSONLocationDictDecoder)
        tower_data = json_load(open(TOWER_FILE),
                               object_hook=JSONLocationDictDecoder)

    return tower_data.items(), ap_data.items()
예제 #9
0
 def read_metadata_cache(self):
     try:
         file_name = self.get_meta_data_path()
         gzip_file = GzipFile(filename=file_name,
                              mode='rb')
         meta_data_cache = json_load(gzip_file)
         gzip_file.close()
         cache_time = stat(file_name).st_mtime
     except IOError:
         cache_time = -1
         meta_data_cache = {}
     return cache_time, meta_data_cache
예제 #10
0
def standard_include(infiles):
    """Load and merge all the ``infiles``."""
    if infiles:
        definitions = {}
        for infile in infiles:
            if path_exists(infile):
                with open(infile, 'r') as infile_file:
                    infile_json = json_load(infile_file)
                    definitions = merge_dictionaries(infile_json, definitions)
            else:
                LOG.error('Missing file: %s', infile)
        return JsonAsset(definitions=definitions)
    else:
        return JsonAsset()
    return None
예제 #11
0
def standard_include(infiles):
    """Load and merge all the ``infiles``."""
    if infiles:
        definitions = { }
        for infile in infiles:
            if path_exists(infile):
                with open(infile, 'r') as infile_file:
                    infile_json = json_load(infile_file)
                    definitions = merge_dictionaries(infile_json, definitions)
            else:
                LOG.error('Missing file: %s', infile)
        return JsonAsset(definitions=definitions)
    else:
        return JsonAsset()
    return None
예제 #12
0
    def load_hashes(self, project):
        hashes = set()

        try:
            # Files containing cached hashes are stored in a folder called "__cached_hashes__".
            # The name of the file contains the creation time
            # so we skip files that are too old
            hashes_folder = join(self.cache_dir, self._cached_hash_folder)

            stale_time = long(time() - self._cached_hash_ttl) # 30 days

            for file_path in iglob(join(hashes_folder, '*.json')):
                delete_file = True

                try:
                    file_time = long(splitext(basename(file_path))[0])
                    if stale_time < file_time:
                        file_obj = open(file_path, 'rb')
                        hashes_meta = json_load(file_obj)
                        file_obj.close()
                        # pylint: disable=E1103
                        hashes_version = hashes_meta.get('version', 0)
                        if 2 <= hashes_version:
                            cached_hashes = hashes_meta.get('hashes', None)
                            if cached_hashes:
                                delete_file = False
                                hashes_host = hashes_meta.get('host', None)
                                if hashes_host == self.hub_pool.host:
                                    hashes.update(cached_hashes)
                        # pylint: enable=E1103
                except (TypeError, ValueError):
                    pass

                if delete_file:
                    LOG.info('Deleting stale cache file: %s', file_path)
                    remove(file_path)

        except (IOError, error):
            pass
        except Exception as e:
            LOG.error(str(e))

        hashes.update(self.request_hashes(project))

        return hashes
예제 #13
0
    def load_hashes(self, project):
        hashes = set()

        try:
            # Files containing cached hashes are stored in a folder called "__cached_hashes__".
            # The name of the file contains the creation time
            # so we skip files that are too old
            hashes_folder = join(self.cache_dir, self._cached_hash_folder)

            stale_time = long(time() - self._cached_hash_ttl) # 30 days

            for file_path in iglob(join(hashes_folder, '*.json')):
                delete_file = True

                try:
                    file_time = long(splitext(basename(file_path))[0])
                    if stale_time < file_time:
                        file_obj = open(file_path, 'rb')
                        hashes_meta = json_load(file_obj)
                        file_obj.close()
                        # pylint: disable=E1103
                        hashes_version = hashes_meta.get('version', 0)
                        if 2 <= hashes_version:
                            cached_hashes = hashes_meta.get('hashes', None)
                            if cached_hashes:
                                delete_file = False
                                hashes_host = hashes_meta.get('host', None)
                                if hashes_host == self.hub_pool.host:
                                    hashes.update(cached_hashes)
                        # pylint: enable=E1103
                except (TypeError, ValueError):
                    pass

                if delete_file:
                    LOG.info('Deleting stale cache file: %s', file_path)
                    remove(file_path)

        except (IOError, error):
            pass
        except Exception as e:
            LOG.error(str(e))

        hashes.update(self.request_hashes(project))

        return hashes
예제 #14
0
    def save_hashes(self, hashes):
        try:
            hashes_folder = join(self.cache_dir, self._cached_hash_folder)
            try:
                makedirs(hashes_folder)
            except OSError as e:
                if e.errno != EEXIST:
                    LOG.error(str(e))
                    return

            # Load existing cache and only save the delta
            for file_path in iglob(join(hashes_folder, '*.json')):
                try:
                    file_obj = open(file_path, 'rb')
                    hashes_meta = json_load(file_obj)
                    file_obj.close()
                    hashes_host = hashes_meta['host']
                    if hashes_host == self.hub_pool.host:
                        hashes.difference_update(hashes_meta['hashes'])
                except (IOError, TypeError, ValueError, KeyError,
                        AttributeError):
                    pass

            if hashes:
                try:
                    file_path = join(hashes_folder, '%d.json' % long(time()))
                    file_obj = open(file_path, 'wb')
                    hashes_meta = {
                        'version': 2,
                        'host': self.hub_pool.host,
                        'hashes': list(hashes)
                    }
                    json_dump(hashes_meta, file_obj, separators=(',', ':'))
                    file_obj.close()
                except IOError:
                    pass

        # pylint: disable=W0703
        except Exception as e:
            LOG.error(str(e))
def main():
	output = ''
	exit_code = 3
	url = "http://api.antizapret.info/get.php?item=%s&type=json" % cliargs.host
	result = None	
	try:
		result = urllib2.urlopen(url,timeout = cliargs.timeout)

	except (urllib2.URLError,urllib2.HTTPError) as err_msg:
		output = ('Antizapret.info error: %s' % err_msg)
		exit_code = 3
	if result is not None:
		data = json_load(result.read())
		if data.has_key('register'):
			if data['register'] is None:
				print ('OK %s is not found in zapret-info.gov.ru. Updated: %s' %(cliargs.host,data['updateTime']))
				exit(0)
			for i in data['register']:
				output += ('\n%s is found in register. For get more information see: %s' % (i['url'],i['proof']))
				exit_code = 2
	print(output)
	exit(exit_code)
예제 #16
0
    def save_hashes(self, hashes):
        try:
            hashes_folder = join(self.cache_dir, self._cached_hash_folder)
            try:
                makedirs(hashes_folder)
            except OSError as e:
                if e.errno != EEXIST:
                    LOG.error(str(e))
                    return

            # Load existing cache and only save the delta
            for file_path in iglob(join(hashes_folder, '*.json')):
                try:
                    file_obj = open(file_path, 'rb')
                    hashes_meta = json_load(file_obj)
                    file_obj.close()
                    hashes_host = hashes_meta['host']
                    if hashes_host == self.hub_pool.host:
                        hashes.difference_update(hashes_meta['hashes'])
                except (IOError, TypeError, ValueError, KeyError, AttributeError):
                    pass

            if hashes:
                try:
                    file_path = join(hashes_folder, '%d.json' % long(time()))
                    file_obj = open(file_path, 'wb')
                    hashes_meta = {'version': 2,
                                   'host': self.hub_pool.host,
                                   'hashes': list(hashes)}
                    json_dump(hashes_meta, file_obj, separators=(',', ':'))
                    file_obj.close()
                except IOError:
                    pass

        # pylint: disable=W0703
        except Exception as e:
            LOG.error(str(e))
예제 #17
0
 def merge(self, other):
     merged = False
     if self.secret is None:
         merged = MultiplayerHandler.merge_sessions(self.session_id, other.session_id)
     else:
         hmac = _calculate_merge_session_hmac(self.secret, self.session_id, other.session_id)
         url = 'http://%s/api/v1/multiplayer/session/merge/%s/%s/%s' % (self.server,
                                                                        self.session_id,
                                                                        other.session_id,
                                                                        hmac)
         try:
             f = urlopen(url)
             try:
                 response = json_load(f)
                 # pylint: disable=E1103
                 merged = response['ok']
                 # pylint: enable=E1103
             finally:
                 f.close()
         except (URLError, KeyError):
             pass
     if merged:
         self.players.update(other.players)
     return merged
예제 #18
0
 def merge(self, other):
     merged = False
     if self.secret is None:
         merged = MultiplayerHandler.merge_sessions(self.session_id,
                                                    other.session_id)
     else:
         hmac = _calculate_merge_session_hmac(self.secret, self.session_id,
                                              other.session_id)
         url = 'http://%s/api/v1/multiplayer/session/merge/%s/%s/%s' % (
             self.server, self.session_id, other.session_id, hmac)
         try:
             f = urlopen(url)
             try:
                 response = json_load(f)
                 # pylint: disable=E1103
                 merged = response['ok']
                 # pylint: enable=E1103
             finally:
                 f.close()
         except (URLError, KeyError):
             pass
     if merged:
         self.players.update(other.players)
     return merged
예제 #19
0
def load_json(filename):
    f = open(filename, 'rb')
    data = json_load(f)
    f.close()
    return data
예제 #20
0
from simplejson import load as json_load

from matplotlib import pyplot as plt

if (len(sys.argv) != 2):
    print("Argumentos invalidos" + str(len(sys.argv)))
    exit(1)

config_file = sys.argv[1]
if not os.path.isfile(config_file):
    print(f"Fichero {config_file} no encontrado")
    exit(1)

with open(config_file, 'r') as f:
    config = json_load(f)

DATASET_FILES = config['DATASET_FILES']
MAX_DEPTH1 = config['MAX_DEPTH_MIN']
MAX_DEPTH2 = config['MAX_DEPTH_MAX']
MAX_DEPTH_STEP = config['MAX_DEPTH_STEP']
SEED1 = config['SEED1']
SEED2 = config['SEED2']
OUTLIERS = config['OUTLIERS_PERCENT']
O_STEP = config['OUTLIERS_STEP']
PCA1 = config['PCA1']
PCA2 = config['PCA2']
PCA_STEP = config['PCA_STEP']
SAVE_DIR_ = config['SAVE_DIR']
MULTIPLICIDAD = config['MULTIPLICIDAD']
예제 #21
0
    def check(self):
        """Check inbox for new items."""
        
        logfh = None
        
        if os.access(self.logfile, os.F_OK):
            if self.verbose:
                print "[LOG] Log file found. Trying to resume...",
            try:
                # read in previous log data for resuming
                logfh = open(self.logfile, 'r')
                logfh.readline()
                logfh.close()
                if self.verbose:
                    print "OK"
            except IOError:
                # most probably, the log file does not exist (yet)
                if self.verbose:
                    print "failed"
        else:
            # log file does not exist, so there isn't any resume data
            # to read in
            pass

        try:
            if self.verbose:
                print "[LOG] Open log file for appending...",
            logfh = open(self.logfile, 'a')
            if self.verbose:
                print "done"
        except IOError:
            if self.verbose:
                print "failed"
            print "[LOG] ERROR: could not open log file for appending"
            self._cleanup()
            return
        
        # query metadata about each entry from delicious.com
        for index, entry in enumerate(self.inbox.entries):
            url = entry.link
            urlmd5 = md5.new(url).hexdigest()
            if entry.author in self.posters:
                if self.verbose:
                    logfh.write("[LOG] %s Processing entry #%s: '%s'\n" % \
                        (strftime("%Y-%m-%d %H:%M:%S"), index + 1, url))
                try:
                    sleep(1) # be nice and wait 1 sec between connects 
                    urlinfo = json_load(pydelicious.dlcs_feed('urlinfo',
                                urlmd5=urlmd5))
                    if urlinfo:
                        urlinfo = urlinfo[0]
                    else:
                        urlinfo = {}
                    title = urlinfo['title']
                    top_tags = urlinfo['top_tags'] or []
                    tagstr = 'via:%s ' % entry.author + \
                        ' '.join([tag.replace(' ','_') for tag in top_tags]) 
                    self.api.posts_add(url, title, tags=tagstr.strip())
                    if self.verbose:
                        logfh.write("[LOG] %s Saved %s\n" % \
                            (strftime("%Y-%m-%d %H:%M:%S"), url))
                except KeyError:
                    pass
                except pydelicious.DeliciousItemExistsError:
                    if self.verbose:
                        logfh.write("[LOG] %s %s already added\n" % \
                            (strftime("%Y-%m-%d %H:%M:%S"), url))
                except:
                    logfh.write("[LOG] %s ERROR: %s\n" % \
                        (strftime("%Y-%m-%d %H:%M:%S"), sys.exc_info()[0]))
                    # clean up
                    logfh.close()
                    raise
            else:
                logfh.write("[LOG] %s ERROR: %s not in network-aborting %s\n" % :\
                    (strftime("%Y-%m-%d %H:%M:%S"), entry.author, url))
                
        # clean up
        logfh.close()