Esempio n. 1
0
    def session_info(self, where, name):
        """
		Get information on the specified session

		If where is '', will auto try the webscarab temp dir
		If where is not '', will try session nicknames 
		@param where: directory name where the sessions are located
		@param name: specific name of subdirectory containing desired session

		@return: C{dict} containing id, domains, transactions, date, seentids
			and whether the session is currently active
		"""
        if not where: where = self.get_tmpdir()
        if not opexists(opjoin(where, name)):
            wstmp = 'webscarab%s.tmp' % name
            if not opexists(opjoin(where, wstmp)):
                return {}
            name = wstmp
        session = {}
        wsdir = opjoin(where, name)
        session['id'] = wsdir
        m = re.search(r'webscarab(\d+).tmp', wsdir)
        if m:
            session['nickname'] = m.group(1)
        elif wsdir.rfind(os.sep) > -1:
            session['nickname'] = wsdir[wsdir.rfind(os.sep):]
        session['domains'] = self.domains_in_dir(wsdir)
        session['transactions'] = self.transactions(wsdir)
        session['date'] = os.path.getctime(wsdir)
        session['seentids'] = []
        if os.path.exists(opjoin(wsdir, 'conversationlog')):
            session['active'] = False
        else:
            session['active'] = True
        return session
Esempio n. 2
0
        def _test_without_symlink():
            builder = SmallBodiesFileBuilder(self.FILEPATH)
            builder.create_small_body_file()

            self.assertTrue(opexists(self.FILEPATH))
            self.assertFalse(opexists(self.SYMLINK))

            os.remove(self.FILEPATH)
        def _test_without_symlink():
            builder = SmallBodiesFileBuilder(self.FILEPATH)
            builder.create_small_body_file()

            self.assertTrue(opexists(self.FILEPATH))
            self.assertFalse(opexists(self.SYMLINK))

            os.remove(self.FILEPATH)
Esempio n. 4
0
def extract_trans(t):
	if not 199 < int(t['code']) < 300: return
	if not 'respbody' in t: return
	if not t['respbody']: return
	try:
		if not opexists(Extract): os.mkdir(Extract)
		pdir = opjoin(Extract, t['hostname']+'_'+t['port'])
		if not opexists(pdir):
			os.mkdir(pdir)
		os.makedirs(opjoin(pdir, t['dir'][1:]))
	except OSError, e:
		if e.errno != 17:
			log.error('Error: %s' % e.strerror)
Esempio n. 5
0
def extract_trans(t):
    if not 199 < int(t['code']) < 300: return
    if not 'respbody' in t: return
    if not t['respbody']: return
    try:
        if not opexists(Extract): os.mkdir(Extract)
        pdir = opjoin(Extract, t['hostname'] + '_' + t['port'])
        if not opexists(pdir):
            os.mkdir(pdir)
        os.makedirs(opjoin(pdir, t['dir'][1:]))
    except OSError, e:
        if e.errno != 17:
            log.error('Error: %s' % e.strerror)
Esempio n. 6
0
    def exec(self):
        """
        Loads resonances and makes list of files contains aggregated the
        resonances' id by asteroid.
        """
        if self._integration.state == _IntegrationState.calc:
            for builder in self._builders:
                planets = builder.planets
                logging.debug('Load resonances for %s' % ', '.join(planets))
                folder = self._integration.get_agres_folder(planets)
                if opexists(folder):
                    rmtree(folder)
                makedirs(folder)

                for i, asteroid_buffer in enumerate(self.get_asteroid_list_gen()):
                    aggregated_resonances = load_resonances(
                        RESONANCE_FILEPATH, asteroid_buffer, builder, True)

                    asteroids_without_resonances = [
                        x for x in aggregated_resonances.keys() if not aggregated_resonances[x]
                    ]
                    for key in asteroids_without_resonances:
                        del aggregated_resonances[key]

                    if asteroids_without_resonances:
                        logging.info('Asteroids %s have no resonances with axis variation: %f',
                                     ' '.join(asteroids_without_resonances), builder.axis_swing)

                    if aggregated_resonances:
                        filename = opjoin(folder, 'agres-%i.json' % i)
                        with open(filename, 'w') as fd:
                            json.dump(aggregated_resonances, fd)

                        self._integration.files_with_aggregated_asteroids.append(filename)
            self._integration.save(self._state)
Esempio n. 7
0
 def __init__(self, catalog: str):
     if opexists(self.state_file):
         self.open()
     else:
         self._state = _IntegrationState.start
     self.catalog = catalog
     self.files_with_aggregated_asteroids = []
Esempio n. 8
0
def _get_last_computed_asteroid(catalog: str, in_path: str) -> int:
    """
    Return sequence number in catalog of first asteroid that doesn't have
    A*.aei file in pointed_path.
    """
    i = 0
    for i, name in enumerate(asteroid_names_gen(catalog)):
        path = opjoin(in_path, 'A%s.aei' % name)
        if not opexists(path):
            break
    return i
Esempio n. 9
0
    def sessions(self, where):
        """
		Get a list of sessions available in the provided location
		If the provided location is a session directory, figure that out

		@return: C{list} of C{session_info} C{dict}s
		"""
        if self._sessioncache: return self._sessioncache
        sl = []
        if not where: where = self.get_tmpdir()
        if not where: return None
        if not opexists(where): return None
        if opexists(opjoin(where, 'conversations')):
            return [self.session_info(where, '')]
        for d in os.listdir(where):
            if (re.search(r"^webscarab(\d+).tmp", d)
                    or opexists(opjoin(where, d, 'conversations'))):
                sl.append(self.session_info(where, d))
        self._sessioncache = sl
        return sl
Esempio n. 10
0
def _get_from_s3(filepaths: List[str]) -> List[str]:
    new_paths = []
    if any([is_s3(x) for x in filepaths]):
        conn = S3Connection(CONFIG['s3']['access_key'], CONFIG['s3']['secret_key'])
        bucket = conn.get_bucket(BUCKET)
        for path in filepaths:
            if not is_s3(path):
                continue
            start = path.index(BUCKET)
            filename = path[start + len(BUCKET) + 1:]
            if not opexists(S3_FILES_DIR):
                makedirs(S3_FILES_DIR)
            local_path = opjoin(S3_FILES_DIR, basename(filename))
            if not opexists(local_path):
                s3key = bucket.get_key(filename, validate=False)
                with open(local_path, 'wb') as f:
                    s3key.get_contents_to_file(f)
                if not is_tarfile(local_path):
                    raise FilepathInvalidException('%s is not tar. Local copy %s' %
                                                   (path, local_path))
            new_paths.append(local_path)
    return new_paths
Esempio n. 11
0
    def _build_from_dirs(self, for_name: str) -> str:
        res = None
        if self._is_recursive:
            for path_base in self._dirs:
                for filepath in glob.iglob(opjoin(path_base, '**', for_name), recursive=True):
                    res = filepath
                    break
        else:
            for path_base in self._dirs:
                filepath = opjoin(path_base, for_name)
                if opexists(filepath):
                    res = filepath

        return res
Esempio n. 12
0
def _get_from_s3(filepaths: List[str]) -> List[str]:
    new_paths = []
    if any([is_s3(x) for x in filepaths]):
        conn = S3Connection(CONFIG['s3']['access_key'],
                            CONFIG['s3']['secret_key'])
        bucket = conn.get_bucket(BUCKET)
        for path in filepaths:
            if not is_s3(path):
                continue
            start = path.index(BUCKET)
            filename = path[start + len(BUCKET) + 1:]
            if not opexists(S3_FILES_DIR):
                makedirs(S3_FILES_DIR)
            local_path = opjoin(S3_FILES_DIR, basename(filename))
            if not opexists(local_path):
                s3key = bucket.get_key(filename, validate=False)
                with open(local_path, 'wb') as f:
                    s3key.get_contents_to_file(f)
                if not is_tarfile(local_path):
                    raise FilepathInvalidException(
                        '%s is not tar. Local copy %s' % (path, local_path))
            new_paths.append(local_path)
    return new_paths
Esempio n. 13
0
    def _build_from_dirs(self, for_name: str) -> str:
        res = None
        if self._is_recursive:
            for path_base in self._dirs:
                for filepath in glob.iglob(opjoin(path_base, '**', for_name),
                                           recursive=True):
                    res = filepath
                    break
        else:
            for path_base in self._dirs:
                filepath = opjoin(path_base, for_name)
                if opexists(filepath):
                    res = filepath

        return res
Esempio n. 14
0
    def __del__(self):
        if opexists(EXTRACT_PATH):
            shutil.rmtree(EXTRACT_PATH)

        if self._is_clear_downloaded and opexists(S3_FILES_DIR):
            shutil.rmtree(S3_FILES_DIR)
Esempio n. 15
0
    def __del__(self):
        if opexists(EXTRACT_PATH):
            shutil.rmtree(EXTRACT_PATH)

        if self._is_clear_downloaded and opexists(S3_FILES_DIR):
            shutil.rmtree(S3_FILES_DIR)
Esempio n. 16
0
 def tearDown(self):
     if opexists(self.FILEPATH):
         os.remove(self.FILEPATH)
 def tearDown(self):
     if opexists(self.FILEPATH):
         os.remove(self.FILEPATH)