Exemplo n.º 1
0
 def job(self, run: "Run", payload: dict, device: Device) -> dict:
     now = controller.strip_all(controller.get_time())
     source = Path.cwd() / "projects" / "migrations" / f"backup_{now}.tgz"
     controller.migrate_export(
         Path.cwd(), {"import_export_types": import_classes, "name": f"backup_{now}"}
     )
     with open_tar(source, "w:gz") as tar:
         tar.add(
             Path.cwd() / "projects" / "migrations" / f"backup_{now}", arcname="/"
         )
     ssh_client = SSHClient()
     ssh_client.set_missing_host_key_policy(AutoAddPolicy())
     ssh_client.connect(
         device.ip_address,
         username=device.username,
         password=device.password,
         look_for_keys=False,
     )
     destination = f"{run.sub(run.destination_path, locals())}/backup_{now}.tgz"
     run.transfer_file(ssh_client, [(source, destination)])
     ssh_client.close()
     if run.delete_folder:
         rmtree(Path.cwd() / "projects" / "migrations" / f"backup_{now}")
     if run.delete_archive:
         remove(source)
     return {
         "success": True,
         "result": f"backup stored in {destination} ({device.ip_address})",
     }
Exemplo n.º 2
0
 def job(self, payload: dict, device: Device) -> dict:
     path_backup = Path.cwd() / "logs" / "job_logs"
     now = strip_all(str(datetime.now()))
     path_dir = path_backup / f"logs_{now}"
     source = path_backup / f"logs_{now}.tgz"
     makedirs(path_dir)
     for job in fetch_all("Job"):
         with open(path_dir / f"{job.name}.json", "w") as log_file:
             dump(job.logs, log_file)
     with open_tar(source, "w:gz") as tar:
         tar.add(path_dir, arcname="/")
     ssh_client = SSHClient()
     ssh_client.set_missing_host_key_policy(AutoAddPolicy())
     ssh_client.connect(
         device.ip_address,
         username=device.username,
         password=device.password,
         look_for_keys=False,
     )
     destination = f"{self.destination_path}/logs_{now}.tgz"
     self.transfer_file(ssh_client, [(source, destination)])
     ssh_client.close()
     if self.delete_folder:
         rmtree(path_dir)
     if self.delete_archive:
         remove(source)
     return {
         "success": True,
         "result": f"logs stored in {destination} ({device.ip_address})",
     }
Exemplo n.º 3
0
def restore(options):
    if options.files and options.backup_file is None:
        raise Exception('no backup file specified')
    if options.files:
        if exists('campaign-data') or \
                exists('simics-workspace/gold-checkpoints'):
            if input('existing data will be deleted before restore '
                     'operation, continue? [Y/n]: ') in ('n', 'N', 'no', 'No',
                                                         'NO'):
                return
            if exists('campaign-data'):
                rmtree('campaign-data')
            if exists('simics-workspace/gold-checkpoints'):
                rmtree('simics-workspace/gold-checkpoints')
        print('restoring files...', end='')
        stdout.flush()
        with open_tar(options.backup_file, 'r:gz') as backup:
            backup.extractall()
        print('done')
    for item in listdir('campaign-data'):
        if '.sql' in item:
            print('restoring database from {}...'.format(item))
            restore_database(options, join('campaign-data', item))
            print('database restored')
            remove(join('campaign-data', item))
            break
    else:
        print('could not find .sql file to restore')
 def job(self, payload: dict, device: Device) -> dict:
     now = strip_all(str(datetime.now()))
     source = Path.cwd() / "migrations" / f"backup_{now}.tgz"
     migrate_export(
         Path.cwd(),
         {
             "import_export_types": list(import_properties),
             "name": f"backup_{now}"
         },
     )
     with open_tar(source, "w:gz") as tar:
         tar.add(Path.cwd() / "migrations" / f"backup_{now}", arcname="/")
     ssh_client = SSHClient()
     ssh_client.set_missing_host_key_policy(AutoAddPolicy())
     ssh_client.connect(
         device.ip_address,
         username=device.username,
         password=device.password,
         look_for_keys=False,
     )
     destination = f"{self.destination_path}/backup_{now}.tgz"
     self.transfer_file(ssh_client, [(source, destination)])
     ssh_client.close()
     if self.delete_folder:
         rmtree(Path.cwd() / "migrations" / f"backup_{now}")
     if self.delete_archive:
         remove(source)
     return {
         "success": True,
         "result": f"backup stored in {destination} ({device.ip_address})",
     }
Exemplo n.º 5
0
 def job(self, device, _):
     path_backup = Path.cwd() / 'logs' / 'job_logs'
     now = strip_all(str(datetime.now()))
     path_dir = path_backup / f'logs_{now}'
     source = path_backup / f'logs_{now}.tgz'
     makedirs(path_dir)
     for job in fetch_all('Job'):
         with open(path_dir / f'{job.name}.json', 'w') as log_file:
             dump(job.logs, log_file)
     with open_tar(source, 'w:gz') as tar:
         tar.add(path_dir, arcname='/')
     ssh_client = SSHClient()
     ssh_client.set_missing_host_key_policy(AutoAddPolicy())
     ssh_client.connect(device.ip_address,
                        username=device.username,
                        password=device.password,
                        look_for_keys=False)
     destination = f'{self.destination_path}/logs_{now}.tgz'
     self.transfer_file(ssh_client, source, destination)
     ssh_client.close()
     if self.delete_folder:
         rmtree(path_dir)
     if self.delete_archive:
         remove(source)
     return {
         'success': True,
         'result': f'logs stored in {destination} ({device.ip_address})'
     }
Exemplo n.º 6
0
def retrieve_render(handle, folder):
    """Performs a retrieve request on the server with the given handle
    
    The data is decompressed and written in a new subfolder inside 'folder'
    
    Returns th parsed response"""

    data = \
    {
        "pass": password,
        "command": command,
        "action": "retrieve",
        "data": handle
    }

    #get the tar.gz
    res = None
    try:
        res = post(server_address, data=data, **extra_params)
    except Exception:
        pass
    
    payload = try_parse_res(res)

    if payload["code"] >= 0:
        #decompresse the gz
        decompressed_file = decompress_gzip(bytearray.fromhex(payload["data"]))
        file_like = BytesIO(decompressed_file)

        #extract the tar
        tar = open_tar(fileobj=file_like, mode="r")
        tar.extractall(folder)

    return payload
Exemplo n.º 7
0
 def job(self, device, _):
     now = strip_all(str(datetime.now()))
     source = Path.cwd() / 'migrations' / f'backup_{now}.tgz'
     migrate_export(Path.cwd(), {
         'import_export_types': list(import_properties),
         'name': f'backup_{now}'
     })
     with open_tar(source, 'w:gz') as tar:
         tar.add(
             Path.cwd() / 'migrations' / f'backup_{now}',
             arcname='/'
         )
     ssh_client = SSHClient()
     ssh_client.set_missing_host_key_policy(AutoAddPolicy())
     ssh_client.connect(
         device.ip_address,
         username=device.username,
         password=device.password,
         look_for_keys=False
     )
     destination = f'{self.destination_path}/backup_{now}.tgz'
     self.transfer_file(ssh_client, source, destination)
     ssh_client.close()
     if self.delete_folder:
         rmtree(Path.cwd() / 'migrations' / f'backup_{now}')
     if self.delete_archive:
         remove(source)
     return {
         'success': True,
         'result': f'backup stored in {destination} ({device.ip_address})'
     }
Exemplo n.º 8
0
 def import_jobs(self, **kwargs: Any) -> None:
     jobs = kwargs["jobs_to_import"]
     path = self.path / "projects" / "exported_jobs"
     for file in scandir(path / "services"):
         if file.name == ".gitkeep" or file.name not in jobs:
             continue
         with open(file.path, "r") as instance_file:
             instance = yaml.load(instance_file)
             model = instance.pop("type")
             factory(model, **self.objectify(model, instance))
     Session.commit()
     for workflow in listdir(path / "workflows"):
         if workflow == ".gitkeep" or workflow not in jobs:
             continue
         workflow_name = workflow.split(".")[0]
         with open_tar(path / "workflows" / workflow) as tar_file:
             tar_file.extractall(path=path / "workflows")
         for instance_type in ("jobs", "workflow", "edges"):
             path_job = path / "workflows" / workflow_name / instance_type
             for file in scandir(path_job):
                 with open(path_job / file.name, "r") as instance_file:
                     instance = yaml.load(instance_file)
                     model = instance.pop("type")
                     factory(model, **self.objectify(model, instance))
             Session.commit()
         rmtree(path / "workflows" / workflow_name)
Exemplo n.º 9
0
 def _extract(self):
     with open_tar(self.path) as tar:
         for name in tar.getnames():
             path = self.base_path / name
             if path.exists():
                 continue
             logger.info(f'extracting {path}')
             tar.extract(name, path=self.base_path)
Exemplo n.º 10
0
def backup(options):
    def traverse_directory(directory, archive=None, progress=None):
        num_items = 0
        for item in listdir(directory):
            if isdir(join(directory, item)):
                num_items += traverse_directory(join(directory, item), archive,
                                                progress)
            else:
                num_items += 1
                if archive is not None:
                    try:
                        archive.add(join(directory, item))
                    except FileNotFoundError:
                        pass
                if progress is not None:
                    progress[0] += 1
                    progress[1].update(progress[0])
        return num_items


# def backup(options):

    if not exists('backups'):
        mkdir('backups')
    if not exists('campaign-data'):
        mkdir('campaign-data')
    sql_backup = 'campaign-data/{}.sql'.format(options.db_name)
    print('dumping database...')
    backup_database(options, sql_backup)
    print('database dumped')
    if options.files:
        backup_name = 'backups/{}_{}'.format(
            '-'.join([
                '{:02}'.format(unit) for unit in datetime.now().timetuple()[:3]
            ]), '-'.join([
                '{:02}'.format(unit)
                for unit in datetime.now().timetuple()[3:6]
            ]))
        num_items = 0
        directories = ['campaign-data']
        if exists('simics-workspace/gold-checkpoints'):
            directories.append('simics-workspace/gold-checkpoints')
        print('discovering files to archive')
        for directory in directories:
            num_items += traverse_directory(directory)
        print('archiving files...')
        with open_tar('{}.tar.gz'.format(backup_name), 'w:gz') \
            as backup, ProgressBar(max_value=num_items, widgets=[
                Percentage(), ' (',
                SimpleProgress(format='%(value)d/%(max_value)d'), ') ', Bar(),
                ' ', Timer()]) as progress_bar:
            progress = [0, progress_bar]
            for directory in directories:
                traverse_directory(directory, backup, progress)
        remove(sql_backup)
        print('backup complete')
Exemplo n.º 11
0
 def import_service(self, archive):
     service_name = archive.split(".")[0]
     path = self.path / "files" / "services"
     with open_tar(path / archive) as tar_file:
         tar_file.extractall(path=path)
         status = self.migration_import(
             folder="services",
             name=service_name,
             import_export_types=["service", "workflow_edge"],
         )
     rmtree(path / service_name)
     return status
Exemplo n.º 12
0
def tarball_data(tar_name):
    """Extract a tarball for test usage

    This fixture extracts a tarball, and returns the path to the extracted
    files.

    :see: `tar_name`
    """
    data_dir = path.join(path.dirname(__file__), 'data', 'git')
    with open_tar(path.join(data_dir, tar_name + '.tar'), 'r:') as tar:
        with TemporaryDirectory() as temp_dir:
            tar.extractall(temp_dir)
            yield str(path.join(temp_dir, tar_name))
Exemplo n.º 13
0
 def import_service(self, archive):
     service_name = archive.split(".")[0]
     path = self.path / "files" / "services"
     with open_tar(path / archive) as tar_file:
         tar_file.extractall(path=path)
         status = self.migration_import(
             folder="services",
             name=service_name,
             import_export_types=["service", "workflow_edge"],
             skip_pool_update=True,
             skip_model_update=True,
             update_pools=True,
         )
     rmtree(path / service_name, ignore_errors=True)
     return status
Exemplo n.º 14
0
def tarball_data(tar_name):
    """Extract a tarball for test usage

    This fixture extracts a tarball, and returns the path to the extracted
    files.

    :see: `tar_name`
    """
    data_dir = path.join(path.dirname(path.abspath(__file__)), 'data', 'git')
    with closing(open_tar(path.join(data_dir, tar_name + '.tar'))) as tar:
        try:
            temp_dir = mkdtemp()
            tar.extractall(temp_dir)
            yield str(path.join(temp_dir, tar_name))
        finally:
            rmtree(temp_dir)
Exemplo n.º 15
0
 def export_service(self, service_id):
     service = db.fetch("service", id=service_id)
     path = Path(self.path / "files" / "services" / service.filename)
     path.mkdir(parents=True, exist_ok=True)
     services = service.deep_services if service.type == "workflow" else [service]
     services = [service.to_dict(export=True) for service in services]
     for service_dict in services:
         for relation in ("devices", "pools", "events"):
             service_dict.pop(relation)
     with open(path / "service.yaml", "w") as file:
         yaml.dump(services, file)
     if service.type == "workflow":
         with open(path / "workflow_edge.yaml", "w") as file:
             yaml.dump(
                 [edge.to_dict(export=True) for edge in service.deep_edges], file
             )
     with open_tar(f"{path}.tgz", "w:gz") as tar:
         tar.add(path, arcname=service.filename)
     rmtree(path, ignore_errors=True)
Exemplo n.º 16
0
def index_dataset_basic(path_to_dataset, db_session, max_movies_rated):
    """
    Load a small sub-set of the Netflix data into the database.
    
    """
    # Loading the movies:
    movies_path = os.path.join(path_to_dataset, "movie_titles.txt")
    for movie in get_movies(movies_path):
        db_session.add(movie)
    
    # Loading the ratings:
    training_set = os.path.join(path_to_dataset, "training_set")
    if not os.path.isdir(training_set):
        # The ratings haven't been extracted. Let's do it!
        try:
            tar_file = open_tar(training_set + ".tar")
            tar_file.extractall(path_to_dataset)
        except TarError, exc:
            raise IndexingError("Could not open training file: %s" % exc)
        finally:
Exemplo n.º 17
0
def index_dataset_basic(path_to_dataset, db_session, max_movies_rated):
    """
    Load a small sub-set of the Netflix data into the database.
    
    """
    # Loading the movies:
    movies_path = os.path.join(path_to_dataset, "movie_titles.txt")
    for movie in get_movies(movies_path):
        db_session.add(movie)

    # Loading the ratings:
    training_set = os.path.join(path_to_dataset, "training_set")
    if not os.path.isdir(training_set):
        # The ratings haven't been extracted. Let's do it!
        try:
            tar_file = open_tar(training_set + ".tar")
            tar_file.extractall(path_to_dataset)
        except TarError, exc:
            raise IndexingError("Could not open training file: %s" % exc)
        finally:
Exemplo n.º 18
0
    def tarfile(self, format, filename, content_type):
        from .root.histogram import Histogram
        from .combination import Combination
        imgformat = "eps"

        tarred_contents = StringIO()
        with closing(open_tar(mode="w" + format, fileobj=tarred_contents)) as tar:
            for key, context in self.resource_to_render.indexed_contexts:
                if not context_renderable_as(context, imgformat):
                    continue
                name = "/".join(map(str, key))
                content = context.rendered(imgformat).content.body

                info = TarInfo(name=name + "." + imgformat)
                info.size = len(content)
                tar.addfile(tarinfo=info, fileobj=StringIO(content))

        return Response(tarred_contents.getvalue(), content_type=content_type,
                        content_disposition=("Content-Disposition: attachment; filename={0};"
                                             .format(filename)))
Exemplo n.º 19
0
def unpack_tar_file(directory):
    """
    extract files in results.tar which is in directory into this directory
    then remove the results.tar file from directory
    :param directory: (string)
    :return:
    """
    message(mode='INFO', text='    Unpack')
    tar_file = adapt_path('{}results.tar'.format(directory))

    try:
        chdir(directory)
        tar = open_tar(tar_file)
    except FileNotFoundError:
        message(mode='ERROR', text='No tar file')
    except Exception as err:
        message(mode='ERROR', text="{}".format(err))
    else:
        tar.extractall()
        tar.close()
        remove(tar_file)
Exemplo n.º 20
0
 def export_job(self, job_id: str) -> None:
     job = fetch("Job", id=job_id)
     if job.type == "Workflow":
         path = self.path / "projects" / "exported_jobs" / "workflows" / job.filename
         path.mkdir(parents=True, exist_ok=True)
         for instance_type in ("jobs", "workflow", "edges"):
             Path(path / instance_type).mkdir(parents=True, exist_ok=True)
         for sub_job in job.jobs:
             with open(path / "jobs" / f"{sub_job.filename}.yaml",
                       "w") as file:
                 sub_job_as_dict = sub_job.to_dict(export=True)
                 for relation in ("devices", "pools", "events"):
                     sub_job_as_dict.pop(relation)
                 if sub_job.type == "Workflow":
                     sub_job_as_dict["type"] = "Workflow"
                 yaml.dump(sub_job_as_dict, file)
         for edge in job.edges:
             name = self.strip_all(
                 f"{edge.workflow}{edge.source}{edge.destination}")
             with open(path / "edges" / f"{name}.yaml", "w") as file:
                 edge = {
                     **edge.to_dict(export=True), "type": "WorkflowEdge"
                 }
                 yaml.dump(edge, file)
         with open(path / "workflow" / f"{job.filename}.yaml", "w") as file:
             job_as_dict = job.to_dict(export=True)
             for relation in ("devices", "pools", "events"):
                 job_as_dict.pop(relation)
             yaml.dump({**job_as_dict, "type": "Workflow"}, file)
         with open_tar(f"{path}.tgz", "w:gz") as tar:
             tar.add(path, arcname=job.filename)
         rmtree(path)
     else:
         path = self.path / "projects" / "exported_jobs" / "services"
         with open(path / f"{job.filename}.yaml", "w") as file:
             job_as_dict = job.to_dict(export=True)
             for relation in ("devices", "pools", "events"):
                 job_as_dict.pop(relation)
             yaml.dump(job_as_dict, file)
Exemplo n.º 21
0
def pack_tar_file(directory):
    """
    pack files with configurationShared.outputFileEndings into results.tar in directory
    before that, remove old tar file if it exists
    :param directory: (string)
    :return:
    """
    message(mode='INFO', text='    Pack')
    tar_file = adapt_path('{}results.tar'.format(directory))

    try:
        if path.isfile(tar_file):  # remove old tar file if it exists
            remove(tar_file)
        chdir(directory)
        tar = open_tar(tar_file, 'w')
    except Exception as err:
        message(mode='ERROR', text="{}".format(err))
    else:
        for extension in outputFileEndings:
            for file in listdir(directory):
                if file.endswith('.{}'.format(extension)):
                    tar.add(file)
        tar.close()
Exemplo n.º 22
0
 def export_service(self, service_id):
     service = db.fetch("service", id=service_id)
     path = Path(self.path / "files" / "services" / service.filename)
     path.mkdir(parents=True, exist_ok=True)
     services = service.deep_services if service.type == "workflow" else [
         service
     ]
     exclude = ("target_devices", "target_pools", "pools", "events")
     services = [
         service.to_dict(export=True,
                         private_properties=True,
                         exclude=exclude) for service in services
     ]
     with open(path / "service.yaml", "w") as file:
         yaml.dump(services, file)
     if service.type == "workflow":
         edges = [edge.to_dict(export=True) for edge in service.deep_edges]
         with open(path / "workflow_edge.yaml", "w") as file:
             yaml.dump(edges, file)
     with open_tar(f"{path}.tgz", "w:gz") as tar:
         tar.add(path, arcname=service.filename)
     rmtree(path, ignore_errors=True)
     return path
Exemplo n.º 23
0
    def tarfile(self, format, filename, content_type):
        from .root.histogram import Histogram
        from .combination import Combination
        imgformat = "eps"

        tarred_contents = StringIO()
        with closing(open_tar(mode="w" + format,
                              fileobj=tarred_contents)) as tar:
            for key, context in self.resource_to_render.indexed_contexts:
                if not context_renderable_as(context, imgformat):
                    continue
                name = "/".join(map(str, key))
                content = context.rendered(imgformat).content.body

                info = TarInfo(name=name + "." + imgformat)
                info.size = len(content)
                tar.addfile(tarinfo=info, fileobj=StringIO(content))

        return Response(
            tarred_contents.getvalue(),
            content_type=content_type,
            content_disposition=(
                "Content-Disposition: attachment; filename={0};".format(
                    filename)))
Exemplo n.º 24
0
    def __init__(self, embedding_file, vocab):
        self._vocab = vocab
        with open_tar(embedding_file, 'r') as fin:
            with closing(fin.extractfile('parameters')) as f:
                params = json.loads(f.read())
                self.N = params['N']
                self.K = params['K']

            self.mu = numpy.empty([self.N, self.K], dtype=numpy.float)
            self.context = numpy.empty([self.N, self.K], dtype=numpy.float)
            self.sigma = numpy.empty([2 * self.N, self.K], dtype=numpy.float)

            with closing(fin.extractfile('word_mu')) as f:
                for i, line in enumerate(f):
                    vec = line.strip().split()[1:]
                    self.mu[i,:] = [float(ele) for ele in vec]
            with closing(fin.extractfile('mu_context')) as f:
                for i, line in enumerate(f):
                    vec = line.strip().split()
                    self.context[i,:] = [float(ele) for ele in vec]
            with closing(fin.extractfile('sigma')) as f:
                for i, line in enumerate(f):
                    vec = line.strip().split()
                    self.sigma[i,:] = [float(ele) for ele in vec]
Exemplo n.º 25
0
#!/usr/bin/env python3

from compileall import compile_dir
from os import chdir, remove, walk
from os.path import abspath, dirname, join
from tarfile import open as open_tar

top_dir = dirname(dirname(abspath(__file__)))
chdir(top_dir)
compile_dir('src', force=True, quiet=1, legacy=True)

with open_tar('drseus.tar.gz', 'w:gz') as package:
    package.add('scripts/install_dependencies.sh', 'install_dependencies.sh')
    package.add('scripts/setup_environment.sh', 'setup_environment.sh')
    package.add('drseus.py')
    package.add('README.md')
    for root, dirs, files in walk(join(top_dir, 'src')):
        if 'migrations' not in root and '__pycache__' not in root:
            for file_ in files:
                if not file_.endswith('.py'):
                    package.add(join(root.replace(top_dir + '/', ''), file_))
                if file_.endswith('.pyc'):
                    remove(join(root, file_))
Exemplo n.º 26
0
 def _generate_state(self, sm):
     with self.handle.follow(sm).make_path() as r:
         with open_tar(str(r), "r") as tp:
             yield tp
Exemplo n.º 27
0
def get_test_filenames(test_path, force_download):
    """Return the test file names
    If the test files does'nt exists, we download it
    """

    uris = {'http://download.wikimedia.org/qualitywiki/latest':
            [('qualitywiki-latest-stub-articles.xml', '.gz'),      #~  3.1 KB
             ('qualitywiki-latest-stub-meta-current.xml', '.gz'),  #~ 11.0 KB
             ('qualitywiki-latest-stub-meta-history.xml', '.gz')], #~ 28.9 KB
            'http://download.wikimedia.org/tawiki/latest':
            [('tawiki-latest-stub-articles.xml', '.gz'),           #~ 1.2 MB
             ('tawiki-latest-stub-meta-history.xml', '.gz')],      #~ 7.3 MB
            'http://www.w3.org/XML/Test/': [('xmlts20080205', '.tar.gz')]
            }
    compressed_dir_path = join(test_path, 'compressed_files')

    if force_download is True:
        if lfs.exists(compressed_dir_path):
            print 'Remove compressed directory ', compressed_dir_path
            lfs.remove(compressed_dir_path)
            for names in uris.itervalues():
                for (name, ext) in names:
                    path = join(test_path, name)
                    if lfs.exists(path):
                        print 'Remove %s file' % path
                        lfs.remove(path)

    # test directory
    if lfs.exists(test_path) is False:
        lfs.make_folder(test_path)

    # compressed directory
    if lfs.exists(compressed_dir_path) is False:
        lfs.make_folder(compressed_dir_path)
    else:
        lfs.open(compressed_dir_path)

    test_dir_filenames = lfs.get_names(test_path)
    for base_uri, names in uris.iteritems():
        for (name, ext) in names:
            if test_dir_filenames.count(name):
                continue
            compressed_dest = join(compressed_dir_path, '%s%s' % (name, ext))
            # check if tarball already exists
            if lfs.exists(compressed_dest) is False:
                src = join(base_uri, '%s%s' % (name, ext))
                print 'GET %s file' % src
                dest = join(test_path, name)
                if vfs.exists(src) is False:
                    print "%s uri does not exists" % src
                    continue
                src_file = vfs.open(src)
                # save Gzip file
                compressed_dest_file = lfs.make_file(compressed_dest)
                compressed_dest_file.write(src_file.read())
                compressed_dest_file.close()
                src_file.close()
            print 'Extract file %s' % compressed_dest
            # Uncompressed File Path
            if name == 'xmlts20080205':
                # uncompress only xmlconf.xml file
                tar = open_tar(compressed_dest)
                xmlconf_file = tar.extractfile('xmlconf/xmlconf.xml')
                ucf_path = join(test_path, name)
                ucf_file = lfs.make_file(ucf_path)
                ucf_file.write(xmlconf_file.read())
                ucf_file.close()
            else:
                # untar Gzip file
                compressed_dest_file = lfs.open(compressed_dest)
                gzip_file = GzipFile(compressed_dest)
                ucf_path = join(test_path, name)
                ucf_file = lfs.make_file(ucf_path)
                ucf_file.write(gzip_file.read())
                compressed_dest_file.close()
                gzip_file.close()
                ucf_file.close()

    tests = []
    # update test dir name
    test_dir_filenames = lfs.get_names(test_path)
    for filename in test_dir_filenames:
        real_path = join(test_path, filename)
        if lfs.is_file(real_path):
            bytes = lfs.get_size(real_path)
            tests.append((real_path, filename, bytes,
                          get_string_size(bytes)))
    tests.sort(key=lambda x: x[2])
    return tests
Exemplo n.º 28
0
def results_page(request, campaign_id=None):
    error_title = None
    error_message = None
    result_filter = None
    if campaign_id is not None:
        campaign = models.campaign.objects.get(id=campaign_id)
    else:
        campaign = None
    if request.method == 'GET' and 'view_output' in request.GET and \
            'view_all' not in request.GET and 'select_box' in request.GET:
        result_ids = map(int, dict(request.GET)['select_box'])
        results = models.result.objects.filter(
            id__in=result_ids).order_by('-id')
    else:
        if campaign_id is not None:
            campaign_items_ = campaign_items
            output_file = 'campaign-data/{}/gold_{}'.format(
                campaign_id, campaign.output_file)
            if exists(output_file) and guess_type(output_file)[0] is not None:
                output_file = True
            else:
                output_file = False
            results = campaign.result_set.all()
        else:
            campaign_items_ = None
            output_file = True
            results = models.result.objects.all()
        result_filter = filters.result(request.GET, queryset=results)
        if not result_filter.qs.count() and results.count():
            error_title = 'Filter Error'
            error_message = 'Filter did not return any results and was ignored.'
            result_filter = filters.result(None, queryset=results)
        else:
            results = result_filter.qs.order_by('-id')
    if request.method == 'GET' and 'view_output' in request.GET:
        if 'view_dut_output' in request.GET:
            if 'view_download' in request.GET:
                temp_file = TemporaryFile()
                start = perf_counter()
                with open_tar(fileobj=temp_file, mode='w:gz') as archive:
                    for result in results:
                        with BytesIO(result.dut_output.encode('utf-8')) as \
                                byte_file:
                            info = TarInfo('{}_dut_output.txt'.format(
                                result.id))
                            info.size = len(result.dut_output)
                            archive.addfile(info, byte_file)
                print('archive created', round(perf_counter() - start, 2),
                      'seconds')
                response = FileResponse(
                    temp_file, content_type='application/x-compressed')
                response['Content-Disposition'] = \
                    'attachment; filename=dut_outputs.tar.gz'
                response['Content-Length'] = temp_file.tell()
                temp_file.seek(0)
                return response
            else:
                return render(
                    request, 'output.html', {
                        'campaign': campaign,
                        'campaign_items': campaign_items if campaign else None,
                        'navigation_items': navigation_items,
                        'results': results,
                        'type': 'dut_output'
                    })
        elif 'view_aux_output' in request.GET:
            if 'view_download' in request.GET:
                temp_file = TemporaryFile()
                start = perf_counter()
                with open_tar(fileobj=temp_file, mode='w:gz') as archive:
                    for result in results:
                        with BytesIO(result.aux_output.encode('utf-8')) as \
                                byte_file:
                            info = TarInfo('{}_aux_output.txt'.format(
                                result.id))
                            info.size = len(result.aux_output)
                            archive.addfile(info, byte_file)
                print('archive created', round(perf_counter() - start, 2),
                      'seconds')
                response = FileResponse(
                    temp_file, content_type='application/x-compressed')
                response['Content-Disposition'] = \
                    'attachment; filename=aux_outputs.tar.gz'
                response['Content-Length'] = temp_file.tell()
                temp_file.seek(0)
                return response
            else:
                return render(
                    request, 'output.html', {
                        'campaign': campaign,
                        'campaign_items': campaign_items if campaign else None,
                        'navigation_items': navigation_items,
                        'results': results,
                        'type': 'aux_output'
                    })
        elif 'view_debugger_output' in request.GET:
            if 'view_download' in request.GET:
                temp_file = TemporaryFile()
                start = perf_counter()
                with open_tar(fileobj=temp_file, mode='w:gz') as archive:
                    for result in results:
                        with BytesIO(
                                result.debugger_output.encode('utf-8')) as \
                                byte_file:
                            info = TarInfo('{}_debugger_output.txt'.format(
                                result.id))
                            info.size = len(result.debugger_output)
                            archive.addfile(info, byte_file)
                print('archive created', round(perf_counter() - start, 2),
                      'seconds')
                response = FileResponse(
                    temp_file, content_type='application/x-compressed')
                response['Content-Disposition'] = \
                    'attachment; filename=debugger_outputs.tar.gz'
                response['Content-Length'] = temp_file.tell()
                temp_file.seek(0)
                return response
            else:
                return render(
                    request, 'output.html', {
                        'campaign': campaign,
                        'campaign_items': campaign_items if campaign else None,
                        'navigation_items': navigation_items,
                        'results': results,
                        'type': 'debugger_output'
                    })
        elif 'view_output_file' in request.GET:
            result_ids = []
            for result in results:
                if exists('campaign-data/{}/results/{}/{}'.format(
                        result.campaign_id, result.id,
                        result.campaign.output_file)):
                    result_ids.append(result.id)
            results = models.result.objects.filter(
                id__in=result_ids).order_by('-id')
            if 'view_download' in request.GET:
                temp_file = TemporaryFile()
                start = perf_counter()
                with open_tar(fileobj=temp_file, mode='w:gz') as archive:
                    for result in results:
                        archive.add(
                            'campaign-data/{}/results/{}/{}'.format(
                                result.campaign_id, result.id,
                                result.campaign.output_file),
                            '{}_{}'.format(result.id,
                                           result.campaign.output_file))
                print('archive created', round(perf_counter() - start, 2),
                      'seconds')
                response = FileResponse(
                    temp_file, content_type='application/x-compressed')
                response['Content-Disposition'] = \
                    'attachment; filename=output_files.tar.gz'
                response['Content-Length'] = temp_file.tell()
                temp_file.seek(0)
                return response
            else:
                return render(
                    request, 'output.html', {
                        'campaign': campaign,
                        'campaign_items': campaign_items if campaign else None,
                        'navigation_items': navigation_items,
                        'results': results,
                        'type': 'output_file'
                    })
        elif 'view_log_file' in request.GET:
            if 'view_download' in request.GET:
                temp_file = TemporaryFile()
                start = perf_counter()
                with open_tar(fileobj=temp_file, mode='w:gz') as archive:
                    for result in results:
                        for log_file in result.campaign.log_files:
                            archive.add(
                                'campaign-data/{}/results/{}/{}'.format(
                                    result.campaign_id, result.id, log_file),
                                '{}_{}'.format(result.id, log_file))
                print('archive created', round(perf_counter() - start, 2),
                      'seconds')
                response = FileResponse(
                    temp_file, content_type='application/x-compressed')
                response['Content-Disposition'] = \
                    'attachment; filename=log_files.tar.gz'
                response['Content-Length'] = temp_file.tell()
                temp_file.seek(0)
                return response
            else:
                return render(
                    request, 'output.html', {
                        'campaign': campaign,
                        'campaign_items': campaign_items if campaign else None,
                        'navigation_items': navigation_items,
                        'results': results,
                        'type': 'log_file'
                    })
    elif request.method == 'POST':
        if 'new_outcome_category' in request.POST:
            results.values('outcome_category').update(
                outcome_category=request.POST['new_outcome_category'])
        elif 'new_outcome' in request.POST:
            results.values('outcome').update(
                outcome=request.POST['new_outcome'])
        elif 'delete' in request.POST and 'results[]' in request.POST:
            result_ids = [
                int(result_id) for result_id in dict(request.POST)['results[]']
            ]
            results_to_delete = models.result.objects.filter(id__in=result_ids)
            for result in results_to_delete:
                if exists('campaign-data/{}/results/{}'.format(
                        result.campaign_id, result.id)):
                    rmtree('campaign-data/{}/results/{}'.format(
                        result.campaign_id, result.id))
            results_to_delete.delete()
        elif 'delete_all' in request.POST:
            for result in results:
                if exists('campaign-data/{}/results/{}'.format(
                        result.campaign_id, result.id)):
                    rmtree('campaign-data/{}/results/{}'.format(
                        result.campaign_id, result.id))
            results.delete()
            if campaign_id:
                return redirect('/campaign/{}/results'.format(campaign_id))
            else:
                return redirect('/results')
    result_table = tables.results(results)
    RequestConfig(request, paginate={
        'per_page': table_length
    }).configure(result_table)
    return render(
        request, 'results.html', {
            'campaign': campaign,
            'campaign_items': campaign_items_,
            'error_message': error_message,
            'error_title': error_title,
            'filter': result_filter,
            'filter_tabs': True,
            'navigation_items': navigation_items,
            'output_file': output_file,
            'result_count': '{:,}'.format(results.count()),
            'result_table': result_table
        })
Exemplo n.º 29
0
def result_page(request, result_id):
    result = models.result.objects.get(id=result_id)
    if request.method == 'GET':
        if 'get_dut_output' in request.GET:
            response = HttpResponse(result.dut_output,
                                    content_type='text/plain')
            response['Content-Disposition'] = \
                'attachment; filename="{}_dut_output.txt"'.format(
                    result_id)
            return response
        elif 'get_debugger_output' in request.GET:
            response = HttpResponse(result.debugger_output,
                                    content_type='text/plain')
            response['Content-Disposition'] = \
                'attachment; filename="{}_debugger_output.txt"'.format(
                    result_id)
            return response
        elif 'get_aux_output' in request.GET:
            response = HttpResponse(result.aux_output,
                                    content_type='text/plain')
            response['Content-Disposition'] = \
                'attachment; filename="{}_aux_output.txt"'.format(
                    result_id)
            return response
        elif 'get_output_file' in request.GET:
            response = get_file(result.campaign.output_file, result_id)
            response['Content-Disposition'] = \
                'attachment; filename={}_{}'.format(
                    result_id, result.campaign.output_file)
            return response
        elif 'get_log_file' in request.GET:
            temp_file = TemporaryFile()
            with open_tar(fileobj=temp_file, mode='w:gz') as archive:
                for log_file in result.campaign.log_files:
                    archive.add(
                        'campaign-data/{}/results/{}/{}'.format(
                            result.campaign_id, result.id, log_file),
                        '{}_{}'.format(result.id, log_file))
            response = FileResponse(temp_file,
                                    content_type='application/x-compressed')
            response['Content-Disposition'] = \
                'attachment; filename={}_log_files.tar.gz'.format(result.id)
            response['Content-Length'] = temp_file.tell()
            temp_file.seek(0)
            return response
    campaign_items_ = [(item[0],
                        '/campaign/{}/{}'.format(result.campaign_id,
                                                 item[1]), item[2], item[3])
                       for item in campaign_items]
    if result.campaign.output_file:
        output_file = 'campaign-data/{}/results/{}/{}'.format(
            result.campaign_id, result_id, result.campaign.output_file)
        output_file = \
            exists(output_file) and guess_type(output_file)[0] is not None
    else:
        output_file = False
    result_table = tables.result(models.result.objects.filter(id=result_id))
    events = result.event_set.all()
    event_table = tables.event(events)
    if request.method == 'POST' and 'launch' in request.POST:
        Popen([
            argv[0], '--campaign_id',
            str(result.campaign_id), 'regenerate', result_id
        ])
    if request.method == 'POST' and 'save' in request.POST:
        result.outcome = request.POST['outcome']
        result.outcome_category = request.POST['outcome_category']
        result.save()
    elif request.method == 'POST' and 'delete' in request.POST:
        if exists('campaign-data/{}/results/{}'.format(result.campaign_id,
                                                       result.id)):
            rmtree('campaign-data/{}/results/{}'.format(
                result.campaign_id, result.id))
        result.delete()
        return HttpResponse('Result deleted')
    injections = result.injection_set.all()
    if result.campaign.simics:
        if injections.count():
            injection_table = tables.injection(injections)
        else:
            injection_table = None
        register_diffs = result.simics_register_diff_set.all()
        register_filter = filters.simics_register_diff(request.GET,
                                                       queryset=register_diffs)
        register_diff_count = register_filter.qs.count()
        register_table = tables.simics_register_diff(register_filter.qs)
        RequestConfig(request, paginate={
            'per_page': table_length
        }).configure(register_table)
        memory_diffs = result.simics_memory_diff_set.all()
        memory_diff_count = memory_diffs.count()
        memory_table = tables.simics_memory_diff(memory_diffs)
        RequestConfig(request, paginate={
            'per_page': table_length
        }).configure(memory_table)
    else:
        register_filter = None
        memory_diff_count = 0
        memory_table = None
        register_diff_count = 0
        register_table = None
        if injections.count():
            injection_table = tables.injection(injections)
        else:
            injection_table = None
    RequestConfig(request, paginate=False).configure(result_table)
    RequestConfig(request, paginate=False).configure(event_table)
    if injection_table:
        RequestConfig(request, paginate=False).configure(injection_table)
    return render(
        request, 'result.html', {
            'campaign_items': campaign_items_,
            'event_count': '{:,}'.format(events.count()),
            'event_table': event_table,
            'filter': register_filter,
            'injection_table': injection_table,
            'memory_diff_count': '{:,}'.format(memory_diff_count),
            'memory_table': memory_table,
            'navigation_items': navigation_items,
            'output_file': output_file,
            'register_diff_count': '{:,}'.format(register_diff_count),
            'register_table': register_table,
            'result': result,
            'result_table': result_table
        })