def write_footer(about=None, date=None): """Write a <footer> for an Hveto page Parameters ---------- about : `str`, optional path of about page to link date : `datetime.datetime`, optional the datetime representing when this analysis was generated, defaults to `~datetime.datetime.now` Returns ------- page : `~glue.markup.page` the markup object containing the footer HTML """ page = markup.page() page.twotags.append('footer') markup.element('footer', parent=page)(class_='footer') page.div(class_='container') # write user/time for analysis if date is None: date = datetime.datetime.now().replace(second=0, microsecond=0) version = get_versions()['version'] commit = get_versions()['full-revisionid'] url = 'https://github.com/hveto/hveto/tree/%s' % commit hlink = markup.oneliner.a('Hveto version %s' % version, href=url, target='_blank') page.p('Page generated using %s by %s at %s' % (hlink, getuser(), date)) # link to 'about' if about is not None: page.a('How was this page generated?', href=about) markup.element('footer', parent=page).close() return page
def parse_command_line(): parser = argparse.ArgumentParser( description= 'Tool for downloading and installing WebDriver binaries. Version: {}'. format(get_versions()["version"]), ) parser.add_argument( 'browser', help= 'Browser to download the corresponding WebDriver binary. Valid values are: {0}. Optionally specify a version number of the WebDriver binary as follows: \'browser:version\' e.g. \'chrome:2.39\'. If no version number is specified, the latest available version of the WebDriver binary will be downloaded.' .format(', '.join(DOWNLOADERS.keys())), nargs='+') parser.add_argument('--downloadpath', '-d', action='store', dest='downloadpath', metavar='F', default=None, help='Where to download the webdriver binaries') parser.add_argument( '--linkpath', '-l', action='store', dest='linkpath', metavar='F', default=None, help= 'Where to link the webdriver binary to. Set to "AUTO" if you need some intelligense to decide where to place the final webdriver binary. If set to "SKIP", no link/copy done.' ) parser.add_argument( '--os', '-o', action='store', dest='os_name', choices=OS_NAMES, metavar='OSNAME', default=None, help='Overrides os detection with given os name. Values: {0}'.format( ', '.join(OS_NAMES))) parser.add_argument( '--bitness', '-b', action='store', dest='bitness', choices=BITNESS, metavar='BITS', default=None, help='Overrides bitness detection with given value. Values: {0}'. format(', '.join(BITNESS))) parser.add_argument('--version', action='version', version='%(prog)s {}'.format( get_versions()["version"])) return parser.parse_args()
def create_event(self, outcome, outcome_detail_note): log = self.taskobj.log if log not in [EventIP] or not self.event_type: # check if log is an event class return event_type = EventType.objects.get(eventType=self.event_type) application = self.taskobj event = log.objects.create( eventType=event_type, eventOutcome=outcome, eventVersion=get_versions()['version'], eventOutcomeDetailNote=truncate(outcome_detail_note, 1024), eventApplication=application, linkingAgentIdentifierValue=self.taskobj.responsible, ) if log == EventIP: if not isinstance(self.taskobj.information_package, InformationPackage): raise AttributeError( "An IP is required to be set on the task to create an IP event" ) event.linkingObjectIdentifierValue = self.taskobj.information_package event.save(update_fields=['linkingObjectIdentifierValue'])
def get_version(): import sys sys.path.append(str(CURRENT_DIR.parent)) from _version import get_versions v = get_versions() return v.get("closest-tag", v["version"])
def upload(self, request, pk=None): ip = self.get_object() ip.State = "Uploading" ip.save() dst, _ = find_destination('content', ip.get_profile('sip').structure) if dst is None: dst = '' if request.method == 'GET': path = os.path.join(dst, request.GET.get('flowRelativePath', '')) chunk_nr = request.GET.get('flowChunkNumber') chunk_path = "%s_%s" % (path, chunk_nr) if os.path.exists(os.path.join(ip.ObjectPath, chunk_path)): return HttpResponse(status=200) return HttpResponse(status=204) if request.method == 'POST': path = os.path.join(dst, request.data.get('flowRelativePath', '')) chunk_nr = request.data.get('flowChunkNumber') chunk_path = "%s_%s" % (path, chunk_nr) chunk_path = os.path.join(ip.ObjectPath, chunk_path) chunk = request.FILES['file'] if not os.path.exists(os.path.dirname(chunk_path)): mkdir_p(os.path.dirname(chunk_path)) with open(chunk_path, 'wb+') as dst: for c in chunk.chunks(): dst.write(c) if chunk_nr == request.data.get('flowTotalChunks'): path = os.path.join(ip.ObjectPath, path) with open(path, 'wb') as f: for chunk_file in glob.glob('%s_*' % path): f.write(open(chunk_file).read()) os.remove(chunk_file) event_type = EventType.objects.get(eventType=10120) agent = request.user create_event(event_type, 0, "Uploaded %s" % path, get_versions()['version'], agent, ip=ip) return Response("Uploaded files")
def get(self, request): context = {} # Flags in settings: Their expected and actual values. SETTINGS_FLAGS = [ ('DEBUG', False), ('LANGUAGE_CODE', None), ('TIME_ZONE', None), ] context['python'] = '.'.join(str(x) for x in sys.version_info[:3]) context['platform'] = { 'os': platform.system(), 'release': platform.release(), 'version': platform.version(), 'mac_version': platform.mac_ver(), 'win_version': platform.win32_ver(), 'linux_dist': platform.linux_distribution(), } context['hostname'] = socket.gethostname() context['version'] = get_versions() context['core_version'] = get_core_versions() context['time_checked'] = timezone.now() context['database'] = get_database_info() try: context['elasticsearch'] = get_elasticsearch_info() except KeyError: pass context['redis'] = get_redis_info() context['rabbitmq'] = get_rabbitmq_info() context['workers'] = get_workers() context['python_packages'] = pip_freeze() context['settings_flags'] = [] for name, expected in SETTINGS_FLAGS: actual_setting = getattr(settings, name, None) if expected is not None: unexpected = expected != actual_setting else: unexpected = False context['settings_flags'].append({ 'name': name, 'unexpected': unexpected, 'actual': actual_setting }) return Response(context)
def upload(self, request, pk=None): ip = self.get_object() ip.State = "Uploading" ip.save() dst, _ = find_destination('content', ip.get_profile('sip').structure) if dst is None: dst = '' if request.method == 'GET': path = os.path.join(dst, request.GET.get('flowRelativePath', '')) chunk_nr = request.GET.get('flowChunkNumber') chunk_path = "%s_%s" % (path, chunk_nr) if os.path.exists(os.path.join(ip.ObjectPath, chunk_path)): return HttpResponse(status=200) return HttpResponse(status=204) if request.method == 'POST': path = os.path.join(dst, request.data.get('flowRelativePath', '')) chunk_nr = request.data.get('flowChunkNumber') chunk_path = "%s_%s" % (path, chunk_nr) chunk_path = os.path.join(ip.ObjectPath, chunk_path) chunk = request.FILES['file'] if not os.path.exists(os.path.dirname(chunk_path)): mkdir_p(os.path.dirname(chunk_path)) with open(chunk_path, 'wb+') as dst: for c in chunk.chunks(): dst.write(c) if chunk_nr == request.data.get('flowTotalChunks'): path = os.path.join(ip.ObjectPath, path) with open(path, 'wb') as f: for chunk_file in glob.glob('%s_*' % path): f.write(open(chunk_file).read()) os.remove(chunk_file) event_type = EventType.objects.get(eventType=10120) agent = request.user create_event( event_type, 0, "Uploaded %s" % path, get_versions()['version'], agent, ip=ip ) return Response("Uploaded files")
def determine_version_and_release(): version_data = get_versions() version = version_data['version'] commit = version_data['full-revisionid'][:8] if version.endswith("dirty"): version = version[:-6] splitted = version.split('+') if len(splitted) > 1 and not splitted[1].startswith('0'): version = "unstable development version " + commit else: version = splitted[0] return version, version
def get_version(): version = versioneer.get_versions()["version"] major_version = re.findall(MAJOR_VER_REGEX, version)[0] minor_version = re.findall(MINOR_VER_REGEX, version)[0] patch_version = re.findall(PATCH_VER_REGEX, version)[0] short_version = major_version + "." + minor_version + "." + patch_version version_dict = {} version_dict["full"] = version version_dict["major"] = major_version version_dict["minor"] = minor_version version_dict["patch"] = patch_version version_dict["short"] = short_version return version_dict
def create_event(self, task_id, status, args, kwargs, retval, einfo): if status == celery_states.SUCCESS: outcome = 0 kwargs.pop('_options', {}) outcome_detail_note = self.event_outcome_success(*args, **kwargs) else: outcome = 1 outcome_detail_note = einfo.traceback return EventIP(eventType_id=self.event_type, eventOutcome=outcome, eventVersion=get_versions()['version'], eventOutcomeDetailNote=truncate(outcome_detail_note, 1024), eventApplication_id=task_id, linkingAgentIdentifierValue_id=self.responsible, linkingObjectIdentifierValue_id=self.ip)
def versions_from_versioneer(): # Ideally, we will interrogate versioneer to find out the version of the # project we are building. Note that we can't simply look at # project.__version__ as we need the version string pre-build, so the # package may not be importable. for dir_ in dirs_containing_file('_version.py'): sys.path.insert(0, dir_) try: import _version yield _version.get_versions()['version'] except Exception as e: print(e) finally: if '_version' in sys.modules: sys.modules.pop('_version') sys.path.pop(0)
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) path = Path(__file__).parent uic.loadUi(path / "gui/main_form.ui", self) self.label_logo.setProperty("pixmap", path / "gui/5-ID_TopAlign.png") self.setProperty("windowIcon", path / "gui/5-ID_TopAlign.png") ver = _version.get_versions() ver_str = f"version: {ver['version'][:3]} {ver['date'].split('T')[0]}" self.label_version.setProperty("text", ver_str) self.pushButton_stop.setProperty("enabled", False) self.setContentsMargins(20, 0, 20, 20) self.pushButton_currentid.released.connect(self.update_scanid) self.pushButton_plus1.released.connect(self.update_scanid_plus1) self.pushButton_browse.released.connect(self.get_dir) self.pushButton_start.released.connect(self.start_loop) self.pushButton_stop.released.connect(self.stop_loop) self.pushButton_batchfit.released.connect(self.get_conf_H5_dirs)
def get(self, request): context = {} cwd = settings.BASE_DIR # Shell commands: Name and command SHELL_COMMANDS = [ ('hostname', 'hostname'), ('mysql_version', 'mysql --version'), ('python_packages', 'pip freeze'), ] # Flags in settings: Their expected and actual values. SETTINGS_FLAGS = [ ('DEBUG', False), ('LANGUAGE_CODE', None), ('TIME_ZONE', None), ] context['version'] = get_versions()['version'] context['time_checked'] = timezone.now() for name, cmd in SHELL_COMMANDS: context[name] = run_shell_command(cmd, cwd) context['python_packages'] = context['python_packages'].split('\n') context['settings_flags'] = [] for name, expected in SETTINGS_FLAGS: actual_setting = getattr(settings, name, None) if expected is not None: unexpected = expected != actual_setting else: unexpected = False context['settings_flags'].append({ 'name': name, 'unexpected': unexpected, 'actual': actual_setting }) return Response(context)
# The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'skultrafast' copyright = u'2012, Till Stensitzki' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. from _version import get_versions release = get_versions()['version'] del get_versions # The short X.Y version. version = '.'.join(release.split('.')[:2]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y'
master_doc = 'contents' project = u'AlphaTwirl' copyright = u'2018, Tai Sakuma' author = u'Tai Sakuma' ##__________________________________________________________________|| # import alphatwirl/_version.py without importing alphatwirl path = os.path.dirname(os.path.abspath('.')) path = os.path.join(path, 'alphatwirl') sys.path.insert(0, path) del path from _version import get_versions sys.path.pop() version = '.'.join(get_versions()['version'].split('.')[0:2]) # e.g., '0.11' release = '.'.join(get_versions()['version'].split('+')[0:1]) # e.g., '0.11.0.dev' ##__________________________________________________________________|| language = None exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] pygments_style = 'sphinx' todo_include_todos = True
def create_ip(self, request, pk=None): """ Creates the specified information package Args: pk: The primary key (id) of the information package to create Returns: None """ ip = self.get_object() sa = ip.SubmissionAgreement agent = request.user if ip.State != "Uploaded": raise ValueError( "The IP (%s) is in the state '%s' but should be 'Uploaded'" % (pk, ip.State) ) validators = request.data.get('validators', {}) validate_xml_file = validators.get('validate_xml_file', False) validate_file_format = validators.get('validate_file_format', False) validate_integrity = validators.get('validate_integrity', False) validate_logical_physical_representation = validators.get('validate_logical_physical_representation', False) container_format = ip.get_container_format() main_step = ProcessStep.objects.create( name="Create SIP", ) t0 = ProcessTask.objects.create( name="preingest.tasks.UpdateIPStatus", params={ "ip": ip, "status": "Creating", }, processstep_pos=0, log=EventIP, information_package=ip, responsible=self.request.user, ) start_create_sip_step = ProcessStep.objects.create( name="Update IP Status", parent_step_pos=0 ) start_create_sip_step.tasks.add(t0) event_type = EventType.objects.get(eventType=10200) create_event(event_type, 0, "Created SIP", get_versions()['version'], agent, ip=ip) prepare_path = Path.objects.get( entity="path_preingest_prepare" ).value reception_path = Path.objects.get( entity="path_preingest_reception" ).value ip_prepare_path = os.path.join(prepare_path, str(ip.pk)) ip_reception_path = os.path.join(reception_path, str(ip.pk)) events_path = os.path.join(ip_prepare_path, "ipevents.xml") structure = ip.get_profile('sip').structure info = ip.get_profile('sip').fill_specification_data(sa, ip) # ensure premis is created before mets filesToCreate = OrderedDict() if ip.profile_locked('preservation_metadata'): premis_profile = ip.get_profile('preservation_metadata') premis_dir, premis_name = find_destination("preservation_description_file", structure) premis_path = os.path.join(ip.ObjectPath, premis_dir, premis_name) filesToCreate[premis_path] = premis_profile.specification mets_dir, mets_name = find_destination("mets_file", structure) mets_path = os.path.join(ip.ObjectPath, mets_dir, mets_name) filesToCreate[mets_path] = ip.get_profile('sip').specification generate_xml_step = ProcessStep.objects.create( name="Generate XML", parent_step_pos=1 ) for fname, template in filesToCreate.iteritems(): dirname = os.path.dirname(fname) t = ProcessTask.objects.create( name="ESSArch_Core.tasks.DownloadSchemas", params={ "template": template, "dirname": dirname, "structure": structure, "root": ip.ObjectPath, }, processstep_pos=1, log=EventIP, information_package=ip, responsible=self.request.user, ) generate_xml_step.tasks.add(t) t = ProcessTask.objects.create( name="preingest.tasks.GenerateXML", params={ "info": info, "filesToCreate": filesToCreate, "folderToParse": ip_prepare_path, "algorithm": ip.get_checksum_algorithm(), }, processstep_pos=3, log=EventIP, information_package=ip, responsible=self.request.user, ) generate_xml_step.tasks.add(t) if any(validators.itervalues()): validate_step = ProcessStep.objects.create( name="Validation", parent_step=main_step, parent_step_pos=2, ) if validate_xml_file: validate_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.ValidateXMLFile", params={ "xml_filename": mets_path, }, processstep_pos=1, log=EventIP, information_package=ip, responsible=self.request.user, ) ) if ip.profile_locked("preservation_metadata"): validate_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.ValidateXMLFile", params={ "xml_filename": premis_path, }, processstep_pos=2, log=EventIP, information_package=ip, responsible=self.request.user, ) ) if validate_logical_physical_representation: validate_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.ValidateLogicalPhysicalRepresentation", params={ "dirname": ip.ObjectPath, "xmlfile": mets_path, }, processstep_pos=3, log=EventIP, information_package=ip, responsible=self.request.user, ) ) validate_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.ValidateFiles", params={ "ip": ip, "xmlfile": mets_path, "validate_fileformat": validate_file_format, "validate_integrity": validate_integrity, }, processstep_pos=4, log=EventIP, information_package=ip, responsible=self.request.user, ) ) validate_step.save() info = { "_OBJID": str(ip.pk), "_OBJLABEL": ip.Label } filesToCreate = OrderedDict() filesToCreate[events_path] = get_event_spec() create_sip_step = ProcessStep.objects.create( name="Create SIP", parent_step_pos=3 ) for fname, template in filesToCreate.iteritems(): dirname = os.path.dirname(fname) create_sip_step.tasks.add(ProcessTask.objects.create( name="ESSArch_Core.tasks.DownloadSchemas", params={ "template": template, "dirname": dirname, "structure": structure, "root": ip.ObjectPath, }, processstep_pos=-1, log=EventIP, information_package=ip, responsible=self.request.user, )) create_sip_step.tasks.add(ProcessTask.objects.create( name="preingest.tasks.GenerateXML", params={ "info": info, "filesToCreate": filesToCreate, "algorithm": ip.get_checksum_algorithm(), }, processstep_pos=0, log=EventIP, information_package=ip, responsible=self.request.user, )) create_sip_step.tasks.add(ProcessTask.objects.create( name="preingest.tasks.AppendEvents", params={ "filename": events_path, }, processstep_pos=1, log=EventIP, information_package=ip, responsible=self.request.user, )) spec = { "-name": "object", "-namespace": "premis", "-children": [ { "-name": "objectIdentifier", "-namespace": "premis", "-children": [ { "-name": "objectIdentifierType", "-namespace": "premis", "#content": [{"var": "FIDType"}], "-children": [] }, { "-name": "objectIdentifierValue", "-namespace": "premis", "#content": [{"var": "FID"}], "-children": [] } ] }, { "-name": "objectCharacteristics", "-namespace": "premis", "-children": [ { "-name": "format", "-namespace": "premis", "-children": [ { "-name": "formatDesignation", "-namespace": "premis", "-children": [ { "-name": "formatName", "-namespace": "premis", "#content": [{"var": "FFormatName"}], "-children": [] } ] } ] } ] }, { "-name": "storage", "-namespace": "premis", "-children": [ { "-name": "contentLocation", "-namespace": "premis", "-children": [ { "-name": "contentLocationType", "-namespace": "premis", "#content": [{"var": "FLocationType"}], "-children": [] }, { "-name": "contentLocationValue", "-namespace": "premis", "#content": [{"text": "file:///%s.%s" % (ip.pk, container_format.lower())}], "-children": [] } ] } ] } ], "-attr": [ { "-name": "type", '-namespace': 'xsi', "-req": "1", "#content": [{"text": "premis:file"}] } ], } info = { 'FIDType': "UUID", 'FID': ip.ObjectIdentifierValue, 'FFormatName': container_format.upper(), 'FLocationType': 'URI', 'FName': ip.ObjectPath, } create_sip_step.tasks.add(ProcessTask.objects.create( name="ESSArch_Core.tasks.InsertXML", params={ "filename": events_path, "elementToAppendTo": "premis", "spec": spec, "info": info, "index": 0 }, processstep_pos=2, information_package=ip, responsible=self.request.user, )) if validate_xml_file: create_sip_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.ValidateXMLFile", params={ "xml_filename": events_path, }, processstep_pos=3, log=EventIP, information_package=ip, responsible=self.request.user, ) ) if container_format.lower() == 'zip': zipname = os.path.join(ip_reception_path) + '.zip' container_task = ProcessTask.objects.create( name="preingest.tasks.CreateZIP", params={ "dirname": ip_prepare_path, "zipname": zipname, }, processstep_pos=4, log=EventIP, information_package=ip, responsible=self.request.user, ) else: tarname = os.path.join(ip_reception_path) + '.tar' container_task = ProcessTask.objects.create( name="preingest.tasks.CreateTAR", params={ "dirname": ip_prepare_path, "tarname": tarname, }, processstep_pos=4, log=EventIP, information_package=ip, responsible=self.request.user, ) create_sip_step.tasks.add(container_task) create_sip_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.DeleteFiles", params={ "path": ip.ObjectPath }, processstep_pos=45, log=EventIP, information_package=ip, responsible=self.request.user, ) ) create_sip_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.UpdateIPPath", params={ "ip": ip, }, result_params={ "path": container_task.pk }, processstep_pos=50, log=EventIP, information_package=ip, responsible=self.request.user, ) ) create_sip_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.UpdateIPStatus", params={ "ip": ip, "status": "Created", }, processstep_pos=60, log=EventIP, information_package=ip, responsible=self.request.user, ) ) create_sip_step.save() main_step.child_steps.add( start_create_sip_step, generate_xml_step, create_sip_step ) main_step.information_package = ip main_step.save() main_step.run() return Response({'status': 'creating ip'})
def build(source_dir, target_dir, package_name=None, versionNumber=None): ''' Create a release of a MicroDrop plugin source directory in the target directory path. Skip the following patterns: - ``bld.bat`` - ``.conda-recipe/*`` - ``.git/*`` Parameters ---------- source_dir : str Source directory. target_dir : str Target directory. package_name : str, optional Name of plugin Conda package (defaults to name of :data:`target_dir`). ''' source_dir = ph.path(source_dir).realpath() target_dir = ph.path(target_dir).realpath() target_dir.makedirs_p() source_archive = source_dir.joinpath(source_dir.name + '.zip') if package_name is None: package_name = str(target_dir.name) logger.info('Source directory: %s', source_dir) logger.info('Source archive: %s', source_archive) logger.info('Target directory: %s', target_dir) logger.info('Package name: %s', package_name) # Export git archive, which substitutes version expressions in # `_version.py` to reflect the state (i.e., revision and tag info) of the # git repository. sp.check_call(['git', 'archive', '-o', source_archive, 'HEAD'], shell=True) # Extract exported git archive to Conda MicroDrop plugins directory. with zipfile.ZipFile(source_archive, 'r') as zip_ref: zip_ref.extractall(target_dir) # Delete Conda build recipe from installed package. target_dir.joinpath('.conda-recipe').rmtree() # Delete Conda build recipe from installed package. for p in target_dir.files('.git*'): p.remove() # Write package information to (legacy) `properties.yml` file. original_dir = ph.path(os.getcwd()) try: os.chdir(source_dir) if versionNumber is None: import _version as v finally: os.chdir(original_dir) # Create properties dictionary object (cast types, e.g., `ph.path`, to # strings for cleaner YAML dump). if versionNumber is None: properties = {'package_name': package_name, 'plugin_name': str(target_dir.name), 'version': v.get_versions()['version'], 'versioneer': v.get_versions()} else: properties = {'package_name': package_name, 'plugin_name': str(target_dir.name), 'version': versionNumber, 'versioneer': "?"} with target_dir.joinpath('properties.yml').open('w') as properties_yml: # Dump properties to YAML-formatted file. # Setting `default_flow_style=False` writes each property on a separate # line (cosmetic change only). yaml.dump(properties, properties_yml, default_flow_style=False)
def version(): from _version import get_versions version = get_versions() if version['error']: raise RuntimeError(version['error']) return version['version'].split('+')[0]
from __future__ import print_function from csv import reader, DictReader import exifread as er import os from os import path import shutil from sys import exit from time import strptime, strftime, mktime, localtime, struct_time, time from voluptuous import Required, Schema, MultipleInvalid from itertools import cycle from inspect import isclass import logging # versioneer from _version import get_versions __version__ = get_versions()['version'] del get_versions EXIF_DATE_TAG = "Image DateTime" EXIF_DATE_FMT = "%Y:%m:%d %H:%M:%S" TS_V1_FMT = ("%Y/%Y_%m/%Y_%m_%d/%Y_%m_%d_%H/" "{tsname:s}_%Y_%m_%d_%H_%M_%S_{n:02d}.{ext:s}") TS_V2_FMT = ("%Y/%Y_%m/%Y_%m_%d/%Y_%m_%d_%H/" "{tsname:s}_%Y_%m_%d_%H_%M_%S_{n:02d}.{ext:s}") TS_DATE_FMT = "%Y_%m_%d_%H_%M_%S" TS_FMT = TS_V1_FMT TS_NAME_FMT = "{expt:s}-{loc:s}-C{cam:02d}~{res:s}-{step:s}" FULLRES_CONSTANTS = {"original", "orig", "fullres"} IMAGE_TYPE_CONSTANTS = {"raw", "jpg"} RAW_FORMATS = {"cr2", "nef", "tif", "tiff"} IMAGE_SUBFOLDERS = {"raw", "jpg", "png", "tiff", "nef", "cr2"}
import errno from rest_framework import filters, serializers from ESSArch_Core.api.serializers import DynamicHyperlinkedModelSerializer from ESSArch_Core.auth.serializers import UserSerializer from ESSArch_Core.ip.models import InformationPackage, Order from ESSArch_Core.ip.serializers import ( AgentSerializer, InformationPackageSerializer as CoreInformationPackageSerializer, WorkareaSerializer, ) from ESSArch_Core.profiles.models import SubmissionAgreement from _version import get_versions from configuration.serializers import ArchivePolicySerializer VERSION = get_versions()['version'] class InformationPackageSerializer(CoreInformationPackageSerializer): workarea = serializers.SerializerMethodField() aic = serializers.PrimaryKeyRelatedField(queryset=InformationPackage.objects.all()) first_generation = serializers.SerializerMethodField() last_generation = serializers.SerializerMethodField() agents = serializers.SerializerMethodField() def get_first_generation(self, obj): if hasattr(obj, 'first_generation'): return obj.first_generation return obj.is_first_generation()
# The master toctree document. master_doc = 'index' # General information about the project. project = 'postpic' copyright = '2017, the postpic developers' author = 'the postpic developers' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. sys.path.insert(0, os.path.abspath('../../postpic')) import _version version = _version.get_versions()['version'] # The full version, including alpha/beta/rc tags. release = version #os.chdir('doc') # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = []
def essarch_version(): return get_versions()['version']
# -*- coding: utf-8 -*- from _version import get_versions __version__ = get_versions()["version"] del get_versions
'matplotlib.sphinxext.plot_directive', 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.coverage', 'sphinx.ext.mathjax', 'sphinx.ext.napoleon', 'sphinx.ext.intersphinx', 'sphinx.ext.extlinks' ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The master toctree document. master_doc = 'index' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. v = get_versions()['version'] # Strip out all after the commit offset and hash if 'dirty' in v: v = v[:v.find('dirty') - 1] # full release number, including alpha/beta/rc tags and versioneer offsets release = v if '+' in v: v = v[:v.find('+')] # shorter version M.m.p version = v del get_versions, v # General information about the project. project = 'serpentTools' devTeamLink = ('https://github.com/CORE-GATECH-GROUP/serpent-tools/graphs/'
def create_ip(self, request, pk=None): """ Creates the specified information package Args: pk: The primary key (id) of the information package to create Returns: None """ ip = self.get_object() sa = ip.SubmissionAgreement agent = request.user if ip.State != "Uploaded": raise ValueError( "The IP (%s) is in the state '%s' but should be 'Uploaded'" % (pk, ip.State)) validators = request.data.get('validators', {}) validate_xml_file = validators.get('validate_xml_file', False) validate_file_format = validators.get('validate_file_format', False) validate_integrity = validators.get('validate_integrity', False) validate_logical_physical_representation = validators.get( 'validate_logical_physical_representation', False) container_format = ip.get_container_format() main_step = ProcessStep.objects.create(name="Create SIP", ) t0 = ProcessTask.objects.create( name="preingest.tasks.UpdateIPStatus", params={ "ip": ip, "status": "Creating", }, processstep_pos=0, log=EventIP, information_package=ip, responsible=self.request.user, ) start_create_sip_step = ProcessStep.objects.create( name="Update IP Status", parent_step_pos=0) start_create_sip_step.tasks.add(t0) event_type = EventType.objects.get(eventType=10200) create_event(event_type, 0, "Created SIP", get_versions()['version'], agent, ip=ip) prepare_path = Path.objects.get(entity="path_preingest_prepare").value reception_path = Path.objects.get( entity="path_preingest_reception").value ip_prepare_path = os.path.join(prepare_path, str(ip.pk)) ip_reception_path = os.path.join(reception_path, str(ip.pk)) events_path = os.path.join(ip_prepare_path, "ipevents.xml") structure = ip.get_profile('sip').structure info = ip.get_profile('sip').fill_specification_data(sa, ip) # ensure premis is created before mets filesToCreate = OrderedDict() if ip.profile_locked('preservation_metadata'): premis_profile = ip.get_profile('preservation_metadata') premis_dir, premis_name = find_destination( "preservation_description_file", structure) premis_path = os.path.join(ip.ObjectPath, premis_dir, premis_name) filesToCreate[premis_path] = premis_profile.specification mets_dir, mets_name = find_destination("mets_file", structure) mets_path = os.path.join(ip.ObjectPath, mets_dir, mets_name) filesToCreate[mets_path] = ip.get_profile('sip').specification generate_xml_step = ProcessStep.objects.create(name="Generate XML", parent_step_pos=1) for fname, template in filesToCreate.iteritems(): dirname = os.path.dirname(fname) t = ProcessTask.objects.create( name="ESSArch_Core.tasks.DownloadSchemas", params={ "template": template, "dirname": dirname, "structure": structure, "root": ip.ObjectPath, }, processstep_pos=1, log=EventIP, information_package=ip, responsible=self.request.user, ) generate_xml_step.tasks.add(t) t = ProcessTask.objects.create( name="preingest.tasks.GenerateXML", params={ "info": info, "filesToCreate": filesToCreate, "folderToParse": ip_prepare_path, "algorithm": ip.get_checksum_algorithm(), }, processstep_pos=3, log=EventIP, information_package=ip, responsible=self.request.user, ) generate_xml_step.tasks.add(t) if any(validators.itervalues()): validate_step = ProcessStep.objects.create( name="Validation", parent_step=main_step, parent_step_pos=2, ) if validate_xml_file: validate_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.ValidateXMLFile", params={ "xml_filename": mets_path, }, processstep_pos=1, log=EventIP, information_package=ip, responsible=self.request.user, )) if ip.profile_locked("preservation_metadata"): validate_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.ValidateXMLFile", params={ "xml_filename": premis_path, }, processstep_pos=2, log=EventIP, information_package=ip, responsible=self.request.user, )) if validate_logical_physical_representation: validate_step.tasks.add( ProcessTask.objects.create( name= "preingest.tasks.ValidateLogicalPhysicalRepresentation", params={ "dirname": ip.ObjectPath, "xmlfile": mets_path, }, processstep_pos=3, log=EventIP, information_package=ip, responsible=self.request.user, )) validate_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.ValidateFiles", params={ "ip": ip, "xmlfile": mets_path, "validate_fileformat": validate_file_format, "validate_integrity": validate_integrity, }, processstep_pos=4, log=EventIP, information_package=ip, responsible=self.request.user, )) validate_step.save() info = {"_OBJID": str(ip.pk), "_OBJLABEL": ip.Label} filesToCreate = OrderedDict() filesToCreate[events_path] = get_event_spec() create_sip_step = ProcessStep.objects.create(name="Create SIP", parent_step_pos=3) for fname, template in filesToCreate.iteritems(): dirname = os.path.dirname(fname) create_sip_step.tasks.add( ProcessTask.objects.create( name="ESSArch_Core.tasks.DownloadSchemas", params={ "template": template, "dirname": dirname, "structure": structure, "root": ip.ObjectPath, }, processstep_pos=-1, log=EventIP, information_package=ip, responsible=self.request.user, )) create_sip_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.GenerateXML", params={ "info": info, "filesToCreate": filesToCreate, "algorithm": ip.get_checksum_algorithm(), }, processstep_pos=0, log=EventIP, information_package=ip, responsible=self.request.user, )) create_sip_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.AppendEvents", params={ "filename": events_path, }, processstep_pos=1, log=EventIP, information_package=ip, responsible=self.request.user, )) spec = { "-name": "object", "-namespace": "premis", "-children": [{ "-name": "objectIdentifier", "-namespace": "premis", "-children": [{ "-name": "objectIdentifierType", "-namespace": "premis", "#content": [{ "var": "FIDType" }], "-children": [] }, { "-name": "objectIdentifierValue", "-namespace": "premis", "#content": [{ "var": "FID" }], "-children": [] }] }, { "-name": "objectCharacteristics", "-namespace": "premis", "-children": [{ "-name": "format", "-namespace": "premis", "-children": [{ "-name": "formatDesignation", "-namespace": "premis", "-children": [{ "-name": "formatName", "-namespace": "premis", "#content": [{ "var": "FFormatName" }], "-children": [] }] }] }] }, { "-name": "storage", "-namespace": "premis", "-children": [{ "-name": "contentLocation", "-namespace": "premis", "-children": [{ "-name": "contentLocationType", "-namespace": "premis", "#content": [{ "var": "FLocationType" }], "-children": [] }, { "-name": "contentLocationValue", "-namespace": "premis", "#content": [{ "text": "file:///%s.%s" % (ip.pk, container_format.lower()) }], "-children": [] }] }] }], "-attr": [{ "-name": "type", '-namespace': 'xsi', "-req": "1", "#content": [{ "text": "premis:file" }] }], } info = { 'FIDType': "UUID", 'FID': ip.ObjectIdentifierValue, 'FFormatName': container_format.upper(), 'FLocationType': 'URI', 'FName': ip.ObjectPath, } create_sip_step.tasks.add( ProcessTask.objects.create( name="ESSArch_Core.tasks.InsertXML", params={ "filename": events_path, "elementToAppendTo": "premis", "spec": spec, "info": info, "index": 0 }, processstep_pos=2, information_package=ip, responsible=self.request.user, )) if validate_xml_file: create_sip_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.ValidateXMLFile", params={ "xml_filename": events_path, }, processstep_pos=3, log=EventIP, information_package=ip, responsible=self.request.user, )) if container_format.lower() == 'zip': zipname = os.path.join(ip_reception_path) + '.zip' container_task = ProcessTask.objects.create( name="preingest.tasks.CreateZIP", params={ "dirname": ip_prepare_path, "zipname": zipname, }, processstep_pos=4, log=EventIP, information_package=ip, responsible=self.request.user, ) else: tarname = os.path.join(ip_reception_path) + '.tar' container_task = ProcessTask.objects.create( name="preingest.tasks.CreateTAR", params={ "dirname": ip_prepare_path, "tarname": tarname, }, processstep_pos=4, log=EventIP, information_package=ip, responsible=self.request.user, ) create_sip_step.tasks.add(container_task) create_sip_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.DeleteFiles", params={"path": ip.ObjectPath}, processstep_pos=45, log=EventIP, information_package=ip, responsible=self.request.user, )) create_sip_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.UpdateIPPath", params={ "ip": ip, }, result_params={"path": container_task.pk}, processstep_pos=50, log=EventIP, information_package=ip, responsible=self.request.user, )) create_sip_step.tasks.add( ProcessTask.objects.create( name="preingest.tasks.UpdateIPStatus", params={ "ip": ip, "status": "Created", }, processstep_pos=60, log=EventIP, information_package=ip, responsible=self.request.user, )) create_sip_step.save() main_step.child_steps.add(start_create_sip_step, generate_xml_step, create_sip_step) main_step.information_package = ip main_step.save() main_step.run() return Response({'status': 'creating ip'})
def command_version(self, args): from _version import get_versions print get_versions()['version']
def cli(args=None): if not args: args = sys.argv[1:] command = args[0] if command == "jobscript.sh": print(importlib_resources.read_text(__package__, 'jobscript.sh'), end='') return if command in ["help", "-h", "--help"]: from _version import get_versions print("Snakeobjects %s\n" % (get_versions()['version'])) if len(args) == 1: print("Available commands are:\n\t", "\n\t".join(helpData.keys()), "\n", sep="") # print("Typical sequence of commands is descripe, prepareTest, prepare, run:\n") for cmd, hs in helpData.items(): print(cmd) print('#' * len(cmd)) print(hs) print() elif len(args) == 2: hCmd = args[1] if hCmd in helpData: print(helpData[hCmd]) else: print("The command", hCmd, "is unknown") return 1 else: print("Help accepts at most one argument.") return 1 return from snakeobjects import __version__ if command == "version": print(__version__) return from snakeobjects import Project, ObjectGraph, load_object_graph, graph import importlib.resources as importlib_resources import yaml from importlib.util import spec_from_file_location, module_from_spec proj = Project() print("# WORKING ON PROJECT", proj.directory) print("# WITH PIPELINE", proj.get_pipeline_directory()) if command in ["prepare", "prepareTest"]: bldObjGraphPy = proj.get_pipeline_directory( ) + "/build_object_graph.py" if os.path.isfile(bldObjGraphPy): spec = spec_from_file_location("build_object_graph", bldObjGraphPy) foo = module_from_spec(spec) spec.loader.exec_module(foo) newObjectGraph = ObjectGraph() foo.run(proj, newObjectGraph, *args[1:]) else: print(f'ERROR: There is no {bldObjGraphPy}') exit(1) if command == "prepareTest": print("Current graph stats") print("+++++++++++++++++++") proj.objectGraph.print_stats() print("\n") print("New graph stats") print("+++++++++++++++") newObjectGraph.print_stats() else: proj.objectGraph = newObjectGraph proj.save_object_graph() proj.prepare_objects() elif command in ["prepareObjects"]: proj.prepare_objects() elif command == "run": sargs = [ 'snakemake', '-s', proj.directory + '/objects/.snakeobjects/main.snakefile', '-d', proj.directory + '/objects' ] if "default_snakemake_args" in proj.parameters: sargs += proj.parameters["default_snakemake_args"].split() sargs += args[1:] # os.chdir(proj.directory + '/objects') print("UPDATING ENVIRONMENT:") print("export SO_PROJECT=", proj.directory, sep="") print("export SO_PIPELINE=", proj.get_pipeline_directory(), sep="") print("export PATH=$SO_PIPELINE:$PATH", sep="") print("RUNNING:", " ".join(sargs)) os.environ['SO_PROJECT'] = proj.directory os.environ['SO_PIPELINE'] = proj.get_pipeline_directory() os.environ['PATH'] = proj.get_pipeline_directory( ) + ":" + os.environ['PATH'] os.execvp('snakemake', sargs) elif command == "submit": sargs = [] if "default_snakemake_args" in proj.parameters: sargs += proj.parameters["default_snakemake_args"].split() else: raise ProjectException("No profile specified") sargs += args[1:] profile = sargs[sargs.index('--profile') + 1] if not os.path.exists(profile): raise ProjectException("Profile not found %s" % profile) if not os.path.exists(profile + "/config.yaml"): raise ProjectException("No config.yaml in %s" % profile) pr_config = load_yaml(profile + "/config.yaml") if not "cluster" in pr_config: ProjectException("cluster in not specified in %s" % profile + "/config.yaml") cmd = pr_config["cluster"] # os.chdir(proj.directory + '/objects') print("UPDATING ENVIRONMENT:") print("export SO_PROJECT=", proj.directory, sep="") print("export SO_PIPELINE=", proj.get_pipeline_directory(), sep="") print("export PATH=$SO_PIPELINE:$PATH", sep="") print("RUNNING:", " ".join(sargs)) os.environ['SO_PROJECT'] = proj.directory os.environ['SO_PIPELINE'] = proj.get_pipeline_directory() os.environ['PATH'] = proj.get_pipeline_directory( ) + ":" + os.environ['PATH'] if os.system( 'sobjects jobscript.sh >$SO_PROJECT/objects/.snakeobjects/jobscript.sh' ): raise ProjectException("sobjects jobscript.sh failed") with open(proj.directory + '/objects/.snakeobjects/jobscript.sh', 'a') as js: for k, v in pr_config.items(): if not k in 'jobname jobscript cluster cluster-status'.split( ' '): js.write('--' + str(k) + ' ' + str(v) + ' ') js.write(' '.join(args[1:])) os.system("%s/%s" % (profile, cmd) + " $SO_PROJECT/objects/.snakeobjects/jobscript.sh") #os.execvp('python', [profile + "/" +cmd, "$SO_PROJECT/objects/.snakeobjects/jobscript.sh"]) elif command == "describe": print("Project parameters:") for k, v in proj.parameters.items(): print(f"\t{k}: {v}") proj.objectGraph.print_stats() elif command == "graph": print(args, file=sys.stderr) graph.driver(proj.objectGraph, args) else: print("Don't know the command:", command) return 1
# documentation root, use os.path.abspath to make it absolute, like shown here. # from pathlib import Path import sys sys.path.insert(0, str(Path().resolve().parent / 'scopetools')) from _version import get_versions # -- Project information ----------------------------------------------------- project = 'SCOPE-tools' copyright = '2020, 新格元生物科技有限公司' author = '新格元生物科技有限公司' # The full version, including alpha/beta/rc tags version = get_versions()['version'].split('+')[0] release = get_versions()['version'].split('+')[0] # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #
source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Cooperative Lane Management and Traffic flow Optimisation' copyright = u'2018, Malte Aschermann' author = u'Malte Aschermann' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = _version.get_versions()['version'] # The full version, including alpha/beta/rc tags. release = _version.get_versions()['version'] # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = []
# -- Path setup -------------------------------------------------------------- import os import sys sys.path.append(os.path.dirname(os.path.dirname(__file__))) sys.path.append(os.path.join(os.path.dirname(__file__), "../openscm")) from _version import get_versions # isort:skip # append path before # -- Project information ----------------------------------------------------- project = "OpenSCM" copyright = "2018-2019, Robert Gieseke, Zebedee Nicholls, Sven Willner" author = "Robert Gieseke, Zebedee Nicholls, Sven Willner" version = get_versions()["version"] # The short X.Y version release = version # The full version, including alpha/beta/rc tags # -- General configuration --------------------------------------------------- exclude_patterns = ["build", "Thumbs.db", ".DS_Store"] extensions = [ "sphinx.ext.autodoc", "sphinx.ext.coverage", "sphinx.ext.intersphinx", "sphinx.ext.napoleon", "sphinx_autodoc_typehints", # must be after sphinx.ext.napoleon ] language = "en" master_doc = "index"
self.test_args = [] self.test_suite = True def run_tests(self): # Run nose ensuring that argv simulates running nosetests directly import nose nose.run_exit(argv=['nosetests']) commands = {} commands["test"] = NoseTestCommand setup( name='pyaff4', long_description=long_description, version=_version.get_versions()["pep440"], cmdclass=commands, description='Python Advanced Forensic Format Version 4 library.', author='Michael Cohen', author_email='*****@*****.**', url='https://www.aff4.org/', packages=['pyaff4'], package_dir={"pyaff4": "pyaff4"}, install_requires=[ "aff4-snappy == 0.5", "rdflib >= 4.2.1", "intervaltree >= 2.1.0", ], extras_require=dict( cloud="google-api-python-client" )
sys.path.append(os.path.dirname(os.path.dirname(__file__))) sys.path.append( os.path.join(os.path.dirname(__file__), "../../src/openscm_twolayermodel")) from _version import get_versions # isort:skip # append path before # -- Project information ----------------------------------------------------- project = "OpenSCM Two Layer Model" authors = ", ".join(["Chris Smith", "Zeb Nicholls"]) copyright_year = "2020" copyright = "{}, {}".format(copyright_year, authors) author = authors # The short X.Y version version = get_versions()["version"].split("+")[0] # The full version, including alpha/beta/rc tags release = get_versions()["version"] # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "sphinx.ext.autodoc", "sphinx.ext.coverage", "sphinx.ext.intersphinx", "sphinx.ext.napoleon", # pass numpy style docstrings "nbsphinx", "sphinx.ext.mathjax",
def __init__(self, application="ESSArch", agent_role=""): Handler.__init__(self) self.application = application self.agent_role = agent_role self.version = get_versions()['version']
from _version import get_versions __version__ = get_versions()['version'] del get_versions
def main(): from _version import get_versions __version__ = get_versions()['version'] print(__version__)
# along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # """Meta-script for pulling in all Rekall components.""" __author__ = "Michael Cohen <*****@*****.**>" import os import sys import subprocess from setuptools import setup from setuptools.command.install import install as _install from setuptools.command.develop import develop as _develop import _version VERSION = _version.get_versions() rekall_description = "Rekall Memory Forensic Framework" # This is a metapackage which pulls in the dependencies. There are two main # installation scenarios: # 1) We get installed from PyPi from our own sdist. In this case we need to # declare dependencies on the released PyPi packages. # 2) We get run from the root of the source tree (e.g. checked out from git). In # this case we need to declare the setup.py as a dependency so it gets installed # first. class install(_install): def do_egg_install(self):
import os import sys sys.path.append(os.path.dirname(os.path.dirname(__file__))) sys.path.append(os.path.join(os.path.dirname(__file__), "../pymagicc")) from _version import get_versions # -- Project information ----------------------------------------------------- project = "Pymagic" copyright = "2018, Pymagicc Authors" author = "Robert Gieseke, Zeb Nicholls, Jared Lewis, Sven Willner, Matthias Mengel" # The short X.Y version version = get_versions()["version"] # The full version, including alpha/beta/rc tags release = version # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "sphinx.ext.autodoc", "sphinx.ext.autosummary",
# The master toctree document. master_doc = 'index' # General information about the project. project = 'postpic' copyright = '2020, the postpic developers' author = 'the postpic developers' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. sys.path.insert(0, os.path.abspath('../../postpic')) import _version version = _version.get_versions()['version'] # The full version, including alpha/beta/rc tags. release = version #os.chdir('doc') # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = []
# """Meta-script for pulling in all Rekall components.""" from __future__ import print_function __author__ = "Michael Cohen <*****@*****.**>" import io import os import sys import subprocess from setuptools import setup from setuptools.command.install import install as _install from setuptools.command.develop import develop as _develop import _version VERSION = _version.get_versions() rekall_description = "Rekall Memory Forensic Framework" # This is a metapackage which pulls in the dependencies. There are two main # installation scenarios: # 1) We get installed from PyPi from our own sdist. In this case we need to # declare dependencies on the released PyPi packages. # 2) We get run from the root of the source tree (e.g. checked out from git). In # this case we need to declare the setup.py as a dependency so it gets installed # first. class install(_install):
from keys import ( BadSignatureError, BadPrefixError, create_keypair, SigningKey, VerifyingKey, remove_prefix, to_ascii, from_ascii, ) ( BadSignatureError, BadPrefixError, create_keypair, SigningKey, VerifyingKey, remove_prefix, to_ascii, from_ascii, ) # hush pyflakes from _version import get_versions __version__ = get_versions()["version"] del get_versions