Example #1
0
  def _DetectExecutedBinaries(file_name):
    """Detect executed binaries from an strace output file.

    Args:
      file_name: An strace output file.

    Returns:
      A set of the binaries executed during an strace run.
    """

    executed_binaries = set()
    try:
      with open(file_name, 'r') as strace_file:
        for line in strace_file:
          match = BinaryLauncher._EXEC_EXPRESSION.search(line)
          if match:
            binary = match.group(1)
            if binary[0] != '/':
              logging.error('Expecting an absolute path to a binary, found %s '
                            'instead.', binary)
            else:
              executed_binaries.add(binary)
        strace_file.close()
        return executed_binaries
    except (IOError, OSError) as err:
      logging.error('Could not read strace file %s: %s', file_name, err)
      return executed_binaries
Example #2
0
def handle(data, con, apikey=None):
  d = json.loads(data)

  handlers = {'import': importit, 'ping': ping,
      'listimported': listimported, 'slice': sliceit,
      'listdone': listdone, 'getdone': getdone,
      'importconfig': importconfig, 'listconfig': listconfigs,
      'listprogress': listprogress, 'getstats': getstats,
      'journal': getjournal, 'del': wipefile, 'wait': waitfor}

  hndlr = noop
  cmd = 'noop'
  if d.has_key('cmd'):
    if d['cmd'] in handlers.keys():
      cmd = d['cmd']
      hndlr = handlers[cmd]

  logging.info('cmd: ' + cmd)

  if not apikey is None:
    if not (d.has_key('key') and d['key'] == apikey):
      logging.info('authentication failed for "{}" key!'.format(
        '' if not d.has_key('key') else d['key']))
      return json.dumps({'r': 'fail',
        'm': 'authentication failed. incorrect apikey'})

  try:
    r = hndlr(d, con)
    result = json.dumps(r)
  except Exception as e:
    logging.error(str(e))
    result = json.dumps({u'm':unicode(e), u'r':u'fail'})
  logaccess(len(data), len(result), unicode(cmd), con)

  return result
Example #3
0
    def contour(self, data):
        """
        Overlay a contour-plot

        @param data: 2darray with the 2theta values in radians...
        """
        if self.fig is None:
            logging.warning("No diffraction image available => not showing the contour")
        else:
            while len(self.msp.images) > 1:
                self.msp.images.pop()
            while len(self.ct.images) > 1:
                self.ct.images.pop()
            while len(self.ct.collections) > 0:
                self.ct.collections.pop()

            if self.points.dSpacing and  self.points._wavelength:
                angles = list(2.0 * numpy.arcsin(5e9 * self.points._wavelength / numpy.array(self.points.dSpacing)))
            else:
                angles = None
            try:
                xlim, ylim = self.ax.get_xlim(), self.ax.get_ylim()
                self.ct.contour(data, levels=angles)
                self.ax.set_xlim(xlim);self.ax.set_ylim(ylim);
                print("Visually check that the curve overlays with the Debye-Sherrer rings of the image")
                print("Check also for correct indexing of rings")
            except MemoryError:
                logging.error("Sorry but your computer does NOT have enough memory to display the 2-theta contour plot")
            self.fig.show()
Example #4
0
def remove(name):
    """
    Remove a snippet with a given name
    If there is no such snippet, return '404: Snippet Not Found'.
    """
    logging.error("FIXME: Unimplemented - remove({!r}".format(name))
    return ""
Example #5
0
 def execute(self, email_models):
     logging.debug("In Destiny::execute()")
     if not email_models:
         return
     emails_id = []
     destinations = {}
     for destiny in self._plugins.keys():
         destinations.setdefault(destiny, email_models[-1].get(destiny))
         emails_id.append(email_models[-1].email_id())
     for email_model in email_models[:-1]:
         for destiny in self._plugins.keys():
             d_tables = destinations.get(destiny).get("tables")
             for d_table in d_tables:
                 for k, v in d_table.iteritems():
                     m_tables = email_model.get(destiny).get("tables")
                     for m_table in m_tables:
                         if k in m_table:
                             d_table.setdefault(k, []).extend(m_table[k])
         emails_id.append(email_model.email_id())
     for destiny, models in destinations.iteritems():
         for forward in self._plugins.get(destiny):
             try:
                 forward.execute(models)
             except Exception, e:
                 logging.error("!! Error-execute: %s" % (str(e),))
                 logging.info("Add emails in queure error: %s" % str(emails_id))
                 for email_id in emails_id:
                     self.add_email_error(email_id)
                 continue
Example #6
0
 def act(self, force_act=False, action=None, skip_responses=False):
     """
     returns:
         (action, response) tuple.  response type depends on the action that was performed.
     """        
     if not force_act:
         config = ConfigurationAccessor.get_or_create()
         if config and (config.is_tweeting is not None) and (not safe_int(config.is_tweeting)):
             logging.debug("config.is_tweeting is False; hiding")
             return ()
     
     result = []
     responded = False
     if not skip_responses:
         try:
             direct, response = self.respond()
             if (direct or response):
                 # a response to a direct message or mention was generated
                 responded = True
                 if direct:
                     result.append(direct.AsDict())
                 if response:
                     result.append(response.AsDict())
         except Exception, e:
             logging.error(e)
Example #7
0
    def call(self, function, params=None):
        self.requestPerMinute += 1
        now = datetime.utcnow()

        if self.requestPerMinute >= self.requestLimit:
            waittime = 60 - now.second
            logging.warning("Limit for request per minute exceeded. Waiting for: {0} sec.".format(waittime))
            time.sleep(waittime)
            now = datetime.utcnow()

        if self.checkMinute != now.minute:
            self.requestPerMinute = 0
            self.checkMinute = now.minute

        payload = ''
        try:
            p = "" if not params else '?' + "&".join(
                ["{key}={value}".format(key=k, value=v) for (k, v) in params.iteritems()])
            url = "{base}.{func}{params}".format(base=self.baseConfig["url"], func=function, params=p)
            logging.debug("{0} {1} API call:{2}".format(self.checkMinute, self.requestPerMinute, url))
            request = urllib2.Request(url, None, self.baseConfig["headers"])
            stream = urllib2.urlopen(request)
            payload = stream.read()
            data = json.loads(payload)
            if isinstance(data, dict) and 'ruid' in data:
                logging.error('Api call failed with error: {0} Code: {1}'.format(data['message'], data['code']))
                return None
            return data

        except Exception as e:
            logging.error('Error: {0} Context: {1}'.format(e, payload))
            return None
    def get(self):
        settings = get_server_settings()
        secret = self.request.headers.get("X-Nuntiuz-Secret", None)
        if secret != settings.jabberSecret:
            logging.error("Received unauthenticated apple certificate request, ignoring ...")
            return
        app_id = self.request.get("id")
        if not app_id:
            return
        app = get_app_by_id(app_id)
        if not app:
            return

        if app.apple_push_cert_valid_until < now() + 30 * DAY:
            send_mail(settings.dashboardEmail,
                      settings.supportWorkers,
                      "The APN cert of %s is about to expire" % app_id,
                      "The APN cert of %s is valid until %s GMT" % (app_id, time.ctime(app.apple_push_cert_valid_until)))
        if app.apple_push_cert_valid_until < now() + 15 * DAY:
            logging.error("The APN cert of %s is valid until %s GMT" % (app_id, time.ctime(app.apple_push_cert_valid_until)))

        result = json.dumps(dict(cert=app.apple_push_cert, key=app.apple_push_key, valid_until=app.apple_push_cert_valid_until))
        self.response.headers['Content-Type'] = 'application/binary'
        _, data = encrypt_for_jabber_cloud(secret, result)
        self.response.write(data)
Example #9
0
def local(command):
    """
    Runs a shell command locally.

    Intended to be a close-enough dropin replacement for Fabric's local() command.
    """
    # save the current dir
    original_directory = os.getcwd()

    # switch to wherever the lcd context manager tells us to be
    logging.debug("Changing directory to [%s]" % lcd_directory)
    os.chdir(lcd_directory)
    try:
        # run the command
        logger = logging.getLogger("exec")
        logger.debug("Running: [%s]" % command)
        output = []
        p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
        for line in iter(p.stdout.readline, ""):
            output.append(line.rstrip())
            logger.debug(line.rstrip())

        p.communicate()
        if p.returncode != 0:
            logging.error(output)
            raise Exception("Return code was non-zero for command [%s]" % command)
        return output
    finally:
        # go back to the old directory
        logging.debug("Changing directory back to [%s]" % original_directory)
        os.chdir(original_directory)
Example #10
0
 def virsh_capabilities(option):
     cmd = "virsh capabilities  %s" % option
     cmd_result = utils.run(cmd, ignore_status=True)
     logging.info("Output: %s", cmd_result.stdout.strip())
     logging.error("Error: %s", cmd_result.stderr.strip())
     logging.info("Status: %d", cmd_result.exit_status)
     return cmd_result.exit_status, cmd_result.stdout.strip()
Example #11
0
	def do_POST(self):
		if self.headers['content-type'] != CONTENT_TYPE:
			self.send_error(400, 'bad content type')
			self.end_headers()
			return
		try:
			content_length = int(self.headers['content-length'])
			content = self.rfile.read(content_length).decode()
			data = json.loads(content)
		except ValueError as err:
			self.send_error(400, 'bad json', str(err))
			self.end_headers()
			return
		if type(data) is not dict or '_token' not in data:
			self.send_error(401, 'missing api token')
			self.end_headers()
			return
		if data.pop('_token') not in self.server.token:
			self.send_error(401, 'invalid api token')
			self.end_headers()
			return
		try:
			self.server.handle(**data)
		except Exception as err:
			logging.error('{}: {}'.format(type(err).__name__, err))
			self.send_error(400, 'bad parameters')
		else:
			self.send_response(201, 'value received')
		self.end_headers()
Example #12
0
    def queryHTTP (self, data):
        result = None

        try:
            self.con.putrequest("POST", "/")
            self.con.putheader("Connection", "Keep-Alive")
            self.con.putheader("User-Agent", "bitmessage")
            self.con.putheader("Host", self.host)
            self.con.putheader("Content-Type", "application/json")
            self.con.putheader("Content-Length", str(len(data)))
            self.con.putheader("Accept", "application/json")
            authstr = "%s:%s" % (self.user, self.password)
            self.con.putheader("Authorization", "Basic %s" % base64.b64encode (authstr))
            self.con.endheaders()
            self.con.send(data)
            try:
                resp = self.con.getresponse()
                result = resp.read()
                if resp.status != 200:
                    raise Exception ("Namecoin returned status %i: %s", resp.status, resp.reason)
            except:
                logger.error("HTTP receive error")
        except:
            logger.error("HTTP connection error", exc_info=True)

        return result
Example #13
0
    def test (self):
        try:
            if self.nmctype == "namecoind":
                res = self.callRPC ("getinfo", [])
                vers = res["version"]
                
                v3 = vers % 100
                vers = vers / 100
                v2 = vers % 100
                vers = vers / 100
                v1 = vers
                if v3 == 0:
                  versStr = "0.%d.%d" % (v1, v2)
                else:
                  versStr = "0.%d.%d.%d" % (v1, v2, v3)
                return ('success',  tr._translate("MainWindow",'Success!  Namecoind version %1 running.').arg(unicode(versStr)) )

            elif self.nmctype == "nmcontrol":
                res = self.callRPC ("data", ["status"])
                prefix = "Plugin data running"
                if ("reply" in res) and res["reply"][:len(prefix)] == prefix:
                    return ('success', tr._translate("MainWindow",'Success!  NMControll is up and running.'))

                logger.error("Unexpected nmcontrol reply: %s", res)
                return ('failed',  tr._translate("MainWindow",'Couldn\'t understand NMControl.'))

            else:
                assert False

        except Exception:
            logger.exception("Namecoin connection test failure")
            return ('failed', "The connection to namecoin failed.")
Example #14
0
 def plus(self,start,t):
     '''@return start time + t work time (positive or negative)'''
     start=datetimef(start,self.start)
     if not self.isworktime(start):
         logging.error('%s is not in worktime'%start)     
         raise   
     days=timedelta_div(t,self.delta)
     res=start
     while days>=1:
         res=self.nextworkday(res)
         days=days-1
     while days<=-1:
         res=self.prevworkday(res)
         days=days+1
     
     remaining=timedelta_mul(self.delta,days) #less than one day of work
     day=res.date()
     start=datetimef(day,self.start)
     end=datetimef(day,self.end)
     if (res+remaining)<start: # skip to previous day
         remaining=(res+remaining)-start #in full time
         res=datetimef(self.prevworkday(day),self.end)
     if (res+remaining)>end: # skip to next day
         remaining=(res+remaining)-end #in full time
         res=datetimef(self.nextworkday(day),self.start)
     return res+remaining
Example #15
0
	def post(self):
		username = self.request.get('quotationrEmail')

		user = self.user_model.get_by_auth_id(username)
		if not user:
			logging.info('Could not find any user entry for username %s', username)
			self.response.out.write('fail:::cant find email')
			return

		user_id = user.get_id()
		token = self.user_model.create_signup_token(user_id)

		verification_url = self.uri_for('verification', type='p', user_id=user_id,
			signup_token=token, _full=True)

		
		logging.error(verification_url)
		
		mail.send_mail(sender="Quotationr <jason@quotationr.com>",
                to=user.email_address,
                subject="Reset Your Quotationr Password",
                body="Please click the following link to reset your Quotationr password:\n\n " + verification_url)
		
		#self.response.out.write('success:::' + user.email_address)
		self.response.out.write('success:::email sent')
Example #16
0
def creatorProcess(work, jobCacheDir):
    """
    _creatorProcess_

    Creator work areas and pickle job objects
    """
    createWorkArea  = CreateWorkArea()

    try:
        wmbsJobGroup = work.get('jobGroup')
        workflow     = work.get('workflow')
        wmWorkload   = work.get('wmWorkload')
        wmTaskName   = work.get('wmTaskName')
        sandbox      = work.get('sandbox')
        owner        = work.get('owner')
        ownerDN      = work.get('ownerDN',None)
        ownerGroup   = work.get('ownerGroup','')
        ownerRole    = work.get('ownerRole','')
        scramArch    = work.get('scramArch', None)
        swVersion    = work.get('swVersion', None)
        agentNumber  = work.get('agentNumber', 0)

        if ownerDN == None:
            ownerDN = owner

        jobNumber    = work.get('jobNumber', 0)
    except KeyError, ex:
        msg =  "Could not find critical key-value in work input.\n"
        msg += str(ex)
        logging.error(msg)
        raise JobCreatorException(msg)
Example #17
0
	def get(self):
		
		alertFlag = None
		dynamicFlag = True
		userID = str(self.user_info['user_id'])
		alertHTML = ""
		q = Alert.all()
		q.filter('userID =', userID)
		for alert in q:
			
			alertFlag=True
			if alert.trafficAlert == 'email':
				contact = alert.email
			else:
				contact = "(" + alert.areacode + ")" + " " + alert.prefix + "-" + alert.suffix
					
			alertHTML += "<tr id='row" + str(alert.key()) + "'>"
			alertHTML += "<td><input class='alertCheckbox' type='checkbox' id='" + str(alert.key()) + "'/></td>"
			alertHTML += "<td>" + contact + "</td></tr>"
			
		
		logging.error(alertHTML)
		template_values = {'alertHTML': alertHTML}
		params = { 'bodyID' : 'alertsPage', 'alertHTML' : alertHTML, 'alerts' : alertFlag, 'dynamic' : dynamicFlag }
		self.render_template('alerts.html',params)
Example #18
0
def retrieveJobSplitParams(wmWorkload, task):
    """
    _retrieveJobSplitParams_

    Retrieve job splitting parameters from the workflow.  The way this is
    setup currently sucks, we have to know all the job splitting parameters
    up front.  The following are currently supported:
        files_per_job
        min_merge_size
        max_merge_size
        max_merge_events
    """


    # This function has to find the WMSpec, and get the parameters from the spec
    # I don't know where the spec is, but I'll have to find it.
    # I don't want to save it in each workflow area, but I may have to

    if not wmWorkload:
        logging.error("Could not find wmWorkload for splitting")
        return {"files_per_job": 5}
    task = wmWorkload.getTaskByPath(task)
    if not task:
        return {"files_per_job": 5}
    else:
        return task.jobSplittingParameters()
Example #19
0
def gather(suffix, options):
    url = options.get("url")
    if url is None:
        logging.warn("A --url is required. (Can be a local path.)")
        exit(1)

    # remote URL
    if url.startswith("http:") or url.startswith("https:"):
        # Though it's saved in cache/, it will be downloaded every time.
        remote_path = os.path.join(utils.cache_dir(), "url.csv")

        try:
            response = requests.get(url)
            utils.write(response.text, remote_path)
        except:
            logging.error("Remote URL not downloaded successfully.")
            print(utils.format_last_exception())
            exit(1)

    # local path
    else:
        remote_path = url

    for domain in utils.load_domains(remote_path):
        yield domain
Example #20
0
    def generateCreateFailedReports(self, createFailedJobs):
        """
        _generateCreateFailedReports_

        Create and store FWJR for the  jobs that failed on creation
        leaving meaningful information about what happened with them
        """
        if not createFailedJobs:
            return

        fjrsToSave = []
        for failedJob in createFailedJobs:
            report = Report()
            defaultMsg = "There is a condition which assures that this job will fail if it's submitted"
            report.addError("CreationFailure", 99305, "CreationFailure", failedJob.get("failedReason", defaultMsg))
            jobCache = failedJob.getCache()
            try:
                fjrPath = os.path.join(jobCache, "Report.0.pkl")
                report.save(fjrPath)
                fjrsToSave.append({"jobid": failedJob["id"], "fwjrpath": fjrPath})
                failedJob["fwjr"] = report
            except Exception:
                logging.error("Something went wrong while saving the report for  job %s" % failedJob["id"])

        myThread = threading.currentThread()
        self.setFWJRPath.execute(binds = fjrsToSave, conn = myThread.transaction.conn, transaction = True)

        return
Example #21
0
    def post_config_change(self, method):
        route = CsRoute()
        if method == "add":
            route.add_table(self.dev)
            route.add_route(self.dev, str(self.address["network"]))
        elif method == "delete":
            logging.warn("delete route not implemented")

        self.fw_router()
        self.fw_vpcrouter()

        # On deletion nw_type will no longer be known
        if self.get_type() in ["guest"] and self.config.is_vpc():

            CsDevice(self.dev, self.config).configure_rp()

            logging.error(
                "Not able to setup source-nat for a regular router yet")
            dns = CsDnsmasq(self)
            dns.add_firewall_rules()
            app = CsApache(self)
            app.setup()

        cmdline = self.config.cmdline()
        # If redundant then this is dealt with by the master backup functions
        if self.get_type() in ["guest"] and not cmdline.is_redundant():
            pwdsvc = CsPasswdSvc(self.address['public_ip']).start()

        if self.get_type() == "public" and self.config.is_vpc():
            if self.address["source_nat"]:
                vpccidr = cmdline.get_vpccidr()
                self.fw.append(
                    ["filter", "", "-A FORWARD -s %s ! -d %s -j ACCEPT" % (vpccidr, vpccidr)])
                self.fw.append(
                    ["nat", "", "-A POSTROUTING -j SNAT -o %s --to-source %s" % (self.dev, self.address['public_ip'])])
    def clean(self):
        """this form must always be valid
        should use defaults if the data is incomplete
        or invalid"""
        if self._errors:
            #since the form is always valid, clear the errors
            logging.error(str(self._errors))
            self._errors = {}

        in_data = self.get_pruned_data()
        out_data = dict()
        if ('answer' in in_data) ^ ('comment' in in_data):
            out_data['is_permalink'] = True
            out_data['show_page'] = None
            out_data['answer_sort_method'] = 'votes'
            out_data['show_comment'] = in_data.get('comment', None)
            out_data['show_answer'] = in_data.get('answer', None)
        else:
            out_data['is_permalink'] = False
            out_data['show_page'] = in_data.get('page', 1)
            out_data['answer_sort_method'] = in_data.get(
                                                    'sort',
                                                    self.default_sort_method
                                                )
            out_data['show_comment'] = None
            out_data['show_answer'] = None
        self.cleaned_data = out_data
        return out_data
Example #23
0
def compile(input, output=None, flags=None):
    """Prepares command-line call to Closure Compiler.

    Args:
      source_paths: Source paths to build, in order.

    Returns:
      The compiled source, as a string, or None if compilation failed.
    """

    # User friendly version check.
    if not (distutils.version.LooseVersion(_GetJavaVersion()) >=
                distutils.version.LooseVersion('1.6')):
        logging.error('Requires Java 1.6 or higher. '
                      'Please visit http://www.java.com/getjava')
        return

    svn.try_lock(output)

    args = ['java', '-jar', os.path.dirname(__file__) + '/lib/yuicompressor-2.4.7.jar', input, '--line-break', '1000',
            '--charset', 'gb2312']

    if output:
        args += ['-o', output]

    if flags:
        args += flags

    command.run(' '.join(args), show_log=True)

    return output
Example #24
0
File: dist.py Project: louiz/botan
    def content_rewriter():
        for line in contents:

            if target_version != "HEAD":
                match = version_re.match(line)
                if match:
                    name_to_idx = {"major": 0, "minor": 1, "patch": 2}
                    in_tag = int(version_parts[name_to_idx[match.group(1)]])
                    in_file = int(match.group(2))

                    if in_tag != in_file:
                        logging.error(
                            'Version number part "%s" in botan_version.py does not match tag %s'
                            % (match.group(1), target_version)
                        )
                        raise Exception("Bad botan_version.py")

            if line == "release_vc_rev = None\n":
                yield "release_vc_rev = 'git:%s'\n" % (rev_id)
            elif line == "release_datestamp = 0\n":
                yield "release_datestamp = %d\n" % (rel_date)
            elif line == "release_type = 'unreleased'\n":
                if args[0] == "snapshot":
                    yield "release_type = 'snapshot'\n"
                else:
                    yield "release_type = 'released'\n"
            else:
                yield line
Example #25
0
def gql_json_parser(query_obj, form_id):
    all_components = []
    for e in query_obj:
    	if e.input_type == 'radiobuttons' or e.input_type == 'checkbox' and e.options:
    		logging.error(e.options)
    		opts = json.loads(e.options)
    		radio_values = []
    		for elm in opts:
    			if e.input_type == 'radiobuttons':
    				field = {"type": "radio"}
    			if e.input_type == 'checkbox':
    				field = {"type": "checkbox"}
    			field["name"] = str(e.key().id())
    			field["caption"] = elm.capitalize()
    			field["value"] = elm
    			field["id"] = elm
    			radio_values.append(field)
    		form_components = { "type": 'div',"data-role": 'fieldcontain',
    							"html": { "type": 'fieldset',"data-role": "controlgroup", "caption": e.caption, "data-type": "horizontal", "data-mini": "true", "html":  radio_values }}
    	elif e.input_type == "h2":
    		form_components = { "type": e.input_type, "html": e.caption}
    	else:
    		form_components = {"name": str(e.key().id()), "id": str(e.key().id()), "type": e.input_type, "caption": e.caption}
    	if e.input_type == 'file':
    		form_components['class'] = 'image_file'
    	all_components.append(form_components)
    all_components.append({"type": "hidden","name": "form_id", "value": str(form_id)})
    all_components.append({"type": "submit", "value": "Spara checklistan!"})
    return all_components
Example #26
0
  def submit_job_description(self, job):
    """Creates and excutes a job request."""
    request = dataflow.DataflowProjectsLocationsJobsCreateRequest()
    request.projectId = self.google_cloud_options.project
    request.location = self.google_cloud_options.region
    request.job = job.proto

    try:
      response = self._client.projects_locations_jobs.Create(request)
    except exceptions.BadStatusCodeError as e:
      logging.error('HTTP status %d trying to create job'
                    ' at dataflow service endpoint %s',
                    e.response.status,
                    self.google_cloud_options.dataflow_endpoint)
      logging.fatal('details of server error: %s', e)
      raise
    logging.info('Create job: %s', response)
    # The response is a Job proto with the id for the new job.
    logging.info('Created job with id: [%s]', response.id)
    logging.info(
        'To access the Dataflow monitoring console, please navigate to '
        'https://console.developers.google.com/project/%s/dataflow/job/%s',
        self.google_cloud_options.project, response.id)

    return response
Example #27
0
    def Adjustment(self):
        """ adjustment & and blunder removing

            :returns: adjusted coordinates or None
        """
        # adjustment loop
        last_res = None
        while True:
            res, blunder = self.g.adjust()
            if res is None or not 'east' in res[0] or not 'north' in res[0] or \
                              not 'elev' in res[0]:
                # adjustment faild or too many blunders
                if not last_res is None:
                    logging.warning("blunders are not fully removed")
                    res = last_res
                else:
                    logging.error("adjustment failed")
                break
            elif blunder['std-residual'] < 1.0:
                logging.info("blunders removed")
                break
            else:
                logging.info("%s - %s observation removed" % (blunder['from'], blunder['to']))
                self.g.remove_observation(blunder['from'], blunder['to'])
                last_res = res
        return res
Example #28
0
def buildLinkBlock( linkDoc, context, dataType ):
	dynamicHTML = "<div class='quotationrContainer'>"
	dynamicHTML += "<p class='quotationrSource'>"
	
	if context == "tags":
		dynamicHTML+= "<a class='quoteSource' href='/users/" + linkDoc.link + "'>" + linkDoc.title + "</a>"
	else:
		dynamicHTML+= "<a class='quoteSource' href='/users/" + linkDoc.name + "/link/" + linkDoc.urlHash + "'>" + linkDoc.title + "</a>"
		
	logging.error(linkDoc.favicon)
	logging.error(linkDoc.link)
	dynamicHTML+= "<a href='" + linkDoc.link + "'><img class='favicon' onerror='if (this.src != &quot;/images/icon16.png&quot;) this.src = &quot;/images/icon16.png&quot;;' width=16 src='" + linkDoc.favicon + "' /></a>"
	dynamicHTML += "</p>"
	tagHTML = ""
	
	
	#if dataType == "tags":
	for tag in linkDoc.tags.split(' '):
		tagHTML += "<a href='/tags/" + tag + "'>" + tag + "</a>"
	
	dynamicHTML += "<div class='quotationrWidget'>"
	
	

	dynamicHTML += "<span class='quotationrTags'>"
	dynamicHTML += tagHTML + "</span>"
	dynamicHTML += "</p>"
	
	dynamicHTML += "</div>"
	dynamicHTML+= "</div>"
	dynamicHTML+= "<hr />"
	return dynamicHTML
Example #29
0
    def massif_contour(self, data):
        """
        @param data:
        """

        if self.fig is None:
            logging.error("No diffraction image available => not showing the contour")
        else:
            tmp = 100 * (1 - data.astype("uint8"))
            mask = numpy.zeros((data.shape[0], data.shape[1], 4), dtype="uint8")

            mask[:, :, 0] = tmp
            mask[:, :, 1] = tmp
            mask[:, :, 2] = tmp
            mask[:, :, 3] = tmp
            while len(self.msp.images) > 1:
                self.msp.images.pop()
            try:
                xlim, ylim = self.ax.get_xlim(), self.ax.get_ylim()
                self.msp.imshow(mask, cmap="gray", origin="lower", interpolation="nearest")
                self.ax.set_xlim(xlim);self.ax.set_ylim(ylim);
            except MemoryError:
                logging.error("Sorry but your computer does NOT have enough memory to display the massif plot")
            # self.fig.show()
            self.fig.canvas.draw()
Example #30
0
def unregister_ui(review_ui):
    """Unregister a Review UI class.

    This will unregister a previously registered Review UI.

    Only FileAttachmentReviewUI subclasses are supported. The class must
    have been registered beforehand or a ValueError will be thrown.

    Args:
        review_ui (type):
            The Review UI to unregister. This must be a subclass of
            :py:class:`FileAttachmentReviewUI`, and must have been registered
            before.

    Raises:
        TypeError:
            The provided Review UI class is not of a compatible type.

        ValueError:
            The provided Review UI was not previously registered.
    """
    if not issubclass(review_ui, FileAttachmentReviewUI):
        raise TypeError('Only FileAttachmentReviewUI subclasses can be '
                        'unregistered')

    try:
        _file_attachment_review_uis.remove(review_ui)
    except ValueError:
        logging.error('Failed to unregister missing review UI %r' %
                      review_ui)
        raise ValueError('This review UI was not previously registered')