def run(self):
        pid = psutil.Process(os.getpid())
        update = False

        try:
            log.info(json.dumps({"rsk" : "[RSKLOG]", "tag" : "[PROCESS]", "start" : Interfaces.timestamper.time(), "data" : {"threads" : [{"pid" : x[0], "cpu_percent" : psutil.Process(x[0]).cpu_percent()} for x in pid.threads()], "memory_percent" : pid.memory_percent()}}))
            if self.registry.last_block:
                current_prevhash = "%064x" % self.registry.last_block.hashPrevBlock
            else:
                current_prevhash = None

            prevhash = yield self.bitcoin_rpc.prevhash()
            start = Interfaces.timestamper.time()
            
            if prevhash and prevhash != current_prevhash:
                logid = util.id_generator()
                log.info("New block! Prevhash: %s" % prevhash)
                update = True
                log.info(json.dumps({"uuid" : logid, "rsk" : "[RSKLOG]", "tag" : "[BTC_NEW_BLOCK_PARENT]", "start" : start, "elapsed" : Interfaces.timestamper.time() - start}))

            elif Interfaces.timestamper.time() - self.registry.last_update >= settings.MERKLE_REFRESH_INTERVAL:
                log.info("Merkle update! Prevhash: %s" % prevhash)
                update = True

            if update:
                self.registry.update_block()
                log.info(json.dumps({"rsk" : "[RSKLOG]", "tag" : "[BTC_NEW_WORK_UNIT]", "uuid" : util.id_generator(), "start" : start, "elapsed" : Interfaces.timestamper.time() - start}))

        except Exception:
            log.exception("UpdateWatchdog.run failed")
        finally:
            self.schedule()
Example #2
0
def import_items(conn, track_import):
    logger.info("start import %s", track_import["filename"])
    file_id = track_import["fileId"]
    url = "http://www.xiniudata.com/file/" + file_id
    saved_name = "logs/" + util.id_generator(16) + ".xlsx"

    r = requests.get(url)
    with open(saved_name, "wb") as fp:
        fp.write(r.content)

    w = xlrd.open_workbook(saved_name, on_demand=True)
    table = w.sheets()[0]
    nrows = table.nrows
    for i in range(0, nrows):
        if i == 0:
            continue
        row = table.row_values(i)
        try:
            if track_import["type"] == 82001:
                name = None
                if len(row) >= 1:
                    name = row[0].strip()
                full_name = None
                if len(row) >= 2:
                    full_name = row[1].strip()
                if (name is None or name=="") and (full_name is None or full_name==""):
                    continue

                logger.info("import company, name: %s, fullname: %s", name, full_name)
                import_one_project(conn, track_import, name, full_name)
            elif track_import["type"] == 82002:
                if len(row) == 0:
                    continue
                name = row[0].strip()
                if name is None or name == "":
                    continue
                logger.info("import investor, name: %s", name)
                import_one_investor(conn, track_import, name)
        except:
            traceback.print_exc()

    cnt_result = conn.get("select count(*) cnt from track_import_item where trackImportId=%s", track_import["id"])
    found_cnt_result = conn.get("select count(*) cnt from track_import_item where trackImportId=%s and status=84001",
                                track_import["id"])
    dup_cnt_result = conn.get("select count(*) cnt from track_import_item where trackImportId=%s and status=84002",
                                track_import["id"])
    unmatch_cnt_result = conn.get("select count(*) cnt from track_import_item where trackImportId=%s and status>84002",
                                track_import["id"])
    conn.update("update track_import set processStatus=%s, cnt=%s, dupCnt=%s, unMatchCnt=%s, foundCnt=%s "
                "where id=%s",
                83003,
                cnt_result["cnt"],
                dup_cnt_result["cnt"],
                unmatch_cnt_result["cnt"],
                found_cnt_result["cnt"],
                track_import["id"])

    os.remove(saved_name)
    logger.info("end import.")
def Fc_glycan_rmsd( working, working_Fc_glycan_chains, native, native_Fc_glycan_chains, decoy_num, dump_dir ):
    """
    :param working: decoy Pose()
    :param working_Fc_glycan_chains: list( the chain id's for the working Fc glycan ). Ex = [ 'H', 'I' ]
    :param native: native Pose()
    :param native_Fc_glycan_chains: list( the chain id's for the native Fc glycan ). Ex = [ 'D', 'E' ]
    :param decoy_num: int( the number of the decoy for use when dumping its Fc glycan )
    :param dump_dir: str( /path/to/dump_dir for the temp pdb files made. Files will be deleted )
    return: float( Fc glycan rmsd )
    """
    # imports
    import os
    from pyrosetta import Pose
    from rosetta.core.scoring import non_peptide_heavy_atom_RMSD
    from antibody_functions import load_pose
    from util import dump_pdb_by_chain, id_generator


    # get temporary files to work with
    id = id_generator()
    if dump_dir.endswith( '/' ):
        working_filename = "%s%s_temp_working_just_glyc%s.pdb" %( dump_dir, id, str( decoy_num ) )
        native_filename = "%s%s_temp_native_just_glyc%s.pdb" %( dump_dir, id, str( decoy_num ) )
    else:
        working_filename = "%s/%s_temp_working_just_glyc%s.pdb" %( dump_dir, id, str( decoy_num ) )
        native_filename = "%s/%s_temp_native_just_glyc%s.pdb" %( dump_dir, id, str( decoy_num ) )

    # dump out the Fc glycans by their chain id's
    dump_pdb_by_chain( working_filename, working, working_Fc_glycan_chains, decoy_num, dump_dir = dump_dir )
    dump_pdb_by_chain( native_filename, native, native_Fc_glycan_chains, decoy_num, dump_dir = dump_dir )

    # load in the Fc glycans
    just_Fc_glycan = Pose()
    try:
        just_Fc_glycan.assign( load_pose( working_filename ) )
    except:
        pass

    native_just_Fc_glycan = Pose()
    try:
        native_just_Fc_glycan.assign( load_pose( native_filename ) )
    except:
        pass

    # calculate the glycan rmsd
    try:
        glycan_rmsd = non_peptide_heavy_atom_RMSD( just_Fc_glycan, native_just_Fc_glycan )
    except:
        glycan_rmsd = "nan"
        pass

    # delete the files
    try:
        os.popen( "rm %s" %working_filename )
        os.popen( "rm %s" %native_filename )
    except:
        pass

    return glycan_rmsd
Example #4
0
 def run(self):
     if self.rootstock_rpc.active:
         start = Interfaces.timestamper.time()
         rsk_update = False
         try:
             if Interfaces.timestamper.time(
             ) - self.registry.rsk_last_update >= self.timer:
                 rsk_update = True
                 log.info(
                     json.dumps({
                         "uuid":
                         util.id_generator(),
                         "rsk":
                         "[RSKLOG]",
                         "tag":
                         "[RSK_NEW_BLOCK_PARENT]",
                         "start":
                         start,
                         "elapsed":
                         Interfaces.timestamper.time() - start
                     }))
             if rsk_update:
                 self.registry.rsk_update_block()
                 log.info(
                     json.dumps({
                         "rsk":
                         "[RSKLOG]",
                         "tag":
                         "[RSK_NEW_WORK_UNIT]",
                         "uuid":
                         util.id_generator(),
                         "start":
                         start,
                         "elapsed":
                         Interfaces.timestamper.time() - start
                     }))
         except Exception:
             log.exception("RSKUpdateWatchdog.run failed")
         finally:
             self.schedule()
             yield self.yielder()
     else:
         rsk_update = True
         self.shutdown()
Example #5
0
	def sendOrderEvent1(self, honeySessId, storeSessId):
				
		orderId = "OD" + util.order_id_generator() 
		token  = util.id_generator(32)
		headers = self.getRequestHeaders()
		referrerUrl = "https://www.flipkart.com/orderresponse?reference_id=" + orderId + "&token=" + token + "src=or&pr=1"
		
		payload = {"src": self.getSrc(),"exv": self.getExvField(), "events":[{"store":{"id":"7364884674316784684","session_id": storeSessId},"cashback_offer":{"offer":{}},"cart":{"order_id":"null","price":0},"user_hbc":1479176847,"checkout":{"stage":"confirmation"},"icon":"active","referrer_url": referrerUrl ,"session_id": honeySessId,"code":"ext009001"}]}
		eventResponse = requests.post(self.genericEvent, data=json.dumps(payload), headers=headers, cookies= self.getCookie())
		return eventResponse
Example #6
0
    def is_crawl_success(self,url, content):
        content = util.html_encode(content)

        if content.find("站长帮手网") > 0:
            return True
        if content.find("暂无数据") > 0:
            return True
        if content.find("为无效的域名格式") > 0:
            return True

        if content.find("HTTP Error 400. The request URL is invalid.") > 0:
            return True

        if content.find("您的查询量比较大") > 0:
            logger.info("您的查询量比较大")

            if len(login_users) < 100:
                while True:
                    opener = urllib2.build_opener()
                    username = util.id_generator(10)
                    data = {
                        "username":username,
                        "password": "******",
                        "confirmpassword": "******",
                        "opaction":"reg",
                        "qq":"",
                        "isqqopen":"1",
                        "email":"*****@*****.**" % username
                    }

                    data = urllib.urlencode(data)
                    logger.info(data)
                    headers = {
                        "Referer": "http://my.links.cn/reg.asp"
                    }
                    user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/600.8.9 (KHTML, like Gecko) Version/8.0.8 Safari/600.8.9'
                    headers['User-Agent'] = user_agent

                    try:
                        request= urllib2.Request("http://my.links.cn/regpost.asp", data, headers)
                        r = opener.open(request, timeout=30)
                        try:
                            content = util.html_encode(r.read())
                            #logger.info(content)
                            login_users.append({"name":username, "pwd":"ann123456", "date":datetime.datetime.now()})
                            logger.info(login_users)
                            break
                        except Exception,e:
                            #pass
                            traceback.print_exc()
                    except Exception,e:
                        #pass
                        traceback.print_exc()
            return False
Example #7
0
	def sendOrderEvent2(self, honeySessId, storeSessId):
				
		orderId = "OD" + util.order_id_generator() 
		token  = util.id_generator(32)
		headers = self.getRequestHeaders()
		referrerUrl = "https://www.flipkart.com/orderresponse?reference_id=" + orderId + "&token=" + token + "src=or&pr=1"
		
		payload = {"src": self.getSrc(),"exv": self.getExvField(), "events":[{"matches":[{"matched_string":"Order ID"},{"matched_string":"order has been placed"},{"matched_string":"Thank you for your order"}],"previous_url":"https://www.flipkart.com/checkout/init","store":{"id":"7364884674316784684","session_id": storeSessId},"referrer_url": referrerUrl ,"session_id": honeySessId,"code":"ext009003"}]}
		eventResponse = requests.post(self.genericEvent, data=json.dumps(payload), headers=headers, cookies= self.getCookie())
		print eventResponse.text
		print eventResponse.cookies
		return eventResponse
Example #8
0
def main():
    print("Opening image...")
    input_img = Image.open(argparams.image_input_path)

    print("Converting to RGBA...")
    input_img.convert('RGBA')

    print("Rotating image...")
    input_img = input_img.rotate(argparams.angle, expand=True)

    print("Getting data...")
    data = input_img.load()

    print("Getting pixels...")
    pixels = []
    for y in range(input_img.size[1]):
        pixels.append([])
        for x in range(input_img.size[0]):
            pixels[y].append(data[x, y])

    print("Determining intervals...")
    intervals = argparams.interval_function(pixels, argparams)

    print("Sorting pixels...")
    sorted_pixels = sorter.sort_image(pixels, intervals, argparams)

    print("Placing pixels in output image...")
    output_img = Image.new('RGBA', input_img.size)
    for y in range(output_img.size[1]):
        for x in range(output_img.size[0]):
            output_img.putpixel((x, y), sorted_pixels[y][x])

    if argparams.angle is not 0:
        print("Rotating output image back to original orientation...")
        output_img = output_img.rotate(-argparams.angle, expand=True)

        print("Crop image to apropriate size...")
        output_img = util.crop_to(output_img,
                                  Image.open(argparams.image_input_path))

    print("Saving image...")
    #output_img.save(argparams.output_image_path)
    print(argparams.output_image_path.split(".")[-1])
    output_img.save(argparams.output_image_path + "/" + util.id_generator() +
                    ".png")

    print("Done! ", argparams.output_image_path)
Example #9
0
def add_text_image(area_name, base_id, blob, width, height):
	if not textarea_exists(area_name):
		raise IntegrityError('No such text area: ' + area_name)
	
	cursor = get_connection().cursor()
	counter = 0
	upload_date = time.mktime(datetime.datetime.now().timetuple())
	
	for id in util.id_generator(base_id):
		cursor.execute('select count(*) from text_image where id = ? and version = ? and text_area_name = ?', [id, 'new', area_name])
		
		if cursor.fetchone()[0] == 0: break
	
	uploaded_image_id = add_uploaded_image(blob, width, height)
	cursor.execute('insert into text_image(id, version, text_area_name, uploaded_image_id) values (?, ?, ?, ?)', (id, 'new', area_name, uploaded_image_id))
	
	return { 'image-id': id }
 def build_broadcast_args(self, rsk_job=False):
     '''Build parameters of mining.notify call. All clients
     may receive the same params, because they include
     their unique extranonce1 into the coinbase, so every
     coinbase_hash (and then merkle_root) will be unique as well.'''
     logid = util.id_generator()
     start = Interfaces.timestamper.time()
     job_id = self.job_id
     prevhash = binascii.hexlify(self.prevhash_bin)
     (coinb1,
      coinb2) = [binascii.hexlify(x) for x in self.vtx[0]._serialized]
     merkle_branch = [binascii.hexlify(x) for x in self.merkletree._steps]
     version = binascii.hexlify(struct.pack(">i", self.nVersion))
     nbits = binascii.hexlify(struct.pack(">I", self.nBits))
     ntime = binascii.hexlify(struct.pack(">I", self.curtime))
     clean_jobs = True
     return (job_id, prevhash, coinb1, coinb2, merkle_branch, version,
             nbits, ntime, clean_jobs, rsk_job)
Example #11
0
def add_question():
    if request.method == 'POST':
        id = util.id_generator()
        submission_time = util.get_time()
        # view_number = request.form[]
        # vote_number = request.form[]
        title = request.form['question_title']
        message = request.form['question']
        # image = request.form[]
        questions = connection.get_data('question.csv', PATH)
        data_to_save = [id, submission_time, title, message, "image"]

        # question_to_save = questions.append(data_to_save)

        connection.save_data(PATH, 'question.csv', data_to_save, 'a')
        data = connection.get_data('question.csv', PATH)
        return render_template('list.html', data=data, TITLE=TITLE, ID=ID)

    return render_template('add_question.html')
Example #12
0
 def _rsk_fill_data(self, data):
     '''
     Helper function for filling out the Bitcoin RPCs RSK data
     '''
     start = Interfaces.timestamper.time()
     logid = util.id_generator()
     self.rootstock_rpc.rsk_notify = data['notify']
     self.rootstock_rpc.rsk_blockhashformergedmining = data[
         'blockHashForMergedMining']
     self.rootstock_rpc.rsk_last_header = self.rootstock_rpc.rsk_header
     self.rootstock_rpc.rsk_miner_fees = data['feesPaidToMiner']
     self.rootstock_rpc.rsk_last_parent_hash = self.rootstock_rpc.rsk_parent_hash
     self.rootstock_rpc.rsk_parent_hash = data['parentBlockHash']
     self.rootstock_rpc.rsk_header = self._rsk_genheader(
         self.rootstock_rpc.rsk_blockhashformergedmining)
     if settings.RSK_DEV_MODE:
         self.rootstock_rpc.rsk_target = int(settings.RSK_DEV_TARGET)
     else:
         self.rootstock_rpc.rsk_target = int(data['target'], 16)
Example #13
0
def add_gallery_image(area_name, base_id, blob, width, height, title, comment):
	if not galleryarea_exists(area_name):
		raise IntegrityError('No such gallery: ' + area_name)
	
	cursor = get_connection().cursor()
	upload_date = time.mktime(datetime.datetime.now().timetuple())
	
	for id in util.id_generator(base_id):
		cursor.execute('select count(*) from gallery_image where id = ? and version = ? and gallery_area_name = ?', [id, 'new', area_name])
		
		if cursor.fetchone()[0] == 0: break
	
	cursor.execute('select max(position) from gallery_image where gallery_area_name = ? and version = ?', [area_name, 'new'])
	res = cursor.fetchone()[0]
	next_pos = 0 if res is None else res + 1
	
	uploaded_image_id = add_uploaded_image(blob, width, height)
	cursor.execute('insert into gallery_image(id, position, title, comment, version, gallery_area_name, uploaded_image_id) values (?, ?, ?, ?, ?, ?, ?)', (id, next_pos, title, comment, 'new', area_name, uploaded_image_id))
	
	return { 'image-id': id }
Example #14
0
    def update_block(self):
        '''Registry calls the getblocktemplate() RPC
        and build new block template.'''

        if self.update_in_progress:
            # Block has been already detected
            return

        self.update_in_progress = True
        self.last_update = Interfaces.timestamper.time()
        btc_block_received_start = Interfaces.timestamper.time()
        btc_block_received_id = util.id_generator()
        log.info(
            json.dumps({
                "rsk": "[RSKLOG]",
                "tag": "[BTC_BLOCK_RECEIVED_START]",
                "start": btc_block_received_start,
                "elapsed": 0,
                "uuid": btc_block_received_id
            }))
        d = self.bitcoin_rpc.getblocktemplate()
        d.addCallback(self._update_block, btc_block_received_id)
        d.addErrback(self._update_block_failed)
Example #15
0
 def rsk_update_block(self):
     try:
         currentTime = Interfaces.timestamper.time()
         if self.rsk_update_in_progress and not (
                 currentTime - self.rsk_last_update > 3):
             return
         self.rsk_last_update = currentTime
         self.rsk_update_in_progress = True
         rsk_block_received_id = util.id_generator()
         log.info(
             json.dumps({
                 "rsk": "[RSKLOG]",
                 "tag": "[RSK_BLOCK_RECEIVED_START]",
                 "start": Interfaces.timestamper.time(),
                 "elapsed": 0,
                 "uuid": rsk_block_received_id
             }))
         rsk = self.rootstock_rpc.getwork()
         rsk.addCallback(self._rsk_getwork, rsk_block_received_id)
         rsk.addErrback(self._rsk_getwork_err)
     except AttributeError as e:
         if "'NoneType' object has no attribute 'getwork'" in str(e):
             pass  #RSK dropped recently so we're letting this pass
def Fc_glycan_metrics( working, native, working_Fc_glycan_chains, native_Fc_glycan_chains, sf, decoy_num, dump_dir ):
    """
    Return the glycan RMSD contribution of the two Fc glycans in 3ay4 (may work for other PDBs, but I don't know yet)
    Fc_glycan_buried_sasa = complex with Fc glycan - ( complex without Fc glycan + just Fc glycan )
    hbonds contributed by Fc glycans = total hbonds in Pose - total hbonds in Pose without Fc glycans - just Fc glycan hbonds
    :param working: decoy Pose()
    :param native: native Pose()
    :param working_Fc_glycan_chains: list( the chain id's for the working Fc glycan ). Ex = [ 'H', 'I' ]
    :param native_Fc_glycan_chains: list( the chain id's for the native Fc glycan ). Ex = [ 'D', 'E' ]
    :param sf: ScoreFunction
    :param decoy_num: int( the number of the decoy for use when dumping its Fc glycan )
    :param dump_dir: str( /path/to/dump_dir for the temp pdb files made. Files will be deleted )
    :return: obj( DataHolder that contains Fc_glycan_rmsd, Fc_glycan_tot_score, Fc_glycan_buried_sasa, and Fc_glycan_internal_hbonds, Fc_glycan_hbonds_contributed )
    """
    #################
    #### IMPORTS ####
    #################

    # Rosetta functions
    from pyrosetta import Pose
    from rosetta.core.scoring import non_peptide_heavy_atom_RMSD, \
        calc_total_sasa

    # Rosetta functions I wrote out
    from antibody_functions import load_pose, DataHolder

    # utility functions
    import os
    from util import dump_pdb_by_chain, id_generator
    from pyrosetta.toolbox import get_hbonds

    # for use in SASA calculations
    probe_size = 1.4


    # get glycan rmsd (not using above function because I want to use the glycan poses for something else
    # get temporary files to work with
    id = id_generator()
    if dump_dir.endswith( '/' ):
        working_filename = "%s%s_temp_working_just_glyc%s.pdb" %( dump_dir, id, str( decoy_num ) )
        native_filename = "%s%s_temp_native_just_glyc%s.pdb" %( dump_dir, id, str( decoy_num ) )
    else:
        working_filename = "%s/%s_temp_working_just_glyc%s.pdb" %( dump_dir, id, str( decoy_num ) )
        native_filename = "%s/%s_temp_native_just_glyc%s.pdb" %( dump_dir, id, str( decoy_num ) )

    # dump out the Fc glycans by their chain id's
    dump_pdb_by_chain( working_filename, working, working_Fc_glycan_chains, decoy_num, dump_dir = dump_dir )
    dump_pdb_by_chain( native_filename, native, native_Fc_glycan_chains, decoy_num, dump_dir = dump_dir )

    # load in the Fc glycans
    working_just_Fc_glycan = Pose()
    try:
        working_just_Fc_glycan.assign( load_pose( working_filename ) )
    except:
        pass

    native_just_Fc_glycan = Pose()
    try:
        native_just_Fc_glycan.assign( load_pose( native_filename ) )
    except:
        pass

    # calculate the glycan rmsd
    try:
        glycan_rmsd = non_peptide_heavy_atom_RMSD( working_just_Fc_glycan, native_just_Fc_glycan )
    except:
        glycan_rmsd = "nan"
        pass

    # get the metrics associated with just the Fc glycan
    # score first as to gain access to the hbonds data
    working_Fc_glycan_tot_score = sf( working_just_Fc_glycan )
    native_Fc_glycan_tot_score = sf( native_just_Fc_glycan )

    # SASA of just the glycan
    working_Fc_glycan_sasa = calc_total_sasa( working_just_Fc_glycan, probe_size )
    native_Fc_glycan_sasa = calc_total_sasa( native_just_Fc_glycan, probe_size )

    # num hbonds in Fc glycan
    working_Fc_glycan_internal_hbonds = get_hbonds( working_just_Fc_glycan ).nhbonds()
    native_Fc_glycan_internal_hbonds = get_hbonds( native_just_Fc_glycan ).nhbonds()

    # delete the files
    try:
        os.popen( "rm %s" %working_filename )
        os.popen( "rm %s" %native_filename )
    except:
        pass


    # now move to metrics requiring the removal of the glycan from the complex
    # get temporary files to work with
    id = id_generator()
    if dump_dir.endswith( '/' ):
        working_filename = "%s%s_working_no_glyc_%s.pdb" %( dump_dir, id, str( decoy_num ) )
        native_filename = "%s%s_native_no_glyc_%s.pdb" %( dump_dir, id, str( decoy_num ) )
    else:
        working_filename = "%s/%s_working_no_glyc_%s.pdb" %( dump_dir, id, str( decoy_num ) )
        native_filename = "%s/%s_native_no_glyc_%s.pdb" %( dump_dir, id, str( decoy_num ) )

    # get the chain id's of everything discluding the passed Fc glycan chain id's
    working_pose_chains = []
    for res in working:
        chain_id = working.pdb_info().chain( res.seqpos() )
        if ( chain_id not in working_pose_chains ) and ( chain_id not in working_Fc_glycan_chains ):
            working_pose_chains.append( chain_id )
    native_pose_chains = []
    for res in native:
        chain_id = native.pdb_info().chain( res.seqpos() )
        if ( chain_id not in native_pose_chains ) and ( chain_id not in native_Fc_glycan_chains ):
            native_pose_chains.append( chain_id )

    # dump out the pose without its Fc glycans by the chain id's
    dump_pdb_by_chain( working_filename, working, working_pose_chains, decoy_num, dump_dir = dump_dir )
    dump_pdb_by_chain( native_filename, native, native_pose_chains, decoy_num, dump_dir = dump_dir )

    # load in the working Pose without the Fc glycans
    working_complex_no_Fc_glycan = Pose()
    native_complex_no_Fc_glycan = Pose()
    try:
        working_complex_no_Fc_glycan.assign( load_pose( working_filename ) )
        native_complex_no_Fc_glycan.assign( load_pose( working_filename ) )
    except:
        pass

    # score the Poses so their hbond energies get updated
    sf( working )
    sf( working_complex_no_Fc_glycan )
    sf( native )
    sf( native_complex_no_Fc_glycan )

    # get the number of hbonds in the Pose without the Fc glycans
    # working
    working_with_Fc_glycan_hbonds = get_hbonds( working )
    working_no_Fc_glycan_hbonds = get_hbonds( working_complex_no_Fc_glycan )
    working_Fc_glycan_hbonds_contributed = working_with_Fc_glycan_hbonds.nhbonds() - working_no_Fc_glycan_hbonds.nhbonds() - working_Fc_glycan_internal_hbonds

    # native
    native_with_Fc_glycan_hbonds = get_hbonds( native )
    native_no_Fc_glycan_hbonds = get_hbonds( native_complex_no_Fc_glycan )
    native_Fc_glycan_hbonds_contributed = native_with_Fc_glycan_hbonds.nhbonds() - native_no_Fc_glycan_hbonds.nhbonds() - native_Fc_glycan_internal_hbonds

    # get the SASA contributed by the presence of the Fc glycan
    # working
    working_with_Fc_glycan_sasa = calc_total_sasa( working, probe_size )
    working_no_Fc_glycan_sasa = calc_total_sasa( working_complex_no_Fc_glycan, probe_size )
    working_Fc_glycan_sasa_contributed = working_with_Fc_glycan_sasa - ( working_no_Fc_glycan_sasa + working_Fc_glycan_sasa )

    # native
    native_with_Fc_glycan_sasa = calc_total_sasa( native, probe_size )
    native_no_Fc_glycan_sasa = calc_total_sasa( native_complex_no_Fc_glycan, probe_size )
    native_Fc_glycan_sasa_contributed = native_with_Fc_glycan_sasa - ( native_no_Fc_glycan_sasa + native_Fc_glycan_sasa )

    # delete the files
    try:
        os.popen( "rm %s" %working_filename )
        os.popen( "rm %s" %native_filename )
    except:
        pass

    # store data in the DataHolder and return it
    data = DataHolder()
    data.Fc_glycan_rmsd = glycan_rmsd
    data.Fc_glycan_tot_score = working_Fc_glycan_tot_score
    data.native_Fc_glycan_tot_score = native_Fc_glycan_tot_score
    data.Fc_glycan_internal_hbonds = working_Fc_glycan_internal_hbonds
    data.native_Fc_glycan_internal_hbonds = native_Fc_glycan_internal_hbonds
    data.Fc_glycan_hbonds_contributed = working_Fc_glycan_hbonds_contributed
    data.native_Fc_glycan_hbonds_contributed = native_Fc_glycan_hbonds_contributed
    data.Fc_glycan_sasa_contributed = working_Fc_glycan_sasa_contributed
    data.native_Fc_glycan_sasa_contributed = native_Fc_glycan_sasa_contributed
    data.probe_size = probe_size

    return data
Example #17
0
 def get_ID():
     """
     :return: A random ID
     """
     return id_generator(size=6)
Example #18
0
def read_output_image_path():
    return __args.output if __args.output else util.id_generator() + ".png"
Example #19
0
 def test_id_length(self):
     x = id_generator(size=5)
     assert len(x) == 5
Example #20
0
 def test_id_content(self):
     x = id_generator()
     assert x.isalnum()
Example #21
0
 def start_new_game(self):
     self.id = util.id_generator()
     self.state = 'Waiting for players'
Example #22
0
    def submit_share(self, job_id, worker_name, extranonce1_bin, extranonce2,
                     ntime, nonce, difficulty):
        '''Check parameters and finalize block template. If it leads
           to valid block candidate, asynchronously submits the block
           back to the bitcoin network.

            - extranonce1_bin is binary. No checks performed, it should be from session data
            - job_id, extranonce2, ntime, nonce - in hex form sent by the client
            - difficulty - decimal number from session, again no checks performed
            - submitblock_callback - reference to method which receive result of submitblock()
        '''
        global rsk_last_received_share_time
        global rsk_submitted_shares
        start = Interfaces.timestamper.time()
        logid = util.id_generator()
        log.info(
            json.dumps({
                "rsk": "[RSKLOG]",
                "tag": "[SHARE_RECEIVED_START]",
                "uuid": logid,
                "start": start,
                "elapsed": 0
            }))
        # Check if extranonce2 looks correctly. extranonce2 is in hex form...
        if len(extranonce2) != self.extranonce2_size * 2:
            raise SubmitException(
                "Incorrect size of extranonce2. Expected %d chars" %
                (self.extranonce2_size * 2))

        # Check for job
        job = self.get_job(job_id)
        if job == None:
            raise SubmitException("Job '%s' not found" % job_id)

        # Check if ntime looks correct
        if len(ntime) != 8:
            raise SubmitException("Incorrect size of ntime. Expected 8 chars")

        if not job.check_ntime(int(ntime, 16)):
            raise SubmitException("Ntime out of range")

        # Check nonce
        if len(nonce) != 8:
            raise SubmitException("Incorrect size of nonce. Expected 8 chars")

        # Convert from hex to binary
        extranonce2_bin = binascii.unhexlify(extranonce2)
        ntime_bin = binascii.unhexlify(ntime)
        nonce_bin = binascii.unhexlify(nonce)

        # Check for duplicated submit
        if not job.register_submit(extranonce1_bin, extranonce2_bin, ntime_bin,
                                   nonce_bin):
            log.info("Duplicate from %s, (%s %s %s %s)" % \
                    (worker_name, binascii.hexlify(extranonce1_bin), extranonce2, ntime, nonce))
            raise SubmitException("Duplicate share")

        # Now let's do the hard work!
        # ---------------------------

        # 1. Build coinbase
        coinbase_bin = job.serialize_coinbase(extranonce1_bin, extranonce2_bin)
        coinbase_hash = util.doublesha(coinbase_bin)

        # 2. Calculate merkle root
        merkle_root_bin = job.merkletree.withFirst(coinbase_hash)
        merkle_root_int = util.uint256_from_str(merkle_root_bin)

        # 3. Serialize header with given merkle, ntime and nonce
        header_bin = job.serialize_header(merkle_root_int, ntime_bin,
                                          nonce_bin)

        # 4. Reverse header and compare it with target of the user

        # header 80-bytes (19*4 + 4)
        header_le = ''.join(
            [header_bin[i * 4:i * 4 + 4][::-1] for i in range(0, 20)])
        hash_bin = util.doublesha(header_le)

        hash_int = util.uint256_from_str(hash_bin)
        block_hash_hex = "%064x" % hash_int
        header_hex = binascii.hexlify(header_bin)

        log.info(
            json.dumps({
                "rsk": "[RSKLOG]",
                "tag": "[SHARE_RECEIVED_HEX]",
                "uuid": logid,
                "start": Interfaces.timestamper.time(),
                "elapsed": 0,
                "data": block_hash_hex
            }))

        if not settings.RSK_DEV_MODE:
            target_user = self.diff_to_target(difficulty)
            if hash_int > target_user:
                raise SubmitException("Share is above target")

        # Mostly for debugging purposes
        target_info = self.diff_to_target(100000)
        if hash_int <= target_info:
            log.info("Yay, share with diff above 100000")

        # 5. Compare hash with target of the network
        log.info("Hash_Int: %s, Job.Target %s" % (hash_int, job.target))
        btc_solution = hash_int <= job.target
        rsk_solution = False

        if self.rootstock_rpc is not None:
            rsk_solution = hash_int <= self.rootstock_rpc.rsk_target and self._is_rsk_tag_in_coinbase(
                coinbase_bin)

        on_submit_rsk = None
        on_submit = None

        if btc_solution or rsk_solution:
            log.info("We found a block candidate! %s" % block_hash_hex)
            job.finalize(merkle_root_int, extranonce1_bin, extranonce2_bin,
                         int(ntime, 16), int(nonce, 16))

            if btc_solution:
                serialized = binascii.hexlify(job.serialize())
                on_submit = self.bitcoin_rpc.submitblock(serialized)
                log.info(
                    json.dumps({
                        "rsk": "[RSKLOG]",
                        "tag": "[BTC_SUBMITBLOCK]",
                        "uuid": util.id_generator(),
                        "start": start,
                        "elapsed": Interfaces.timestamper.time(),
                        "data": block_hash_hex
                    }))

            if rsk_solution:
                if rsk_last_received_share_time is None:
                    rsk_last_received_share_time = int(round(time() * 1000))
                    rsk_submitted_shares = 0
                last_received_share_time_now = int(round(time() * 1000))
                if last_received_share_time_now - rsk_last_received_share_time >= 1000:
                    rsk_submitted_shares = 0
                    rsk_last_received_share_time = last_received_share_time_now

                if last_received_share_time_now - rsk_last_received_share_time < 1000 and rsk_submitted_shares < 3:
                    rsk_submitted_shares += 1
                else:
                    return (header_hex, block_hash_hex, on_submit,
                            on_submit_rsk)

                serialized = binascii.hexlify(job.serialize())

                block_header_hex = binascii.hexlify(header_le)
                coinbase_hex = binascii.hexlify(coinbase_bin)
                coinbase_hash_hex = binascii.hexlify(coinbase_hash)
                merkle_hashes_array = [
                    binascii.hexlify(x) for x in job.merkletree._steps
                ]
                merkle_hashes_array.insert(0, coinbase_hash_hex)
                merkle_hashes = ' '.join(merkle_hashes_array)
                txn_count = hex(len(merkle_hashes_array))[2:]

                on_submit_rsk = self.rootstock_rpc.submitBitcoinBlockPartialMerkle(
                    block_hash_hex, block_header_hex, coinbase_hex,
                    merkle_hashes, txn_count)

                log.info(
                    json.dumps({
                        "rsk": "[RSKLOG]",
                        "tag": "[RSK_SUBMITBLOCK]",
                        "uuid": util.id_generator(),
                        "start": start,
                        "elapsed": Interfaces.timestamper.time(),
                        "data": block_hash_hex
                    }))

        return (header_hex, block_hash_hex, on_submit, on_submit_rsk)
Example #23
0
    def __init__(self,
                    R,
                    B,
                    num_features,
                    num_classes,
                    seed=0,
                    tag=None,
                    save_path=None,
                    load_hash=False):
        """
        Parameters:

        R - number of sub-classifiers
        B - number of merged classes in each sub-classifier
        num_features - number of features in the input dataset
        num_classes - number of classes in the input dataset
        tag - unique tag to identify the directory that contains models, if not
                specified, a new tag will be generated and a new directory
                named with this tag and parameters R, B will be created.
        save_path - path to save the directory that contains models, hash
                        parameters and precomputed probabilities. If tag is
                        provided, the program will try to load the data from
                        the directory specified by tag.

        """
        self.R = R;
        self.B = B;
        self.num_features = num_features
        self.num_classes = num_classes
        self.save_path = save_path
        self.seed = seed
        self.tag = tag
        self.root_dir = None
        self.model_dir = None
        self.probs_dir = None
        self.hash_path = None
        self.complete_probs_path = None
        # hash parameters
        self.a = None
        self.b = None
        self.p = hash_util.PRIME
        # training variables
        self.weights = []
        self.bias = []

        if self.tag is None:
            # create a 6 characters id to save models and hash parameters
            self.tag = util.id_generator()

        if save_path is None:
            self.save_path = "./"
        elif not save_path.endswith("/"):
            self.save_path = save_path + "/"

        self.root_dir = self.save_path + self.tag + \
                                "_B" + str(self.B) + \
                                "_R" + str(self.R)
        self.model_dir = self.root_dir + "/models"
        self.probs_dir = self.root_dir + "/probs"
        self.hash_path = self.root_dir + "/hash_" + self.tag + \
                                "_B" + str(self.B) + "_R" + str(self.R) + ".npz"

        # create new directory if the root directory does not exist
        if not os.path.isdir(self.root_dir):
            try:
                os.makedirs(self.root_dir)
            except OSError as e:
                if e.errno != errno.EEXIST:
                    raise

        if not os.path.isdir(self.model_dir):
            try:
                os.makedirs(self.model_dir)
            except OSError as e:
                if e.errno != errno.EEXIST:
                    raise

        if not os.path.isdir(self.probs_dir):
            try:
                os.makedirs(self.probs_dir)
            except OSError as e:
                if e.errno != errno.EEXIST:
                    raise

        h = hash_util.HashGenerator(seed=self.seed)
        h_a, h_b = h.get_hash_params(self.num_classes, self.R, self.B)
        self.a = h_a
        self.b = h_b
        self.p = h.get_p()

        if load_hash is True:
            print("loading hash parameters")
            hash_params = np.load(self.hash_path)
            self.a = hash_params['a']
            self.b = hash_params['b']
        else:
            np.savez(self.hash_path, a=h_a, b=h_b)

        self.complete_probs_path = self.probs_dir + "/complete_probs_" + \
                                   self.tag + \
                                   "_B" + str(self.B) + \
                                   "_R" + str(self.R) + ".npz"
        print("current tag is", self.tag)
Example #24
0
def read_output_image_path():
    return __args.output if __args.output else util.id_generator() + ".png"
Example #25
0
def parse_args():
    p = argparse.ArgumentParser(description="pixel mangle an image")
    p.add_argument("image", help="input image file")
    p.add_argument(
        "-o",
        "--output",
        help="output image file, defaults to a randomly generated string",
        default=util.id_generator() + ".png")
    p.add_argument(
        "-i",
        "--int_function",
        help="random, threshold, edges, waves, file, file-edges, none",
        default="threshold")
    p.add_argument("-f",
                   "--int_file",
                   help="Image used for defining intervals",
                   default="in.png")
    p.add_argument(
        "-t",
        "--threshold",
        type=float,
        help="Pixels darker than this are not sorted, between 0 and 1",
        default=0.25)
    p.add_argument(
        "-u",
        "--upper_threshold",
        type=float,
        help="Pixels brighter than this are not sorted, between 0 and 1",
        default=0.8)
    p.add_argument("-c",
                   "--clength",
                   type=int,
                   help="Characteristic length of random intervals",
                   default=50)
    p.add_argument(
        "-a",
        "--angle",
        type=float,
        help="Rotate the image by an angle (in degrees) before sorting",
        default=0)
    p.add_argument("-r",
                   "--randomness",
                   type=float,
                   help="What percentage of intervals are NOT sorted",
                   default=0)
    p.add_argument("-s",
                   "--sorting_function",
                   help="lightness, intensity, hue, saturation, minimum",
                   default="lightness")
    p.add_argument("-m",
                   "--mask",
                   help="Image used for masking parts of the image")
    p.add_argument("-l",
                   "--log_level",
                   default="WARNING",
                   help="Print more or less info",
                   choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"])

    __args = p.parse_args()

    logging.basicConfig(format="%(name)s: %(levelname)s - %(message)s",
                        level=logging.getLevelName(__args.log_level))

    return {
        "image_input_path": __args.image,
        "output_image_path": __args.output,
        "interval_function": __args.int_function,
        "interval_file_path": __args.int_file,
        "bottom_threshold": __args.threshold,
        "upper_threshold": __args.upper_threshold,
        "clength": __args.clength,
        "angle": __args.angle,
        "randomness": __args.randomness,
        "sorting_function": __args.sorting_function,
        "mask": __args.mask
    }
Example #26
0
 def get_ID():
     """
     :return: A random ID
     """
     return id_generator(size=6)