def __init__(self): self._reader, self._writer = Pipe(duplex=False) self._rlock = Lock() if sys.platform == 'win32': self._wlock = None else: self._wlock = Lock() self._make_methods()
def __init__(self, maxsize = 0): if maxsize <= 0: maxsize = _multiprocessing.SemLock.SEM_VALUE_MAX self._maxsize = maxsize self._reader, self._writer = Pipe(duplex=False) self._rlock = Lock() self._opid = os.getpid() if sys.platform == 'win32': self._wlock = None else: self._wlock = Lock() self._sem = BoundedSemaphore(maxsize) self._after_fork() if sys.platform != 'win32': register_after_fork(self, Queue._after_fork) return
def __init__(self, pid): Daemon.__init__(self, pid) # Reddit https://praw.readthedocs.io/en/stable/pages/comment_parsing.html self.reddit = {} self.default_subs = 'pollster_bot' self.bot_name = 'pollster_bot' self.version = '1.0' self.touched_comment_ids = [] # create logger self.logger = logging.getLogger('Pollster_Bot') self.logger.setLevel(logging.INFO) # File handler set to DEBUG fh = logging.FileHandler(filename=os.path.join( os.path.dirname(__file__), 'PollsterBotLog.txt')) fh.setLevel(logging.DEBUG) # create console handler and set level to debug ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) # create formatter formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') # add formatter to ch ch.setFormatter(formatter) fh.setFormatter(formatter) # add ch, fh to logger self.logger.addHandler(ch) self.logger.addHandler(fh) self.lock = Lock() self.logger.info('Starting Pollster Bot ver. ' + self.version) # Huffington post http://elections.huffingtonpost.com/pollster/api self.uri = 'http://elections.huffingtonpost.com/pollster/api/charts.json' # Set states self.states = {} self.states = self.load_json_file('data/states.json') # phrases phrases = self.load_json_file('data/phrases.json') self.greetings = phrases['greeting'] self.winning = phrases['winning'] self.losing = phrases['losing'] # keywords to call the pollster bot self.keywords = self.load_json_file('data/keywords.json')['keywords'] # subs subs = self.load_json_file('data/subs.json')['subs'] for sub in subs: self.default_subs += '+' + sub self.log_in_credentials = self.load_json_file( 'data/login_credentials.json')
def test_get_instance_with_concurrent_mode(self, mr_lock: LockFactory): try: _lock = mr_lock.get_instance() except ValueError as ve: assert "FeatureMode is None. Please configure it as one of 'multirunnable.mode.FeatureMode'." in str( ve), "It should set the FeatureMode first." mr_lock.feature_mode = FeatureMode.Concurrent _lock = mr_lock.get_instance() from threading import Lock assert _lock is not None and isinstance(_lock, type(Lock( ))) is True, "This type of Lock instance should be 'threading.Lock'."
def Lock(): """ Returns a non-recursive lock object """ from multiprocessing.synchronize import Lock return Lock()
# limitations under the License. import logging import uuid from ConfigParser import NoOptionError import nexusproto.DataTile_pb2 as nexusproto import numpy as np from cassandra.auth import PlainTextAuthProvider from cassandra.cqlengine import columns, connection, CQLEngineException from cassandra.cqlengine.models import Model from cassandra.policies import TokenAwarePolicy, DCAwareRoundRobinPolicy, WhiteListRoundRobinPolicy from multiprocessing.synchronize import Lock from nexusproto.serialization import from_shaped_array INIT_LOCK = Lock() logger = logging.getLogger(__name__) class NexusTileData(Model): __table_name__ = 'sea_surface_temp' tile_id = columns.UUID(primary_key=True) tile_blob = columns.Blob() __nexus_tile = None def _get_nexus_tile(self): if self.__nexus_tile is None: self.__nexus_tile = nexusproto.TileData.FromString(self.tile_blob) return self.__nexus_tile
def main(argv=None): # IGNORE:C0111 '''Command line options.''' if argv is None: argv = sys.argv else: sys.argv.extend(argv) parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter) parser.add_argument("-v", "--verbose", dest="verbose", action="count", help="set verbosity level [default: %(default)s]") # parser.add_argument("-dir", "--structs_dir", required = True ) parser.add_argument("-db", "--database_name", default='pdb') parser.add_argument("-host", "--db_host", default='127.0.0.1') parser.add_argument("--procesados", default='/tmp/pdbs_dist_procesados.txt') parser.add_argument("--domains", default='/data/databases/pdb/processed/dns_pdbs.tlb') parser.add_argument( "--seqs", default='/data/databases/pdb/processed/pdb_seq_res.fasta') parser.add_argument("--pdbs", default='/data/databases/pdb/') parser.add_argument( "--distances", default='/data/databases/pdb/processed/distances.tbl', help= "Final output: table with atom distances between residues and ligands. Only for distances less than 'dist' parameter" ) parser.add_argument("--dist", default=5) parser.add_argument( "--pdbs_with_drug", default='/data/databases/pdb/processed/pdbs_with_drug.txt', help="Output: list of PDB codes with an associated ligand") args = parser.parse_args() if not os.path.exists(args.pdbs): sys.stderr.write( "%s not found. Specify where is pdbs/divided directory" % (parser.pdbs)) sys.exit(1) PDB_PATH = args.pdbs CONTACT_DIST = args.dist pdbs_with_drug_path = args.pdbs_with_drug if not os.path.exists(os.path.dirname(args.pdbs_with_drug)): sys.stderr.write("can't %s create %s. Set pdbs_with_drug correctly" % (pdbs_with_drug_path)) sys.exit(1) if not os.path.exists(os.path.dirname(args.distances)): sys.stderr.write("can't %s create %s. Set distances correctly" % (args.distances)) sys.exit(1) pdbs_procesados_path = args.procesados print( "In %s the processed pdbs are kept, if the file is deleted, the process starts from scratch " % pdbs_procesados_path) print("Outputs: '%s' and '%s' " % (pdbs_with_drug_path, args.distances)) pdbs_procesados = [] if os.path.exists(pdbs_procesados_path): with open(pdbs_procesados_path) as handle: pdbs_procesados = [x.strip() for x in handle.readlines()] pdbs_procesados = {x: 1 for x in pdbs_procesados} pdbs_iterator = PDBsIterator(pdb_dir=args.pdbs) def not_processed_iter(): for pdb, pdb_path in pdbs_iterator: if pdb not in pdbs_procesados: yield [pdb, pdb_path] DNsPDBs = args.domains if not os.path.exists(DNsPDBs): seqs_from_pdb = args.seqs if not os.path.exists(seqs_from_pdb): sys.stderr.write( "%s does not exists and %s not found. Specify where it is." % (DNsPDBs, seqs_from_pdb)) sys.exit(1) sys.stderr.write( "%s not found. You can create it with the following command: \n" % DNsPDBs) sys.stderr.write( "hmmscan --cut_tc --domtblout dns_pdbs.tlb --acc -o pdb_seq_res.hmm Pfam-A.hmm seqs_from_pdb.fasta" ) sys.exit(1) drugcompounds = [ x for x, y in compound_type.items() if y in ["DRUG", "COFACTOR"] ] othercompounds = [ x for x, y in compound_type.items() if y in ["METAL", "SUGAR", "NUCLEOTIDE", "LIPID"] ] aminoacidcompounds = [ x for x, y in compound_type.items() if y in ["MODIFIED", "RESIDUE"] ] drugcompounds = othercompounds + drugcompounds pdbs_with_drug_path = "/data/databases/pdb/processed/pdbs_with_drug.txt" _log.info("proceced pdbs: %i" % len(pdbs_procesados)) ppb = CaPPBuilder() p = PDBParser(PERMISSIVE=1, QUIET=1) pdbs_with_drug = [] if os.path.exists(pdbs_with_drug_path): _log.info("pdbs with drugs already loaded") with open(pdbs_with_drug_path) as handle: for x in handle.readlines(): pdbs_with_drug.append(x.strip()) else: with open(pdbs_with_drug_path, "a") as handle: _log.info("pdbs with drugs will be loaded") pdbs = list(pdbs_iterator) for pdb, file_path in tqdm(pdbs): try: if pdb not in pdbs_with_drug: structure = p.get_structure(pdb, file_path) for res in structure.get_residues(): if res.resname in drugcompounds: pdbs_with_drug.append(pdb) handle.write(pdb + "\n") handle.flush() break except Exception as ex: print(str(ex)) # import re # dns_table = re.sub(r" +", "\t","\n".join( [str(i) + "\t" + x for i,x in enumerate(open('/data/databases/pdb/processed/dns_pdbs.tlb').readlines()) if not x.startswith("#") ]) ) if not os.path.exists(DNsPDBs + "2"): cols = [ "target_name", "accession", "tlen", "query_name", "accession2", "qlen", "E-value", "score1", "bias1", "#", "of", "c-Evalue", "i-Evalue", "score2", "bias2", "from1", "to1", "from2", "to2", "from3", "to3", "acc" ] _log.info("correcting hmmer-pdb output") regexp = re.compile(" +") items = [] for x in tqdm(open(DNsPDBs).readlines()): if not x.startswith("#"): line = regexp.split(x) items.append(line[0:len(cols)]) # record = {c: line[i] for i, c in enumerate(cols)} df_hmm = pd.DataFrame.from_records(items, columns=cols) # df_hmm = df = pd.read_table('/data/databases/pdb/processed/dns_pdbs.tlb', index_col=None, header=None, delimiter=r"\s+",comment="#",names=cols) # df_hmm = df_hmm.dropna() df_hmm = df_hmm[["accession", "query_name", "from3", "to3"]] df_hmm.to_csv(DNsPDBs + "2") df_hmm["pdb"] = map(lambda x: x.split("_")[0].lower().strip(), df_hmm["query_name"]) df_hmm["chain"] = map(lambda x: x.split("_")[1].upper().strip(), df_hmm["query_name"]) df_hmm["start_res"] = map(lambda x: x.split("_")[2].upper().strip(), df_hmm["query_name"]) df_hmm["end_res"] = map(lambda x: x.split("_")[3].upper().strip(), df_hmm["query_name"]) else: df_hmm = pd.read_csv(DNsPDBs + "2") df_hmm["pdb"] = map(lambda x: x.split("_")[0].lower().strip(), df_hmm["query_name"]) df_hmm["chain"] = map(lambda x: x.split("_")[1].upper().strip(), df_hmm["query_name"]) df_hmm["start_res"] = map(lambda x: x.split("_")[2].upper().strip(), df_hmm["query_name"]) df_hmm["end_res"] = map(lambda x: x.split("_")[3].upper().strip(), df_hmm["query_name"]) print(len(df_hmm)) lock = Lock() def centeroid(arr): length = len(arr) sum_x = np.sum([x.coord[0] for x in arr]) sum_y = np.sum([x.coord[1] for x in arr]) sum_z = np.sum([x.coord[2] for x in arr]) return sum_x / length, sum_y / length, sum_z / length def residues_near_drug(drug_centroid, aa_residues): residues_near = [] for r in aa_residues: for a in list(r): dist = a - Struct(coord=drug_centroid) if dist > 20: break if dist < 10: residues_near.append(r) break return residues_near def juan(pdb_raw): try: pepe(pdb_raw) except Exception: traceback.print_exc() finally: with lock: pdbs_procesados.append(pdb_raw) with open(pdbs_procesados_path, "a") as handle: handle.write(pdb_raw + "\n") def pepe(pdb): ppb = CaPPBuilder() p = PDBParser(PERMISSIVE=1, QUIET=1) path_dir = PDB_PATH + "/" + pdb[1:3].lower() + "/" path = path_dir + "pdb" + pdb.lower() + ".ent" model = list(p.get_structure('X', path))[0] for chain_obj in list(model): chain = chain_obj.id hmm_residues = {} pdb_seq = list(model[chain].get_residues()) if pdb_seq: hmm_contacts = {} hmm_residues = {} hmms = df_hmm[(df_hmm["pdb"] == pdb) & (df_hmm["chain"] == chain) & (df_hmm["start_res"] == str(pdb_seq[0].id[1]))] for j, hmm in hmms.iterrows(): try: hmm_start = int(hmm["from3"]) - 1 hmm_end = int(hmm["to3"]) - 1 hmm_chain_name = "_".join( map(str, [ hmm["accession"].split(".")[0], hmm["chain"], pdb_seq[hmm_start].id[1], pdb_seq[hmm_end].id[1] ])) hmm_contacts[hmm_chain_name] = [] hmm_residues.update({ res.id[1]: hmm_chain_name for res in pdb_seq[hmm_start:hmm_end] }) except IndexError: print(pdb, hmm["accession"], hmm["chain"], hmm_start, hmm_end, pdb_seq) aa_residues = [] drug_molecules = [] for res_obj in chain_obj.get_residues(): if res_obj.resname in drugcompounds: drug_molecules.append(res_obj) elif res_obj.resname in aminoacidcompounds: aa_residues.append(res_obj) for res_drug_obj in drug_molecules: drug_centroid = centeroid(list(res_drug_obj)) near_residues = residues_near_drug(drug_centroid, aa_residues) for drug_atom in list(res_drug_obj): for near_residue in near_residues: for residue_atom in list(near_residue): distance = (residue_atom - drug_atom) if distance > 20: break if distance < CONTACT_DIST: with open(args.distances, "a") as handle: hmm_name = hmm_residues[ near_residue.id[1]] if near_residue.id[ 1] in hmm_residues else "NoDn" fields = [ pdb, chain, hmm_name, near_residue.id[1], near_residue.resname, residue_atom.serial_number, res_drug_obj.id[1], res_drug_obj.resname, drug_atom.serial_number, distance ] handle.write("\t".join(map(str, fields)) + "\n") _log.info("processing distances file") for x in tqdm(set(pdbs_with_drug)): if x not in pdbs_procesados: juan(x) # pool = ThreadPool(1) # pool.map(juan, set(pdbs_with_drug) - set(pdbs_procesados)) print("Finished!!!")
#!/usr/bin/env python # coding=utf-8 # Python 3.6 from multiprocessing.synchronize import Lock from PIL import Image from utils import * pathlock = Lock() # type: Lock class TextureConvert(object): def __init__(self): self.args = get_args() self.tool = "PVRTexToolCLI" pass def execute(self): print("BuildTool excute >") pass def get_texture_format(self, option, exists_alpha=True): args = get_args() if args.convert_tool == "PVRTexToolCLI": if option == "ETC1": return "ETC1" elif option == "ETC2": if exists_alpha: return "ETC2_RGBA" else: return "ETC2_RGB" elif args.convert_tool == "etctool":
def __init__(self): self._events = [] self._lock = Lock()
#! -*- coding: utf-8 -*- from rpccore.gen import Links from thrift.server import TServer from thrift.transport import TSocket from thrift.transport import TTransport from thrift.protocol import TBinaryProtocol from multiprocessing.synchronize import Lock _lock = Lock() class LinksHandler(Links.InterFace): DEFAULT_PORT = 9990 def getStand(self, predicate, pendings): try: _lock.acquire() print "do something" return finally: _lock.release() handler = LinksHandler() processor = Links.Processor(handler) transport = TSocket.TServerSocket(port=LinksHandler.DEFAULT_PORT) tfactory = TTransport.TBufferedTransportFactory() pfactory = TBinaryProtocol.TBinaryProtocolFactory() server = TServer.TThreadedServer(processor, transport, tfactory, pfactory)
def Lock(): from multiprocessing.synchronize import Lock return Lock()
from ObexSender.ObexSender import ObexSender import ProximusAgent from ProximusAgent import ProximusAgent import getopt #from encodings.punycode import adapt def DeviceFound(address, properties): if not queue.has_key(address): queue[address] = properties #print "# Queued %s Name: %s" % (address, properties["Name"]) manager = bluez.Manager('gobject') adapter = manager.DefaultAdapter() queueLock = Lock() queue = {} logger = None def MePropertyChanged(name, value): global adapter global logger if (logger != None): logger.debug("# - property changed (%s: %s)" % (name, value)) if (name == "Discovering"): if (value == 1): if (logger != None): logger.debug("# - queue sleep 10") time.sleep(10) queueLock.acquire()
# limitations under the License. import logging import uuid from configparser import NoOptionError import nexusproto.DataTile_pb2 as nexusproto import numpy as np from cassandra.auth import PlainTextAuthProvider from cassandra.cqlengine import columns, connection, CQLEngineException from cassandra.cqlengine.models import Model from cassandra.policies import TokenAwarePolicy, DCAwareRoundRobinPolicy, WhiteListRoundRobinPolicy from multiprocessing.synchronize import Lock from nexusproto.serialization import from_shaped_array INIT_LOCK = Lock(ctx=None) logger = logging.getLogger(__name__) class NexusTileData(Model): __table_name__ = 'sea_surface_temp' tile_id = columns.UUID(primary_key=True) tile_blob = columns.Blob() __nexus_tile = None def _get_nexus_tile(self): if self.__nexus_tile is None: self.__nexus_tile = nexusproto.TileData.FromString(self.tile_blob)