def check_mod_server(ip,
                     port,
                     mod_name="NoName",
                     show_send_recv_dat_=True,
                     logger=Logger.get_stdout_logger()):
    if check_port(ip, port):
        check_rst = False
        try:
            recv = JsonSocket.json_send_and_recv(
                ip,
                port, {"cmd": "check"},
                show_send_recv_dat_=show_send_recv_dat_)

            if recv:
                res = recv[0]
                if res is not None and "state" in res and res[
                        "state"] == "healthy":
                    check_rst = True
        except Exception as e:
            Logger.write_exception_log(
                logger, e,
                "check_mod_server({}, {}, {})".format(ip, port, mod_name))

        if check_rst:
            logger.info(" # {}://{}:{:d} is in healthy state.".format(
                mod_name, ip, port))
        else:
            logger.info(" # {}://{}:{:d} is NOT in healthy state.".format(
                mod_name, ip, port))
        return check_rst
    else:
        logger.info(" @ {}://{}:{:d} DOES NOT exist.".format(
            mod_name, ip, port))
        return False
Example #2
0
def parseQuandl(strResponse):
    """
    Parse a quandl GET request to collect all the tickers returned in the response

    :param strResponse: a response from quandl
    :return: array containing tickers
    """
    aTickers = []
    aRows = strResponse.split('\n')

    #the first row will be a header
    #find the 'ticker' column so we can figure out what column contains the ticker

    i = 0
    iTickerCol = -1
    aHeader = aRows.pop(0).split(',')
    while i < len(aHeader):
        if aHeader[i] == 'ticker':
            iTickerCol = i
            break
        i += 1

    if iTickerCol == -1:
        Logger.logError('There were no tickers returned from quandl')

    #loop through the remaining rows and collect all the tickers
    for strRow in aRows:
        aRow = strRow.split(',')
        aTickers.append(aRow[iTickerCol])

    return aTickers
    def handle_request(self, func):
        while not self._stop_event or not self._stop_event.is_set():
            if not self.listen_sock:
                self.listen_sock = self._get_listen_socket(listen_num=50)

            if not self.listen_sock:
                self.logger.error("failed to listen {}:{}".format(
                    self.ip, self.port))
                return

            conn, client_address = self.listen_sock.accept()
            self.logger.info(" # Connected with {}".format(client_address))
            try:
                connected_socket = self._create_socket_instance(conn)

                data = connected_socket.recv()
                ret_check_recv = self.bypass_check_data(data)
                if ret_check_recv:
                    connected_socket.send(ret_check_recv)
                else:
                    response = func(data)
                    if response:
                        connected_socket.send(response)

            except Exception as e:
                Logger.write_exception_log(self.logger, e,
                                           "# handle_request.exception")
                try:
                    self.listen_sock.close()
                except Exception as c_e:
                    self.logger.error(" # close.exception : {}".format(c_e))
                self.listen_sock = None

            finally:
                conn.close()
Example #4
0
def get(oDB, strTableTemplate, aTickers, strDate):

    aRetval = {}
    for strTicker in aTickers:

        #dont do the same work twice
        if strTicker in aRetval:
            continue

        strTable = strTableTemplate.replace(TABLE_WILDCARD, strTicker)
        strQuery = """
            SELECT *
            FROM {0}
            WHERE date >= {1}
            ORDER BY date DESC;
        """.format(strTable, quoteString(strDate))

        #go to next ticker if error selecting data
        aData = Connection.execute(oDB, strQuery)
        if not aData:
            Logger.logError("Error trying to select data")
            continue

        #add data to retval
        aRetval[strTicker] = mapSelect(aData)

    return aRetval
Example #5
0
    def __init__(self):

        self.aPortfolios = []

        # recursively iterate over the directory that contains algorithm classes
        for root, dirs, files in os.walk("Algorithms"):
            for file in files:
                if file.endswith(".py") and file != "__init__.py":

                    try:
                        # get an algorithm's file information
                        path = os.path.join(root, file)
                        info = os.stat(path)
                        oPortfolio = {"last_modified": info.st_mtime, "file_path": path, "file_name": file}

                        # TODO: we should also add a user identifier so we know who this algo belongs to

                        # get an algorithm's object instance
                        strAlgorithmClass = file.split(".")[0]
                        oModule = imp.load_source(strAlgorithmClass, path)
                        oAlgorithm = getattr(oModule, strAlgorithmClass)()

                        # store an algorithm's file info and obj instance
                        oPortfolio['algorithm'] = oAlgorithm
                        self.aPortfolios.append(oPortfolio)

                        del path, info, strAlgorithmClass, oModule

                    except Exception as e:
                        Logger.logError("Failed to instantiate {0}: {1}".format(str(file), str(e)))
    def __init__(self):
        self.dataSetFile = ''
        self.successLogger = Logger(os.path.join(FACE_DETECTION_ROOT, 'faceDetection.success'))
        self.errorLogger = Logger(os.path.join(FACE_DETECTION_ROOT, 'faceDetection.error'))
        self.boundingboxFile = Logger(os.path.join(FACE_DETECTION_ROOT, 'boundingbox.list'))

        self.cc = cv2.CascadeClassifier(os.path.join(FACE_DETECTION_ROOT, 'haarcascade_frontalface_alt.xml'))
Example #7
0
def addToPhEDEx(WFName, fileList):
    dsName = getDatasetName(WFName)

    # no file to inject for the dataset
    if not fileList:
        return

    # create xml data for injection&subscription
    Logger.log('Creating xml file for injection')
    xmlData = createXML(WFName, fileList)

    # inject data
    Logger.log("Injecting to CASTOR: %s" % dsName)
    data = request.send(urlInject, {
        'data': xmlData,
        'node': 'T0_CH_CERN_Export'
    })
    if data:
        # subscribed it so that transfer can start from CASTOR to EOS
        Logger.log("Subscribing at EOS : %s" % dsName)
        request.send(
            urlSubscribe, {
                'data': xmlData,
                'node': 'T2_CH_CERN',
                'group': 'transferops',
                'no_mail': 'y',
                'comments': 'auto-approved log transfer from CASTOR to EOS'
            })
    else:
        Logger.log('Skipping subscription since injection got failed')
Example #8
0
def get_server_socket(ip,
                      port,
                      logger=Logger.get_stdout_logger(),
                      proc_name='',
                      listen_num=5):
    logger.info(" # Getting server socket...")
    sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    server_address = (ip, port)
    # logger.info(server_address)
    # check = check_port(ip, port, logger=logger)
    # logger.info(check)
    # if check:
    if check_port(ip, port):  # , logger=logger):
        logger.info(" # Port, {:d}, was already taken. "
                    "The process using {:d} will be killed first.".format(
                        port, port))
        kill_process(port, name=proc_name)

    logger.info(" # Starting up \"{}\" SERVER on {}:{:d}...".format(
        proc_name, ip, port))
    sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
    sock.bind(server_address)
    sock.listen(listen_num)

    return sock
Example #9
0
def get_images_from_video(vid_fname,
                          out_path,
                          frame_interval,
                          logger=Logger.get_stdout_logger()):
    utils.file_exists(vid_fname, exit_=True)
    utils.folder_exists(out_path, exit_=False, create_=True, print_=True)

    logger.info(" # Extract image from video, {}".format(vid_fname))

    vid = mpy.VideoFileClip(vid_fname)
    base_fname = os.path.splitext(os.path.basename(vid_fname))[0]
    i_digit = int(np.log10(vid.duration / frame_interval)) + 1
    n_digit = int(np.log10(vid.duration)) + 3

    for i, s in enumerate(itools.numeric_range(0, vid.duration,
                                               frame_interval)):
        frame = vid.get_frame(s)
        time_info = "__" + \
                    "{:0{width}d}".format(i, width=i_digit) + \
                    "__" + \
                    "{:0{width}.1f}sec".format(s, width=n_digit)
        out_fname = os.path.join(out_path,
                                 base_fname + time_info + IMAGE_FILE_EXT)
        utils.imwrite(frame, out_fname)
        logger.info(" # save image, {}".format(out_fname))
    def _get_listen_socket(self, listen_num=5):
        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        server_address = (self.ip, self.port)

        sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)

        try:
            sock.bind(server_address)
        except Exception as e:
            Logger.write_exception_log(self.logger, e)
            return None

        sock.listen(listen_num)

        self.logger.info("listen : {}, {}".format(self.ip, self.port))
        return sock
def check_module_servers(server_features,
                         exit_=False,
                         show_send_recv_dat_=False,
                         logger=Logger.get_stdout_logger()):
    fail_list = []

    for server_feat in server_features:

        if isinstance(server_features, list):
            ip = server_feat.ip
            port = server_feat.port
            # name = server_feat.name
            acronym = server_feat.acronym
        elif isinstance(server_features, dict):
            ip = server_features[server_feat].ip
            port = server_features[server_feat].port
            # name = server_features[server_feat].name
            acronym = server_features[server_feat].acronym
        else:
            ip, port, name, acronym = None, None, None, None

        if not check_mod_server(ip,
                                port,
                                acronym,
                                show_send_recv_dat_=show_send_recv_dat_,
                                logger=logger):
            fail_list.append([ip, port])

    if len(fail_list) > 0:
        if exit_:
            sys.exit(1)

    return len(fail_list) == 0, fail_list
Example #12
0
def get(strRequest):
    """
    Performs a GET request provided by the given request

    :param strRequest: the URL of the request
    :return: response if OK, False if not
    """
    try:
        return urllib2.urlopen(strRequest);
    except urllib2.HTTPError as e:
        strError = "The server couldn't fulfill the request. Error code: " + str(e.code)
        Logger.logError(strError)
        return None
    except urllib2.URLError as e:
        strError = "We failed to reach the server. Reason: " + e.reason
        Logger.logError(strError)
        return None
Example #13
0
 def __init__(self, db_file=None, log_file=None, debug=False):
     self.dbms = 'unknown'
     self.metadata = sqlalchemy.MetaData()
     self.schema = {}
     self.tables = {}
     self.types = {}
     self.sources = {}
     self.logger = Logger(log_file, debug, 'Database.logger')
Example #14
0
def save_txt_file(contents,
                  fname,
                  out_path=None,
                  desc='',
                  logger=Logger.get_stdout_logger()):
    txt_fname = os.path.join(out_path, fname) if out_path else fname
    logger.info(" # save {} file, {}.".format(desc, txt_fname))
    with open(txt_fname, "w") as f:
        f.write(contents)
Example #15
0
def save_video_file_from_images(img_arr,
                                vid_fname,
                                duration=2,
                                fps=30,
                                logger=Logger.get_stdout_logger()):
    clips = [mpy.ImageClip(m).set_duration(duration) for m in img_arr]
    concat_clip = mpy.concatenate_videoclips(clips, method='compose')
    concat_clip.write_videofile(vid_fname, fps=fps)
    logger.info(" # save video file from {:d} images, {}.".format(
        len(img_arr), vid_fname))
    return True
Example #16
0
def createXML(WFName, fileInfoList):
    dsName = getDatasetName(WFName)
    xml = []
    xml.append('<data version="2.0">')
    xml.append(' <dbs name="Log_Files" dls="dbs">')
    xml.append('  <dataset name="%s" is-open="y" is-transient="n">' % dsName)
    xml.append('   <block name="%s#01" is-open="y">' % dsName)

    for file in fileInfoList:
        fileLFN = getLFN(WFName,file['name'])
        # if checksum is missing do not add it to xml
        if not file['checksum']:
            Logger.log('skipping file without checksum: %s' % fileLFN)
            continue
        xml.append('    <file name="%s" bytes="%s" checksum="adler32:%s"/>' % (fileLFN,file['size'],file['checksum']))

    xml.append('   </block>')
    xml.append('  </dataset>')
    xml.append(' </dbs>')
    xml.append('</data>')
    return ''.join(xml)
 def __init__(self,
              ip,
              port,
              logger=Logger.get_stdout_logger(),
              listen_num=5):
     self.logger = logger
     self.ip = ip
     self.port = port
     self.listen_sock = self._get_listen_socket(listen_num)
     self._stop_event = multiprocessing.Event()
     self._lock = multiprocessing.Lock()
     self._cond_lock = multiprocessing.Condition(self._lock)
Example #18
0
def createXML(WFName, fileInfoList):
    dsName = getDatasetName(WFName)
    xml = []
    xml.append('<data version="2.0">')
    xml.append(' <dbs name="Log_Files" dls="dbs">')
    xml.append('  <dataset name="%s" is-open="y" is-transient="n">' % dsName)
    xml.append('   <block name="%s#01" is-open="y">' % dsName)

    for file in fileInfoList:
        fileLFN = getLFN(WFName, file['name'])
        # if checksum is missing do not add it to xml
        if not file['checksum']:
            Logger.log('skipping file without checksum: %s' % fileLFN)
            continue
        xml.append('    <file name="%s" bytes="%s" checksum="adler32:%s"/>' %
                   (fileLFN, file['size'], file['checksum']))

    xml.append('   </block>')
    xml.append('  </dataset>')
    xml.append(' </dbs>')
    xml.append('</data>')
    return ''.join(xml)
Example #19
0
def save_video_from_image_array(img_arr,
                                base_fname,
                                out_path=None,
                                vid_duration=2,
                                logger=Logger.get_stdout_logger()):
    if vid_duration <= 0 or not img_arr:
        return

    out_fname = os.path.join(out_path, base_fname) if out_path else base_fname
    out_fname += VIDEO_FILE_EXT

    logger.info(" # save video from {:d} images, {}.".format(
        len(img_arr), out_fname))
    create_video_from_images(img_arr, out_fname, duration=vid_duration)
Example #20
0
    def __init__(self):
        self.dataSetFile = ''
        self.successLogger = Logger(
            os.path.join(FACE_DETECTION_ROOT, 'faceDetection.success'))
        self.errorLogger = Logger(
            os.path.join(FACE_DETECTION_ROOT, 'faceDetection.error'))
        self.boundingboxFile = Logger(
            os.path.join(FACE_DETECTION_ROOT, 'boundingbox.list'))

        self.cc = cv2.CascadeClassifier(
            os.path.join(FACE_DETECTION_ROOT,
                         'haarcascade_frontalface_alt.xml'))
Example #21
0
def addToPhEDEx(WFName, fileList):
    dsName = getDatasetName(WFName)

    # no file to inject for the dataset
    if not fileList:
        return

    # create xml data for injection&subscription
    Logger.log('Creating xml file for injection')
    xmlData = createXML(WFName, fileList)

    # inject data
    Logger.log("Injecting to CASTOR: %s" %dsName)
    data = request.send(urlInject, {'data':xmlData,'node':'T0_CH_CERN_Export'})
    if data:
        # subscribed it so that transfer can start from CASTOR to EOS
        Logger.log("Subscribing at EOS : %s" % dsName)
        request.send(urlSubscribe, {'data':xmlData,'node':'T2_CH_CERN','group':'transferops','no_mail':'y','comments':'auto-approved log transfer from CASTOR to EOS'})
    else:
        Logger.log('Skipping subscription since injection got failed')
Example #22
0
def detectLandmarks(boundingboxList):
    """
        detect landmarks in `src` and store the result in `dst`
    """

    #bboxes = []
    #landmarks = []
    fl = Landmarker()
    logger = Logger(os.path.join(FACE_ALIGNMENT_ROOT, 'landmark.list'))

    # create bbox list
    fid = open(boundingboxList, 'r');
    fLines = fid.read().splitlines()
    fid.close()

    for line in fLines:
        word = line.split()
        filename = word[0]
        img = cv2.imread(filename)
        gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)

        bbox = BBox([int(word[1]), int(word[2]), int(word[3]), int(word[4])])\
                .subBBox(0.1, 0.9, 0.2, 1)

        landmark, status = fl.detectLandmark(gray, bbox)

        '''
        get real landmark position
        '''
        landmark = bbox.reprojectLandmark(landmark)

        logger.writeMsg("%s" % filename)
        for x, y in landmark:
            logger.writeMsg(" %s %s" % (str(x), str(y)))
        logger.writeMsg('\n')

        '''
        free memory: force the Garbage Collector to release 
        '''
        gc.collect()
Example #23
0
class FaceDetector(object):
    def __init__(self):
        self.dataSetFile = ''
        self.successLogger = Logger(
            os.path.join(FACE_DETECTION_ROOT, 'faceDetection.success'))
        self.errorLogger = Logger(
            os.path.join(FACE_DETECTION_ROOT, 'faceDetection.error'))
        self.boundingboxFile = Logger(
            os.path.join(FACE_DETECTION_ROOT, 'boundingbox.list'))

        self.cc = cv2.CascadeClassifier(
            os.path.join(FACE_DETECTION_ROOT,
                         'haarcascade_frontalface_alt.xml'))

    def setDataSetFile(self, filePath):
        self.dataSetFile = filePath

    def run(self):
        fid = open(self.dataSetFile, 'r').read().splitlines()

        for src in fid:
            self._detectFace(src)

    def _detectFace(self, imgPath):
        img = cv2.imread(imgPath)
        gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)

        rects = self.cc.detectMultiScale(gray, scaleFactor=1.2, minNeighbors=2, \
                    minSize=(30, 30), flags = cv2.CASCADE_SCALE_IMAGE)

        if not len(rects):
            self.errorLogger.writeMsg("%s\n" % imgPath)
            return

        for rect in rects:
            rect[2:] += rect[:2]

            self.successLogger.writeMsg("%s\n" % imgPath)
            '''
            boundingbox format: left right top bottom
            '''
            self.boundingboxFile.writeMsg("%s %s %s %s %s\n" % \
                (imgPath, str(rect[0]), str(rect[2]), str(rect[1]), str(rect[3])))
class FaceDetector(object):

    def __init__(self):
        self.dataSetFile = ''
        self.successLogger = Logger(os.path.join(FACE_DETECTION_ROOT, 'faceDetection.success'))
        self.errorLogger = Logger(os.path.join(FACE_DETECTION_ROOT, 'faceDetection.error'))
        self.boundingboxFile = Logger(os.path.join(FACE_DETECTION_ROOT, 'boundingbox.list'))

        self.cc = cv2.CascadeClassifier(os.path.join(FACE_DETECTION_ROOT, 'haarcascade_frontalface_alt.xml'))

    def setDataSetFile(self, filePath):
        self.dataSetFile = filePath

    def run(self):
        fid = open(self.dataSetFile, 'r').read().splitlines()

        for src in fid:
            self._detectFace(src)

    def _detectFace(self, imgPath):
        img = cv2.imread(imgPath)
        gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)

        rects = self.cc.detectMultiScale(gray, scaleFactor=1.2, minNeighbors=2, \
                    minSize=(30, 30), flags = cv2.CASCADE_SCALE_IMAGE)

        if not len(rects):
            self.errorLogger.writeMsg("%s\n" % imgPath)
            return

        for rect in rects:
            rect[2:] += rect[:2]

            self.successLogger.writeMsg("%s\n" % imgPath)

            '''
            boundingbox format: left right top bottom
            '''
            self.boundingboxFile.writeMsg("%s %s %s %s %s\n" % \
                (imgPath, str(rect[0]), str(rect[2]), str(rect[1]), str(rect[3])))
Example #25
0
    def insertDailyData(self):
        """
        Routine for collecting and inserting daily data from the YahooApi. All data is for the previously closed
        trading day.
        :return: None
        """

        Logger.logApp("Collecting and inserting daily data...")

        # chunk the tickers into a managable size, retrieve data for each chunk, and then insert each chunk
        # chunking allows us to insert periodicly through the data collection process and ensures our YahooApi request
        # doesnt return a 414 response code (URI too long)
        iCurChunk = 0
        aTickers = self.getQuandlTickers(AppVars.DATA_DAILY_TICKERS)
        aTickerChunks = Utils.chunk(aTickers, AppVars.CHUNK_TICKERS)
        for iCurChunk in range(0, len(aTickerChunks)):
            oData = Api.getData(aTickerChunks[iCurChunk], AppVars.DATA_DAILY_DIMENSIONS)
            if  oData:
                TradingData.insert(self.oDB, TradingData.S_DAILY_DATA, oData)
                self.oDB.commit()
                Logger.logApp("Inserting data for chunk " + str(iCurChunk + 1) + " of " + str(len(aTickerChunks)))
            else:
                Logger.logError('There was an error retrieving data for chunk ' +  str(iCurChunk + 1))
            del oData
Example #26
0
                'data': xmlData,
                'node': 'T2_CH_CERN',
                'group': 'transferops',
                'no_mail': 'y',
                'comments': 'auto-approved log transfer from CASTOR to EOS'
            })
    else:
        Logger.log('Skipping subscription since injection got failed')


if __name__ == '__main__':
    # Get request object for future PhEDEx calls
    request = Request()

    # get list of datasets injected before
    Logger.log('retrieving list of injected files in %s/%s' % (year, month))
    injectedFiles = getAlreadyInjectedFiles()
    Logger.log('number of already injected files: %s' % len(injectedFiles))

    #list log directories on CASTOR
    Logger.log('listing files under %s' % logCastorPath)
    cmd = 'nsls -lR --checksum %s' % logCastorPath

    process = subprocess.Popen(cmd.split(),
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE)
    #process.wait()

    Logger.log('listing is completed, parsing the output')
    fileList = []
    currentWF = None
 def __init__(self,
              ip,
              port,
              logger=Logger.get_stdout_logger(),
              listen_num=1):
     super().__init__(ip, port, logger, listen_num)
Example #28
0
        "transition":
        namedtuple('transition',
                   ('state', 'action', 'reward', 'next_state', 'done'))
    })

    print(f"Environment: {params['env_name']}\n"
          f"Number of actions: {params['n_actions']}")

    if params["do_intro_env"]:
        intro_env()

    env = make_atari(params["env_name"], params["seed"])

    agent = Agent(**params)
    experiment = Experiment()
    logger = Logger(agent, experiment=experiment, **params)

    if not params["train_from_scratch"]:
        chekpoint = logger.load_weights()
        agent.online_model.load_state_dict(
            chekpoint["online_model_state_dict"])
        agent.hard_update_target_network()
        params.update({"beta": chekpoint["beta"]})
        min_episode = chekpoint["episode"]

        print("Keep training from previous run.")
    else:
        min_episode = 0
        print("Train from scratch.")

    if params["do_train"]:
Example #29
0
    def run(self):
        """
        Main daemon process invoked by DataDaemon. This method is a infinite loop that has logic in it's body to
        execute commands at specific times of day.  More specifically, this process is responsible for creating,
        running, and closing each trading day. This process will get killed when the daemon stops.
        :return:
        """

        # service variables
        bTrading = False

        while True:

            # Get the current EST time and date
            oNow = datetime.datetime.now(timezone(Conf.MARKET_TIMEZONE))
            oNowDate = datetime.datetime(oNow.year, oNow.month, oNow.day)

            # Market is only open on week days from 9:30AM EST to 4:00PM EST
            bIsWeekDay = not(oNow.strftime('%A') == 'sunday' or oNow.strftime('%A') == 'saturday')
            bIsMarketHours = datetime.time(Conf.MARKET_OPEN_HOUR, Conf.MARKET_OPEN_MINUTE) <= datetime.time(oNow.hour, oNow.minute) \
                             and datetime.time(oNow.hour, oNow.minute) < datetime.time(Conf.MARKET_CLOSE_HOUR, Conf.MARKET_CLOSE_MINUTE)
            bIsOpen = bIsWeekDay and bIsMarketHours

            # it's after 5:00AM EST on a week day, let's collect the previous days data and get everything set up
            if (bIsWeekDay and not bTrading and oNow.hour >= 5) or Conf.DAEMON_IS_DEBUG:

                # insert daily data from yesterday
                if Conf.DAEMON_INSERT_DAILY:
                    self.insertDailyData()

                # market vars, must be deleted at EOD
                aTickers = self.getQuandlTickers(AppVars.DATA_RT_TICKERS)
                aTickerChunks = Utils.chunk(aTickers, AppVars.CHUNK_TICKERS)
                del aTickers

                oPortfolioCollection = PortfolioCollection()

                # OK to stop trading
                bTrading = True

            # the market is open! start collecting data and trading
            if (bTrading and bIsOpen and aTickerChunks) or Conf.DAEMON_IS_DEBUG:

                Logger.logApp("Starting a trading cycle...")

                # get current pricing data for all tickers and create a data map where keys are tickers and values are
                # the location of the ticker's value in the data list
                aDataList = []
                oDataMap = {}
                for iCurChunk in range(0, len(aTickerChunks)):
                    aChunkData = Api.getData(aTickerChunks[iCurChunk], AppVars.DATA_RT_DIMENSIONS)
                    for iDataIndex in range(len(aDataList), len(aDataList) + len(aChunkData)):
                        oDataMap[aChunkData[iDataIndex - len(aDataList)][Company.SYMBOL]] = iDataIndex
                    aDataList += aChunkData

                    del aChunkData
                    del iCurChunk
                    del iDataIndex

                # broadcast new data to all portfolios
                for oPortfolio in oPortfolioCollection.iteritems():
                    oAlgorithm = oPortfolio['algorithm']
                    oAlgorithm.run(oDataMap)

                # insert new data
                if aDataList:
                    TradingData.insert(self.oDB, TradingData.S_RT_DATA, aDataList)
                    self.oDB.commit()
                else:
                    Logger.logError('There was an error inserting real time data')
                del oDataMap

                Logger.logApp("Finished a trading cycle")

            # it's after 4:30PM EST on a week day let's close the trading day and go to sleep
            if (bIsWeekDay and bTrading and oNow.hour >= 16 and oNow.minute > 30) or Conf.DAEMON_IS_DEBUG:

                # insert portfolio data
                for oPortfolio in oPortfolioCollection.iteritems():
                    oAlgorithm = oPortfolio['algorithm']
                    oAlgorithm.insert()

                # clean up market vars
                del aTickerChunks
                del oPortfolioCollection

                # OK to start trading
                bTrading = False

            time.sleep(Conf.DAEMON_SLEEP)
Example #30
0
def save_dict_to_json_file(dict_dat,
                           json_fname,
                           logger=Logger.get_stdout_logger()):
    with open(json_fname, "w") as f:
        json.dump(dict_dat, f)
    logger.info(" # Save dict to json file, {}".format(json_fname))
Example #31
0
class Database():
    def __init__(self, db_file=None, log_file=None, debug=False):
        self.dbms = 'unknown'
        self.metadata = sqlalchemy.MetaData()
        self.schema = {}
        self.tables = {}
        self.types = {}
        self.sources = {}
        self.logger = Logger(log_file, debug, 'Database.logger')
    
    def _detect_datatype(self, sample):
        """ Supported sqlalchemy types: Boolean, DateTime, Integer, Float, Text
        """
        #if not sample:
        #    #raise NotImplementedError('Expected a sample but got "%s"' % (sample))
        #    return sqlalchemy.types.String(convert_unicode=True)
        #if sample.title() in ['True', 'False']:
        #    return sqlalchemy.types.Boolean()
        #try:
        #    if int(sample):
        #        return sqlalchemy.types.Integer()
        #except ValueError:
        #    pass
        #try:
        #    if float(sample):
        #        return sqlalchemy.types.Float()
        #except ValueError:
        #    pass
        #for strf in ['%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M:%S']:
        #    try:
        #        if datetime.datetime.strptime(sample, strf):
        #            return sqlalchemy.types.DateTime()
        #    except ValueError:
        #        pass
        return sqlalchemy.types.String(convert_unicode=True)
    
    def create_schema(self, table, column_names, samples):
        """ samples should be a generator yielding dicts from the csv (e.g. as 
            returned from DictReader()).
        """
        self.sources[table] = samples
        if not self.schema.has_key(table):
            self.schema[table] = []
        for column in column_names:
            index = column_names.index(column)
            sample = ''
            while True:
                try:
                    row = samples.next()
                    sample = row[column]
                    if sample != '':
                        break
                    if samples.line_num > 100:
                        break
                except StopIteration:
                    break
            if not sample:
                self.logger.log('could not find a sample for column: %s' % column, 'DEBUG')
            datatype = self._detect_datatype(sample)
            while True:
                if column.lower() in [name.lower() for name, value in self.schema[table]]:
                    self.logger.log('found duplicate column "%s"' % (column), 'INFO')
                    column += '_'
                else:
                    break
            self.schema[table].append((column, datatype))
            column_names[index] = column
        return column_names

    def create_table(self, table, primary_key):
        if not self.schema.get(table):
            raise RuntimeError('this database instance has no schema; please run create_schema first.')
        self.tables[table] = sqlalchemy.Table(table, self.metadata)
        for column, datatype in self.schema[table]:
            if column.lower() == primary_key.lower():
                self.tables[table].append_column(sqlalchemy.Column(column, datatype, primary_key=True))
            else:
                self.tables[table].append_column(sqlalchemy.Column(column, datatype))
        try:
            self.metadata.create_all(self.engine)
        except sqlalchemy.exceptions.OperationalError:
            self.logger.log('OperationalError while attempting to create database table!', 'ERROR')
            return
        class Type(object):
            def __init__(self):
                pass
        orm.mapper(Type, self.tables[table])
        self.types[table] = Type
        return self.types[table]
        
    def create_session(self):
        session = orm.sessionmaker()
        session.configure(bind=self.engine)
        self.session = session()
        return self.session
        
    def commit(self, count=False):
        self.logger.log('starting to flush cache to %s database.' % (self.dbms), 'INFO')
        self.session.commit()
        self.logger.log('finished flushing cache to %s database.' % (self.dbms), 'INFO')
        if count:
            self.logger.log('total record count is %s.' % (count), 'INFO')
        return
Example #32
0
def insert(oDB, strTableTemplate, aRows):
    """
    Inserts data into the tables of type strTableTemplate. Will create tables if needed. Each row must contain a
    Stock.DATE and Company.SYMBOL key. Each row represents a unique
        INSERT IGNORE INTO strTableTemplate
        (date, dim_name, value)
        VALUES (aRows[Stock.DATE], aRows[Dimension], aRows[DimensionValue])


    :param oDB: MySQLdb object
    :param strTableTemplate: Type of table that will recieve inserts
    :param aRows: An array of objects where the object keys are Dimensions
    :return: boolean indicating the success of the inserts
    """

    bSuccess = True
    strColumns = '(' + ",".join(['date', 'dim_name', 'value']) + ')'
    while aRows:
        oRow = aRows.pop()
        strDateDim = ''
        strDateDimVal = ''

        if Stock.DATE in oRow and oRow[Stock.DATE] != 'N/A':
            strDateDim = Stock.DATE
            strDateDimVal = oRow[Stock.DATE]

        elif RealTime.RT_LAST_TRADE in oRow and oRow[RealTime.RT_LAST_TRADE] != 'N/A':
            strDateDim = RealTime.RT_LAST_TRADE
            strDateDimVal = oRow[RealTime.RT_LAST_TRADE]

        else:
            continue

        if not Company.SYMBOL in oRow and oRow[Company.SYMBOL] != 'N/A':
            continue

        strDate = datetime.datetime.strptime(strDateDimVal.replace('"', ''), '%m/%d/%Y').strftime('%Y-%m-%d')
        strSymbol = oRow[Company.SYMBOL]
        strTable = strTableTemplate.replace(TABLE_WILDCARD, strSymbol).replace('"', '')

        #create a table for this stock if it doesnt exist. Skip insert if there's a MySQL error
        if not createTable(oDB, strTable):
            bSuccess = False
            continue

        #insert
        for oDim, mVal in oRow.iteritems():

            #never insert the date dimension or any dimension with a 'N/A' value
            if oDim == strDateDim or mVal == 'N/A':
                continue

            #construct and execute INSERT statement
            strRow = '(' + ",".join([quoteString(strDate), quoteString(oDim), quoteString(mVal)]) + ')'
            strInsert = """
                INSERT IGNORE INTO {0}
                {1}
                VALUES
                {2};
                """.format(strTable, strColumns, strRow)


            if not Connection.insert(oDB, strInsert):
                Logger.logError("Failed to execute: " + strInsert)
                bSuccess = False

    return bSuccess
Example #33
0
import time
from Common import Request
from Common import Logger
try: import json
except ImportError: import simplejson as json

urlPhedex = 'https://phedex-dev.cern.ch/phedex/datasvc/json/ops/'
urlDelete = urlPhedex + 'delete'

def getOldDatasets():
    datasetList = []
    date = time.time() - 60*60*24*2
    urlSubscription = urlPhedex + 'subscriptions?node=T2_CH_CERN&dataset=/Log*/*/*&create_since=!%s' % date
    result = json.loads(request.send(urlSubscription))
    for dataset in result['phedex']['dataset']:
        datasetList.append(dataset['name'])
    return datasetList

if __name__ == '__main__':
    # Get request object for future PhEDEx calls
    request = Request()

    # get datasets with subscription to EOS older than N months
    Logger.log('retrieving list of old datasets'
    oldDatasets = getOldDatasets();
    Logger.log('number of old datasets: %s' % len(oldDatasets))

Example #34
0
    data = request.send(urlInject, {'data':xmlData,'node':'T0_CH_CERN_Export'})
    if data:
        # subscribed it so that transfer can start from CASTOR to EOS
        Logger.log("Subscribing at EOS : %s" % dsName)
        request.send(urlSubscribe, {'data':xmlData,'node':'T2_CH_CERN','group':'transferops','no_mail':'y','comments':'auto-approved log transfer from CASTOR to EOS'})
    else:
        Logger.log('Skipping subscription since injection got failed')



if __name__ == '__main__':
    # Get request object for future PhEDEx calls
    request = Request()

    # get list of datasets injected before
    Logger.log('retrieving list of injected files in %s/%s' % (year,month))
    injectedFiles = getAlreadyInjectedFiles()
    Logger.log('number of already injected files: %s' % len(injectedFiles))

    #list log directories on CASTOR
    Logger.log('listing files under %s' % logCastorPath)
    cmd = 'nsls -lR --checksum %s' % logCastorPath

    process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    #process.wait()

    Logger.log('listing is completed, parsing the output')
    fileList = []
    currentWF = None
    #collect info from castor
    for line in process.stdout:
Example #35
0
# -*- coding:utf-8 -*-

from Common import Logger
from Common import HTMLRun

if __name__ == '__main__':
    logger = Logger.logger()
    logger.info("start")

    HTMLRun.html_run()