def __init__( self, ra, dec, depth=16, log=False, convertToArray=True): self.convertToArray = convertToArray if log == False: if log == False: from fundamentals.logs import emptyLogger self.log = emptyLogger() else: self.log = log if convertToArray == True: from astrocalc.coords import coordinates_to_array ra, dec = coordinates_to_array( log=log, ra=ra, dec=dec ) if ra.size != dec.size: raise ValueError("ra size (%d) != " "dec size (%d)" % (ra.size, dec.size)) super(Matcher, self).__init__(depth, ra, dec)
def __init__( self, ra, dec, depth=16, log=False): if log == False: if log == False: from fundamentals.logs import emptyLogger self.log = emptyLogger() else: self.log = log from astrocalc.coords import coordinates_to_array ra, dec = coordinates_to_array( log=log, ra=ra, dec=dec ) if ra.size != dec.size: raise ValueError("ra size (%d) != " "dec size (%d)" % (ra.size, dec.size)) super(Matcher, self).__init__(depth, ra, dec)
def __init__(self, depth=16, log=False): if log == False: if log == False: from fundamentals.logs import emptyLogger self.log = emptyLogger() else: self.log = log this = _htmc.new_HTMC(depth) try: self.this.append(this) except: self.this = this
def _add_dictlist_to_database_via_load_in_file(masterListIndex, dbTablename, dbSettings, dateModified=False): """*load a list of dictionaries into a database table with load data infile* **Key Arguments:** - ``masterListIndex`` -- the index of the sharedList of dictionary lists to process - ``dbTablename`` -- the name of the database table to add the list to - ``dbSettings`` -- the dictionary of database settings - ``log`` -- logger - ``dateModified`` -- add a dateModified stamp with an updated flag to rows? **Return:** - None **Usage:** .. todo:: add usage info create a sublime snippet for usage .. code-block:: python usage code """ from fundamentals.logs import emptyLogger log = emptyLogger() log.debug( 'starting the ``_add_dictlist_to_database_via_load_in_file`` function') global sharedList dictList = sharedList[masterListIndex][0] count = sharedList[masterListIndex][1] if count > totalCount: count = totalCount ltotalCount = totalCount # SETUP ALL DATABASE CONNECTIONS dbConn = database(log=log, dbSettings=dbSettings).connect() now = datetime.now() tmpTable = now.strftime("tmp_%Y%m%dt%H%M%S%f") # CREATE A TEMPORY TABLE TO ADD DATA TO sqlQuery = """CREATE TEMPORARY TABLE %(tmpTable)s SELECT * FROM %(dbTablename)s WHERE 1=0;""" % locals( ) writequery(log=log, sqlQuery=sqlQuery, dbConn=dbConn) csvColumns = [k for d in dictList for k in list(d.keys())] csvColumns = list(set(csvColumns)) csvColumnsString = (', ').join(csvColumns) csvColumnsString = csvColumnsString.replace(u" dec,", u" decl,") df = pd.DataFrame(dictList) df.replace(['nan', 'None', '', 'NaN', np.nan], '\\N', inplace=True) df.to_csv('/tmp/%(tmpTable)s' % locals(), sep="|", index=False, escapechar="\\", quotechar='"', columns=csvColumns, encoding='utf-8') sqlQuery = """LOAD DATA LOCAL INFILE '/tmp/%(tmpTable)s' INTO TABLE %(tmpTable)s FIELDS TERMINATED BY '|' OPTIONALLY ENCLOSED BY '"' IGNORE 1 LINES (%(csvColumnsString)s);""" % locals() writequery(log=log, sqlQuery=sqlQuery, dbConn=dbConn) updateStatement = "" for i in csvColumns: updateStatement += "`%(i)s` = VALUES(`%(i)s`), " % locals() if dateModified: updateStatement += "dateLastModified = NOW(), updated = 1" else: updateStatement = updateStatement[0:-2] sqlQuery = """ INSERT IGNORE INTO %(dbTablename)s SELECT * FROM %(tmpTable)s ON DUPLICATE KEY UPDATE %(updateStatement)s;""" % locals() writequery(log=log, sqlQuery=sqlQuery, dbConn=dbConn) sqlQuery = """DROP TEMPORARY TABLE %(tmpTable)s;""" % locals() writequery(log=log, sqlQuery=sqlQuery, dbConn=dbConn) try: os.remove('/tmp/%(tmpTable)s' % locals()) except: pass dbConn.close() log.debug( 'completed the ``_add_dictlist_to_database_via_load_in_file`` function' ) return None
def test_logs_function(self): from fundamentals.logs import emptyLogger log = emptyLogger() print "shit" log.error("crap")
def test_logs_function(self): from fundamentals.logs import emptyLogger log = emptyLogger() log.error("crap")