def return_sql(file_path, insert_id=None): result = Popen("ffprobe -v quiet -print_format json -show_format -show_streams -i %s" % file_path, shell=True, stdout=PIPE).stdout.read() result = json.loads(result) streams = result.get("streams", []) text = "" if streams: for stream in streams: text += analytical_dict(stream) format_info = result.get("format", {}) format_tags = format_info.get("tags", {}) duration = format_info.get("duration", "") media_size = format_info.get("size", "") bit_rate = format_info.get("bit_rate", "") major_brand = format_tags.get("major_brand", "") creation_time = format_tags.get("creation_time", "") sql = "insert into media_file_info (media_id,duration,media_size,bit_rate,major_brand,creation_time) VALUES (%s,%s,%s,%s,%s,%s)" % ( insert_id, duration, media_size, bit_rate, major_brand, creation_time) return sql
def get_activityname(): if sys.platform == "linux": root = Popen(['xprop', '-root', '_NET_ACTIVE_WINDOW'], stdout=PIPE) stdout, stderr = root.communicate() m = re.search(b'^_NET_ACTIVE_WINDOW.* ([\w]+)$', stdout) if m is not None: window_id = m.group(1) windowname = None window = Popen(['xprop', '-id', window_id, 'WM_NAME'], stdout=PIPE) stdout, stderr = window.communicate() wmatch = re.match(b'WM_NAME\(\w+\) = (?P<name>.+)$', stdout) if wmatch is not None: windowname = wmatch.group('name').decode('UTF-8').strip('"') processname1, processname2 = None, None process = Popen(['xprop', '-id', window_id, 'WM_CLASS'], stdout=PIPE) stdout, stderr = process.communicate() pmatch = re.match(b'WM_CLASS\(\w+\) = (?P<name>.+)$', stdout) if pmatch is not None: processname1, processname2 = pmatch.group('name').decode( 'UTF-8').split(', ') processname1 = processname1.strip('"') processname2 = processname2.strip('"') pidnumber = None pid = Popen(['xprop', '-id', window_id, '_NET_WM_PID'], stdout=PIPE) stdout, stderr = pid.communicate() pidmatch = re.match(b'_NET_WM_PID\(\w+\) = (?P<pid>[0-9]+)$', stdout) if pidmatch is not None: pidnumber = pidmatch.group('pid').decode('UTF-8') return { 'windowname': windowname, 'processname1': processname1, 'processname2': processname2, 'pid': pidnumber } return { 'windowname': None, 'processname1': None, 'processname2': None, 'pid': None } elif sys.platform == "darwin": curr_app = NSWorkspace.sharedWorkspace().frontmostApplication() curr_pid = NSWorkspace.sharedWorkspace().activeApplication( )['NSApplicationProcessIdentifier'] curr_app_name = curr_app.localizedName() options = kCGWindowListOptionOnScreenOnly windowList = CGWindowListCopyWindowInfo(options, kCGNullWindowID) for window in windowList: pid = window['kCGWindowOwnerPID'] windowNumber = window['kCGWindowNumber'] ownerName = window['kCGWindowOwnerName'] geometry = window['kCGWindowBounds'] windowTitle = window.get('kCGWindowName', u'Unknown') if curr_pid == pid: return { 'windowname': windowTitle, 'processname1': ownerName, 'processname2': None, 'pid': pid } elif sys.platform == "win32": print("Not implemented for win32 platform!") return { 'windowname': None, 'processname1': None, 'processname2': None, 'pid': None } else: print("Unknown platform! - " + sys.platform) return { 'windowname': None, 'processname1': None, 'processname2': None, 'pid': None }
def main(arguments=None): """ *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command* """ # setup the command-line util settings su = tools( arguments=arguments, docString=__doc__, logLevel="WARNING", options_first=False, projectName="breaker" ) arguments, settings, log, dbConn = su.setup() # unpack remaining cl arguments using `exec` to setup the variable names # automatically for arg, val in arguments.iteritems(): if arg[0] == "-": varname = arg.replace("-", "") + "Flag" else: varname = arg.replace("<", "").replace(">", "") if isinstance(val, str) or isinstance(val, unicode): exec(varname + " = '%s'" % (val,)) else: exec(varname + " = %s" % (val,)) if arg == "--dbConn": dbConn = val log.debug('%s = %s' % (varname, val,)) ## START LOGGING ## startTime = times.get_now_sql_datetime() log.info( '--- STARTING TO RUN THE cl_utils.py AT %s' % (startTime,)) if init: from os.path import expanduser home = expanduser("~") filepath = home + "/.config/breaker/breaker.yaml" try: cmd = """open %(filepath)s""" % locals() p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) except: pass try: cmd = """start %(filepath)s""" % locals() p = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=True) except: pass if not far: far = 1e-5 if gwid and gwid[:2] == "GW": for g in settings["gravitational waves"]: if settings["gravitational waves"][g]["human-name"] == gwid.strip(): gwid = g if wFlag and wFlag[:2] == "GW": for g in settings["gravitational waves"]: if settings["gravitational waves"][g]["human-name"] == wFlag.strip(): wFlag = g # CALL FUNCTIONS/OBJECTS if update: pointingsFlag = True if nopointingsFlag: pointingsFlag = False u = update_ps1_atlas_footprint_tables( log=log, settings=settings, updateNed=updateNedFlag, updateAll=allFlag, updatePointings=pointingsFlag ) u.get() if plot and history: p = plot_wave_observational_timelines( log=log, settings=settings, plotType="history" ) p.get() if plot and timeline: if not pFlag: pFlag = "mercator" if fFlag: filters = list(fFlag) else: filters = False p = plot_wave_observational_timelines( log=log, settings=settings, gwid=wFlag, plotType="timeline", allPlots=allFlag, telescope=tFlag, projection=pFlag, filters=filters, probabilityCut=True ) p.get() if plot and sources: p = plot_wave_matched_source_maps( log=log, settings=settings, gwid=gwid ) p.get() if faker: f = generate_faker_catalogue( log=log, settings=settings, ps1ExpId=ps1ExpId, gwid=False ) f.get() if stats: s = survey_footprint( log=log, settings=settings, gwid=gwid, telescope=telescope ) s.get() if listen and inLastNMins: timeNowMjd = mjdNow( log=log ).get_mjd() startMJD = float(timeNowMjd) - float(inLastNMins) / (60 * 60 * 24.) this = mlisten( log=log, settings=settings, #label="EM_READY | EM_Selected | ADVOK", label="", farThreshold=far, startMJD=float(startMJD), endMJD=float(timeNowMjd) + 1. ) this.get_maps() if listen and mjdStart: this = mlisten( log=log, settings=settings, # label="EM_READY | EM_Selected | ADVOK", label="", farThreshold=far, startMJD=float(mjdStart), endMJD=float(mjdEnd) ) this.get_maps() if listen and daemonFlag: if sec: daemon = float(sec) else: daemon = True this = mlisten( log=log, settings=settings, # label="EM_READY | EM_Selected | ADVOK", label="", farThreshold=far, daemon=daemon ) this.get_maps() if skymap: if exposuresFlag: databaseConnRequired = True else: databaseConnRequired = False plotter = plot_wave_observational_timelines( log=log, settings=settings, databaseConnRequired=databaseConnRequired ) if exposuresFlag: plotParameters, ps1Transients, ps1Pointings, atlasPointings, atlasTransients = plotter.get_gw_parameters_from_settings( gwid=gwid, inFirstDays=(0, 31) ) else: ps1Transients = [] atlasTransients = [] ps1Pointings = [] atlasPointings = [] ps1Transients = [] atlasTransients = [] if not cFlag: cFlag = 0. else: cFlag = float(cFlag) if defaultoutputFlag: outputDirectory = False else: outputDirectory = "." plotter.generate_probability_plot( gwid=gwid, ps1Transients=ps1Transients, atlasTransients=atlasTransients, ps1Pointings=ps1Pointings, atlasPointings=atlasPointings, pathToProbMap=pathToLVMap, fileFormats=["pdf", "png"], outputDirectory=outputDirectory, projection="mollweide", plotType="timeline", folderName="all_sky_plots", fitsImage=False, allSky=True, center=cFlag ) plotter.generate_probability_plot( gwid=gwid, pathToProbMap=pathToLVMap, fileFormats=["pdf", "png"], outputDirectory=outputDirectory, projection="cartesian", plotType="timeline", folderName="all_sky_plots", fitsImage=True, allSky=True, center=cFlag ) if contour: from breaker.transients import annotator an = annotator( log=log, settings=settings, gwid=gwid ) from astrocalc.coords import unit_conversion # ASTROCALC UNIT CONVERTER OBJECT converter = unit_conversion( log=log ) ra = converter.ra_sexegesimal_to_decimal( ra=ra ) dec = converter.dec_sexegesimal_to_decimal( dec=dec ) transients = {"cl": (ra, dec)} transientNames, probs = an.annotate(transients) percentage = probs[0] print "The transient lies within the inner %(percentage)s%% likelihood contour of event %(gwid)s" % locals() if "dbConn" in locals() and dbConn: dbConn.commit() dbConn.close() ## FINISH LOGGING ## endTime = times.get_now_sql_datetime() runningTime = times.calculate_time_difference(startTime, endTime) log.info('-- FINISHED ATTEMPT TO RUN THE cl_utils.py AT %s (RUNTIME: %s) --' % (endTime, runningTime, )) return