def func11(file_name, mid): if imghdr.what(file_name) in [None, 'gif']: path = eu('~/www/media/storage/movie/') + f'{mid}.jpg' subprocess.run( ['ffmpeg', ] + \ '-ss 7 -t 1 -r 1'.split(' ') + \ ['-i', file_name] + \ f'-f image2 -s 320x240'.split(' ') +\ [path] ) path = eu('~/www/media/storage/movie/') cmd = ["ffmpeg", "-i", file_name] + \ '-map 0 -f segment -vcodec libx264 -acodec aac -strict experimental'.split(' ') + \ '-vf scale=640:-1 -vb 512k'.split(' ') + \ ["-segment_list", f'{path}ts/{mid}.m3u8', '-segment_time', '2', f'{path}ts/{mid}-%03d.ts'] subprocess.run(cmd) else: path = eu('~/www/media/storage/movie/') + f'{mid}.jpg' subprocess.run( ['ffmpeg', ] + \ '-ss 7 -t 1 -r 1'.split(' ') + \ ['-i', file_name] + \ f'-f image2 -s 3200x2400'.split(' ') +\ [path] ) os.remove(file_name)
def createproject(name): if not exists(dataroot): os.mkdir(dataroot) if exists(dataroot + f"Project_{name}"): print(f"Project {name} already exits!") return 1 os.mkdir(eu(f"{dataroot}Project_{name}")) os.mkdir(eu(f"{progroot}{name}")) py = f'from os.path import expanduser as _eu\nprojectroot = _eu("{dataroot}Project_{name}")\n' with open(f"{progroot}{name}/__init__.py", "w") as handle: handle.write(py) print(f"Created SciProject: {name}")
def main(): # Pull in the plugins if call('git submodule update --init', shell=True) != 0: if 'y' != input('Error during submodule (=plugin) init or update. Continue setup? [y*/n] '): return 1 global backup backup = raw_input('Delete existing files? [y/n]: ') != 'y' here_to_home('bash') bashrc = eu('~/.bashrc') bashappend = open('_bashrc.append').read() try: if bashappend not in open(bashrc).read(): with open(bashrc, 'a') as f: f.write(bashappend) except IOError: # Assume non-existing file. Create one. # (Or no permission, this wont change anything in that case.) with open(bashrc, 'w+') as f: f.write(bashappend) here_to_home('vimrc') here_to_home('vim') here_to_home('inputrc')
def updateproj(name): try: os.chdir(eu(f"~/SciProjects/{name}")) except FileNotFoundError: print("No such project:", name) if input("Should I create it? y/N >") == "y": return createproject(name) return subprocess.run(["git", "pull"])
def calculateOR(n11, n10, n01, n00): ORnum = n11*n00 ORden = n01*n10 if ORnum == ORden == 0: OR_uMLE, wald_MLE_CI95L, wald_MLE_CI95R = 'NaN', '', '' wald_MLE_CI90L, wald_MLE_CI90R = '', '' elif ORnum == 0: OR_uMLE, wald_MLE_CI95L, wald_MLE_CI95R = 0, '', '' wald_MLE_CI90L, wald_MLE_CI90R = '', '' elif ORden == 0: OR_uMLE, wald_MLE_CI95L, wald_MLE_CI95R = 'InF', '', '' wald_MLE_CI90L, wald_MLE_CI90R = '', '' else: OR_uMLE = ORnum/ORden # Wald test\tCI lnOR = numpy.log(OR_uMLE) SE_lnOR = numpy.sqrt(sum(map(lambda _:1/_ ,[n11,n10,n01,n00]))) wald_MLE_CI95L, wald_MLE_CI95R = numpy.exp(lnOR - 1.96*SE_lnOR), numpy.exp(lnOR + 1.96*SE_lnOR) wald_MLE_CI90L, wald_MLE_CI90R = numpy.exp(lnOR - 1.645*SE_lnOR), numpy.exp(lnOR + 1.645*SE_lnOR) if n11 + n10 + n01 + n00 >= 1000: # G-test cmd = ['Rscript', eu("~/Dropbox/age_final_submission/ageflu/scripts/ageflu_gtest.R")] + map(str,[n11, n10, n01, n00]) Routput = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.PIPE).split('\n') pval = float(Routput[1]) # small sample size CI - NaN ss_CI90L, ss_CI90R, ss_CI95L, ss_CI95R = 'NaN', 'NaN', 'NaN', 'NaN' else: # Barnard's cmd = ['Rscript', eu("~/Dropbox/age_final_submission/ageflu/scripts/ageflu_barnard.R")] + map(str,[n11, n10, n01, n00]) Routput = subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.PIPE).split('\n') pval = float(Routput[-2].replace('[1]','').split()[1]) # Agresti\tMin unconditional exact CI for small sample sizes cmd = ['Rscript', eu("~/Dropbox/age_final_submission/ageflu/scripts/agresti_and_min.R")] + map(str,[n11, n10, n01, n00, 0.9]) Routput = filter(None, subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.PIPE).split('\n')) ss_CI90L, ss_CI90R = map(lambda _: _ if _ == 'Inf' else float(_), re.findall('(\d+\.\d+|\d+|Inf)', Routput[-1])) cmd = ['Rscript', eu("~/Dropbox/age_final_submission/ageflu/scripts/agresti_and_min.R")] + map(str,[n11, n10, n01, n00, 0.95]) Routput = filter(None, subprocess.check_output(cmd, universal_newlines=True, stderr=subprocess.PIPE).split('\n')) ss_CI95L, ss_CI95R = map(lambda _: _ if _ == 'Inf' else float(_), re.findall('(\d+\.\d+|\d+|Inf)', Routput[-1])) return OR_uMLE, wald_MLE_CI90L, wald_MLE_CI90R, wald_MLE_CI95L, wald_MLE_CI95R, pval, ss_CI90L, ss_CI90R, ss_CI95L, ss_CI95R
def __init__(self): try: with open(eu("~/.config/waybackweather/darksky.json"), "r", encoding="utf8") as conf_json: conf = json.load(conf_json) self.API_KEY = conf["key"] except Exception as e: raise e self.session = requests.Session() self.session.headers.update({"User-Agent": f"WaybackWeather/{darksky.__version__}"})
def link_with_backup(source, link_name): full_link_name = eu(link_name) print('Installing ' + source + ' -> ' + full_link_name) try: os.symlink(source, full_link_name) except OSError: if backup: os.rename(full_link_name, full_link_name + '.' + str(int(time())) + '.dotfiles_backup') else: os.remove(full_link_name) os.symlink(source, full_link_name)
def get_number_of_monitors() -> int: """It gets the number of monitors. Returns ------- int The number of active non-mirrored monitors. """ try: output = subprocess.check_output( eu("~/.config/qtile/check_number_of_monitors.sh"), shell=True).decode()[:-1] except subprocess.SubprocessError: return 0 return int(output)
def link_with_backup(source, link_name, symbolic=True): full_link_name = eu(link_name) print('Installing ' + source + ' -> ' + full_link_name) makedirs(dirname(full_link_name)) try: if symbolic: os.symlink(source, full_link_name) else: os.link(source, full_link_name) except OSError: if backup: os.rename(full_link_name, full_link_name + '.' + str(int(time())) + '.dotfiles_backup') else: # Try to remove this thing. Non-empty directories don't work yet. try: os.remove(full_link_name) except OSError as e: os.rmdir(full_link_name) os.symlink(source, full_link_name)
def widget_layout(self) -> list[_Widget]: """Module for showing layout and number of windows. Returns ------- list[libqtile.widget.base._Widget] The list of widgets to add to the bar. """ # Widget for layout widget_layout = CurrentLayoutIcon( custom_icon_paths=[eu("~/.config/qtile/icons")], scale=0.5, padding=-5, foreground=self.colors["green"], ) # Widget for number of windows widget_nw = WindowCount(**self.fonts["Normal"], foreground=self.colors["green"], show_zero=True) widgets = [widget_layout, widget_nw] return widgets
def btn4push(self, event): for file in self.text_entry.GetValue().split('\n'): shutil.move(file, eu('~/Downloads/変換済/')) self.text2.SetValue(self.text2.GetValue() + '%s : 移動しました' % datetime.now())
def btn2push(self, event): ls = glob.glob(eu('~/Downloads/**/*.mp4'), recursive=True) self.text_entry.SetValue('\n'.join(ls))
#! encoding: utf8 from unittest import TestCase, main from proto.ils import qrmcp, reduction, search, ils from numpy.random import rand, randint from numpy import array, dot from numpy.testing import assert_allclose, assert_array_equal from oct2py import octave, Oct2Py from os.path import expanduser as eu octave.addpath(eu('~')+'/code/pylgrim/ils') oc = Oct2Py() m, n = 5, 3 print "Initital size of matrix B: %d × %d" % (m, n) # B = rand(m, n) B = array([[-1.38483, 0.53704, 0.14925], # ans: [1.05734, 0.61432, 0.94116], # v1 v2 [-0.33438, -0.13293, -0.60755], # 1 1 [0.26814, 0.41059, -0.52649], # -2 -1 [-0.66335, -1.42715, -0.97412]]) # 3 2 z_true = array([1, -2, 3]).reshape(3, 1) # column vector class TestQrmcp(TestCase): def test_qrmcp(self): y = dot(B, z_true) + 1e-3 * rand(m, 1) R_qrmcp, piv, y = qrmcp(B, y) R_true = [[-1.58217452, -1.20917892, -0.94591479], [0, 1.19444506, -0.11444606],
print("------------------------") print() print("Done!") print() print("Successfully converted %d out of %d files!" % (len(all_midi_matrices), num_files)) print() print("Saving array as .npy file...") print() np.save('midimatrices.npy', np.array(all_midi_matrices)) print("All done!") if converting_errors: print("Here are the ones that didn't convert: ") for i, f in converting_errors: print(f"{i}: {f}") with open('converting_errors.txt', 'w') as fh: fh.write("%s\n" % f) if parsing_errors: print("Here are the files that couldn't be parsed: ") for i, f in parsing_errors: print(f"{i}: {f}") with open('parsing_errors.txt', 'w') as fh: fh.write("%s\n" % f) if __name__ == '__main__': MidiMatrix(join(eu('~'), 'VGmidi/Balloon_Fight_-_Main_Theme_%28Dancing_Balloon%21_remix%29.mid')).mid_to_matrix()
'''Reads in addresses from DB stored at path, or backup, to label phone numbers.''' from helpers.utils import filter_based_on_col from os.path import expanduser as eu import os import pandas as pd import numpy as np import sqlite3 import sys COMP_PATH = eu( "~/Library/Application Support/AddressBook/AddressBook-v22.abcddb") ENDING = "AddressBook-v22.abcddb" SRCS = eu("~/Library/Application Support/AddressBook/Sources") MO_PTH = '31bb7ba8914766d4ba40d6dfb6113c8b614be442' MO_BASE = eu('~/Library/Application Support/MobileSync/Backup/') def extract_contacts(path): '''Get Contact Data from PHONENUMBER, RECORD Tables. As in icloud_query.py''' try: ad_db = sqlite3.connect(path) jn = pd.read_sql( """SELECT ZFULLNUMBER, ZSORTINGFIRSTNAME FROM ZABCDPHONENUMBER LEFT OUTER JOIN ZABCDRECORD ON ZABCDPHONENUMBER.ZOWNER = ZABCDRECORD.Z_PK""", ad_db) clean = lambda x: filter(lambda y: '0' <= y <= '9', x)[-10:] jn['ZFULLNUMBER'] = jn.ZFULLNUMBER.apply(clean) cstart = zip(jn.ZFULLNUMBER, jn.ZSORTINGFIRSTNAME) clist = {x[0]: x[1][:len(x[1]) / 2] for x in cstart} return clist
from multiprocessing import Lock from multiprocessing import Queue import cma from gammatone import gammatone from dnn_protos import writeDoubleMatrix from color_log import add_coloring_to_emit_ansi POP_SIZE = 50 SIGMA = 2 BOUNDS = (0, 10) CMA_JOBS = 4 VERBOSE = True RUNS_DIR = eu("~/dnn/runs/mpl") MPL_BIN = eu("~/dnn/bin/mpl") MPL_CONF_JSON = eu("~/dnn/mpl.json") MPL_TEST_DATA = eu("~/dnn/ts/riken_14chan_3LRR.pb") MPL_DIM = 0 cma_conf = { "log_freq": { "def" : True }, # non learn "a" : { "def" : 10.0, }, # non learn "b" : { "def" : 0.1, "min" : 0.1, "max" : 10}, "n": { "def" : 6, "min" : 0.1, "max" : 10}, "fnum": { "def" : 24}, #"min" : 12, "max" : 24}, "lb": { "def" : 0.0, "min" : 0, "max" : 5}, "hb": { "def" : 50.0, "min" : 10, "max" : 300}, "L": { "def" : 100.0}, # "min" : 10, "max" : 100}, "Tmax": { "def" : 1.0, "min" : 0.1, "max" : 10},
from multiprocessing import Lock from multiprocessing import Queue import cma from gammatone import gammatone from dnn_protos import writeDoubleMatrix from color_log import add_coloring_to_emit_ansi POP_SIZE = 50 SIGMA = 2 BOUNDS = (0, 10) CMA_JOBS = 4 VERBOSE = True RUNS_DIR = eu("~/dnn/mpl/cma_runs") MPL_BIN = eu("~/cpp/build/bin/mpl") MPL_CONF_JSON = eu("~/cpp/mpl.json") MPL_TEST_DATA = eu("~/dnn/ts/riken_14chan_3LRR.pb") MPL_DIM = 0 cma_conf = { "log_freq": { "def" : True }, # non learn "a" : { "def" : 10.0, }, # non learn "b" : { "def" : 0.1, "min" : 0.1, "max" : 10}, "n": { "def" : 6, "min" : 0.1, "max" : 10}, "fnum": { "def" : 24}, #"min" : 12, "max" : 24}, "lb": { "def" : 0.0, "min" : 0, "max" : 5}, "hb": { "def" : 50.0, "min" : 10, "max" : 300}, "L": { "def" : 100.0}, # "min" : 10, "max" : 100}, "Tmax": { "def" : 1.0, "min" : 0.1, "max" : 10},
#! encoding: utf8 from unittest import TestCase, main from proto.ils import qrmcp, reduction, search, ils from numpy.random import rand, randint from numpy import array, dot from numpy.testing import assert_allclose, assert_array_equal from oct2py import octave, Oct2Py from os.path import expanduser as eu octave.addpath(eu('~') + '/code/pylgrim/ils') oc = Oct2Py() m, n = 5, 3 print "Initital size of matrix B: %d × %d" % (m, n) # B = rand(m, n) B = array([ [-1.38483, 0.53704, 0.14925], # ans: [1.05734, 0.61432, 0.94116], # v1 v2 [-0.33438, -0.13293, -0.60755], # 1 1 [0.26814, 0.41059, -0.52649], # -2 -1 [-0.66335, -1.42715, -0.97412] ]) # 3 2 z_true = array([1, -2, 3]).reshape(3, 1) # column vector class TestQrmcp(TestCase): def test_qrmcp(self): y = dot(B, z_true) + 1e-3 * rand(m, 1) R_qrmcp, piv, y = qrmcp(B, y) R_true = [[-1.58217452, -1.20917892, -0.94591479],
from os.path import expanduser as eu from hac import GreedyAgglomerativeClusterer import networkx as nx from collections import defaultdict from itertools import combinations # # Clustering! # In this notebook, we perform the actual clustering. Given an input scenario, we: # 1. perform all possible clusterings, from 1 to $\min\{|\mathcal{V},|,|\mathcal{H}|\}$; # 2. for each possible clustering, check if a _straightforward_ clustering, where bigger VNF clusters are associated with bigger host clusters, is _prima facie_ feasible. # # The path of input and output files (both in JSON) are specified as environment variables. # In[7]: input_path = os.getenv('INPUT_PATH', eu('test/release2/test_req_sap_cp.json')) output_path = os.getenv( 'OUTPUT_PATH', eu('test/release2/test_req_sap_cp_cluster_decisions.json')) # Before creating a VNF graph, we treat those **SAPs associated to a VL** as a VNF with zero requirements # In[15]: def cluster(PARequest): """Executes the clustering of the PARequest :PARequest: REST API PARequest :returns: PARequest dictionary with the clustering decisions """
import os, time, sys from win10toast import ToastNotifier from infi.systray import SysTrayIcon from os.path import expanduser as eu def resource_path(relative_path): """is used for pyinstaller so it can read the relative path""" if hasattr(sys, '_MEIPASS'): return os.path.join(sys._MEIPASS, relative_path) return os.path.join(os.path.abspath('.'), relative_path) #resources ico = resource_path("./gui/logo.ico") toaster = ToastNotifier() home = eu("~") t = time.localtime() current_time = time.strftime("%Y%m%d_%H%M%S", t) try: os.mkdir(eu("~"+"\\pictures\\screenshot\\")) home = eu("~"+"\\pictures\\screenshot\\") except FileExistsError: home = eu("~"+"\\pictures\\screenshot\\") def notification(message): """Windows10 notification""" toaster.show_toast("Printscreen", message, icon_path=ico,
"e", [ Key([], "l", lazy.shutdown(), desc="Log off"), Key([], "r", lazy.spawn("systemctl reboot"), desc="Reboot PC"), Key([], "p", lazy.spawn("systemctl poweroff"), desc="Shutdown"), ], mode="exit: [l]ogout, [r]eboot, [p]oweroff", ), # Search for app. Customized to use dracula theme. # Tested with dmenu 5.0 Key([mod], "d", lazy.spawn("dmenu_run"), desc="Spawn dmenu"), # Select monitor layout Key( [mod, "shift"], "a", lazy.spawn(eu("~/.bin/select-monitor.sh")), desc="Select monitor layout using script", ), ] # ============ Media Controls ============ playerctl = "playerctl -p spotify,%any" keys += [ # Volume control # Tested with pamixer 1.4.5 Key([], "XF86AudioRaiseVolume", lazy.spawn("pamixer -i 5"), desc="Increase volume"), Key([], "XF86AudioLowerVolume",
'''Reads in addresses from DB stored at path, or backup, to label phone numbers.''' from helpers.utils import filter_based_on_col from os.path import expanduser as eu import os import pandas as pd import numpy as np import sqlite3 import sys COMP_PATH = eu("~/Library/Application Support/AddressBook/AddressBook-v22.abcddb") ENDING = "AddressBook-v22.abcddb" SRCS = eu("~/Library/Application Support/AddressBook/Sources") MO_PTH = '31bb7ba8914766d4ba40d6dfb6113c8b614be442' MO_BASE = eu('~/Library/Application Support/MobileSync/Backup/') def extract_contacts(path): '''Get Contact Data from PHONENUMBER, RECORD Tables. As in icloud_query.py''' try: ad_db = sqlite3.connect(path) jn = pd.read_sql("""SELECT ZFULLNUMBER, ZSORTINGFIRSTNAME FROM ZABCDPHONENUMBER LEFT OUTER JOIN ZABCDRECORD ON ZABCDPHONENUMBER.ZOWNER = ZABCDRECORD.Z_PK""", ad_db) clean = lambda x: filter(lambda y: '0' <= y <= '9', x)[-10:] jn['ZFULLNUMBER'] = jn.ZFULLNUMBER.apply(clean) cstart = zip(jn.ZFULLNUMBER, jn.ZSORTINGFIRSTNAME) clist = {x[0]: x[1][:len(x[1])/2] for x in cstart} return clist except pd.io.sql.DatabaseError as e: print "Non-fatal DB error ON path: ", path return {}
#!/usr/bin/env python import os from os.path import expanduser as eu os.system('rm ' + eu('~/.config/i3/blur.png')) pic = eu('~/.config/i3/toBlur.png') os.system('scrot {0}'.format(pic)) os.system('convert {0} -filter Gaussian -blur -2x10 '.format(pic) + eu('~/.config/i3/blur.png')) os.system('rm -f {0}'.format(pic)) os.system('i3lock -e -f -i ' + eu('~/.config/i3/blur.png'))
def main(): # Pull in the plugins if call(['git', 'submodule', 'update', '--init']) != 0: if input('Error during submodule (=plugin) init or update. Continue setup? [Y/n] ') not in ('y', 'Y', ''): return 1 # Pull in Vundle (for vim, should make it a submodule at some point?) if not exists(here('_vim/bundle/vundle')): if call(['git', 'clone', 'https://github.com/gmarik/vundle.git', here('_vim/bundle/vundle')]) != 0: if input('Error getting Vundle. Continue setup? [y/N]') not in ('y', 'Y'): return 1 global backup backup = input('Delete existing files (no backs them up)? [y/N]: ') not in ('y', 'Y') here_to_home('bash') bashrc = eu('~/.bashrc') bashappend = open('_bashrc.append').read() try: if bashappend not in open(bashrc).read(): with open(bashrc, 'a') as f: f.write(bashappend) except IOError: # Assume non-existing file. Create one. # (Or no permission, this wont change anything in that case.) with open(bashrc, 'w+') as f: f.write(bashappend) here_to_home('vimrc') here_to_home('vim') here_to_home('tmux.conf') here_to_home('inputrc') here_to_home('Xresources') here_to_home('gitconfig') here_to_home('gitignore') here_to_home('pythonrc.py') here_to_home('juliarc.jl') here_to_home('ssh_config', 'ssh/config', symbolic=False) # Can't be symlink due to permissions. here_to_home('config/awesome') here_to_home('config/htop') # Disabled ones don't seem to work. nb_ext(['autoscroll.js'], enable=False) nb_ext(['breakpoints.js'], enable=False) nb_ext(['init_cell.js']) nb_ext(['notify.js']) nb_ext(['main.js', 'button.png'], 'equation_numbering') nb_ext(['ExecuteTime.js', 'ExecuteTime.css'], 'execute_time') nb_ext(['main.js', 'main.css'], 'toc') from distutils import spawn if spawn.find_executable('fish'): here_to_home('config/fish/solarized.fish') here_to_home('config/fish/config.fish') here_to_home('config/fish/functions/fish_prompt.fish') here_to_home('config/fish/functions/grolschnext.fish') here_to_home('config/fish/functions/grolschpp.fish') here_to_home('config/fish/functions/grolschprev.fish') else: print("WARNING: skipped fish, it seems not to be installed.") # Reload some stuff if 'DISPLAY' in os.environ: call(['xrdb', '-nocpp', '-merge', '~/.Xresources'], shell=True) print("Don't forget to possibly run the following: ") print("- Open vim and run `:BundleInstall` or `:BundleUpdate`") print("- `cd _vim/bundle/YouCompleteMe/` and `python install.py --clang-completer/--all`")
parser.add_argument('-dg','--drop-zero-sig-genes',action='store_true',help='Drop any gene that is not expressed in any cellular signals from the fit. With a zero intercept, these genes would add an infinite contribution to the log-likelihood. So the main way this changes is the fit is via the goodness-of-fit metric and possibly the size of the required intercept term if there are many such genes.') parser.add_argument('-ie','--init-log-exposure',type=float,help='Initial value of log-exposure to use when training model. A moderately negative value helps speed up the fit. If set too far from 0 the fit will fail to find a sensible solution.',default=-10) parser.add_argument('-r','--learn-rate',type=float,help='The learn rate to use.',default=0.01) parser.add_argument('-p','--poll-interval',type=int,help='Poll fit after this many iterations',default=100) parser.add_argument('--max-it',type=int,help='Maximum number of iterations',default=1e7) parser.add_argument('-tl','--log-likelihood-tolerance',type=float,help='Termination condition. Stop when improvement in the log-likelihood less than this.',default=1e-6) parser.add_argument('-ts','--sparsity-tolerance',type=float,help='Termination condition. Stop when improvement in the sparsity is less than this.',default=1e-4) args = parser.parse_args() ################### # Save parameters # ################### print("Launching deconvolution with arguments:") print(args) tmp = vars(args) args.output = eu(args.output) with open(args.output+'_arguements.tsv','w') as f: x = f.write('Parameter\tValue\n') x = f.write('Launched\t'+str(datetime.datetime.now())+'\n') x = f.write('WorkDir\t'+os.getcwd()+'\n') for k in tmp.keys(): x = f.write(k+'\t') x = f.write(str(tmp[k])+'\n') ################## # Load bulk data # ################## #Determine list of files to load #Go through them one at a time and either parse the file list or store the path bulkSrcs = []
def autostart(): script = eu("~/.config/qtile/autostart.sh") subprocess.call([script])
def compressuser(path): from os.path import expanduser as eu if path.startswith(eu('~').lower()): return path.replace(eu('~').lower(),"~",1) return path
If this cell works, then you have all the requirements for this tutorial installed correctly! """ import numpy as np import pylab as plt %matplotlib inline """ Adding the location of the library should only be necessary if you haven't already added GPy to your PYTHONPATH """ from os.path import expanduser as eu import sys # Change this to your GPy installation location GPy_path = '~/DPhil/Code/libraries/GPy' sys.path.append(eu(GPy_path)) import GPy # <codecell> #Misc settings fs = (9, 5) # figure size; change this if you need to ms = 11 # marker size in plots sd = 6 # random seed # <markdowncell> # ##Kernels # The workhorse of the Gaussian Process is the kernel. This defines the relationship between points and contains any prior knowledge of the domain: #
#!/usr/bin/env python import os from os.path import expanduser as eu display_list = eu('~/.config/i3/displays') last_external = eu('~/.config/i3/last_ext') lid = eu('~/.config/i3/lid') os.system('xrandr >{0}'.format(display_list)) os.system('cat /proc/acpi/button/lid/LID0/state >{0}'.format(lid)) displays = 0 for line in open(lid): line = line.rstrip().split() if line[1] == 'closed': lid_open = False else: lid_open = True print(lid_open) resolutions = [] IDs = [] conn = False for line in open(display_list): line = line.rstrip() if ' connected ' in line: id = line.split()[0] if id == 'eDP1': continue
params = params.parse_args() # parse fasta aln fdat = {k:translatedDNA(v) for k, v in parsefasta(params.fa).items()} # parse fo subtype try: subtype = re.search('(H3N2|pH1N1|H1N1pdm09|BVic|BYam)', params.input_file).group() aln_subtype = {'H3N2':'H3', 'pH1N1':'pH1abs', 'H1N1pdm09':'pH1abs', 'BVic':'B73', 'BYam':'B73'}[subtype] except: raise Exception('Can\'t parse for aln_subtype.') # copy structure file into current working directory # only H3 is chains = 2 struc_file = {'H3N2':'4we8_Vic11_Repair.pdb', 'H1N1pdm09':'4jtv_Repair.pdb', 'BVic':'4fqm_Repair.pdb', 'BYam':'4m44_Repair.pdb'}[subtype] struc_dir = eu('~/Dropbox/age_final_submission/ageflu/files/') cmd = 'cp {}{} ./'.format(struc_dir, struc_file) subprocess.call(cmd, shell=True) # parse input file fhandle = filter(None, open(params.input_file, 'rU').readlines()) all_mutation_list = [] pt_to_charge_change_count = {} pt_to_mutation_count = {} pt_to_glyco_change_count = {} pos_to_pt_glycogainloss_count = {} for line in fhandle: line = line.strip().split('\t')
import json, sys, os from os.path import expanduser as eu from hac import GreedyAgglomerativeClusterer import networkx as nx from collections import defaultdict # # Clustering! # In this notebook, we perform the actual clustering. Given an input scenario, we: # 1. perform all possible clusterings, from 1 to $\min\{|\mathcal{V},|,|\mathcal{H}|\}$; # 2. for each possible clustering, check if a _straightforward_ clustering, where bigger VNF clusters are associated with bigger host clusters, is _prima facie_ feasible. # # The path of input and output files (both in JSON) are specified as environment variables. # In[31]: input_path = os.getenv('INPUT_PATH', eu('test/release1/test_req.json')) output_path = os.getenv('OUTPUT_PATH', eu('test/release1/test_req_cluster_decisions.json')) # First things first, we build two graphs, one for VNFs and one for VNF. Weights are: # * for the VNF graph, the traffic between VNFs; # * for the host graph, the capacity of links. # In[32]: def cluster(PARequest): """Executes the clustering of the PARequest :PARequest: REST API PARequest :returns: PARequest dictionary with the clustering decisions
#!/usr/bin/env python import os, sys from os.path import expanduser as eu sinks = eu('~/.config/i3/sinks') os.system('pactl list sinks >{0}'.format(sinks)) action = sys.argv[1] last_sink = '0' for line in open(sinks): line = line.rstrip() if 'Sink ' in line: last_sink = line.split('Sink #')[1] if 'alsa_output.pci-0000_00_1f.3.analog-stereo' in line: if action == 'inc': os.system('pactl set-sink-volume {0} +5%'.format(last_sink)) if action == 'dec': os.system('pactl set-sink-volume {0} -5%'.format(last_sink)) if action == 'mute': os.system('pactl set-sink-mute {0} toggle'.format(last_sink)) sys.exit()