class BaseConfig(object): PROJECT = 'voip-server' PROJECT_ROOT = os.path.abspath(parent(parent(__file__))) APP_DIR = os.path.join(PROJECT_ROOT, 'core') DEBUG = True HOST = '0.0.0.0' PORT = 5000 APP_DATE_FORMAT = '%Y-%m-%d %H:%M:%S' DB_HOST = os.environ.get('DB_HOST') DB_NAME = os.environ.get('DB_NAME') DB_USER = os.environ.get('DB_USER') DB_PASS = os.environ.get('DB_PASS') DB_PORT = os.environ.get('DB_PORT') SERVER_KEY = os.environ.get('SERVER_KEY') MODEM_RECONNECT_RETRY_COUNT = 5 MODEM_SLEEP_SECONDS = 5 SCHEDULE_TIME_INTERVAL = 15 if os.environ.get('ENV') == 'production': API_URL = 'http://voip-api.ccprivate.me/api/' else: API_URL = 'http://localhost:1122/api/' LOG_FOLDER = os.path.join(INSTANCE_FOLDER_PATH, 'logs') make_dir(LOG_FOLDER)
def delete(self, paths, delete_marker=False): """ Deletes the provided paths from the metastore. Completly removing files from the metastore can cause problems because the s3 listing may show the files even though the data may not be available. This will cause MR jobs to fail. The delete marker can be used to hide files from the listing. Example: s.delete([path1, path2]) -> True """ if (self.disabled): return paths = self.__as_paths(paths) if delete_marker: for path in paths: item = self.db.get_item(path=path.parent().normalize(), file=path.filename()) item['deleted'] = "true" else: with self.db.batch_write() as batch: for path in paths: batch.delete_item(path=path.parent().normalize(), file=path.filename())
def process(obj): """ Fetches all files in the specified path. :param path: :return: """ allowed_roots = ['plugins', 'static', 'templates'] path = obj.path links = [] path = path or '' rel_dir = parent(path) if not isdir(path) else path parts = rel_dir.split('/') codepath = ' / '.join( ['<a href="%s/">%s</a>' % (url_for('nest.code', path='/'.join(parts[:i+1])), file) for i, file in enumerate(parts[:-1])])+' / %s' % parts[-1] try: if len(urlparse(path).scheme) == 0 and len(rel_dir) == 0 or rel_dir.split('/')[0] in allowed_roots: abs_path = lambda *rel: join(config.BASE_DIR, 'server', *rel) url_path = lambda *rel: join(url_for('nest.code'), *rel) if path and len(parent(path)) > 0: links.append(dict( href=url_path(parent(path)), label='parent', fa='fa-mail-reply' )) for thing in listdir(abs_path(rel_dir)): if (len(rel_dir) > 0 or thing in allowed_roots) \ and thing[0] != '.': file = dict( href=url_path(rel_dir, thing), label=thing ) if thing in path.split('/')[-1]: file['class_'] = 's' if isdir(abs_path(rel_dir, thing)): file['href'] += '/' file['fa'] = 'fa-folder' file['label'] += '/' else: file['fa'] = 'fa-file-o' links.append(file) except FileNotFoundError: pass return dict(links=links, codepath=codepath)
def start(self): path = FilePath(self.path) parent = path.parent() if not parent.exists(): parent.makedirs() path.setContent(self.content) for name, url in self.urls.iteritems(): self.addURL(name, url) self.step_status.setText(self.describe(done=True)) self.finished(SUCCESS)
def compile_message(idl_files, dest_dir, coredxdir, language): global cwd possible_coredx_ddls = glob.glob(coredxdir + "/host/bin/*coredx_ddl*") if len(possible_coredx_ddls) != 1: raise RuntimeError("Didn't find one coredx_ddl executable possibility, found", possible_coredx_ddls) if os.name == "posix": source_command = "source " + cwd + "/install/local_setup.bash" commands = source_command for idl_file in idl_files: this_dest_dir = dest_dir + '/' + basename(parent(parent(parent(idl_file)))) + '/' + basename(parent(parent(idl_file))) + '/' + basename(parent(idl_file)) if not os.path.isdir(this_dest_dir): os.makedirs(this_dest_dir) commands += " && " + possible_coredx_ddls[0] + " -I " + cwd + "/install/share -d " + this_dest_dir + " -s -l " + language + " -f " + idl_file subprocess.run(commands, shell=True, executable='/bin/bash', check=True) else: commands = list() source_command = cwd + "/install/local_setup.bat" for idl_file in idl_files: this_dest_dir = dest_dir + '/' + basename(parent(parent(parent(idl_file)))) + '/' + basename(parent(parent(idl_file))) + '/' + basename(parent(idl_file)) if not os.path.isdir(this_dest_dir): os.makedirs(this_dest_dir) commands.append(possible_coredx_ddls[0] + " -I " + cwd + "/install/share -d " + this_dest_dir + " -s -l " + language + " -f " + idl_file) num = 15 for i in range(0, len(commands), num): my_commands = source_command i2 = i+num if i+num < len(commands) else len(commands) print(i, "to", i2, "of", len(commands)) for c in commands[i : i2]: my_commands += " && " + c subprocess.run(my_commands, shell=True, check=True)
def delete(self, paths, delete_marker=False): """ Deletes the provided paths from the metastore. Completly removing files from the metastore can cause problems because the s3 listing may show the files even though the data may not be available. This will cause MR jobs to fail. The delete marker can be used to hide files from the listing. Example: s.delete([path1, path2]) -> True """ if self.disabled: return paths = self.__as_paths(paths) if delete_marker: for path in paths: item = self.db.get_item(path=path.parent().normalize(), file=path.filename()) item["deleted"] = "true" else: with self.db.batch_write() as batch: for path in paths: batch.delete_item(path=path.parent().normalize(), file=path.filename())
def add(self, paths): """ Adds a list of Paths to the file metastore and returns True on success. Example: s.add([path1, path2]) -> True """ if self.disabled: return epoch = self.__time_now() paths = self.__as_paths(paths) with self.db.batch_write() as batch: for path in paths: batch.put_item(data={"path": path.parent().normalize(), "file": path.filename(), "epoch": epoch})
def add(self, paths): """ Adds a list of Paths to the file metastore and returns True on success. Example: s.add([path1, path2]) -> True """ if self.disabled: return epoch = self.__time_now() paths = self.__as_paths(paths) with self.db.batch_write() as batch: for path in paths: batch.put_item( data={ 'path': path.parent().normalize(), 'file': path.filename(), 'epoch': epoch })
def process(obj): """ Adds path prefix, if the string is not a URI :param path: candidate path :return: new string """ path, request = obj.path, getattr(obj, 'request', None) API = url_for('nest.api_code') if request: parse = urlparse(request.url) return dict(src=urljoin(parse.scheme+'://'+parse.netloc, path), path=path) if len(urlparse(path).scheme) == 0: rel_dir = parent(path) if not isdir(path) else path abs_path = lambda *rel: join(config.BASE_DIR, 'server/templates', *rel) url_path = lambda *rel: join(API, *rel) if isdir(abs_path(path)): for file in listdir(abs_path(rel_dir)): if not isdir(abs_path(rel_dir, file)): return dict(src=url_path(rel_dir, file), path=file) return dict(src=url_path('public/index.html'), path='public/index.html') return dict(src=url_path(path), path=path) return dict(src=path)
def violin(m, xlabel, ylabel, isVertical=False, isLog=False, height=3, legend=True, legendDontOVerlap=False): # rc('text', usetex=True) xlabel = xlabel.replace("Project-level frequency ", "") xlabel = xlabel.replace("Project-level proportion", "") outputPlot = join(parent(parent(realPath('__file__'))), 'OutputPlots/' + xlabel + ylabel + ".pdf").replace( ' ', '') sns.set(font_scale=1.3) sns.set_style("whitegrid") fig, axes = plt.subplots(figsize=(9, height)) m = dict( sorted(m.items(), key=lambda item: statistics.median(item[1]), reverse=True)) r = axes.violinplot(dataset=list(m.values()), showmeans=True, showmedians=True, vert=isVertical) _set_box_color(r['bodies'], "#a9a9a9") r['cmeans'].set_color('red') r['cmedians'].set_color('blue') r['cbars'].set_color('#353839') r['cmaxes'].set_color('#353839') r['cmins'].set_color('#3b444b') labels = list(m.keys()) labels = list(map(lambda x: x.replace("P ", ""), labels)) labels = list(map(lambda x: x.replace("F ", ""), labels)) labels = list( map(lambda x: x.replace("Type Parameter", "Type Argument"), labels)) red_patch = Line2D([0], [0], color='red', linewidth=2) blue_patch = Line2D([0], [0], color='blue', linewidth=2) # plt.legend([red_patch, blue_patch, extra1, extra2], ['Mean', 'Median', 'H(2)='+'{:.2e}'.format(h), 'p-value='+'{:0.2e}'.format(p)]) if len(m) > 1: h, p = do_stats_stuff(m) axes.set_title('p-value=' + ('{:0.2e}'.format(p) if p != 0.0 else '0') + ' ' + ' H(2)=' + '{:.2e}'.format(h), fontdict={'fontsize': 12}) # plt.text(0.5,1, ) # adjust_text(texts) if legend: if legendDontOVerlap: plt.legend([red_patch, blue_patch], ['Mean', 'Median'], bbox_to_anchor=(1, 0.5), loc="center right", bbox_transform=plt.gcf().transFigure) else: plt.legend([red_patch, blue_patch], ['Mean', 'Median']) if isLog: axes.set(xscale="log") if isVertical: axes.set_xticks(np.arange(1, len(labels) + 1)) axes.set_xticklabels(labels, linespacing=0.9) else: axes.set_yticks(np.arange(1, len(labels) + 1)) axes.set_yticklabels(labels, linespacing=0.9) plt.tight_layout() plt.savefig(outputPlot, format="pdf", dpi=300, bbox_inches='tight') plt.show()
print(i, "to", i2, "of", len(commands)) for c in commands[i : i2]: my_commands += " && " + c subprocess.run(my_commands, shell=True, check=True) build_path = cwd + '/build' cpp_messages_path = build_path + '/cpp_messages' csharp_messages_path = build_path + '/csharp_messages' possible_coredxdirs = glob.glob(cwd + "/install/coredx-*") if len(possible_coredxdirs) != 1: raise RuntimeError("Didn't find one coredxdir possibility, found", possible_coredxdirs) else: coredxdir = possible_coredxdirs[0] if os.path.isdir(cpp_messages_path): shutil.rmtree(cpp_messages_path) if os.path.isdir(csharp_messages_path): shutil.rmtree(csharp_messages_path) idl_files = [i for i in glob.iglob(build_path + '/**/*.idl', recursive=True)] compile_message(idl_files, cpp_messages_path, coredxdir, "cpp") compile_message(idl_files, csharp_messages_path, coredxdir, "csharp") with tarfile.open(parent(parent(cpp_messages_path)) + "/cpp_messages.tar.gz", "w:gz") as tar: for dir in os.listdir(cpp_messages_path): tar.add(cpp_messages_path + '/' + dir, arcname=dir) with tarfile.open(parent(parent(csharp_messages_path)) + "/csharp_messages.tar.gz", "w:gz") as tar: for dir in os.listdir(csharp_messages_path): tar.add(csharp_messages_path + '/' + dir, arcname=dir)
import os import re from os.path import dirname as parent APPROOT = parent(parent(parent((os.path.realpath(__file__))))) # Basic configuration settings DEBUG = True HOST = '0.0.0.0' PORT = 8152 SECRET_KEY = 'a secret key --- MUST set this to something unique and strong' FORCE_SSL = False # DANGEROUS: Allow arbitrary Python queries from the browser ALLOW_PYCODE = False SYSTEM_EMAIL = '*****@*****.**' # Emails sent from here # DEPRECATION WARNING: Communication with backend data sources is moving into # Metis, and will be removed in the next release of Jia KRONOS_URL = 'http://localhost:8150' KRONOS_NAMESPACE = 'kronos' # Precompute settings PRECOMPUTE = True # The cache is written here; can be the same as KRONOS_URL CACHE_KRONOS_URL = 'http://localhost:8150' CACHE_KRONOS_NAMESPACE = 'default_cache' # It is not recommended to expose the scheduler outside the LAN SCHEDULER_HOST = '127.0.0.1' SCHEDULER_PORT = 8157 SCHEDULER_DATABASE_URI = 'sqlite:///%s/scheduler.db' % APPROOT # When precompute query code fails, emails will be sent to the following SCHEDULER_FAILURE_EMAILS = [
#!/usr/bin/env python import unittest import sys import os from os.path import dirname as parent sys.path.insert(0, os.path.join(parent(parent(__file__)), 'src')) from robotremoteserver import RobotRemoteServer class NonServingRemoteServer(RobotRemoteServer): def __init__(self, library): self._library = library self._is_dynamic = self._get_routine('run_keyword') and \ self._get_routine('get_keyword_names') class StaticLibrary: def passing_keyword(self): pass def failing_keyword(self, exception, message='Hello, world!'): raise exception(message) def _not_included(self): """Starts with an underscore""" not_included = "Not a method or function" not_included_2 = NonServingRemoteServer # Callable but not method/function class HybridLibrary: def get_keyword_names(self): return [n for n in dir(StaticLibrary) if n.endswith('_keyword')]
#!/usr/bin/env python import unittest from os.path import dirname as parent import sys sys.path.insert(0, parent(parent(parent(__file__)))) from robotremoteserver import RobotRemoteServer class NonServingRemoteServer(RobotRemoteServer): def __init__(self, library): self._library = library class StaticLibrary: def passing_keyword(self): pass def failing_keyword(self, exception, message='Hello, world!'): raise exception(message) def _not_included(self): """Starts with an underscore""" not_included = "Not a method or function" not_included_2 = NonServingRemoteServer # Callable but not method/function class HybridLibrary: def get_keyword_names(self): return [n for n in dir(StaticLibrary) if n.endswith('_keyword')] def __getattr__(self, name): return getattr(StaticLibrary(), name) def not_included(self):
#!/usr/bin/env python import unittest from os.path import dirname as parent import sys sys.path.insert(0, parent(parent(parent(__file__)))) from robotremoteserver import RobotRemoteServer class NonServingRemoteServer(RobotRemoteServer): def __init__(self, library): self._library = library class StaticLibrary: def passing_keyword(self): pass def failing_keyword(self, exception, message='Hello, world!'): raise exception(message) def _not_included(self): """Starts with an underscore""" not_included = "Not a method or function" not_included_2 = NonServingRemoteServer # Callable but not method/function class HybridLibrary:
# python -u experiments/scripts/faster_rcnn_end2end.py ^ # --GPU 0 --NET VGG_CNN_M_1024 --DATASET pascal_voc ^ # EXP_DIR foobar RNG_SEED 42 TRAIN.SCALES "[400, 500, 600, 700]" # # Notes: # 1) the line-continuation symbol is ^ for cmd, use ` for powershell. # 2) "-u" flag stands for unbuffered std output import os, os.path as op from os.path import dirname as parent import argparse from datetime import datetime import sys import time FRCN_ROOT = parent(parent(parent(op.realpath(__file__)))) def at_fcnn(x): '''Convenience function to specify relative paths in code Args: x -- path relative to FRCN_ROOT''' # op.realpath will take care of the mixed Windows and Unix delimeters '/' and '\' return op.realpath(op.join(FRCN_ROOT, x)) # add 'tools' dir to the path sys.path.insert(0, at_fcnn('tools')) import train_net, test_net if __name__ == "__main__":
import numpy as np import ctypes import os import time from os.path import dirname as parent APPROOT = os.path.join(parent(os.path.realpath(__file__)), 'eeg-compute') class EEGSpecParams(ctypes.Structure): _fields_ = [ ('filename', ctypes.POINTER(ctypes.c_char)), ('duration', ctypes.c_float), ('hdl', ctypes.c_int), ('spec_len', ctypes.c_int), ('fs', ctypes.c_int), ('nfft', ctypes.c_int), ('nstep', ctypes.c_int), ('shift', ctypes.c_int), ('nsamples', ctypes.c_int), ('nblocks', ctypes.c_int), ('nfreqs', ctypes.c_int), ] spec_params_p = ctypes.POINTER(EEGSpecParams) # load shared library _libspectrogram = np.ctypeslib.load_library('lib_eeg_spectrogram', APPROOT) # print_spec_params_t
def test_keybase(self): resp = self.app.get('/keybase.txt') self.assertEqual(resp.status_code, 200) path = os.path.join(parent(parent(__file__)), 'static', 'keybase.txt') with open(path, 'rb') as f: self.assertEqual(resp.data, f.read())
import os import re from os.path import dirname as parent APPROOT = parent(parent(parent((os.path.realpath(__file__))))) # Basic configuration settings DEBUG = True HOST = '0.0.0.0' PORT = 8152 SECRET_KEY = 'a secret key --- MUST set this to something unique and strong' FORCE_SSL = False # DANGEROUS: Allow arbitrary Python queries from the browser ALLOW_PYCODE = False SYSTEM_EMAIL = '*****@*****.**' # Emails sent from here # Precompute settings PRECOMPUTE = True # The cache is written here; can be the same as KRONOS_URL CACHE_KRONOS_URL = 'http://*****:*****@yourcompany.com', '*****@*****.**', ] # Kronos and metis pointers
# python -u experiments/scripts/faster_rcnn_alt_opt.py ^ # --GPU 0 --NET VGG_CNN_M_1024 --DATASET pascal_voc ^ # EXP_DIR foobar RNG_SEED 42 TRAIN.SCALES "[400, 500, 600, 700]" # # Notes: # 1) the line-continuation symbol is ^ for cmd, use ` for powershell. # 2) "-u" flag stands for unbuffered std output import os, os.path as op from os.path import dirname as parent import argparse from datetime import datetime import sys import time FRCN_ROOT = parent(parent(parent(op.realpath(__file__)))) def at_fcnn(x): '''Convenience function to specify relative paths in code Args: x -- path relative to FRCN_ROOT''' # op.realpath will take care of the mixed Windows and Unix delimeters '/' and '\' return op.realpath(op.join(FRCN_ROOT, x)) # add 'tools' dir to the path print at_fcnn('tools') sys.path.insert(0, at_fcnn('tools')) import train_faster_rcnn_alt_opt, test_net if __name__ == "__main__":
def resolve_root(filepath): rootpath = parent(filepath) while not os.path.isdir(rootpath + "/default"): rootpath = parent(rootpath) return rootpath.replace("\\", "/")
from collections import Counter as C from collections import namedtuple as nt from os.path import dirname as parent from os.path import join as join from os.path import realpath as realpath # import Analysis.CreatePlots as cp import git import time import pandas as pd import OldRW # from Analysis.RW import readAll fileDir = parent(parent(parent(realpath('__file__')))) pathToTypeChanges = join(fileDir, 'TypeChangeMiner/Output/') all_int_commands = C({}) projects = OldRW.readProject('Projects') for p in projects: commits = OldRW.readCommit('commits_' + p.name) if len(commits) > 0: project_int_commands = C({}) project_int_commands += C({'corpusSize': 1}) for cmt in commits: project_int_commands += C({'noOfCommitsAnalyzed': 1}) if len(cmt.refactorings) > 0: project_int_commands += C({'NoOfRefactoringsMined': len(cmt.refactorings)}) all_int_commands += project_int_commands
def parentislocked(self, path): if path.parent().relativepath() in self.DB: return True; return False;