コード例 #1
0
ファイル: RoutingTable.py プロジェクト: DSG-UPC/Vivaldi-TTFB
 def store(self):
     with simpleflock.SimpleFlock("/tmp/foolock1"):
         with open(self.outfile, 'wb') as f:
             f.write("ip\test_rtt\tTTFB\tTotal\tproxy\tmyProxy\n")
             for r in sorted(self.routes.values(),
                             key=operator.attrgetter('total')):
                 f.write(r.__str__())
コード例 #2
0
ファイル: exp_load_curl.py プロジェクト: DSG-UPC/Vivaldi-TTFB
def getVivaldiProxy():
    with simpleflock.SimpleFlock('/tmp/foolock1'):
        with open('proxy_route_table', 'r') as f:
            global PROXY, ema025, temp_ema025, ema005, temp_ema005, ema075, temp_ema075
            r = csv.reader(f, delimiter='\t')
            new_proxy = None
            for row in r:
                if row[5] == 'True':
                    new_proxy = row[0]
                    new_ttfb = row[2]
                    break
            if new_proxy != PROXY:
                PROXY = new_proxy
                if PROXY not in ema025:
                    ema025[PROXY] = EMA(0.25)
                    temp_ema025[PROXY] = EMA(0.25)
                    ema005[PROXY] = EMA(0.05)
                    temp_ema005[PROXY] = EMA(0.05)
                    ema075[PROXY] = EMA(0.75)
                    temp_ema075[PROXY] = EMA(0.75)
                else:
                    ema025[PROXY].last = float(new_ttfb)
                    temp_ema025[PROXY].last = float(new_ttfb)
                    ema005[PROXY].last = float(new_ttfb)
                    temp_ema005[PROXY].last = float(new_ttfb)
                    ema075[PROXY].last = float(new_ttfb)
                    temp_ema075[PROXY].last = float(new_ttfb)
コード例 #3
0
def getVivaldiDistance(proxy):
	result = 0.0
	with simpleflock.SimpleFlock('/tmp/foolock1'):
		with open('proxy_route_table','r') as f:
    			r = csv.reader(f,delimiter='\t')
			for row in r:
        			if row[0]==proxy:
					result = float(row[1])/1000
	return result
コード例 #4
0
def add(maps):
	try:
		if len(sys.argv) <= 1:
			with simpleflock.SimpleFlock(MAPCACHE_CONFIG, timeout=15):
				_add(maps)
		else:
			_add(maps)
	except:
		print "Mapcache filelock!"
def chooseRepos(filename, count):
    choices = []
    with sf.SimpleFlock("/tmp/" + filename + "_lock"):
        with open(filename) as rf:
            choices.extend([line.strip() for line in rf.readlines()[:count]])
        call("sed -i '1," + str(count) + "d' " + filename, shell=True)
    if not choices:
        raise EOFError("File Empty")
    return choices
コード例 #6
0
    def save_shared_values(self):
        "Write shared variables out to the cache file"
        for key in self.shared:
            self.shared[key] = _env.get(key, None)

        with simpleflock.SimpleFlock(self.lock_file):
            with open(self.cache_file, "wb") as filehandle:
                pickle.dump(self.shared, filehandle)
        self.timestamp = time.time()
コード例 #7
0
def getVivaldiProxy():
	with simpleflock.SimpleFlock('/tmp/foolock1'):
		with open('proxy_route_table','r') as f:
			global PROXY
   			r = csv.reader(f,delimiter='\t')
			new_proxy = None
			for row in r:
				if row[5] == 'True':
					new_proxy = row[0]
					break
			PROXY = new_proxy
コード例 #8
0
ファイル: RoutingTable.py プロジェクト: DSG-UPC/Vivaldi-TTFB
 def readTTFB(self):
     if self.proxy:
         if os.path.isfile(FILE):
             values = []
             with simpleflock.SimpleFlock("/tmp/foolock"):
                 with open(FILE, 'rb') as infile:
                     values = infile.readline().strip().split(',')
             ip = values[0]
             ttfb = values[1]
             if self.proxy == ip:
                 self.updateTTFB(self.proxy, ttfb, 0)
コード例 #9
0
ファイル: rabbitmq.py プロジェクト: smuel09/zabbix-utils
 def __enter__(self):
     c = Cache(self.url, verbose=self.verbose)
     with simpleflock.SimpleFlock(c.fileLock, timeout=10):
         if c.is_valid():
             self.payload = c.read()
         else:
             self.payload['overview'] = self.getOverview()
             self.payload['queues'] = self.getQueueStats()
             self.payload['healthcheck'] = self.getHealthcheck()
             c.write(self.payload)
     if self.verbose > 1:
         print(json.dumps(self.payload))
     return self
コード例 #10
0
 def _load_shared_values(self):
     "Load shared variables from the cache file"
     with simpleflock.SimpleFlock(self.lock_file):
         try:
             self.shared = pickle.load(open(self.cache_file, "rb"))
         except IOError:
             return
         except Exception as the_exception:
             print("Failed to reload shared values.", the_exception)
             return
     for key, value in self.shared.items():
         if value is None and key in _env:
             del _env[key]
         else:
             _env[key] = value
     self.timestamp = time.time()
コード例 #11
0
def add_record(file_name):
    if len(file_name) == 0:
        abort(400)
    if not request.json:
        return jsonify({'status': 'error: not a valid json'}), 201
    out = request.json
    file_path = "/dev/shm/logging/"
    if not os.path.exists(file_path):
        os.makedirs(file_path)
    with open(file_path + file_name + '.json', "a") as myfile:
        out['server_time'] = str(datetime.datetime.utcnow())
        with simpleflock.SimpleFlock(file_path + file_name + '.lock',
                                     timeout=5):
            with timeout():
                myfile.write(json.dumps(out) + '\n')
    return jsonify({'status': (out if debug else 'log added')}), 201
コード例 #12
0
def set_cache(obj):
    """Update the cache with an object (dict, e.g.)

    The object is cached on disk. A lock file is used to coordinate
    updates to this cache among threads and processes.

    If another process has the cache locked (only used for writing),
    then this function does nothing; i.e. it assumes that somebody
    else is taking care of the update ....

    Arguments:
        obj - object to write to the cache

    Return:
        none
    """
    lock_path = _pathname(__LOCK_FILE)
    try:
        with simpleflock.SimpleFlock(lock_path, timeout=0):
            _pickle_and_cache_models(obj)
    except IOError as e:
        if e.errno != errno.EWOULDBLOCK:
            logging.error('creating lock file {}: {}'.format(lock_path, e))
            raise
コード例 #13
0
		ema025[PROXY].compute(final_out)
		temp_ema025[PROXY].last = ema025[PROXY].last
		ema005[PROXY].compute(final_out)
		temp_ema005[PROXY].last = ema005[PROXY].last
		ema075[PROXY].compute(final_out)
		temp_ema075[PROXY].last = ema075[PROXY].last
	else:	
		temp_time = time()
		if (temp_time-last_time)>last_value:
			last_value = temp_time-last_time 
		#out = last_value
		temp_ema025[PROXY].compute(float(last_value))
		temp_ema005[PROXY].compute(float(last_value))
		temp_ema075[PROXY].compute(float(last_value))
		type = 'est'
		
	t = time()-START
	if (t-last_vivaldi) > VIVALDI_PERIOD:
		vivaldi_time = getVivaldiDistance(PROXY)
		last_vivaldi = t
	with open(FILE,'a') as f:
		f.write("{0:.1f},{1},{2},{3},{4},{5},{6},{7},{8},{9}\n".format(t,out,ema025[PROXY].last,temp_ema025[PROXY].last,ema005[PROXY].last,temp_ema005[PROXY].last,ema075[PROXY].last,temp_ema075[PROXY].last,type,vivaldi_time))
	with open(EXP_FILE,'a') as f:
		f.write("{0:.1f},{1}\n".format(t,PROXY))
	with simpleflock.SimpleFlock('/tmp/foolock'):
		with open(VIV_FILE,'wb') as viv:
			viv.write("{},{}\n".format(PROXY,temp_ema005[PROXY].last))
	#getVivaldiProxy()
	#print("{0:.1f},{1},{2},{3},{4},{5},{6},{7},{8},{9}\n".format(t,out,ema025.last,temp_ema025.last,ema005.last,temp_ema005.last,ema075.last,temp_ema075.last,type,vivaldi_time))

コード例 #14
0
ファイル: check_art.py プロジェクト: tarsbase/numutracker_api
def check_art():
    try:
        with simpleflock.SimpleFlock("check-art.lock", timeout=1):
            run_command()
    except BlockingIOError:
        numu_app.logger.error("Unable to achieve lock.")
def writeEmails(user_emails_list):
    with sf.SimpleFlock("/tmp/scrape_write_email"):
        with open("username_email_github.csv", "a") as f:
            for username,email in user_emails_list:
                f.write(username+","+email+"\n")
def patchUnfinished(username_list_filename, choices):
    with sf.SimpleFlock("/tmp/" + username_list_filename + "_lock"):
        with open(username_list_filename, "a") as wf:
            wf.write("\n".join(choices) + "\n")
import csv
csvfile = open("UMLFiles_List_V2.0.csv")
data = csv.reader(csvfile)

all_xmis = []
for (name, link) in data:
    if link[-3:] == "xmi":
        all_xmis.append((name, link.replace("tree/master", "raw/master")))

import requests
import glob
import simpleflock
from os.path import exists

for project_name, xmi_link in all_xmis:
    filename = "scraped_xmis/" + project_name.replace(
        "/", "_") + xmi_link.split("/")[-1]

    with simpleflock.SimpleFlock("/tmp/scrape_select_file"):
        if exists(filename):
            continue
        else:
            open(filename, "w").close()

    print(xmi_link)
    page = requests.get(xmi_link)

    with simpleflock.SimpleFlock("/tmp/scrape_file_write"):
        with open(filename, "wb") as f:
            f.write(page.content)
コード例 #18
0
def user_processing():
    try:
        with simpleflock.SimpleFlock("user-processing.lock", timeout=1):
            run_command()
    except BlockingIOError:
        numu_app.logger.error("Unable to achieve lock.")
コード例 #19
0
def save(batches):
    deadline_batches_index = numpy.where(
        numpy.isfinite([x.batch.deadline for x in batches]))
    best_effort_batches_index = numpy.where(
        numpy.isinf([x.batch.deadline for x in batches]))
    best_effort_completion = [
        (batches[i].batch.size - batches[i].batch.level) /
        batches[i].batch.size for i in best_effort_batches_index[0]
    ]
    #    print(best_effort_batches_index)
    #    print(best_effort_completion)
    #    print(numpy.mean(best_effort_completion), numpy.max(best_effort_completion))
    deadline_completion = [(batches[i].batch.size - batches[i].batch.level) /
                           batches[i].batch.size
                           for i in deadline_batches_index[0]]
    ratio_success_deadline = []
    dead_function = lambda x, s: s * (x * (options[
        'average_service_time'] - options['average_service_time'] / numpy.min(
            [options['no_workers'], s])) + options['average_service_time'] /
                                      numpy.min([options['no_workers'], s]))
    strict_array = [0.0, 0.2, 0.4, 0.6, 0.8, 1.0]
    fn = options['filename'] + '.pkl'
    with simpleflock.SimpleFlock(options['filename'] + '.lock', timeout=60):
        try:
            results = pd.read_pickle(fn)
            print('Results already exists in this directory.. appending...')
        except:
            print('file', fn, "doesn't exists")
            results = pd.DataFrame(columns=[
                'total_time', 'no_batches', 'no_workers', 'seed', 'scheduler',
                'deadline_success_ratio', 'mean_deadline', 'median_deadline',
                'std_deadline', 'mean_best_effort', 'median_best_effort',
                'std_best_effort', 'execution_time', 'no_best_effort',
                'no_deadline', 'strictness', 'log'
            ])
        for s in range(0, len(strict_array)):
            for i in deadline_batches_index[0]:
                batches[i].batch.deadline = dead_function(
                    strict_array[s],
                    batches[i].batch.size) + batches[i].batch.arrive_time
                batches[i].batch.success = batches[
                    i].batch.completion_time <= batches[i].batch.deadline
            ratio_success_deadline.append(
                numpy.sum([
                    batches[i].batch.success for i in deadline_batches_index[0]
                ]) / numpy.size(deadline_batches_index))
            results = results.append(pd.DataFrame(
                {
                    'total_time':
                    options['total_time'],
                    'no_batches':
                    options['no_best_effort_batches'] +
                    options['no_deadline_batches'],
                    'no_workers':
                    options['no_workers'],
                    'seed':
                    options['seed'],
                    'scheduler':
                    options['scheduler'],
                    'execution_time':
                    res['execution_time'],
                    'deadline_success_ratio':
                    ratio_success_deadline[s],
                    'mean_deadline':
                    numpy.mean(deadline_completion),
                    'median_deadline':
                    numpy.median(deadline_completion),
                    'std_deadline':
                    numpy.std(deadline_completion),
                    'mean_best_effort':
                    numpy.mean(best_effort_completion),
                    'median_best_effort':
                    numpy.median(best_effort_completion),
                    'std_best_effort':
                    numpy.std(best_effort_completion),
                    'no_best_effort':
                    numpy.size(best_effort_batches_index),
                    'no_deadline':
                    numpy.size(deadline_batches_index),
                    'strictness':
                    strict_array[s],
                    'log':
                    options['log']
                },
                index=[0]),
                                     ignore_index=True)
        results.to_pickle(fn)
        results.to_csv(options['filename'] + '.csv', index=False)
    if not numpy.mod(options['seed'], 25):
        plt.plot(*zip(*res['fairness_index']))
        plt.xlabel('Time [s]')
        plt.ylabel('Fairness index')
        axes = plt.gca()
        axes.set_ylim([-3010, 10])
        plt.savefig(options['filename'] + '_' + options['scheduler'] + '_' +
                    str(options['seed']) + '.eps')
        with open(
                options['filename'] + str(options['seed']) +
                options['scheduler'] + '_plot.pkl', 'w') as f:
            pickle.dump(res['fairness_index'], f)
コード例 #20
0
ファイル: plate_io.py プロジェクト: synatree/node-pi-plates
#TODO: scan for plates at startup so we can handle wrong-address
#      or plate_type mismatch exceptions

while True:
    try:
        line = sys.stdin.readline()
        # TODO: add error handling for invalid JSON
        msg = json.loads(line)
        addr = msg['addr']
        plate_type = msg['plate_type']
        cmd = msg['cmd']
        args = msg['args']
        resp = {}
        if (plate_type == "RELAY"):
            with simpleflock.SimpleFlock("/tmp/relay.lock", timeout=3):
                if (cmd == "setLED"):
                    RP.setLED(addr)
                    resp['LED'] = 1
                elif (cmd == "clrLED"):
                    RP.clrLED(addr)
                    resp['LED'] = 0
                elif (cmd == "toggleLED"):
                    RP.toggleLED(addr)
                    resp['LED'] = "UNKNOWN"
                elif (cmd == "getID"):
                    resp['ID'] = RP.getID(addr)
                elif (cmd == "getHWrev"):
                    resp['HWrev'] = RP.getHWrev(addr)
                elif (cmd == "getFWrev"):
                    resp['FWrev'] = RP.getFWrev(addr)