def p_limit(p): r"""limit : | LIMIT INTEGER_LITERAL | LIMIT INTEGER_LITERAL COMMA INTEGER_LITERAL""" if len(p) == 1: p[0] = None elif len(p) == 3: p[0] = Limit(None, p[2]) else: p[0] = Limit(p[2], p[4])
def __init__(self, recipient=None, start=None, end=None, limit=None): if recipient is None: self._recipient = '' else: self._recipient = recipient if start is None or end is None: self._dataURL = DataURL() else: self._dataURL = DataURL(start, end) if limit is None: self._limit = Limit() else: self._limit = Limit(limit) return
def _load_from_file(self) -> None: try: with open(self.config_file, 'r') as fh: conf = yaml.safe_load(fh) except FileNotFoundError as ex: raise ConfigException( f"Config file {self.config_file} not found: {ex}") except yaml.YAMLError as ex: raise ConfigException( f"Failed to load config from {self.config_file}: {ex}") if not isinstance(conf, dict): raise ConfigException("Config must be a YAML object") self.qdir_root = conf.get("qdir_root") self.max_workers = conf.get("max_workers", self.max_workers) self.hint_origin = conf.get("hint_origin") limit_raw = conf.get("limit", {}) self.limit = Limit(memory=self._parse_mem(limit_raw.get("memory")), swap=self._parse_mem(limit_raw.get("swap")), cpu=self._parse_proc(limit_raw.get("cpu"))) if self.qdir_root is None: raise ConfigException("Field 'qdir_root' must be set") courses0 = conf.get("courses", []) if not (isinstance(courses0, list) or isinstance(courses0, dict)): raise ConfigException( "Courses must be an array or map of course objects") if isinstance(courses0, dict): courses = [] for n, c in courses0.items(): if "name" not in c: c["name"] = n courses.append(c) else: courses = courses0 for c in courses: cc = Course(c, self.qdir_root) self.courses[cc.name.lower()] = cc out = len([ x for x in [self.socket, self.socket_fd, self.port] if x is not None ]) if out == 0: self.port = 8080 if out > 1: raise ConfigException("At most one of '--socket', '--socket-fd' " "or '--port' must be used") if len(self.courses) == 0: raise ConfigException("At least one course must be set") self.postgres = conf.get("postgres", False) self.postgres_host = conf.get("postgres_host", self.postgres_host) self.postgres_user = conf.get("postgres_user", self.postgres_user)
def from_json(self, json): required = ['recipient', 'data_url', 'limit'] if not all(k in json for k in required): logging.warning(f"value missing in {required}") return False data_url = DataURL() if not data_url.from_json(json['data_url']): return False self._dataURL = data_url if not isinstance(json['recipient'], str) or not isinstance( json['limit'], int): logging.warning( "recipient should be all type<str> and limit should be type<int>" ) return False self._recipient = json['recipient'] self._limit = Limit(json['limit']) return True
def __init__(self, argv: List[str]): self.argv = argv self.config_file = "exprtest.yaml" self.socket_fd: Optional[int] = None self.socket: Optional[str] = None self.port: Optional[int] = None self.qdir_root: Optional[str] = None self.courses: Dict[str, Course] = {} self.max_workers = 4 self.hint_origin: Optional[str] = None self.limit = Limit() self._load_from_argv() self._load_from_file()
def setUp(self): self._resource1 = Resource('test_resource1', [ Limit(2, timedelta(seconds=1)), Limit(3, timedelta(seconds=1)), ]) self._resource2 = Resource('test_resource2', [ Limit(6, timedelta(seconds=1)), Limit(7, timedelta(seconds=1)), ]) self._resource3 = Resource('test_resource3', [ Limit(10, timedelta(seconds=1)), Limit(10, timedelta(seconds=1)), ])
def __init__(self, argv: List[str]) -> None: self.argv = argv self.config_file = "exprtest.yaml" self.socket_fd: Optional[int] = None self.socket: Optional[str] = None self.port: Optional[int] = None self.qdir_root: Optional[str] = None self.courses: Dict[str, Course] = {} self.max_workers = 4 self.hint_origin: Optional[str] = None self.limit = Limit() self.verbose: bool = False self.journal: bool = False self.postgres: bool = True self.postgres_host: str = "/var/run/postgresql" self.postgres_user: str = getpass.getuser() self.exprtest_stamp = _EXPRTEST_GIT_STAMP self._load_from_argv() self._load_from_file() self._setup_logging()
def limit(config): allTasks = [] for mass in config["masses"]: datacardTasks = [] haddTasks = [] tempConf = copy.deepcopy(config) for channel in list(config.keys())[1:]: for key in tempConf[channel].keys(): if type(tempConf[channel][key]) == str: tempConf[channel][key] = tempConf[channel][key].replace( "@", str(mass)) elif type(tempConf[channel][key]) == list: tempConf[channel][key] = [ i.replace("@", str(mass)) for i in tempConf[channel][key] ] tempConf[channel][ "processes"] = tempConf[channel]["backgrounds"] + [ tempConf[channel]["signal"] ] + [tempConf[channel]["data"]] treeTasks = TreeRead.configure(tempConf, channel) haddTasks.extend(HaddPlot.configure(tempConf, treeTasks, channel)) datacardTasks.extend( Datacard.configure(tempConf, channel, mass, haddTasks)) allTasks.extend(treeTasks) allTasks.extend(haddTasks + datacardTasks + Limit.configure(tempConf, mass, datacardTasks)) allTasks.extend(PlotLimit.configure(config)) return allTasks
def run(self, *args): """ """ def set_list(dict_input): for key in dict_input: dict_input[key] = list(set(dict_input[key])) return dict_input lvl1 = args[0] logger.info('Received lvl1 list: {}'.format(len(lvl1))) lvl2 = args[1] logger.info('Received lvl2 list: {}'.format(len(lvl2))) lvl3 = args[2] logger.info('Received lvl3 list: {}'.format(len(lvl3))) # calculate the limit in case of a lvl1 removed self.max_limit = Limit(self.loop_interval).current_list(lvl1) # add the loop value to the dict to parse it during the preparing # return dictionary lvl1 = self.preparing_lvl1(lvl1) lvl2, self.lvl2_loop = self.get_loop(lvl2, self.lvl2_loop) lvl2 = self.preparing_lvl2(lvl1, lvl2, self.time_lvl2, self.max_limit) lvl3 = self.preparing_lvl3(lvl1, lvl2, lvl3, self.max_limit) self.lvl2_loop = self.remove_loop(lvl2, self.lvl2_loop) return set_list(lvl1), set_list(lvl2), set_list(lvl3)
def set_limit(self, limit_min, limit_max): self.limit = Limit(limit_min, limit_max)
class SourceReader: def __init__(self, date, product, catalogue, url, index, login_data): self.date = date self.product = product self.catalogue = catalogue self.url = url self.index = index self.login_data = login_data self.is_last = False self.auth = False self.beg_time = None self.end_time = None self.row_manager = SourceRowManager(self.date) def set_beg_time(self, time): self.beg_time = time def set_end_time(self, time): self.end_time = time def set_log(self, log): self.log = log def set_limit(self, limit_min, limit_max): self.limit = Limit(limit_min, limit_max) def set_iter(self, iter_type, index): self.iter = SourceIterator(iter_type, index) self.iter_type = iter_type if self.iter_type == 'HH:MM:SS': self.hour = 0 def next_bulk(self): if self.is_last: return None data, limit = self._get_data() if data == None: return None self.first_row = data.next() lines = 0 fullBulk = False while not fullBulk: fullBulk = True result = [] #try: for cur_row in data: lines += 1 row = self._get_row( cur_row ) if row == None: self.log("ERROR! Failed to get row.") continue if row == 'ignore': continue json_row = json.dumps(row) result.append(json_row) #except: # self.log("EXCEPTION! Failed to get row. id=["+self.iter.get_str()+"]") # if lines != limit: # fullBulk = False if self.iter_type == 'HH:MM:SS': self.hour += 1 if lines < 2: if not (self.iter_type == 'HH:MM:SS' and self.hour <= 24): self.log("FINISH. id=["+self.iter.get_str()+"] limit=["+str(limit)+"]") self.is_last = True else: self.log("Bulk got. id=["+self.iter.get_str()+"] limit=["+str(limit)+"]") return result def _get_data(self): while True: url = self._get_url() try: response = requests.get(url) except KeyboardInterrupt: self.log("FINISH") return None, 0 except: self.log("GET_DATA ERROR! id=[" + self.iter.get_str() + "]") self.limit.decrease() sleep(1) continue if response.status_code != 200: self.log( "GET_DATA ERROR! code=" + str(response.status_code) + \ " text=" + response.text) self.limit.decrease() continue limit = self.limit.get() self.limit.increase() stream = StringIO.StringIO( response.text.encode('utf-8') ) return csv.reader( stream, delimiter=',' ), limit def _get_row(self, data): if len(data) <= 0: self.log("ROW ERROR! empty") return None if self.iter_type == 'id': self.iter.set(data[0]) else: self.iter.set(data[1]) row = self.row_manager.init_row() i = 0 while i < len(data): self._add_param(self.first_row[i], data[i], row) i += 1 self.row_manager.apply_rules_to_row(row) return row def _add_param(self, key, value, obj): obj[key] = value def _get_url(self): params = "date=" + self.date + \ self.iter.get_param()+"&p="+self.product+\ "&limit=" + str(self.limit.get())+"&pc="+self.catalogue if self.beg_time != None: params += "&beg_time=" + str(self.beg_time) if self.end_time != None: params += "&end_time=" + str(self.end_time) params = self._add_auth(params) url = self.url + "&" + params return url def _add_auth(self, params): if self.login_data == None: return params if self.login_data.get('login') == None: return params if self.login_data.get('password') == None: return params if self.auth == False: if self._auth() != True: return params params += '&login='******'login'] data = params.split('&') data.sort() data = ''.join(data) data += self.login_data['password'] sign = md5.new(data).hexdigest() params += '&sign=' + sign return params def _auth(self): url = self.url.split('?')[0] url += '?method=auth' url += '&login='******'login'] url += '&password='******'password'] try: response = requests.get(url) except: return False return True
def setUp(self): super().setUp() self._limit = Limit(2, timedelta(seconds=3))
def upload_data(): # check if got enough values get_json = request.get_json() print(get_json) print(type(get_json)) required = ['public_key', 'data', 'timestamp', 'op', 'signature'] if not all(k in get_json for k in required): return 'Missing values', 400 public_key = get_json['public_key'] data = get_json['data'] timestamp = get_json['timestamp'] op = get_json['op'] hash = hashlib.sha256((str(data) + str(timestamp) + str(0)).encode()).hexdigest() signature = get_json['signature'] # check if op is upload operation if op != 0: return 'op is not matched!', 400 # check if there is enough room if len(self._database) + len(get_json['data']) >= MAX_DATA_RANGE: return 'no enough storage', 400 # check if timestamp is not expired if timestamp + 600 < time.time(): return 'Request expired', 400 # check if signature is matched print(signature) print(type(signature)) if not verify_signature(public_key, hash, eval(signature)): return 'Signature unmatched', 400 # everything is fine then store data into database if type(data) is 'list': self._database += data else: self._database.append(data) # generate a new authorization input = Input(0, 0, 0) if type(data) is 'list': data_url = DataURL(len(self._database) - len(data), len(self._database)) else: data_url = DataURL(len(self._database) - 1, len(self._database)) output = Output(recipient=public_key, data_url=data_url, limit=Limit(7)) authorization = Authorization(inputs=[input], outputs=[output], duration=Duration(), timestamp=time.time()) # sign this authorization input.add_signature(self._account.sign_message(authorization.calc_hash())) # store this authorization and broadcast it auth_number = self.add_authorization(authorization) print(auth_number) # return the position of authorization to client response = { 'block_number': self._blockchain.get_height(), 'auth_number': auth_number, 'output_number': 0 } return jsonify(response), 201
class SourceReader: def __init__(self, date, product, catalogue, url, index, login_data): self.date = date self.product = product self.catalogue = catalogue self.url = url self.index = index self.login_data = login_data self.is_last = False self.auth = False self.beg_time = None self.end_time = None self.row_manager = SourceRowManager(self.date) def set_beg_time(self, time): self.beg_time = time def set_end_time(self, time): self.end_time = time def set_log(self, log): self.log = log def set_limit(self, limit_min, limit_max): self.limit = Limit(limit_min, limit_max) def set_iter(self, iter_type, index): self.iter = SourceIterator(iter_type, index) self.iter_type = iter_type if self.iter_type == 'HH:MM:SS': self.hour = 0 def next_bulk(self): if self.is_last: return None data, limit = self._get_data() if data == None: return None self.first_row = data.next() lines = 0 fullBulk = False while not fullBulk: fullBulk = True result = [] #try: for cur_row in data: lines += 1 row = self._get_row(cur_row) if row == None: self.log("ERROR! Failed to get row.") continue if row == 'ignore': continue json_row = json.dumps(row) result.append(json_row) #except: # self.log("EXCEPTION! Failed to get row. id=["+self.iter.get_str()+"]") # if lines != limit: # fullBulk = False if self.iter_type == 'HH:MM:SS': self.hour += 1 if lines < 2: if not (self.iter_type == 'HH:MM:SS' and self.hour <= 24): self.log("FINISH. id=[" + self.iter.get_str() + "] limit=[" + str(limit) + "]") self.is_last = True else: self.log("Bulk got. id=[" + self.iter.get_str() + "] limit=[" + str(limit) + "]") return result def _get_data(self): while True: url = self._get_url() try: response = requests.get(url) except KeyboardInterrupt: self.log("FINISH") return None, 0 except: self.log("GET_DATA ERROR! id=[" + self.iter.get_str() + "]") self.limit.decrease() sleep(1) continue if response.status_code != 200: self.log( "GET_DATA ERROR! code=" + str(response.status_code) + \ " text=" + response.text) self.limit.decrease() continue limit = self.limit.get() self.limit.increase() stream = StringIO.StringIO(response.text.encode('utf-8')) return csv.reader(stream, delimiter=','), limit def _get_row(self, data): if len(data) <= 0: self.log("ROW ERROR! empty") return None if self.iter_type == 'id': self.iter.set(data[0]) else: self.iter.set(data[1]) row = self.row_manager.init_row() i = 0 while i < len(data): self._add_param(self.first_row[i], data[i], row) i += 1 self.row_manager.apply_rules_to_row(row) return row def _add_param(self, key, value, obj): obj[key] = value def _get_url(self): params = "date=" + self.date + \ self.iter.get_param()+"&p="+self.product+\ "&limit=" + str(self.limit.get())+"&pc="+self.catalogue if self.beg_time != None: params += "&beg_time=" + str(self.beg_time) if self.end_time != None: params += "&end_time=" + str(self.end_time) params = self._add_auth(params) url = self.url + "&" + params return url def _add_auth(self, params): if self.login_data == None: return params if self.login_data.get('login') == None: return params if self.login_data.get('password') == None: return params if self.auth == False: if self._auth() != True: return params params += '&login='******'login'] data = params.split('&') data.sort() data = ''.join(data) data += self.login_data['password'] sign = md5.new(data).hexdigest() params += '&sign=' + sign return params def _auth(self): url = self.url.split('?')[0] url += '?method=auth' url += '&login='******'login'] url += '&password='******'password'] try: response = requests.get(url) except: return False return True
class Output: def __init__(self, recipient=None, start=None, end=None, limit=None): if recipient is None: self._recipient = '' else: self._recipient = recipient if start is None or end is None: self._dataURL = DataURL() else: self._dataURL = DataURL(start, end) if limit is None: self._limit = Limit() else: self._limit = Limit(limit) return def set_recipient(self, recipient): self._recipient = recipient return def get_recipient(self): return self._recipient def set_data_url(self, data_url): self._dataURL = data_url return def get_data_url(self): return self._dataURL def data_url_contains(self, start, end): return self._dataURL.contains(start, end) def data_url_belongs(self, start, end): return self._dataURL.belongs(start, end) def set_limit(self, limit): self._limit.set(limit) return def get_limit(self): return self._limit def valid_limit(self, limit): return self._limit.valid(limit) def valid(self, givens): # print(givens) for given in givens: if self.data_url_belongs(given[0], given[1]) and self.valid_limit( given[2]): return True logging.warning( 'this output does not match any given data_url or limit') return False def to_json(self): json = { 'recipient': self._recipient, 'data_url': self._dataURL.to_json(), 'limit': self._limit.value() } return json def from_json(self, json): required = ['recipient', 'data_url', 'limit'] if not all(k in json for k in required): logging.warning(f"value missing in {required}") return False data_url = DataURL() if not data_url.from_json(json['data_url']): return False self._dataURL = data_url if not isinstance(json['recipient'], str) or not isinstance( json['limit'], int): logging.warning( "recipient should be all type<str> and limit should be type<int>" ) return False self._recipient = json['recipient'] self._limit = Limit(json['limit']) return True def __str__(self): return str(self._recipient) + str(self._dataURL) + str(self._limit)