def output_networks(path, nets, opts): # Ensure dir exists path = os.path.join(path, 'input', 'linuxdvb', 'networks') if not os.path.exists(path): os.makedirs(path) # Write each network for n in nets: # Network config if n['type'] == 'A': c = 'linuxdvb_network_atsc' else: c = 'linuxdvb_network_dvb' + n['type'].lower() d = { 'networkname': n['name'], 'nid': 0, 'autodiscovery': False, 'skipinitscan': True, 'class': c } # Write u = uuid() npath = os.path.join(path, u) os.mkdir(npath) open(os.path.join(npath, 'config'), 'w').write(json.dumps(d, indent=2)) # Muxes output_muxes(os.path.join(npath, 'muxes'), n['muxs'], opts)
def clone(self, source): for column in source.__table__.c: if column.name != 'id': setattr(self, camelcase_to_underscore(column.name), getattr( source, camelcase_to_underscore(column.name))) else: setattr(self, 'id', uuid())
def __init__(self, name): self.name = name self.exchange = Exchange(self.name) self.routing_key = 'lock_routing_' + self.name self.requester = Producer( Connection(), exchange=self.exchange, auto_declare=True, ) self.id = uuid() self.lock_client_q = Queue(name=self.id, exchange=self.exchange, routing_key=self.id) self.lock_client = Consumer( Connection(), on_message=self.read_response, queues=[self.lock_client_q], ) self.red_connection = redis.StrictRedis() self.lock_client_listen_thread = threading.Thread(target=self.listener) self.lock_client_listen_thread.daemon = True self.hold_lock = threading.Event() self.lock_client_listen_thread.start() DistLockClient.producers[self.id] = (self.requester, self.lock_client)
def create(self, validated_data): self.uuid = uuid() self.create_time = datetime.now() self.last_modified = datetime.now() self.flag='Y' self.company=COMPANYID return self.objects.save(**validated_data)
def __init__(self, milliseconds: int, not_before: Optional[datetime], not_after: Optional[datetime], sticky: bool = False, api: bool = False, uuid: Union[str, Callable] = uuid.uuid4, status: BellResourceStatus = BellResourceStatus.UNUSED, failed_count: int = 0, created_at: Optional[datetime] = None, updated_at: Optional[datetime] = None) -> None: """Initialize with resource params.""" if not_before and not_after and not_before > not_after: raise ValueError("Expected not_before < not_after,\ but {}(not_before) > {}(not_after).".format(not_before, not_after)) # on table self.uuid: str = str(uuid() if callable(uuid) else uuid) self.milliseconds: int = milliseconds self.not_before: Optional[datetime] = (pytz.timezone( options.timezone).localize(not_before) if not_before else None) self.not_after: Optional[datetime] = (pytz.timezone( options.timezone).localize(not_after) if not_after else None) self.sticky: bool = sticky self.api: bool = api self._status: BellResourceStatus = status self._failed_count: int = failed_count self.created_at: datetime = (datetime.fromisoformat(created_at) if created_at else datetime.now(pytz.utc)) self.updated_at: datetime = (datetime.fromisoformat(updated_at) if updated_at else datetime.now(pytz.utc)) # not on table self._is_before_period: Optional[bool] = None self._is_after_period: Optional[bool] = None
def output_networks ( path, nets, opts ): # Ensure dir exists path = os.path.join(path, 'input', 'linuxdvb', 'networks') if not os.path.exists(path): os.makedirs(path) # Write each network for n in nets: # Network config if n['type'] == 'A': c = 'linuxdvb_network_atsc' else: c = 'linuxdvb_network_dvb' + n['type'].lower() d = { 'networkname' : n['name'], 'nid' : 0, 'autodiscovery' : False, 'skipinitscan' : True, 'class' : c } # Write u = uuid() npath = os.path.join(path, u) os.mkdir(npath) open(os.path.join(npath, 'config'), 'w').write(json.dumps(d, indent=2)) # Muxes output_muxes(os.path.join(npath, 'muxes'), n['muxs'], opts)
def register_reference(self, data, *args, **kwargs): """Registers reference to the validation context.""" uuid = getattr(self, 'ref_uuid', None) if uuid: uuid = uuid(data) url = self.ref_url(data) self.register(reference=url, reference_uuid=uuid) return data
def __init__(self, **kwargs): """Override default __init__, if the mapper has an id column and it isn't set, set it to a new uuid.""" for k, v in kwargs.items(): setattr(self, k, v) if hasattr(self, 'id') and not self.id and isinstance(self.__table__.c.id.type, db.VARCHAR): self.id = uuid()
def _localoffload(offline, docs, errordocs=None, debug=False): """ Setup the offloading of data into pickles. Prepares data to be pickled and generates a message on how to correctly modify target data, should a misconfiguration have occurred. Keyword arguments: offline -- dictionary of offline ES clusters docs -- ES documents to be sent to 'data index' for each cluster errordocs -- ES documents to be sent to 'error index' """ basedir = './cfg/tmp/' checkmakedir(basedir) datadir = basedir + 'data/' checkmakedir(datadir) pickle = [] err_pickle = [] if docs: docid = str(uuid()) _dumplist(docs, datadir + docid) pickle.append(docid) if errordocs: errid = str(uuid()) _dumplist(errordocs, datadir + errid) err_pickle.append(errid) for cluster in offline: # makes a json for each clusterfile = basedir + cluster.pop('name_ts', 0) + '.json' cluster['pickle'] = pickle cluster['err_pickle'] = err_pickle if os.path.exists(clusterfile): f = open(clusterfile, mode='r+') old = json.load(f) cluster['pickle'] += old['pickle'] cluster['err_pickle'] = old['err_pickle'] f.seek(0) else: f = open(clusterfile, mode='w') cluster[ 'instructions'] = "keep the name the same but change any of the incorrect information about the cluster if need. Ignore the pickle fields as they point to the data that will be sent. Do NOT touch pickle nor err_picke fields" cluster['path_to_data'] = datadir json.dump(cluster, f, indent=4, sort_keys=True) f.close()
class Metacalss(type): # Создаю метакласс для реализации uuid.uuid4() # Уникальный ключ доступа для бд def __new__(cls, name, parents, attrs): if 'class id' not in attrs: attrs['class_id'] = name.lower() return super().__new__(cls, name, parents, attrs) fout = open('table1.json', 'wt') print('oops/i/did/it/again', uuid(), file=fout) fout.close()
def output_channels ( path, chns, opts ): path = os.path.join(path, 'channel') if not os.path.exists(path): os.makedirs(path) # Each channels for c in chns: c = chns[c] u = uuid() # Output open(os.path.join(path, u), 'w').write(json.dumps(c, indent=2)) # Store c['uuid'] = u
def output_channels(path, chns, opts): path = os.path.join(path, 'channel') if not os.path.exists(path): os.makedirs(path) # Each channels for c in chns: c = chns[c] u = uuid() # Output open(os.path.join(path, u), 'w').write(json.dumps(c, indent=2)) # Store c['uuid'] = u
def _api(self, cmd, form={}, **kwargs): if isinstance(form, (dict, list)): form = urllib.urlencode(form) kwargs['content-type'] = 'text/x-url-arguments' kwargs['accept'] = 'application/json' kwargs['requestId'] = uuid() kwargs['userLocale'] = 'en_US' kwargs['teamId'] = self.team_id query = urllib.urlencode(kwargs) url = "%s/%s?%s" % (self.DEVELOPER_SERVICES_URL, cmd, query) response = self.opener.open(url, form) assert response.getcode() == 200, "Error %" % response.getcode() data = json.loads(response.read()) rc = data['resultCode'] if rc not in [ 0, 8500 ]: raise APIException(data) return data
def output_muxes(path, muxs, opts): ignore = ['type', 'key', 'svcs', 'network', 'quality', 'status'] # Process each for m in muxs: m = muxs[m] # Copy d = {} for k in m: if k not in ignore: d[k] = m[k] # Output u = uuid() mpath = os.path.join(path, u) os.makedirs(mpath) open(os.path.join(mpath, 'config'), 'w').write(json.dumps(d, indent=2)) # Services output_services(os.path.join(mpath, 'services'), m['svcs'], opts)
def output_muxes ( path, muxs, opts ): ignore = [ 'type', 'key', 'svcs', 'network', 'quality', 'status' ] # Process each for m in muxs: m = muxs[m] # Copy d = {} for k in m: if k not in ignore: d[k] = m[k] # Output u = uuid() mpath = os.path.join(path, u) os.makedirs(mpath) open(os.path.join(mpath, 'config'), 'w').write(json.dumps(d, indent=2)) # Services output_services(os.path.join(mpath, 'services'), m['svcs'], opts)
def output_services(path, svcs, opts): ignore = {'type', 'key', 'channelname', 'mapped'} if not os.path.exists(path): os.makedirs(path) # Process services for s in svcs: s = svcs[s] # Copy d = {} for k in s: if k not in ignore: d[k] = s[k] # Output u = uuid() spath = os.path.join(path, u) open(os.path.join(spath), 'w').write(json.dumps(d, indent=2)) s['uuid'] = u
def output_iptv(path, nets, opts): d = None # Find for n in nets: if n['type'] == 'iptv': d = n if not d: return # Ensure dir exists path = os.path.join(path, 'input', 'iptv') if not os.path.exists(path): os.makedirs(path) # Write u = uuid() n = {'uuid': u, 'skipinitscan': True, 'autodiscovery': False} open(os.path.join(path, 'config'), 'w').write(json.dumps(n, indent=2)) # Muxes output_muxes(os.path.join(path, 'muxes'), d['muxs'], opts)
def output_services ( path, svcs, opts ): ignore = { 'type', 'key', 'channelname', 'mapped' } if not os.path.exists(path): os.makedirs(path) # Process services for s in svcs: s = svcs[s] # Copy d = {} for k in s: if k not in ignore: d[k] = s[k] # Output u = uuid() spath = os.path.join(path, u) open(os.path.join(spath), 'w').write(json.dumps(d, indent=2)) s['uuid'] = u
def output_iptv ( path, nets, opts ): d = None # Find for n in nets: if n['type'] == 'iptv': d = n if not d: return # Ensure dir exists path = os.path.join(path, 'input', 'iptv') if not os.path.exists(path): os.makedirs(path) # Write u = uuid() n = { 'uuid' : u, 'skipinitscan' : True, 'autodiscovery' : False } open(os.path.join(path, 'config'), 'w').write(json.dumps(n, indent=2)) # Muxes output_muxes(os.path.join(path, 'muxes'), d['muxs'], opts)
def register(): if request.method == 'POST': if (not request.json): abort(405) data = request.get_json() if not data["username"] or not data["password"] or not data[ "email"] or not data["password2"]: abort( make_response( jsonify({'Error': "Please enter correct information"}), 402)) elif data["password"] != data["password2"]: abort( make_response(jsonify({'Error': "Passwords need to match"}), 402)) else: '''check for duplicate user''' if get_user_id(data["username"]) is not None: abort( make_response(jsonify({'Error': "User already exists"}), 406)) else: db = cluster.connect(KEYSPACE) password = generate_password_hash(data['password']) db.execute( '''insert into user (username, user_id, email, pw_hash) values (?, ?, ?, ?)''', [data['username'], uuid(), data['email'], password]) return jsonify({ 'username': data['username'], 'email': data['email'], 'status': 'Successfully registered.', 'status code': 201 })
class Block: def __init__(self, blockMap={}): self.update_block(blockMap) def update_block(self, blockMap): """updates the block with the given block map. this function overwrites the existing block -- it does not merge""" self.blockMap = blockMap if 'properties' not in self.blockMap: self.blockMap['properties'] = {} else: for k in self.blockMap['properties'].keys(): self.set(k, parse_value(self.blockMap['properties'][k])) def uuid(): doc = "The uuid property." def fget(self): return parse_value(self.blockMap.get('uuid', None)) def fset(self, value): self.blockMap['uuid'] = value return locals() uuid = property(**uuid()) def name(): doc = "The name property." def fget(self): return self.blockMap.get('name', None) def fset(self, value): self.blockMap['name'] = value return locals() name = property(**name()) def creator(): doc = """creator user uuid""" def fget(self): return parse_value(self.blockMap.get('creator', None)) def fset(self, value): self.blockMap['creator'] = value return locals() creator = property(**creator()) def creatorUsername(): doc = """creator username""" def fget(self): return self.blockMap.get('creatorUsername', None) def fset(self, value): self.blockMap['creatorUsername'] = value return locals() creatorUsername = property(**creatorUsername()) def creatorPicture(): doc = """creator picture""" def fget(self): return self.blockMap.get('creatorPicture', None) def fset(self, value): self.blockMap['creatorPicture'] = value return locals() creatorPicture = property(**creatorPicture()) def created(): doc = "The created property." def fget(self): return parse_value(self.blockMap.get('created', None)) def fset(self, value): self.blockMap['created'] = value return locals() created = property(**created()) def modified(): doc = "The modified property." def fget(self): return parse_value(self.blockMap.get('modified', None)) def fset(self, value): self.blockMap['modified'] = value return locals() modified = property(**modified()) def type(): doc = "The type property." def fget(self): return parse_value(self.blockMap.get('type', None)) def fset(self, value): self.blockMap['type'] = value return locals() type = property(**type()) def set(self, propertyName, value): """sets the value""" if propertyName == 'name': self.name = value else: self.blockMap.get('properties')[propertyName] = self._ref(value) def get(self, propertyName): """returns the referenced property, inflating blocks as necessary""" if propertyName == 'name': return self.name else: properties = self.blockMap.get('properties') return self._deref(properties.get(propertyName, None)) def add(self, propertyName, value): """adds a value to the block""" propertyList = self.blockMap['properties'].get(propertyName, None) if propertyList == None: propertyList = [] self.blockMap['properties'][propertyName] = propertyList propertyList.append(self._ref(value)) def remove(self, propertyName, value): """removes the value from the list""" values = self.blockMap['properties'][propertyName] if isinstance(value, Block): results = filter(lambda ref: ref.uuid == value.uuid, values) if len(results) == 0: return False values.remove(results[0]) else: values.remove(value) return True def move(self, propertyName, value, toIndex): """moves the value to the given index""" values = self.blockMap['properties'][propertyName] if isinstance(value, Block): results = filter(lambda ref: ref.uuid == value.uuid, values) if len(results) == 0: return False values.remove(results[0]) values.insert(toIndex, results[0]) else: values.remove(value) values.insert(toIndex, value) return True def _owned(self): """returns true if the block is owned by the current user auth'd by SpringRpcService""" return self.creator == SpringRpcService().user_uuid def _ref(self, value): """creates and returns a BlockReference if necessary""" if isinstance(value, Block) and self._owned(): return BlockReference(value.uuid) elif isinstance(value, list) and self._owned(): return map(self._ref, value) else: return value def _deref(self, value): """dereferences value if it's a BlockReference""" if isinstance(value, BlockReference): return value.resolve() elif isinstance(value, list): return map(self._deref, value) else: return value def __str__(self): return self.name def __repr__(self): return self.name
def submit_job(j): """Submit HySDS job.""" # get task_id and orchestrator queue task_id = submit_job.request.id orch_queue = submit_job.request.delivery_info.get("exchange", "unknown") # get container image name and url image_name = j.get("container_image_name", None) image_url = j.get("container_image_url", None) image_mapping = j.get("container_mappings", None) # get container runtime options runtime_options = j.get("runtime_options", None) # get hard/soft time limits time_limit = j.get("time_limit", None) soft_time_limit = j.get("soft_time_limit", None) # job dedup enabled? dedup = j.get("enable_dedup", True) # get priority priority = j.get("priority", None) if priority is None: priority = submit_job.request.delivery_info.get("priority") if priority is None: priority = 0 # get tag tag = j.get("tag", None) # get username username = j.get("username", None) # default job json job = { "job_id": task_id, "name": task_id, "job_info": j, } # set job type if "job_type" in j: match = JOB_TYPE_RE.search(j["job_type"]) job["type"] = match.group(1) if match else j["job_type"] # default context context = j.get("context", {}) # get orchestrator configuration orch_cfg_file = os.environ.get("HYSDS_ORCHESTRATOR_CFG", None) if orch_cfg_file is None: error = "Environment variable HYSDS_ORCHESTRATOR_CFG is not set." error_info = ERROR_TMPL.substitute(orch_queue=orch_queue, error=error) job_status_json = { "uuid": job["job_id"], "job_id": job["job_id"], "payload_id": task_id, "status": "job-failed", "job": job, "context": context, "error": error_info, "short_error": get_short_error(error_info), "traceback": error_info, } log_job_status(job_status_json) raise OrchestratorExecutionError(error, job_status_json) # logger.info("HYSDS_ORCHESTRATOR_CFG:%s" % orch_cfg_file) if not os.path.exists(orch_cfg_file): error = "Orchestrator configuration %s doesn't exist." % orch_cfg_file error_info = ERROR_TMPL.substitute(orch_queue=orch_queue, error=error) job_status_json = { "uuid": job["job_id"], "job_id": job["job_id"], "payload_id": task_id, "status": "job-failed", "job": job, "context": context, "error": error_info, "short_error": get_short_error(error_info), "traceback": error_info, } log_job_status(job_status_json) raise OrchestratorExecutionError(error, job_status_json) with open(orch_cfg_file) as f: orch_cfg = json.load(f) # get job creators directory job_creators_dir = os.environ.get("HYSDS_JOB_CREATORS_DIR", None) if job_creators_dir is None: error = "Environment variable HYSDS_JOB_CREATORS_DIR is not set." error_info = ERROR_TMPL.substitute(orch_queue=orch_queue, error=error) job_status_json = { "uuid": job["job_id"], "job_id": job["job_id"], "payload_id": task_id, "status": "job-failed", "job": job, "context": context, "error": error_info, "short_error": get_short_error(error_info), "traceback": error_info, } log_job_status(job_status_json) raise OrchestratorExecutionError(error, job_status_json) # logger.info("HYSDS_JOB_CREATORS_DIR:%s" % job_creators_dir) # parse job configurations job_cfgs = {} for cfg in orch_cfg["configs"]: job_cfgs[cfg["job_type"]] = cfg["job_creators"] # check that we have info to create jobs if "job_type" not in j: error = "Invalid job spec. No 'job_type' specified." error_info = ERROR_TMPL.substitute(orch_queue=orch_queue, error=error) job_status_json = { "uuid": job["job_id"], "job_id": job["job_id"], "payload_id": task_id, "status": "job-failed", "job": job, "context": context, "error": error_info, "short_error": get_short_error(error_info), "traceback": error_info, } log_job_status(job_status_json) raise OrchestratorExecutionError(error, job_status_json) job_type = j["job_type"] job_queue = j.get("job_queue", None) if "payload" not in j: error = "Invalid job spec. No 'payload' specified." error_info = ERROR_TMPL.substitute(orch_queue=orch_queue, error=error) job_status_json = { "uuid": job["job_id"], "job_id": job["job_id"], "payload_id": task_id, "status": "job-failed", "job": job, "context": context, "error": error_info, "short_error": get_short_error(error_info), "traceback": error_info, } log_job_status(job_status_json) raise OrchestratorExecutionError(error, job_status_json) payload = j["payload"] # logger.info("got job_type: %s" % job_type) # logger.info("payload: %s" % payload) # set payload hash if j.get("payload_hash", None) is None: j["payload_hash"] = get_payload_hash(payload) payload_hash = j["payload_hash"] # do dedup if dedup is True: try: dj = query_dedup_job(payload_hash) except NoDedupJobFoundException as e: logger.info(str(e)) dj = None if isinstance(dj, dict): dedup_msg = "orchestrator found duplicate job %s with status %s" % ( dj["_id"], dj["status"], ) job_status_json = { "uuid": job["job_id"], "job_id": job["job_id"], "payload_id": task_id, "payload_hash": payload_hash, "dedup": dedup, "dedup_job": dj["_id"], "status": "job-deduped", "job": job, "context": context, "dedup_msg": dedup_msg, } log_job_status(job_status_json) return [task_id] # if no explicit job or data type defined in orchestrator, add catch-all if job_type not in job_cfgs: # first check if data product type; if not then assume job type match = DATA_TYPE_RE.search(job_type) if match: return queue_dataset_evaluation(payload) else: match = JOB_TYPE_RE.search(job_type) jt = match.group(1) if match else job_type job_cfgs[job_type] = [ { "job_name": j.get("job_name", jt).replace(":", "__"), "function": "utils.get_job_json", "job_queues": [jt if job_queue is None else job_queue], } ] # get job json and queue jobs results = [] for jc in job_cfgs[job_type]: func = get_function(jc["function"], add_to_sys_path=job_creators_dir) argspec = getargspec(func) try: if len(argspec.args) > 1 and "job_type" in argspec.args: match = JOB_TYPE_RE.search(job_type) jt = match.group(1) if match else job_type job = func(payload, jt) else: job = func(payload) except Exception as e: error = ( "Job creator function %s failed to generate job JSON." % jc["function"] ) error_info = ERROR_TMPL.substitute(orch_queue=orch_queue, error=error) job_status_json = { "uuid": job["job_id"], "job_id": job["job_id"], "payload_id": task_id, "payload_hash": payload_hash, "dedup": dedup, "status": "job-failed", "job": {"job_id": task_id, "name": task_id, "job_info": j}, "context": context, "error": error_info, "short_error": get_short_error(error_info), "traceback": traceback.format_exc(), } log_job_status(job_status_json) raise OrchestratorExecutionError(error, job_status_json) # logger.info("job: %s" % job) # set context job.setdefault("context", {}).update(context) # override hard/soft time limits and ensure gap soft_time_limit, time_limit = ensure_hard_time_limit_gap( jc.get("soft_time_limit", soft_time_limit), jc.get("time_limit", time_limit) ) # queue jobs for queue in jc["job_queues"]: # copy job job_json = copy.deepcopy(job) # set job id if "name" in job: job_json["job_id"] = get_job_id(job["name"]) else: job_json["job_id"] = get_job_id(jc["job_name"]) job_json["name"] = job_json["job_id"] # set container image name, url, mappings, and runtime options if image_name is not None: job_json["container_image_name"] = image_name if image_url is not None: job_json["container_image_url"] = image_url if image_mapping is not None: job_json["container_mappings"] = image_mapping if runtime_options is not None: job_json["runtime_options"] = runtime_options # set priority job_json["priority"] = priority # set tag if "tag" not in job_json and tag is not None: job_json["tag"] = tag # set username if "username" not in job_json and username is not None: job_json["username"] = username # set job_info time_queued = datetime.utcnow() job_json["job_info"] = { "id": job_json["job_id"], "job_queue": queue, "time_queued": time_queued.isoformat() + "Z", "time_limit": time_limit, "soft_time_limit": soft_time_limit, "payload_hash": payload_hash, "dedup": dedup, "job_payload": { "job_type": job_type, "payload_task_id": task_id, }, } # generate celery task id job_json["task_id"] = uuid() # log queued status job_status_json = { "uuid": job_json["task_id"], "job_id": job_json["job_id"], "payload_id": task_id, "payload_hash": payload_hash, "dedup": dedup, "status": "job-queued", "job": job_json, } log_job_status(job_status_json) # submit job res = run_job.apply_async( (job_json,), queue=queue, time_limit=time_limit, soft_time_limit=soft_time_limit, priority=priority, task_id=job_json["task_id"], ) # append result results.append(job_json["task_id"]) return results
def writechannels39(): xmltvpath = "epggrab/xmltv/channels" if not os.path.exists(xmltvpath): os.makedirs(xmltvpath) chnpath = 'channel' if not os.path.exists(chnpath): os.makedirs(chnpath) #input/iptv path = os.path.join('input', 'iptv') if not os.path.exists(path): os.makedirs(path) #input/iptv/config writejson(os.path.join(path, 'config'), { 'uuid': uuid(), 'skipinitscan': 1, 'autodiscovery': 0 }) #input/iptv/networks/uuid() path = os.path.join(path, 'networks', uuid()) if not os.path.exists(path): os.makedirs(path) writejson(os.path.join(path, 'config'), { 'networkname': 'IPTV network', 'skipinitscan': 1, 'autodiscovery': 0 }) #input/iptv/networks/uuid()/muxes path = os.path.join(path, 'muxes') if not os.path.exists(path): os.mkdir(path) #one mux and service for each channel for channel in channels.values(): muxid = uuid() muxpath = os.path.join(path, muxid) if not os.path.exists(muxpath): os.mkdir(muxpath) jsmux = { 'iptv_url': "udp://@%s:%s" % (channel['ip'], channel['port']), 'iptv_interface': 'eth1', 'iptv_atsc': 0, 'iptv_svcname': channel['name'], 'enabled': 1, 'initscan': 1 # mark mux as scanned } #input/iptv/networks/uuid()/muxes/uuid()/config file writejson(os.path.join(muxpath, 'config'), jsmux) #input/iptv/networks/uuid()/muxes/uuid()/services/uuid() svcpath = os.path.join(muxpath, 'services') if not os.path.exists(svcpath): os.mkdir(svcpath) svcid = uuid() jssvc = { 'sid': 1, # guess service id 'svcname': channel['name'], 'name': channel['name'], 'dvb_servicetype': 1, 'enabled': 1 } writejson(os.path.join(svcpath, svcid), jssvc) #channel chanid = uuid() jschan = { 'name': channel['name'], 'dvr_pre_time': 0, 'dvr_pst_time': 0, 'services': [svcid] } if channel['number'] is not None: jschan['number'] = int(channel['number']) if channel['tags'] is not None: jschan['tags'] = list(tags[x]['num'] for x in channel['tags']) if channel['icon'] is not None: jschan['icon'] = channel['icon'] writejson(os.path.join(chnpath, chanid), jschan) #epg #epggrab/xmltv/channels/# if channel['xmltv'] is not None: xmlid = channel['xmltv'] else: xmlid = channel['name'] jsepg = {'name': xmlid, 'channels': [chanid]} writejson(os.path.join(xmltvpath, chanid), jsepg)
class TestRemoteJobs(SimpleTestCase): def uuid(): return uuid.uuid4().hex base_dir = os.path.dirname(__file__) local_source_file = os.path.join(base_dir, 'files/solution.c') remote_source_file = '/test_worker_manager/' + uuid() + 'add_solution.c' local_wrong_source_file = os.path.join(base_dir, 'files/wrong_solution.c') remote_wrong_source_file = '/test_worker_manager/' + uuid() + \ 'wrong_add_solution.c' binary_file = '/test_worker_manager/' + uuid() + 'add_solution' local_in_file = os.path.join(base_dir, 'files/in') remote_in_file = '/test_worker_manager/' + uuid() + 'in' local_out_file = os.path.join(base_dir, 'files/out') remote_out_file = '/test_worker_manager/' + uuid() + 'out' evaluation_recipe = [ ('upload source', 'oioioi.evalmgr.tests.upload_source'), ('compile source', 'oioioi.evalmgr.tests.compile_source'), ('upload test', 'oioioi.evalmgr.tests.upload_inout'), ('run', 'oioioi.evalmgr.tests.run'), ] evaluation_env = dict( recipe=evaluation_recipe, local_source_file=local_source_file, remote_source_file=remote_source_file, binary_file=binary_file, local_in_file=local_in_file, remote_in_file=remote_in_file, local_out_file=local_out_file, remote_out_file=remote_out_file, ) def tearDown(self): fc = get_client() for filename in (self.remote_source_file, self.remote_wrong_source_file, self.remote_in_file, self.remote_out_file): fc.delete_file(filename) @override_settings( SIOWORKERS_BACKEND='oioioi.evalmgr.tests.SioworkersBackend') def _run_with_dummy_sioworkers(self, testfn): testfn() def test_full_source_file_evaluation_with_dummy_sioworkers(self): self._run_with_dummy_sioworkers(self.test_full_source_file_evaluation) def test_multiple_source_file_evaluation_with_dummy_sioworkers(self): self._run_with_dummy_sioworkers( self.test_multiple_source_file_evaluation) def test_full_source_file_evaluation(self): env = self.evaluation_env.copy() env = evalmgr_job.delay(env).get() self.assertEqual('OK', env['result_code']) def test_multiple_source_file_evaluation(self): good_env = self.evaluation_env.copy() wrong_env = self.evaluation_env.copy() wrong_env.update(local_source_file=self.local_wrong_source_file, remote_source_file=self.remote_wrong_source_file) good_result = evalmgr_job.delay(good_env) wrong_result = evalmgr_job.delay(wrong_env) self.assertEqual('OK', good_result.get()['result_code']) self.assertEqual('WA', wrong_result.get()['result_code'])
def __init__(self, name,email,password, it = id): self.id = str(uuid,uuid(4)) self.name = name self.email = email self.password = self.set_password(password)
def writechannels(networkname, udpxy, iface, output): # # Estructura de directorios: # # input/iptv/config # input/iptv/networks/UUID/config - Creará "IPTV network" # input/iptv/networks/UUID/muxes/UUID/config - Un mux por canal # input/iptv/networks/UUID/muxes/UUID/services/UUID - Un service (falso) por canal (opción -o service) # channel/tag/UUID - Tags de los canales # channel/config/UUID - Canales (asociados a los servicios y tags, opción -o channel) # epggrab/xmltv/channels/UUID - Información EPG (asociada al canal), opción -o channel # # # Información EPG (asociada al canal), opción -o channel # xmltvpath = "epggrab/xmltv/channels" if not os.path.exists(xmltvpath): os.makedirs(xmltvpath) tagpath = 'channel/tag' if not os.path.exists(tagpath): os.makedirs(tagpath) chnpath = 'channel/config' if not os.path.exists(chnpath): os.makedirs(chnpath) #channel/tag/UUID for tag in tags.values(): tag['id'] = uuid() jstag = { 'enabled': 1, 'internal': 0, 'titledIcon': 0, 'name': tag['name'], 'comment': '', 'icon': '' } writejson(os.path.join(tagpath, tag['id']), jstag) #input/iptv path = os.path.join('input', 'iptv') if not os.path.exists(path): os.makedirs(path) #input/iptv/config writejson(os.path.join(path, 'config'), { 'uuid': uuid(), 'skipinitscan': 1, 'autodiscovery': 0 }) # Network # input/iptv/networks/uuid() # path = os.path.join(path, 'networks', uuid()) if not os.path.exists(path): os.makedirs(path) writejson( os.path.join(path, 'config'), { "priority": 1, "spriority": 1, "max_streams": 2, # Max input streams (afterwards change to 0) "max_bandwidth": 0, # Max bandwidth (Kbps) "max_timeout": 10, # Max timeout (seconds) "networkname": networkname, # Network name "nid": 0, "autodiscovery": "false", # Network discovery "skipinitscan": "true", # Skip initial scan "idlescan": "false", # Idle scan (after sacnning change to 0) "sid_chnum": "false", "ignore_chnum": "false", "localtime": "false" }) #input/iptv/networks/uuid()/muxes path = os.path.join(path, 'muxes') if not os.path.exists(path): os.mkdir(path) #one mux and service for each channel for channel in canales.values(): muxid = uuid() muxpath = os.path.join(path, muxid) if not os.path.exists(muxpath): os.mkdir(muxpath) if channel['port']: if udpxy is not None: # http://192.168.1.1:4022/udp/C.C.C.C:PPPP # "iptv_url": "http://['192.168.1.1:4022']/udp/239.0.5.74:8208" url = "http://%s/udp/%s:%s" % (udpxy, channel['ip'], channel['port']) else: url = "%s://@%s:%s" % (channel['scheme'], channel['ip'], channel['port']) else: if udpxy is not None: # http://192.168.1.1:4022/udp/C.C.C.C url = "http://%s/udp/%s" % (udpxy, channel['ip']) else: url = "%s://@%s" % (channel['scheme'], channel['ip']) jsmux = { 'iptv_url': url, 'iptv_interface': iface, 'iptv_atsc': 0, 'iptv_svcname': channel['name'], 'iptv_muxname': channel['name'], 'iptv_sname': channel['name'], 'enabled': 1, 'scan_result': 2 # mark scan result (1 - ok, 2 - failed) } # Mostrar qué hemos escrito print "Insertado el Mux: " + channel['name'] + " - " + url #input/iptv/networks/uuid()/muxes/uuid()/config file writejson(os.path.join(muxpath, 'config'), jsmux) #input/iptv/networks/uuid()/muxes/uuid()/services/uuid() svcpath = os.path.join(muxpath, 'services') if not os.path.exists(svcpath): os.mkdir(svcpath) #create empty service with id 0 svcid = None if output is not None: if 'servicios' in output: svcid = uuid() jssvc = { 'sid': 0, # guess service id 'svcname': channel['name'], 'name': channel['name'], 'dvb_servicetype': 1, 'enabled': 1 } writejson(os.path.join(svcpath, svcid), jssvc) else: svcid = None #channel/config if 'canales' in output: chanid = uuid() jschan = { 'name': channel['name'], 'dvr_pre_time': 0, 'dvr_pst_time': 0, 'services': [svcid] } if channel['number'] is not None: jschan['number'] = int(channel['number']) if channel['tags'] is not None: jschan['tags'] = list(tags[x]['id'] for x in channel['tags']) if channel['icon'] is not None: jschan['icon'] = channel['icon'] writejson(os.path.join(chnpath, chanid), jschan) #epg #epggrab/xmltv/channels/# if channel['xmltv'] is not None: xmlid = channel['xmltv'] else: xmlid = channel['name'] jsepg = {'name': xmlid, 'channels': [chanid]} writejson(os.path.join(xmltvpath, chanid), jsepg)
def writechannels(networkname, udpxy, iface, output): # # Estructura de directorios: # # input/iptv/config # input/iptv/networks/UUID/config - Creará "IPTV network" # input/iptv/networks/UUID/muxes/UUID/config - Un mux por canal # input/iptv/networks/UUID/muxes/UUID/services/UUID - Un service (falso) por canal (opción -o service) # channel/tag/UUID - Tags de los canales # channel/config/UUID - Canales (asociados a los servicios y tags, opción -o channel) # epggrab/xmltv/channels/UUID - Información EPG (asociada al canal), opción -o channel # # # Información EPG (asociada al canal), opción -o channel # xmltvpath = "epggrab/xmltv/channels" if not os.path.exists(xmltvpath): os.makedirs(xmltvpath) tagpath = 'channel/tag' if not os.path.exists(tagpath): os.makedirs(tagpath) chnpath = 'channel/config' if not os.path.exists(chnpath): os.makedirs(chnpath) #channel/tag/UUID for tag in tags.values(): tag['id'] = uuid() jstag = {'enabled': 1, 'internal': 0, 'titledIcon': 0, 'name': tag['name'], 'comment': '', 'icon': ''} writejson(os.path.join(tagpath, tag['id']), jstag) #input/iptv path = os.path.join('input', 'iptv') if not os.path.exists(path): os.makedirs(path) #input/iptv/config writejson(os.path.join(path, 'config'), { 'uuid': uuid(), 'skipinitscan': 1, 'autodiscovery': 0 }) # Network # input/iptv/networks/uuid() # path = os.path.join(path, 'networks', uuid()) if not os.path.exists(path): os.makedirs(path) writejson(os.path.join(path, 'config'), { "priority": 1, "spriority": 1, "max_streams": 2, # Max input streams (afterwards change to 0) "max_bandwidth": 0, # Max bandwidth (Kbps) "max_timeout": 10, # Max timeout (seconds) "networkname": networkname, # Network name "nid": 0, "autodiscovery": "false", # Network discovery "skipinitscan": "true", # Skip initial scan "idlescan": "false", # Idle scan (after sacnning change to 0) "sid_chnum": "false", "ignore_chnum": "false", "localtime": "false" }) #input/iptv/networks/uuid()/muxes path = os.path.join(path, 'muxes') if not os.path.exists(path): os.mkdir(path) #one mux and service for each channel for channel in canales.values(): muxid = uuid() muxpath = os.path.join(path, muxid) if not os.path.exists(muxpath): os.mkdir(muxpath) if channel['port']: if udpxy is not None: # http://192.168.1.1:4022/udp/C.C.C.C:PPPP # "iptv_url": "http://['192.168.1.1:4022']/udp/239.0.5.74:8208" url = "http://%s/udp/%s:%s" % (udpxy, channel['ip'], channel['port']) else: url = "%s://@%s:%s" % (channel['scheme'], channel['ip'], channel['port']) else: if udpxy is not None: # http://192.168.1.1:4022/udp/C.C.C.C url = "http://%s/udp/%s" % (udpxy, channel['ip']) else: url = "%s://@%s" % (channel['scheme'], channel['ip']) jsmux = { 'iptv_url': url, 'iptv_interface': iface, 'iptv_atsc': 0, 'iptv_svcname': channel['name'], 'iptv_muxname': channel['name'], 'iptv_sname': channel['name'], 'enabled': 1, 'scan_result': 2 # mark scan result (1 - ok, 2 - failed) } # Mostrar qué hemos escrito print "Insertado el Mux: " + channel['name'] + " - " + url #input/iptv/networks/uuid()/muxes/uuid()/config file writejson(os.path.join(muxpath, 'config'), jsmux) #input/iptv/networks/uuid()/muxes/uuid()/services/uuid() svcpath = os.path.join(muxpath, 'services') if not os.path.exists(svcpath): os.mkdir(svcpath) #create empty service with id 0 svcid = None if output is not None: if 'servicios' in output: svcid = uuid() jssvc = { 'sid': 0, # guess service id 'svcname': channel['name'], 'name': channel['name'], 'dvb_servicetype': 1, 'enabled': 1 } writejson(os.path.join(svcpath, svcid), jssvc) else: svcid = None #channel/config if 'canales' in output: chanid = uuid() jschan = { 'name': channel['name'], 'dvr_pre_time': 0, 'dvr_pst_time': 0, 'services': [svcid] } if channel['number'] is not None: jschan['number'] = int(channel['number']) if channel['tags'] is not None: jschan['tags'] = list(tags[x]['id'] for x in channel['tags']) if channel['icon'] is not None: jschan['icon'] = channel['icon'] writejson(os.path.join(chnpath, chanid), jschan) #epg #epggrab/xmltv/channels/# if channel['xmltv'] is not None: xmlid = channel['xmltv'] else: xmlid = channel['name'] jsepg = { 'name': xmlid, 'channels': [chanid] } writejson(os.path.join(xmltvpath, chanid), jsepg)
def parse(self, new_buffer): self.my_buffer += new_buffer while len(self.my_buffer) > 0: if self.stream and self.stream_remain > 0: if self.stream_remain >= len(self.my_buffer): if self.debug: if self.stream_size<256 or len(self.my_buffer)<10: print 'binary:',self.my_buffer, else: print 'binary:',self.my_buffer[0:10],'.-.',self.my_buffer[-10:],'!!\n', self.stream.write( self.my_buffer ) self.stream_remain -= len(self.my_buffer) self.my_buffer = '' else: if self.debug: if self.stream_size<127 or self.stream_remain<10: print 'binary:',self.my_buffer, else: print 'binary:',self.my_buffer[0:10],'._.',self.my_buffer[-10:],'!!!\n', self.stream.write( self.my_buffer[0:self.stream_remain] ) self.my_buffer = self.my_buffer[self.stream_remain+1:] self.stream_remain = 0 if self.stream_remain <= 0: if self.use_file: obj = self.objects[-1] obj['path'] = self.stream_fname obj['size'] = self.stream_size self.objects.append( None ) self.stream_fname = None else: self.objects.append( self.stream.getvalue() ) self.stream.close() self.stream = None else: pos = self.my_buffer.find( '\n', self.nospace ) if pos < 0: tlen = len(self.my_buffer) self.nospace = tlen break else: line = self.my_buffer[0:pos] self.my_buffer = self.my_buffer[pos+1:] self.nospace = 0 if self.debug: print 'line:',line if len(line) == 0: self.results.append( self.objects ) self.objects = [] if self.debug: print 'end message.' elif line[0] == '{': self.obj_buffer = '{' elif line[0] == '}': self.obj_buffer += line try: obj = json.loads( self.obj_buffer ) except ValueError, e: print e print "Error reading json:" print self.obj_buffer if 'stream_length' in obj: self.stream_size = self.stream_remain = int(obj['stream_length']) del obj['stream_length'] if (obj['type'] == 'file' and self.stream_size > self.file_threshold) or ('tree_key' in obj and not self.one_file): self.use_file = True if glob.store: self.stream_fname = glob.store.tmp_file_path( ) + uuid() else: self.stream_fname = 'tmp_' + uuid() self.stream = open( self.stream_fname, 'w+' ) else: self.use_file = False self.stream = io.BytesIO(b"") self.objects.append( obj ) self.obj_buffer = '' else: self.obj_buffer += line
def writechannels39(): xmltvpath = "epggrab/xmltv/channels" if not os.path.exists(xmltvpath): os.makedirs(xmltvpath) chnpath = 'channel' if not os.path.exists(chnpath): os.makedirs(chnpath) #input/iptv path = os.path.join('input', 'iptv') if not os.path.exists(path): os.makedirs(path) #input/iptv/config writejson(os.path.join(path, 'config'), { 'uuid': uuid(), 'skipinitscan': 1, 'autodiscovery': 0 }) #input/iptv/networks/uuid() path = os.path.join(path, 'networks', uuid()) if not os.path.exists(path): os.makedirs(path) writejson(os.path.join(path, 'config'), { 'networkname': 'IPTV network', 'skipinitscan': 1, 'autodiscovery': 0 }) #input/iptv/networks/uuid()/muxes path = os.path.join(path, 'muxes') if not os.path.exists(path): os.mkdir(path) #one mux and service for each channel for channel in channels.values(): muxid = uuid() muxpath = os.path.join(path, muxid) if not os.path.exists(muxpath): os.mkdir(muxpath) jsmux = { 'iptv_url': "udp://@%s:%s" % (channel['ip'], channel['port']), 'iptv_interface': 'eth1', 'iptv_atsc': 0, 'iptv_svcname': channel['name'], 'enabled': 1, 'initscan': 1 # mark mux as scanned } #input/iptv/networks/uuid()/muxes/uuid()/config file writejson(os.path.join(muxpath, 'config'), jsmux) #input/iptv/networks/uuid()/muxes/uuid()/services/uuid() svcpath = os.path.join(muxpath, 'services') if not os.path.exists(svcpath): os.mkdir(svcpath) svcid = uuid() jssvc = { 'sid': 1, # guess service id 'svcname': channel['name'], 'name': channel['name'], 'dvb_servicetype': 1, 'enabled': 1 } writejson(os.path.join(svcpath, svcid), jssvc) #channel chanid = uuid() jschan = { 'name': channel['name'], 'dvr_pre_time': 0, 'dvr_pst_time': 0, 'services': [svcid] } if channel['number'] is not None: jschan['number'] = int(channel['number']) if channel['tags'] is not None: jschan['tags'] = list(tags[x]['num'] for x in channel['tags']) if channel['icon'] is not None: jschan['icon'] = channel['icon'] writejson(os.path.join(chnpath, chanid), jschan) #epg #epggrab/xmltv/channels/# if channel['xmltv'] is not None: xmlid = channel['xmltv'] else: xmlid = channel['name'] jsepg = { 'name': xmlid, 'channels': [chanid] } writejson(os.path.join(xmltvpath, chanid), jsepg)
from flask import Flask, jsonify, request import uuid import json from blockchain import Blockchain app = Flask(__name__) node_identifier = str(uuid()).replace('-', '') blockchain = Blockchain() @app.route('/mine', methods=['GET']) def mine(): last_block = blockchain.last_block() last_proof = last_block['proof'] proof = blockchain.proof_of_work(last_proof) blockchain.new_transaction( sender=0, recipient=node_identifier, amount=1 ) block = blockchain.new_block(proof) response = { 'message': 'New block forged', 'index': block['index'],
def writechannels39(iface, output): xmltvpath = "epggrab/xmltv/channels" if not os.path.exists(xmltvpath): os.makedirs(xmltvpath) tagpath = "channel/tag" if not os.path.exists(tagpath): os.makedirs(tagpath) chnpath = "channel/config" if not os.path.exists(chnpath): os.makedirs(chnpath) # channel/tag/UUID for tag in tags.values(): tag["id"] = uuid() jstag = {"enabled": 1, "internal": 0, "titledIcon": 0, "name": tag["name"], "comment": "", "icon": ""} writejson(os.path.join(tagpath, tag["id"]), jstag) # input/iptv path = os.path.join("input", "iptv") if not os.path.exists(path): os.makedirs(path) # input/iptv/config writejson(os.path.join(path, "config"), {"uuid": uuid(), "skipinitscan": 1, "autodiscovery": 0}) # input/iptv/networks/uuid() path = os.path.join(path, "networks", uuid()) if not os.path.exists(path): os.makedirs(path) writejson( os.path.join(path, "config"), { "networkname": "IPTV network", # Network name "skipinitscan": 1, # Skip initial scan "autodiscovery": 0, # Network discovery "idlescan": 0, # Idle scan "max_streams": 2, # Max input streams "max_bandwidth": 0, # Max bandwidth (Kbps) "max_timeout": 10, # Max timeout (seconds) }, ) # input/iptv/networks/uuid()/muxes path = os.path.join(path, "muxes") if not os.path.exists(path): os.mkdir(path) # one mux and service for each channel for channel in channels.values(): muxid = uuid() muxpath = os.path.join(path, muxid) if not os.path.exists(muxpath): os.mkdir(muxpath) if channel["port"]: url = "%s://@%s:%s" % (channel["scheme"], channel["ip"], channel["port"]) else: url = "%s://@%s" % (channel["scheme"], channel["ip"]) jsmux = { "iptv_url": url, "iptv_interface": iface, "iptv_atsc": 0, "iptv_svcname": channel["name"], "iptv_muxname": channel["name"], "iptv_sname": channel["name"], "enabled": 1, "scan_result": 2, # mark scan result (1 - ok, 2 - failed) } # input/iptv/networks/uuid()/muxes/uuid()/config file writejson(os.path.join(muxpath, "config"), jsmux) # input/iptv/networks/uuid()/muxes/uuid()/services/uuid() svcpath = os.path.join(muxpath, "services") if not os.path.exists(svcpath): os.mkdir(svcpath) # create empty service with id 0 if "service" in output: svcid = uuid() jssvc = { "sid": 0, # guess service id "svcname": channel["name"], "name": channel["name"], "dvb_servicetype": 1, "enabled": 1, } writejson(os.path.join(svcpath, svcid), jssvc) else: svcid = None # channel/config if "channel" in output: chanid = uuid() jschan = {"name": channel["name"], "dvr_pre_time": 0, "dvr_pst_time": 0, "services": [svcid]} if channel["number"] is not None: jschan["number"] = int(channel["number"]) if channel["tags"] is not None: jschan["tags"] = list(tags[x]["id"] for x in channel["tags"]) if channel["icon"] is not None: jschan["icon"] = channel["icon"] writejson(os.path.join(chnpath, chanid), jschan) # epg # epggrab/xmltv/channels/# if channel["xmltv"] is not None: xmlid = channel["xmltv"] else: xmlid = channel["name"] jsepg = {"name": xmlid, "channels": [chanid]} writejson(os.path.join(xmltvpath, chanid), jsepg)
def writechannels39(): xmltvpath = "epggrab/xmltv/channels" if not os.path.exists(xmltvpath): os.makedirs(xmltvpath) tagpath = 'channel/tag' if not os.path.exists(tagpath): os.makedirs(tagpath) chnpath = 'channel/config' if not os.path.exists(chnpath): os.makedirs(chnpath) #channel/tag/UUID for tag in tags.values(): tag['id'] = uuid() jstag = {'enabled': 1, 'internal': 0, 'titledIcon': 0, 'name': tag['name'], 'comment': '', 'icon': ''} writejson(os.path.join(tagpath, tag['id']), jstag) #input/iptv path = os.path.join('input', 'iptv') if not os.path.exists(path): os.makedirs(path) #input/iptv/config writejson(os.path.join(path, 'config'), { 'uuid': uuid(), 'skipinitscan': 1, 'autodiscovery': 0 }) #input/iptv/networks/uuid() path = os.path.join(path, 'networks', uuid()) if not os.path.exists(path): os.makedirs(path) writejson(os.path.join(path, 'config'), { 'networkname': 'IPTV network', # Network name 'skipinitscan': 1, # Skip initial scan 'autodiscovery': 0, # Network discovery 'idlescan': 0, # Idle scan 'max_streams': 2, # Max input streams 'max_bandwidth': 0, # Max bandwidth (Kbps) 'max_timeout': 10 # Max timeout (seconds) }) #input/iptv/networks/uuid()/muxes path = os.path.join(path, 'muxes') if not os.path.exists(path): os.mkdir(path) #one mux and service for each channel for channel in channels.values(): muxid = uuid() muxpath = os.path.join(path, muxid) if not os.path.exists(muxpath): os.mkdir(muxpath) jsmux = { 'iptv_url': "udp://@%s:%s" % (channel['ip'], channel['port']), 'iptv_interface': 'eth1', 'iptv_atsc': 0, 'iptv_svcname': channel['name'], 'iptv_muxname': channel['name'], 'iptv_sname': channel['name'], 'enabled': 1, 'scan_result': 2 # mark scan result (1 - ok, 2 - failed) } #input/iptv/networks/uuid()/muxes/uuid()/config file writejson(os.path.join(muxpath, 'config'), jsmux) #input/iptv/networks/uuid()/muxes/uuid()/services/uuid() svcpath = os.path.join(muxpath, 'services') if not os.path.exists(svcpath): os.mkdir(svcpath) #TODO: create empty service with id 1 or don't if False: svcid = uuid() jssvc = { 'sid': 1, # guess service id 'svcname': channel['name'], 'name': channel['name'], 'dvb_servicetype': 1, 'enabled': 1 } writejson(os.path.join(svcpath, svcid), jssvc) else: svcid = None #channel/config chanid = uuid() jschan = { 'name': channel['name'], 'dvr_pre_time': 0, 'dvr_pst_time': 0, 'services': [svcid] } if channel['number'] is not None: jschan['number'] = int(channel['number']) if channel['tags'] is not None: jschan['tags'] = list(tags[x]['id'] for x in channel['tags']) if channel['icon'] is not None: jschan['icon'] = channel['icon'] writejson(os.path.join(chnpath, chanid), jschan) #epg #epggrab/xmltv/channels/# if channel['xmltv'] is not None: xmlid = channel['xmltv'] else: xmlid = channel['name'] jsepg = { 'name': xmlid, 'channels': [chanid] } writejson(os.path.join(xmltvpath, chanid), jsepg)
def parse(self, new_buffer): self.my_buffer += new_buffer while len(self.my_buffer) > 0: if self.stream and self.stream_remain > 0: if self.stream_remain >= len(self.my_buffer): if self.debug: if self.stream_size < 256 or len(self.my_buffer) < 10: print 'binary:', self.my_buffer, else: print 'binary:', self.my_buffer[ 0:10], '.-.', self.my_buffer[-10:], '!!\n', self.stream.write(self.my_buffer) self.stream_remain -= len(self.my_buffer) self.my_buffer = '' else: if self.debug: if self.stream_size < 127 or self.stream_remain < 10: print 'binary:', self.my_buffer, else: print 'binary:', self.my_buffer[ 0:10], '._.', self.my_buffer[-10:], '!!!\n', self.stream.write(self.my_buffer[0:self.stream_remain]) self.my_buffer = self.my_buffer[self.stream_remain + 1:] self.stream_remain = 0 if self.stream_remain <= 0: if self.use_file: obj = self.objects[-1] obj['path'] = self.stream_fname obj['size'] = self.stream_size self.objects.append(None) self.stream_fname = None else: self.objects.append(self.stream.getvalue()) self.stream.close() self.stream = None else: pos = self.my_buffer.find('\n', self.nospace) if pos < 0: tlen = len(self.my_buffer) self.nospace = tlen break else: line = self.my_buffer[0:pos] self.my_buffer = self.my_buffer[pos + 1:] self.nospace = 0 if self.debug: print 'line:', line if len(line) == 0: self.results.append(self.objects) self.objects = [] if self.debug: print 'end message.' elif line[0] == '{': self.obj_buffer = '{' elif line[0] == '}': self.obj_buffer += line try: obj = json.loads(self.obj_buffer) except ValueError, e: print e print "Error reading json:" print self.obj_buffer if 'stream_length' in obj: self.stream_size = self.stream_remain = int( obj['stream_length']) del obj['stream_length'] if (obj['type'] == 'file' and self.stream_size > self.file_threshold ) or ('tree_key' in obj and not self.one_file): self.use_file = True if glob.store: self.stream_fname = glob.store.tmp_file_path( ) + uuid() else: self.stream_fname = 'tmp_' + uuid() self.stream = open(self.stream_fname, 'w+') else: self.use_file = False self.stream = io.BytesIO(b"") self.objects.append(obj) self.obj_buffer = '' else: self.obj_buffer += line
def writechannels39(iface, output): xmltvpath = "epggrab/xmltv/channels" if not os.path.exists(xmltvpath): os.makedirs(xmltvpath) tagpath = 'channel/tag' if not os.path.exists(tagpath): os.makedirs(tagpath) chnpath = 'channel/config' if not os.path.exists(chnpath): os.makedirs(chnpath) #channel/tag/UUID for tag in tags.values(): tag['id'] = uuid() jstag = {'enabled': 1, 'internal': 0, 'titledIcon': 0, 'name': tag['name'], 'comment': '', 'icon': ''} writejson(os.path.join(tagpath, tag['id']), jstag) #input/iptv path = os.path.join('input', 'iptv') if not os.path.exists(path): os.makedirs(path) #input/iptv/config writejson(os.path.join(path, 'config'), { 'uuid': uuid(), 'skipinitscan': 1, 'autodiscovery': 0 }) #input/iptv/networks/uuid() path = os.path.join(path, 'networks', uuid()) if not os.path.exists(path): os.makedirs(path) writejson(os.path.join(path, 'config'), { 'networkname': 'IPTV network', # Network name 'skipinitscan': 1, # Skip initial scan 'autodiscovery': 0, # Network discovery 'idlescan': 0, # Idle scan 'max_streams': 2, # Max input streams 'max_bandwidth': 0, # Max bandwidth (Kbps) 'max_timeout': 10 # Max timeout (seconds) }) #input/iptv/networks/uuid()/muxes path = os.path.join(path, 'muxes') if not os.path.exists(path): os.mkdir(path) #one mux and service for each channel for channel in channels.values(): muxid = uuid() muxpath = os.path.join(path, muxid) if not os.path.exists(muxpath): os.mkdir(muxpath) if channel['port']: url = "%s://@%s:%s" % (channel['scheme'], channel['ip'], channel['port']) else: url = "%s://@%s" % (channel['scheme'], channel['ip']) jsmux = { 'iptv_url': url, 'iptv_interface': iface, 'iptv_atsc': 0, 'iptv_svcname': channel['name'], 'iptv_muxname': channel['name'], 'iptv_sname': channel['name'], 'enabled': 1, 'scan_result': 2 # mark scan result (1 - ok, 2 - failed) } #input/iptv/networks/uuid()/muxes/uuid()/config file writejson(os.path.join(muxpath, 'config'), jsmux) #input/iptv/networks/uuid()/muxes/uuid()/services/uuid() svcpath = os.path.join(muxpath, 'services') if not os.path.exists(svcpath): os.mkdir(svcpath) #create empty service with id 0 if 'service' in output: svcid = uuid() jssvc = { 'sid': 0, # guess service id 'svcname': channel['name'], 'name': channel['name'], 'dvb_servicetype': 1, 'enabled': 1 } writejson(os.path.join(svcpath, svcid), jssvc) else: svcid = None #channel/config if 'channel' in output: chanid = uuid() jschan = { 'name': channel['name'], 'dvr_pre_time': 0, 'dvr_pst_time': 0, 'services': [svcid] } if channel['number'] is not None: jschan['number'] = int(channel['number']) if channel['tags'] is not None: jschan['tags'] = list(tags[x]['id'] for x in channel['tags']) if channel['icon'] is not None: jschan['icon'] = channel['icon'] writejson(os.path.join(chnpath, chanid), jschan) #epg #epggrab/xmltv/channels/# if channel['xmltv'] is not None: xmlid = channel['xmltv'] else: xmlid = channel['name'] jsepg = { 'name': xmlid, 'channels': [chanid] } writejson(os.path.join(xmltvpath, chanid), jsepg)
def submit_job(j): """Submit HySDS job.""" # get task_id and orchestrator queue task_id = submit_job.request.id orch_queue = submit_job.request.delivery_info.get('exchange', 'unknown') # get container image name and url image_name = j.get('container_image_name', None) image_url = j.get('container_image_url', None) image_mapping = j.get('container_mappings', None) # get hard/soft time limits time_limit = j.get('time_limit', None) soft_time_limit = j.get('soft_time_limit', None) # job dedup enabled? dedup = j.get('enable_dedup', True) # get priority priority = j.get('priority', None) if priority is None: priority = submit_job.request.delivery_info.get('priority') if priority is None: priority = 0 # get tag tag = j.get('tag', None) # get username username = j.get('username', None) # default job json job = { 'job_id': task_id, 'name': task_id, 'job_info': j, } # set job type if 'job_type' in j: match = JOB_TYPE_RE.search(j['job_type']) job['type'] = match.group(1) if match else j['job_type'] # default context context = j.get('context', {}) # get orchestrator configuration orch_cfg_file = os.environ.get('HYSDS_ORCHESTRATOR_CFG', None) if orch_cfg_file is None: error = "Environment variable HYSDS_ORCHESTRATOR_CFG is not set." error_info = ERROR_TMPL.substitute(orch_queue=orch_queue, error=error) job_status_json = { 'uuid': job['job_id'], 'job_id': job['job_id'], 'payload_id': task_id, 'status': 'job-failed', 'job': job, 'context': context, 'error': error_info, 'short_error': get_short_error(error_info), 'traceback': error_info } log_job_status(job_status_json) raise (OrchestratorExecutionError(error, job_status_json)) #logger.info("HYSDS_ORCHESTRATOR_CFG:%s" % orch_cfg_file) if not os.path.exists(orch_cfg_file): error = "Orchestrator configuration %s doesn't exist." % orch_cfg_file error_info = ERROR_TMPL.substitute(orch_queue=orch_queue, error=error) job_status_json = { 'uuid': job['job_id'], 'job_id': job['job_id'], 'payload_id': task_id, 'status': 'job-failed', 'job': job, 'context': context, 'error': error_info, 'short_error': get_short_error(error_info), 'traceback': error_info } log_job_status(job_status_json) raise (OrchestratorExecutionError(error, job_status_json)) with open(orch_cfg_file) as f: orch_cfg = json.load(f) # get job creators directory job_creators_dir = os.environ.get('HYSDS_JOB_CREATORS_DIR', None) if job_creators_dir is None: error = "Environment variable HYSDS_JOB_CREATORS_DIR is not set." error_info = ERROR_TMPL.substitute(orch_queue=orch_queue, error=error) job_status_json = { 'uuid': job['job_id'], 'job_id': job['job_id'], 'payload_id': task_id, 'status': 'job-failed', 'job': job, 'context': context, 'error': error_info, 'short_error': get_short_error(error_info), 'traceback': error_info } log_job_status(job_status_json) raise (OrchestratorExecutionError(error, job_status_json)) #logger.info("HYSDS_JOB_CREATORS_DIR:%s" % job_creators_dir) # parse job configurations job_cfgs = {} for cfg in orch_cfg['configs']: job_cfgs[cfg['job_type']] = cfg['job_creators'] # check that we have info to create jobs if 'job_type' not in j: error = "Invalid job spec. No 'job_type' specified." error_info = ERROR_TMPL.substitute(orch_queue=orch_queue, error=error) job_status_json = { 'uuid': job['job_id'], 'job_id': job['job_id'], 'payload_id': task_id, 'status': 'job-failed', 'job': job, 'context': context, 'error': error_info, 'short_error': get_short_error(error_info), 'traceback': error_info } log_job_status(job_status_json) raise (OrchestratorExecutionError(error, job_status_json)) job_type = j['job_type'] job_queue = j.get('job_queue', None) if 'payload' not in j: error = "Invalid job spec. No 'payload' specified." error_info = ERROR_TMPL.substitute(orch_queue=orch_queue, error=error) job_status_json = { 'uuid': job['job_id'], 'job_id': job['job_id'], 'payload_id': task_id, 'status': 'job-failed', 'job': job, 'context': context, 'error': error_info, 'short_error': get_short_error(error_info), 'traceback': error_info } log_job_status(job_status_json) raise (OrchestratorExecutionError(error, job_status_json)) payload = j['payload'] #logger.info("got job_type: %s" % job_type) #logger.info("payload: %s" % payload) # set payload hash if j.get('payload_hash', None) is None: j['payload_hash'] = get_payload_hash(payload) payload_hash = j['payload_hash'] # do dedup if dedup is True: dj = query_dedup_job(payload_hash) if isinstance(dj, dict): dedup_msg = "orchestrator found duplicate job %s with status %s" % ( dj['_id'], dj['status']) job_status_json = { 'uuid': job['job_id'], 'job_id': job['job_id'], 'payload_id': task_id, 'payload_hash': payload_hash, 'dedup': dedup, 'dedup_job': dj['_id'], 'status': 'job-deduped', 'job': job, 'context': context, 'dedup_msg': dedup_msg } log_job_status(job_status_json) return [task_id] # if no explicit job or data type defined in orchestrator, add catch-all if job_type not in job_cfgs: # first check if data product type; if not then assume job type match = DATA_TYPE_RE.search(job_type) if match: return queue_dataset_evaluation(payload) else: match = JOB_TYPE_RE.search(job_type) jt = match.group(1) if match else job_type job_cfgs[job_type] = [{ "job_name": j.get('job_name', jt).replace(":", "__"), "function": "utils.get_job_json", "job_queues": [jt if job_queue is None else job_queue] }] # get job json and queue jobs results = [] for jc in job_cfgs[job_type]: func = get_function(jc['function'], add_to_sys_path=job_creators_dir) argspec = getargspec(func) try: if len(argspec.args) > 1 and 'job_type' in argspec.args: match = JOB_TYPE_RE.search(job_type) jt = match.group(1) if match else job_type job = func(payload, jt) else: job = func(payload) except Exception as e: error = "Job creator function %s failed to generate job JSON." % jc[ 'function'] error_info = ERROR_TMPL.substitute(orch_queue=orch_queue, error=error) job_status_json = { 'uuid': job['job_id'], 'job_id': job['job_id'], 'payload_id': task_id, 'payload_hash': payload_hash, 'dedup': dedup, 'status': 'job-failed', 'job': { 'job_id': task_id, 'name': task_id, 'job_info': j }, 'context': context, 'error': error_info, 'short_error': get_short_error(error_info), 'traceback': traceback.format_exc() } log_job_status(job_status_json) raise (OrchestratorExecutionError(error, job_status_json)) #logger.info("job: %s" % job) # set context job.setdefault('context', {}).update(context) # override hard/soft time limits time_limit = jc.get('time_limit', time_limit) soft_time_limit = jc.get('soft_time_limit', soft_time_limit) # queue jobs for queue in jc['job_queues']: # copy job job_json = copy.deepcopy(job) # set job id if 'name' in job: job_json['job_id'] = get_job_id(job['name']) else: job_json['job_id'] = get_job_id(jc['job_name']) job_json['name'] = job_json['job_id'] # set container image name and url if image_name is not None: job_json['container_image_name'] = image_name if image_url is not None: job_json['container_image_url'] = image_url if image_mapping is not None: job_json['container_mappings'] = image_mapping # set priority job_json['priority'] = priority # set tag if 'tag' not in job_json and tag is not None: job_json['tag'] = tag # set username if 'username' not in job_json and username is not None: job_json['username'] = username # set job_info time_queued = datetime.utcnow() job_json['job_info'] = { 'id': job_json['job_id'], 'job_queue': queue, 'time_queued': time_queued.isoformat() + 'Z', 'time_limit': time_limit, 'soft_time_limit': soft_time_limit, 'payload_hash': payload_hash, 'dedup': dedup, 'job_payload': { 'job_type': job_type, 'payload_task_id': task_id, } } # generate celery task id job_json['task_id'] = uuid() # log queued status job_status_json = { 'uuid': job_json['task_id'], 'job_id': job_json['job_id'], 'payload_id': task_id, 'payload_hash': payload_hash, 'dedup': dedup, 'status': 'job-queued', 'job': job_json } log_job_status(job_status_json) # submit job res = run_job.apply_async((job_json, ), queue=queue, time_limit=time_limit, soft_time_limit=soft_time_limit, priority=priority, task_id=job_json['task_id']) # append result results.append(job_json['task_id']) return results
def get_absolute_url(self): return reverse('library-update_bookInstance', args=[uuid(self.id)])
import { v4 as uuid } from 'uuid' const subscriptionKey = process.env.MICROSOFT_TRANSLATE_KEY_1 if (!subscriptionKey) { throw new Error('Environment variable for your subscription key is not set.') } // endpoints: dictionary/examples dictionary/lookup translate const options = (from, to, text, endpoint, translation) => { const json = { method: 'POST', baseUrl: 'https://api.cognitive.microsofttranslator.com/', url: endpoint === 'translate' ? 'translate' : `dictionary/${endpoint}`, qs: { 'api-version': '3.0', from: from, to: to }, headers: { 'fc0b76e712ab776a80f863b0f7588598': "d77995c9ba1179cfc12a26f29a43552a", 'Content-type': 'application/json', 'X-ClientTraceId': uuid().toString() }, body: [{ Text: text, Translation: translation || '' }], json: true } return json } export default options
if cmd == "get": synid = sys.argv[3] outpath = sys.argv[4] dataset_id = int(sys.argv[5]) ent = syn.downloadEntity(synid) src_path = os.path.join(ent['cacheDir'], ent['files'][0]) shutil.copy(src_path, outpath) import uuid import binascii new_uuid = uuid.uuid4() if 'md5' in ent: new_uuid = uuid.UUID(bytes=binascii.unhexlify(ent['md5'])[:16], version=3) print "Setting uuid", new_uuid elif 'etag' in ent: new_uuid = uuid(ent['etag']) handle = open("galaxy.json", "w") handle.write(json.dumps({ 'type' : 'dataset', 'dataset_id' : dataset_id, 'ext' : 'auto', 'stdout' : '', 'name' : ent.name, 'uuid' : str(new_uuid) }) + "\n") handle.close() if cmd == "upload": etype = sys.argv[3]