def get_qr_queue(host=celeryconfig.SERVER_HOST): if not is_bootstrapped: bootstrap() res = Resource(host, manager=manager) auth_params = {'username':celeryconfig.ZPRINTER_USERNAME, 'api_key': celeryconfig.ZPRINTER_API_KEY} r = res.get('/api/zebra_queue/', params_dict=auth_params) json = simplejson.loads(r.body_string()) if len(printer_dict.keys()) == 0: get_printers() if len(json['objects']) > 0: for instance in json['objects']: uri = instance['resource_uri'] zpl_code= instance['zpl_code'] printer_uri = instance['destination_printer'] printer_ip = printer_dict[printer_uri]['ip_address'] printer_port = printer_dict[printer_uri]['port'] instance['fulfilled_date'] = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.000") res.put(uri, simplejson.dumps(instance), headers={'Content-Type': 'application/json'}, params_dict=auth_params) do_send(printer_ip, printer_port, zpl_code, recv=False) else: logging.debug("no jobs")
def publish(namespace, name, version, description_file, tar_file, readme_file, readme_file_ext): ''' Publish a tarblob to the registry, if the request fails, an exception is raised, which either triggers re-authentication, or is turned into a return value by the decorators. (If successful, the decorated function returns None) ''' url = '%s/%s/%s/versions/%s' % ( Registry_Base_URL, namespace, name, version ) if readme_file_ext == '.md': readme_section_name = 'readme.md' elif readme_file_ext == '': readme_section_name = 'readme' else: raise ValueError('unsupported readme type: "%s"' % readne_file_ext) # description file is in place as text (so read it), tar file is a file body = OrderedDict([('metadata',description_file.read()), ('tarball',tar_file), (readme_section_name, readme_file)]) headers = { } body, headers = multipart_form_encode(body, headers, uuid.uuid4().hex) auth = _registryAuthFilter() resource = Resource(url, pool=connection_pool.getPool(), filters=[auth]) response = resource.put( headers = headers, payload = body ) return None
def addOwner(namespace, name, owner): ''' Add an owner for a module or target (owners are the people with permission to publish versions and add/remove the owners). ''' url = '%s/%s/%s/owners/%s' % ( Registry_Base_URL, namespace, name, owner ) auth = _registryAuthFilter() resource = Resource(url, pool=connection_pool.getPool(), filters=[auth]) try: response = resource.put() except restkit_errors.ResourceNotFound as e: logger.error('no such %s, "%s"' % (namespace, name))
def save_base(self, raw=False, cls=None, origin=None, force_insert=False, force_update=False, using=None, update_fields=None): """ Does the heavy-lifting involved in saving. Subclasses shouldn't need to override this method. It's separate from save() in order to hide the need for overrides of save() to pass around internal-only parameters ('raw', 'cls', and 'origin'). """ assert not (force_insert and force_update) record_exists = False if cls is None: cls = self.__class__ meta = cls._meta if not meta.proxy: origin = cls else: meta = cls._meta if origin and not getattr(meta, "auto_created", False): signals.pre_save.send(sender=origin, instance=self, raw=raw) model_name = str(meta) # If we are in a raw save, save the object exactly as presented. # That means that we don't try to be smart about saving attributes # that might have come from the parent class - we just save the # attributes we have been given to the class we have been given. # We also go through this process to defer the save of proxy objects # to their actual underlying model. if not raw or meta.proxy: if meta.proxy: org = cls else: org = None for parent, field in meta.parents.items(): # At this point, parent's primary key field may be unknown # (for example, from administration form which doesn't fill # this field). If so, fill it. if field and getattr(self, parent._meta.pk.attname) is None and getattr(self, field.attname) is not None: setattr(self, parent._meta.pk.attname, getattr(self, field.attname)) self.save_base(cls=parent, origin=org, using=using) if field: setattr(self, field.attname, self._get_pk_val(parent._meta)) if meta.proxy: return if not meta.proxy: pk_val = self._get_pk_val(meta) pk_is_set = pk_val is not None get_args = {'format': ROA_FORMAT} get_args.update(ROA_CUSTOM_ARGS) # Construct Json payload serializer = self.get_serializer(self) payload = self.get_renderer().render(serializer.data) # Add serializer content_type headers = get_roa_headers() headers.update(self.get_serializer_content_type()) # check if resource use custom primary key if not meta.pk.attname in ['pk', 'id']: # consider it might be inserting so check it first # @todo: try to improve this block to check if custom pripary key is not None first resource = Resource(self.get_resource_url_detail(), headers=headers, filters=ROA_FILTERS) try: response = resource.get(payload=None, **get_args) except ResourceNotFound: # since such resource does not exist, it's actually creating pk_is_set = False except RequestFailed: pk_is_set = False if force_update or pk_is_set and not self.pk is None: record_exists = True resource = Resource(self.get_resource_url_detail(), filters=ROA_FILTERS) try: logger.debug(u"""Modifying : "%s" through %s with payload "%s" and GET args "%s" """ % ( force_unicode(self), force_unicode(resource.uri), force_unicode(payload), force_unicode(get_args))) response = resource.put(payload=payload, headers=headers, **get_args) except RequestFailed as e: raise ROAException(e) else: record_exists = False resource = Resource(self.get_resource_url_list(), filters=ROA_FILTERS) try: logger.debug(u"""Creating : "%s" through %s with payload "%s" and GET args "%s" """ % ( force_unicode(self), force_unicode(resource.uri), force_unicode(payload), force_unicode(get_args))) response = resource.post(payload=payload, headers=headers, **get_args) except RequestFailed as e: raise ROAException(e) response = force_unicode(response.body_string()).encode(DEFAULT_CHARSET) data = self.get_parser().parse(StringIO(response)) serializer = self.get_serializer(data=data) if not serializer.is_valid(): raise ROAException(u'Invalid deserialization for {} model: {}'.format(self, serializer.errors)) try: self.pk = int(serializer.object.pk) except ValueError: self.pk = serializer.object.pk self = serializer.object if origin: signals.post_save.send(sender=origin, instance=self, created=(not record_exists), raw=raw)
def save_base(self, raw=False, cls=None, origin=None, force_insert=False, force_update=False, using=None, update_fields=None): """ Does the heavy-lifting involved in saving. Subclasses shouldn't need to override this method. It's separate from save() in order to hide the need for overrides of save() to pass around internal-only parameters ('raw', 'cls', and 'origin'). """ assert not (force_insert and force_update) record_exists = False if cls is None: cls = self.__class__ meta = cls._meta if not meta.proxy: origin = cls else: meta = cls._meta if origin and not getattr(meta, "auto_created", False): signals.pre_save.send(sender=origin, instance=self, raw=raw) model_name = str(meta) # If we are in a raw save, save the object exactly as presented. # That means that we don't try to be smart about saving attributes # that might have come from the parent class - we just save the # attributes we have been given to the class we have been given. # We also go through this process to defer the save of proxy objects # to their actual underlying model. if not raw or meta.proxy: if meta.proxy: org = cls else: org = None for parent, field in meta.parents.items(): # At this point, parent's primary key field may be unknown # (for example, from administration form which doesn't fill # this field). If so, fill it. if field and getattr( self, parent._meta.pk.attname) is None and getattr( self, field.attname) is not None: setattr(self, parent._meta.pk.attname, getattr(self, field.attname)) self.save_base(cls=parent, origin=org, using=using) if field: setattr(self, field.attname, self._get_pk_val(parent._meta)) if meta.proxy: return if not meta.proxy: pk_val = self._get_pk_val(meta) pk_is_set = pk_val is not None get_args = {} get_args[ROA_ARGS_NAMES_MAPPING.get('FORMAT', 'format')] = ROA_FORMAT get_args.update(ROA_CUSTOM_ARGS) # Construct Json payload serializer = self.get_serializer(self) payload = self.get_renderer().render(serializer.data) # Add serializer content_type headers = get_roa_headers() headers.update(self.get_serializer_content_type()) # check if resource use custom primary key if not meta.pk.attname in ['pk', 'id']: # consider it might be inserting so check it first # @todo: try to improve this block to check if custom pripary key is not None first resource = Resource(self.get_resource_url_detail(), filters=ROA_FILTERS, **ROA_SSL_ARGS) try: response = resource.get(payload=None, headers=headers, **get_args) except ResourceNotFound: # since such resource does not exist, it's actually creating pk_is_set = False except RequestFailed: pk_is_set = False if force_update or pk_is_set and not self.pk is None: record_exists = True resource = Resource(self.get_resource_url_detail(), filters=ROA_FILTERS, **ROA_SSL_ARGS) try: logger.debug( u"""Modifying : "%s" through %s with payload "%s" and GET args "%s" """ % (force_unicode(self), force_unicode(resource.uri), force_unicode(payload), force_unicode(get_args))) response = resource.put(payload=payload, headers=headers, **get_args) except RequestFailed as e: raise ROAException(e) else: record_exists = False resource = Resource(self.get_resource_url_list(), filters=ROA_FILTERS, **ROA_SSL_ARGS) try: logger.debug( u"""Creating : "%s" through %s with payload "%s" and GET args "%s" """ % (force_unicode(self), force_unicode(resource.uri), force_unicode(payload), force_unicode(get_args))) response = resource.post(payload=payload, headers=headers, **get_args) except RequestFailed as e: raise ROAException(e) response = force_unicode( response.body_string()).encode(DEFAULT_CHARSET) data = self.get_parser().parse(StringIO(response)) serializer = self.get_serializer(data=data) if not serializer.is_valid(): raise ROAException( u'Invalid deserialization for %s model: %s' % (self, serializer.errors)) obj = self.__class__(**serializer.initial_data) try: self.pk = int(obj.pk) except ValueError: self.pk = obj.pk self = obj if origin: signals.post_save.send(sender=origin, instance=self, created=(not record_exists), raw=raw)
class StoreClient(object): def __init__(self, endpoint, name, **kwargs): if endpoint.endswith('/'): endpoint = endpoint.rstrip('/') if 'pool' not in kwargs: kwargs['pool'] = ConnectionPool(factory=Connection) self.json_default = kwargs.get('json_default', json_util.default) self.json_object_hook = kwargs.get('json_object_hook', json_util.object_hook) self.resource = Resource(endpoint, **kwargs) self.name = name def create_store(self, store): response = self.resource.post("/stores/%s" % store) if response.status_int == 200: return json.loads(response.body_string(), object_hook=self.json_object_hook) def get_stores(self): response = self.resource.get("/stores") if response.status_int == 200: return json.loads(response.body_string(), object_hook=self.json_object_hook) def create_branch(self, store, branch, parent=None): path = _build_path(store, "branch", branch) params = _build_params(parent=parent) response = self.resource.post(path, params_dict=params) if response.status_int == 200: return json.loads(response.body_string(), object_hook=self.json_object_hook) def get_branch(self, store, branch): path = _build_path(store, "branch", branch) response = self.resource.get(path) if response.status_int == 200: return json.loads(response.body_string(), object_hook=self.json_object_hook) def merge(self, store, source, target='master', author=None, committer=None): path = _build_path(store, "merge", source) params = _build_params(target=target, author=author, committer=committer) response = self.resource.post(path, params_dict=params) if response.status_int == 200: return json.loads(response.body_string(), object_hook=self.json_object_hook) def get(self, store, key=None, shallow=False, branch='master', commit_sha=None): path = _entry_path(store, key) params = _build_params(shallow=shallow, branch=branch, commit_sha=commit_sha) response = self.resource.get(path, params_dict=params) if response.status_int == 200: response_body = response.body_string() return json.loads(response_body, object_hook=self.json_object_hook) def put(self, store, key, value, flatten_keys=True, branch='master', author=None, committer=None): path = _entry_path(store, key) payload = json.dumps(value, default=self.json_default) flatten_keys = 1 if flatten_keys else 0 params = _build_params(flatten_keys=flatten_keys, branch=branch, author=author, committer=committer) headers = {'Content-Type': 'application/json'} response = self.resource.put(path, headers=headers, payload=payload, params_dict=params) if response.status_int == 200: return json.loads(response.body_string(), object_hook=self.json_object_hook) def delete(self, store, key, branch='master', author=None, committer=None): path = _entry_path(store, key) params = _build_params(branch=branch, author=author, committer=committer) response = self.resource.delete(path, params_dict=params) if response.status_int == 200: return json.loads(response.body_string(), object_hook=self.json_object_hook) def keys(self, store, key=None, pattern=None, depth=None, filter_by=None, branch='master', commit_sha=None): path = _build_path(store, "keys", key) params = _build_params(pattern=pattern, depth=depth, filter_by=filter_by, branch=branch, commit_sha=commit_sha) response = self.resource.get(path, params_dict=params) if response.status_int == 200: return json.loads(response.body_string(), object_hook=self.json_object_hook) def entries(self, store, key=None, pattern=None, depth=None, branch='master', commit_sha=None): path = _build_path(store, "entries", key) params = _build_params(pattern=pattern, depth=depth, branch=branch, commit_sha=commit_sha) response = self.resource.get(path, params_dict=params) if response.status_int == 200: return json.loads(response.body_string(), object_hook=self.json_object_hook) def trees(self, store, key=None, pattern=None, depth=None, object_depth=None, branch='master', commit_sha=None): path = _build_path(store, "trees", key) params = _build_params(pattern=pattern, depth=depth, object_depth=object_depth, branch=branch, commit_sha=commit_sha) response = self.resource.get(path, params_dict=params) if response.status_int == 200: return json.loads(response.body_string(), object_hook=self.json_object_hook)
class WWWcomm: def __init__(self, MIDpass, configPath, baseURL, uploadPath, RCstatusPath, MIDname=None): # self.baseURL = defaultConfig.baseURL # self.password = defaultConfig # obtain base configuration self.MIDpass = MIDpass self.baseURL = baseURL self.configPath = configPath self.resource = Resource(baseURL, timeout=60) # self.upResource = Resource("http://isadoredev1.exotericanalytics.com:5050") self.uploadPath = uploadPath self.RCstatusPath = RCstatusPath self.MIDname = MIDname def getConfig(self): params_dict = {"mid_pass": self.MIDpass, "ts": "blah"} if self.MIDname: params_dict["mid_name"] = self.MIDname output = self.resource.get(path=self.configPath, params_dict=params_dict, verify=False) logging.debug("WWW reply status code: " + str(output.status_int) + ".") if output.status_int != 200: raise MIDconfigDownloadError(output) return json.loads(output.body_string()) def RC_cmd_status(self, control_id, fetched_p): """ """ # TODO: check output for errors, don't just rely upon exceptions try: if fetched_p: output = self.resource.put(path=self.RCstatusPath + str(control_id), params_dict={ "fetched": 1, "MID_pass": self.MIDpass }, verify=False) else: output = self.resource.put(path=self.RCstatusPath + str(control_id), params_dict={ "fetched": 0, "MID_pass": self.MIDpass }, verify=False) if output.status_int != 204: raise MID_RCstatusError( "Failed to inform WWW of successful RC command: " + str(control_id) + ". Received response status int: " + str(output.status_int)) except Exception as e: raise MID_RCstatusError( "Failed to inform WWW of unsuccessful RC command: " + str(control_id) + ". This is the result of exception: " + str(e)) def uploadData(self, paramString): # output = self.upResource.get(path=self.uploadPath, # params_dict={"data":paramString}) # NOTE: can remove verify param once we upgrade Python to 2.7.9+ output = self.resource.post(path=self.uploadPath, params_dict={"data": paramString}, verify=False) output.skip_body() # to release the connection return output def uploadReading(self, passwd, readingTime, cmds, errors): payload = { "mid_pass": passwd, "datetime": readingTime.isoformat(), "data": json.dumps( reduce(lambda x, y: x + y.to_JSON_WWW_data(readingTime), cmds, [])), "errors": [] } # TODO: deal with requests-specific exceptions. in particular, the timeout exception. for now, letting MID.py deal with it. # NOTE: can remove verify param once we upgrade Python to 2.7.9+ logging.debug(payload["data"]) r = requests.post(self.baseURL + self.uploadPath, data=payload, timeout=60.0, verify=False)
def save_base(self, raw=False, cls=None, origin=None, force_insert=False, force_update=False, using=None): """ Does the heavy-lifting involved in saving. Subclasses shouldn't need to override this method. It's separate from save() in order to hide the need for overrides of save() to pass around internal-only parameters ('raw', 'cls', and 'origin'). """ assert not (force_insert and force_update) if cls is None: cls = self.__class__ meta = cls._meta if not meta.proxy: origin = cls else: meta = cls._meta if origin and not getattr(meta, "auto_created", False): signals.pre_save.send(sender=origin, instance=self, raw=raw) # If we are in a raw save, save the object exactly as presented. # That means that we don't try to be smart about saving attributes # that might have come from the parent class - we just save the # attributes we have been given to the class we have been given. # We also go through this process to defer the save of proxy objects # to their actual underlying model. if not raw or meta.proxy: if meta.proxy: org = cls else: org = None for parent, field in meta.parents.items(): # At this point, parent's primary key field may be unknown # (for example, from administration form which doesn't fill # this field). If so, fill it. if field and getattr(self, parent._meta.pk.attname) is None \ and getattr(self, field.attname) is not None: setattr(self, parent._meta.pk.attname, getattr(self, field.attname)) self.save_base(cls=parent, origin=org) if field: setattr(self, field.attname, self._get_pk_val(parent._meta)) if meta.proxy: return if not meta.proxy: pk_val = self._get_pk_val(meta) pk_set = pk_val is not None get_args = {'format': ROA_FORMAT} get_args.update(getattr(settings, "ROA_CUSTOM_ARGS", {})) serializer = serializers.get_serializer(ROA_FORMAT) if hasattr(serializer, 'serialize_object'): payload = serializer().serialize_object(self) else: payload = {} for field in meta.local_fields: # Handle FK fields if isinstance(field, models.ForeignKey): field_attr = getattr(self, field.name) if field_attr is None: payload[field.attname] = None else: payload[field.attname] = field_attr.pk # Handle all other fields else: payload[field.name] = field.value_to_string(self) # Handle M2M relations in case of update if force_update or pk_set and not self.pk is None: for field in meta.many_to_many: # First try to get ids from var set in query's add/remove/clear if hasattr(self, '%s_updated_ids' % field.attname): field_pks = getattr(self, '%s_updated_ids' % field.attname) else: field_pks = [obj.pk for obj in field.value_from_object(self)] payload[field.attname] = ','.join(smart_unicode(pk) for pk in field_pks) if force_update or pk_set and not self.pk is None: record_exists = True resource = Resource(self.get_resource_url_detail(), headers=ROA_HEADERS, filters=ROA_FILTERS) try: logger.debug(u"""Modifying : "%s" through %s with payload "%s" and GET args "%s" """ % ( force_unicode(self), force_unicode(resource.uri), force_unicode(payload), force_unicode(get_args))) response = resource.put(payload=payload, **get_args) except RequestFailed, e: raise ROAException(e) else: record_exists = False resource = Resource(self.get_resource_url_list(), headers=ROA_HEADERS, filters=ROA_FILTERS) try: logger.debug(u"""Creating : "%s" through %s with payload "%s" and GET args "%s" """ % ( force_unicode(self), force_unicode(resource.uri), force_unicode(payload), force_unicode(get_args))) response = resource.post(payload=payload, **get_args) except RequestFailed, e: raise ROAException(e)
s = datetime.datetime.strptime(a, "%Y-%m-%d").date() if o in ("-n", "--days"): n = int(a) print "Database: %s" % d print " Start: %s" % s print " Days: %s" % n t1 = time.time() founds = 0 db = Resource(d) try: db.get() except ResourceNotFound: db.put() # create a database list = url_list(days = n) print "Collecting iEPG data from %s Urls" % len(list) for url in list: print "Fetching epg list menu from %s ..." % url content = fetch_content(url) links = extract_epg_link(content) print "%s iEPGs found." % len(links) founds += len(links) for link in links: import_iepg(db, link) t2 = time.time() print "%s iEPGs imported in %.2fs sec." % (founds, (t2 - t1))