def __init__(self, updater): self.updater = updater self.unchecked_files = list(updater.update_files) self.errored_files = [] self.downloaded_files = [] self.num_files = len(self.unchecked_files) if updater.manifest_path.startswith(self.remote_root_base): relpath = self.remote_root_base.relpathto(path.path(updater.manifest_path).parent) else: relpath = self.remote_root_base_equivalent.relpathto(path.path(updater.manifest_path).parent) self.local_root = path.path(updater.temp_dir) self.remote_root = net.httpjoin(self.remote_root_base, relpath) + "/" self.backup_root = net.httpjoin(self.backup_root_base, relpath) + "/" self.callback = None self.cancelling = self.updater.cancelling
def __init__(self, *location_parts, **k): self.httpopen = k.pop('httpopen', asynchttp.httpopen) location_parts = list(location_parts) first = location_parts.pop(0) while len(location_parts) > 0: next = location_parts.pop(0) if next.startswith("http:"): first = next elif first.startswith("http:"): first = net.httpjoin(first, next) else: # path will handle relative vs. absolute first = path.path(first) if first.ext: first = first.parent first = first / next first = first.normpath() self.web = first.startswith("http:") self.content_location = path.path(first) self.content = None self._load_callback = None self.meta = None
def on_page(self, data = None): if data is None: callback, self.callback = self.callback, None if callback is not None: return callback.error(ValueError("no data was received")) else: log.info("no data was received and no callback present. (%r)", self) return if not self.collect_data(data): return False numOmittedEntries = int(data.get('numOmittedEntries', 0)) self.count = int(data['itemsPerPage']) self.startIndex = int(data['startIndex']) totalResults = int(data['totalResults']) log.info("got a page of data: %r/%r/%r", self.startIndex, self.count, totalResults) log.info_s('\tdata=%r', data) if self.max is not None: _max = min(self.max, totalResults) else: _max = totalResults if (self.startIndex + self.count) < _max: self.startIndex += self.count self.conn.api.clear_waiting(net.httpjoin(getattr(self.conn.api, 'OS_API_BASE', self.conn.api.API_BASE), self.uri)) self._next_page() else: callback, self.callback = self.callback, None if callback is not None: data, self.data = self.data, [] callback.success(data)
def refresh_comments(self, post_id, callback=None): uri = 'people/~/network/updates/key=%s/update-comments' % post_id.encode( 'url') self.api.clear_waiting(net.httpjoin(self.api.API_BASE, uri)) self.api.call(uri).GET( success=lambda resp: self.process_comments( post_id, resp, callback=callback), error=lambda err: self._post_comment_error(err, callback=callback))
def get_endpoint(self, base=None, name=None): # Since the method can be created fairly loosely, the way to determine the "endpoint" (url) # of a method is not 100% simple. Still pretty straightforward, though. if base and name: # Provided in the arguments to this function return net.httpjoin(base, name) endpoint = getattr(self, 'endpoint', None) if endpoint is None: base = getattr(self, 'api_base', base) name = getattr(self, 'name', name) if None in (base, name): raise ValueError("No known endpoint") endpoint = net.httpjoin(base, name) return endpoint
def get_endpoint(self, base = None, name = None): # Since the method can be created fairly loosely, the way to determine the "endpoint" (url) # of a method is not 100% simple. Still pretty straightforward, though. if base and name: # Provided in the arguments to this function return net.httpjoin(base, name) endpoint = getattr(self, 'endpoint', None) if endpoint is None: base = getattr(self, 'api_base', base) name = getattr(self, 'name', name) if None in (base, name): raise ValueError("No known endpoint") endpoint = net.httpjoin(base, name) return endpoint
def do(self): import util.net as net from buildutil.promptlib import prompt updateyaml_data = None abs_source = net.httpjoin(self.server, self.source) while updateyaml_data is None: try: print 'Fetching', abs_source res = urllib2.urlopen(abs_source) except Exception, e: tryagain = prompt("Error opening %r (e = %r). Try again?" % (abs_source, e), bool, True) if not tryagain: break else: updateyaml_data = res.read()
def _normalize_url(self, client, **kw): if kw.get('OpenSocial', False): api_base = getattr(client, 'OS_API_BASE', client.API_BASE) kw['use_default_params'] = False else: api_base = client.API_BASE url = net.httpjoin(api_base, self.endpoint, keepquery=True) format = kw.pop('format', client.DEFAULT_FORMAT) if url.endswith('.json'): format = 'json' elif url.endswith('.xml'): format = 'xml' elif url.endswith('.atom'): format = 'atom' kw['format'] = format return url, format
def _normalize_url(self, client, **kw): if kw.get('OpenSocial', False): api_base = getattr(client, 'OS_API_BASE', client.API_BASE) kw['use_default_params'] = False else: api_base = client.API_BASE url = net.httpjoin(api_base, self.endpoint, keepquery = True) format = kw.pop('format', client.DEFAULT_FORMAT) if url.endswith('.json'): format = 'json' elif url.endswith('.xml'): format = 'xml' elif url.endswith('.atom'): format = 'atom' kw['format'] = format return url, format
def on_page(self, data=None): if data is None: callback, self.callback = self.callback, None if callback is not None: return callback.error(ValueError("no data was received")) else: log.info("no data was received and no callback present. (%r)", self) return if not self.collect_data(data): return False numOmittedEntries = int(data.get('numOmittedEntries', 0)) self.count = int(data['itemsPerPage']) self.startIndex = int(data['startIndex']) totalResults = int(data['totalResults']) log.info("got a page of data: %r/%r/%r", self.startIndex, self.count, totalResults) log.info_s('\tdata=%r', data) if self.max is not None: _max = min(self.max, totalResults) else: _max = totalResults if (self.startIndex + self.count) < _max: self.startIndex += self.count self.conn.api.clear_waiting( net.httpjoin( getattr(self.conn.api, 'OS_API_BASE', self.conn.api.API_BASE), self.uri)) self._next_page() else: callback, self.callback = self.callback, None if callback is not None: data, self.data = self.data, [] callback.success(data)
def refresh_comments(self, post_id, callback = None): uri = 'people/~/network/updates/key=%s/update-comments' % post_id.encode('url') self.api.clear_waiting(net.httpjoin(self.api.API_BASE, uri)) self.api.call(uri).GET(success = lambda resp: self.process_comments(post_id, resp, callback = callback), error = lambda err: self._post_comment_error(err, callback = callback))
self._failed_network_updates += 1 else: updates.append(u) except Exception, e: self._failed_network_updates += 1 traceback.print_exc() if (len(updates) + self._failed_network_updates) >= expected_items: self._got_network_updates() else: params = dict(start = start + count, count = count) if self.last_update_time is not None: params['after'] = self.last_update_time log.info("need to request more updates! (%d/%d)", len(updates), expected_items) self.api.clear_waiting(net.httpjoin(self.api.API_BASE, 'people/~/network')) self.api.call('people/~/network').GET(parameters = params, success = self._check_network_updates, error = self._network_updates_error) def _got_network_updates(self): log.info('got all network updates') updates = filter(None, self.pending.get('updates', [])) if updates: to_notify = [x for x in updates if x.timestamp > self.last_update_time] self.last_update_time = max(int(x.timestamp) for x in updates) self.newsfeed_updates(to_notify) self._cache('last_update_time', self.last_update_time) self.update_post()
def API_BASE(self): return net.httpjoin(self.BASE, 'api/')
def weblink(self, resource=""): return net.httpjoin(self.webroot, resource)
def remote_path_for_file(self, file, backup = False): if backup: root = self.backup_root else: root = self.remote_root return net.httpjoin(root, file.path.replace("\\", "/").encode('url'))
def weblink(resource = ''): return net.httpjoin(WEBROOT(), resource)
def weblink(self, resource=''): return net.httpjoin(self.webroot, resource)
updates.append(u) except Exception, e: self._failed_network_updates += 1 traceback.print_exc() if (len(updates) + self._failed_network_updates) >= expected_items: self._got_network_updates() else: params = dict(start=start + count, count=count) if self.last_update_time is not None: params['after'] = self.last_update_time log.info("need to request more updates! (%d/%d)", len(updates), expected_items) self.api.clear_waiting( net.httpjoin(self.api.API_BASE, 'people/~/network')) self.api.call('people/~/network').GET( parameters=params, success=self._check_network_updates, error=self._network_updates_error) def _got_network_updates(self): log.info('got all network updates') updates = filter(None, self.pending.get('updates', [])) if updates: to_notify = [ x for x in updates if x.timestamp > self.last_update_time ] self.last_update_time = max(int(x.timestamp) for x in updates) self.newsfeed_updates(to_notify)