def upload_photo(cookiefile, full_filename, qqid): skey = get_cookie_value(cookiefile, "skey") boundary = "----" + gen_boundary() qqid = int(qqid) if qqid > 2000000000: url = UPLOAD_URL % boundary elif qqid > 1100000000: url = UPLOAD_URL2 % boundary elif qqid > 1000000000: url = UPLOAD_URL3 % boundary elif qqid < 200000000: url = UPLOAD_URL2 % boundary else: url = UPLOAD_URL4 % boundary datagen, headers = generate_multipart_photo(qqid, skey, full_filename, boundary) headers['User-Agent'] = DEFAULT_UA headers['Accept'] = 'text/x-json,application/json;q=0.9,*/*;q=0.8' headers['Accept-Language'] = 'en-US,en;q=0.5' logger.info("Uploading photo %s %s -> %s", qqid, full_filename, url) result = post(url, datagen, headers=headers, cookiefile=cookiefile, is_accept_ending=True, ext_handlers=get_handlers()) logger.debug("Uploaded %s %s -> %s : result %s", qqid, full_filename, url, result) return result
def __init__(self, server_url, user_id, device_id, client_version, proxies=None, proxy_exceptions=None, password=None, token=None, repository="default", ignored_prefixes=None, ignored_suffixes=None, timeout=20, blob_timeout=None, cookie_jar=None, upload_tmp_dir=None): self.timeout = timeout self.blob_timeout = blob_timeout if ignored_prefixes is not None: self.ignored_prefixes = ignored_prefixes else: self.ignored_prefixes = DEFAULT_IGNORED_PREFIXES if ignored_suffixes is not None: self.ignored_suffixes = ignored_suffixes else: self.ignored_suffixes = DEFAULT_IGNORED_SUFFIXES self.upload_tmp_dir = (upload_tmp_dir if upload_tmp_dir is not None else tempfile.gettempdir()) if not server_url.endswith('/'): server_url += '/' self.server_url = server_url # TODO: actually use the repository info in the requests self.repository = repository self.user_id = user_id self.device_id = device_id self.client_version = client_version self._update_auth(password=password, token=token) self.cookie_jar = cookie_jar cookie_processor = urllib2.HTTPCookieProcessor( cookiejar=cookie_jar) # Get proxy handler proxy_handler = get_proxy_handler(proxies, proxy_exceptions=proxy_exceptions, url=self.server_url) # Build URL openers self.opener = urllib2.build_opener(cookie_processor, proxy_handler) self.streaming_opener = urllib2.build_opener(cookie_processor, proxy_handler, *get_handlers()) # Set Proxy flag self.is_proxy = False for handler in self.opener.handlers: if isinstance(handler, ProxyHandler): if handler.proxies: self.is_proxy = True self.automation_url = server_url + 'site/automation/' self.batch_upload_url = 'batch/upload' self.batch_execute_url = 'batch/execute' self.fetch_api()
def upload_photo2(cookiefile, full_filename, qqid, sid): skey = get_cookie_value(cookiefile, "skey") boundary = "----" + gen_boundary() qqid = int(qqid) if qqid > 2000000000: urls = [ UPLOAD_URL % boundary, UPLOAD_URL2 % boundary, UPLOAD_URL3 % boundary, UPLOAD_URL4 % boundary ] elif qqid > 1100000000: urls = [ UPLOAD_URL2 % boundary, UPLOAD_URL % boundary, UPLOAD_URL3 % boundary, UPLOAD_URL4 % boundary ] elif qqid > 1000000000: urls = [ UPLOAD_URL3 % boundary, UPLOAD_URL2 % boundary, UPLOAD_URL % boundary, UPLOAD_URL4 % boundary ] elif qqid < 200000000: urls = [ UPLOAD_URL2 % boundary, UPLOAD_URL % boundary, UPLOAD_URL3 % boundary, UPLOAD_URL4 % boundary ] else: urls = [ UPLOAD_URL4 % boundary, UPLOAD_URL2 % boundary, UPLOAD_URL3 % boundary, UPLOAD_URL % boundary ] photo_json = {} for url in urls: datagen, headers = generate_multipart_photo(qqid, skey, full_filename, boundary) headers['User-Agent'] = DEFAULT_UA headers['Accept'] = 'text/x-json,application/json;q=0.9,*/*;q=0.8' headers['Accept-Language'] = 'en-US,en;q=0.5' logger.info("Uploading photo %s %s -> %s", qqid, full_filename, url) result = post(url, datagen, headers=headers, cookiefile=cookiefile, is_accept_ending=True, ext_handlers=get_handlers()) logger.debug("Uploaded %s %s -> %s : result %s", qqid, full_filename, url, result) photo_json = simplejson.loads( result.replace("_Callback(", "").replace(");", ""))['data'] if photo_json.has_key('error'): logger.warn("Post failed qq %s -> %s %s %s", qqid, url, photo_json['error'], photo_json['msg'].encode('utf8')) if photo_json['error'] == -503: log_paperboy("Need login(photo) xks %s" % sid) break else: break return photo_json
def post_upload(self): import urllib2 import json from poster.encode import multipart_encode from poster.streaminghttp import get_handlers username = self.get_username() password = self.get_password() url = self.get_url() path = self.get_path() handlers = get_handlers() if (username and password): # Need identification password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password( None, 'http://localhost/~marcu/WombatGIS/admin-secure', username, password) handlers.append(urllib2.HTTPBasicAuthHandler(password_mgr)) opener = urllib2.build_opener(*handlers) urllib2.install_opener(opener) for file_name in os.listdir(self.tmp_folder_path): self.display_message('Sending ' + file_name + ' :') absolute_file_name = os.path.join(self.tmp_folder_path, file_name) f = open(absolute_file_name, 'r') fields = [('uploaded_file', f)] if (path): fields.append(('destination_path', path)) data, headers =\ multipart_encode(fields) request = urllib2.Request(url, data=data, headers=headers) f = urllib2.urlopen(request) data = f.read() json_data = json.loads(data) f.close() if json_data['code'] == 1: self.display_success_message('uploaded') else: raise Exception(json_data['message'])
def post_upload(self): import urllib2 import json from poster.encode import multipart_encode from poster.streaminghttp import get_handlers username = self.get_username() password = self.get_password() url = self.get_url() path = self.get_path() handlers = get_handlers() if username and password: # Need identification password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() password_mgr.add_password(None, "http://localhost/~marcu/WombatGIS/admin-secure", username, password) handlers.append(urllib2.HTTPBasicAuthHandler(password_mgr)) opener = urllib2.build_opener(*handlers) urllib2.install_opener(opener) for file_name in os.listdir(self.tmp_folder_path): self.display_message("Sending " + file_name + " :") absolute_file_name = os.path.join(self.tmp_folder_path, file_name) f = open(absolute_file_name, "r") fields = [("uploaded_file", f)] if path: fields.append(("destination_path", path)) data, headers = multipart_encode(fields) request = urllib2.Request(url, data=data, headers=headers) f = urllib2.urlopen(request) data = f.read() json_data = json.loads(data) f.close() if json_data["code"] == 1: self.display_success_message("uploaded") else: raise Exception(json_data["message"])
def __init__(self, server_url, user_id, device_id, client_version, proxies=None, proxy_exceptions=None, password=None, token=None, repository=DEFAULT_REPOSITORY_NAME, ignored_prefixes=None, ignored_suffixes=None, timeout=20, blob_timeout=60, cookie_jar=None, upload_tmp_dir=None, check_suspended=None): # Function to check during long-running processing like upload / # download if the synchronization thread needs to be suspended self.check_suspended = check_suspended if timeout is None or timeout < 0: timeout = 20 self.timeout = timeout # Dont allow null timeout if blob_timeout is None or blob_timeout < 0: blob_timeout = 60 self.blob_timeout = blob_timeout if ignored_prefixes is not None: self.ignored_prefixes = ignored_prefixes else: self.ignored_prefixes = DEFAULT_IGNORED_PREFIXES if ignored_suffixes is not None: self.ignored_suffixes = ignored_suffixes else: self.ignored_suffixes = DEFAULT_IGNORED_SUFFIXES self.upload_tmp_dir = (upload_tmp_dir if upload_tmp_dir is not None else tempfile.gettempdir()) if not server_url.endswith('/'): server_url += '/' self.server_url = server_url self.repository = repository self.user_id = user_id self.device_id = device_id self.client_version = client_version self._update_auth(password=password, token=token) self.cookie_jar = cookie_jar cookie_processor = urllib2.HTTPCookieProcessor(cookiejar=cookie_jar) # Get proxy handler proxy_handler = get_proxy_handler(proxies, proxy_exceptions=proxy_exceptions, url=self.server_url) # Build URL openers self.opener = urllib2.build_opener(cookie_processor, proxy_handler) self.streaming_opener = urllib2.build_opener(cookie_processor, proxy_handler, *get_handlers()) # Set Proxy flag self.is_proxy = False opener_proxies = get_opener_proxies(self.opener) log.trace('Proxy configuration: %s, effective proxy list: %r', get_proxy_config(proxies), opener_proxies) if opener_proxies: self.is_proxy = True self.automation_url = server_url + 'site/automation/' self.batch_upload_url = 'batch/upload' self.batch_execute_url = 'batch/execute' # New batch upload API self.new_upload_api_available = True self.rest_api_url = server_url + 'api/v1/' self.batch_upload_path = 'upload' self.fetch_api()
def __init__(self, server_url, user_id, device_id, client_version, proxies=None, proxy_exceptions=None, password=None, token=None, repository=DEFAULT_REPOSITORY_NAME, ignored_prefixes=None, ignored_suffixes=None, timeout=20, blob_timeout=60, cookie_jar=None, upload_tmp_dir=None, check_suspended=None): # Function to check during long-running processing like upload / # download if the synchronization thread needs to be suspended self.check_suspended = check_suspended if timeout is None or timeout < 0: timeout = 20 self.timeout = timeout # Dont allow null timeout if blob_timeout is None or blob_timeout < 0: blob_timeout = 60 self.blob_timeout = blob_timeout if ignored_prefixes is not None: self.ignored_prefixes = ignored_prefixes else: self.ignored_prefixes = DEFAULT_IGNORED_PREFIXES if ignored_suffixes is not None: self.ignored_suffixes = ignored_suffixes else: self.ignored_suffixes = DEFAULT_IGNORED_SUFFIXES self.upload_tmp_dir = (upload_tmp_dir if upload_tmp_dir is not None else tempfile.gettempdir()) if not server_url.endswith('/'): server_url += '/' self.server_url = server_url self.repository = repository self.user_id = user_id self.device_id = device_id self.client_version = client_version self._update_auth(password=password, token=token) self.cookie_jar = cookie_jar cookie_processor = urllib2.HTTPCookieProcessor( cookiejar=cookie_jar) # Get proxy handler proxy_handler = get_proxy_handler(proxies, proxy_exceptions=proxy_exceptions, url=self.server_url) # Build URL openers self.opener = urllib2.build_opener(cookie_processor, proxy_handler) self.streaming_opener = urllib2.build_opener(cookie_processor, proxy_handler, *get_handlers()) # Set Proxy flag self.is_proxy = False opener_proxies = get_opener_proxies(self.opener) log.debug('Proxy configuration: %s, effective proxy list: %r', get_proxy_config(proxies), opener_proxies) if opener_proxies: self.is_proxy = True self.automation_url = server_url + 'site/automation/' self.batch_upload_url = 'batch/upload' self.batch_execute_url = 'batch/execute' self.fetch_api()
def upload_photo2(cookiefile, full_filename, qqid, sid): skey = get_cookie_value(cookiefile, "skey") boundary = "----" + gen_boundary() qqid = int(qqid) if qqid > 2000000000: urls = [UPLOAD_URL % boundary, UPLOAD_URL2 % boundary, UPLOAD_URL3 % boundary, UPLOAD_URL4 % boundary] elif qqid > 1100000000: urls = [UPLOAD_URL2 % boundary, UPLOAD_URL % boundary, UPLOAD_URL3 % boundary, UPLOAD_URL4 % boundary] elif qqid > 1000000000: urls = [UPLOAD_URL3 % boundary, UPLOAD_URL2 % boundary, UPLOAD_URL % boundary, UPLOAD_URL4 % boundary] elif qqid < 200000000: urls = [UPLOAD_URL2 % boundary, UPLOAD_URL % boundary, UPLOAD_URL3 % boundary, UPLOAD_URL4 % boundary] else: urls = [UPLOAD_URL4 % boundary, UPLOAD_URL2 % boundary, UPLOAD_URL3 % boundary, UPLOAD_URL % boundary] photo_json = {} for url in urls: datagen, headers = generate_multipart_photo(qqid, skey, full_filename, boundary) headers['User-Agent'] = DEFAULT_UA headers['Accept'] = 'text/x-json,application/json;q=0.9,*/*;q=0.8' headers['Accept-Language'] = 'en-US,en;q=0.5' logger.info("Uploading photo %s %s -> %s", qqid, full_filename, url) result = post(url, datagen, headers=headers, cookiefile=cookiefile, is_accept_ending=True, ext_handlers=get_handlers()) logger.debug("Uploaded %s %s -> %s : result %s", qqid, full_filename, url, result) photo_json = simplejson.loads(result.replace("_Callback(","").replace(");",""))['data'] if photo_json.has_key('error'): logger.warn("Post failed qq %s -> %s %s %s", qqid, url, photo_json['error'], photo_json['msg'].encode('utf8')) if photo_json['error'] == -503: log_paperboy("Need login(photo) xks %s" % sid) break else: break return photo_json
def __init__(self, server_url, user_id, device_id, client_version, proxies=None, proxy_exceptions=None, password=None, token=None, repository="default", ignored_prefixes=None, ignored_suffixes=None, timeout=20, blob_timeout=None, cookie_jar=None, upload_tmp_dir=None, check_suspended=None): # Function to check during long-running processing like upload / # download if the synchronization thread needs to be suspended self.check_suspended = check_suspended self.timeout = timeout self.blob_timeout = blob_timeout if ignored_prefixes is not None: self.ignored_prefixes = ignored_prefixes else: self.ignored_prefixes = DEFAULT_IGNORED_PREFIXES if ignored_suffixes is not None: self.ignored_suffixes = ignored_suffixes else: self.ignored_suffixes = DEFAULT_IGNORED_SUFFIXES self.upload_tmp_dir = (upload_tmp_dir if upload_tmp_dir is not None else tempfile.gettempdir()) if not server_url.endswith('/'): server_url += '/' self.server_url = server_url # TODO: actually use the repository info in the requests self.repository = repository self.user_id = user_id self.device_id = device_id self.client_version = client_version self._update_auth(password=password, token=token) self.cookie_jar = cookie_jar cookie_processor = urllib2.HTTPCookieProcessor(cookiejar=cookie_jar) # Get proxy handler proxy_handler = get_proxy_handler(proxies, proxy_exceptions=proxy_exceptions, url=self.server_url) # Build URL openers self.opener = urllib2.build_opener(cookie_processor, proxy_handler) self.streaming_opener = urllib2.build_opener(cookie_processor, proxy_handler, *get_handlers()) # Set Proxy flag self.is_proxy = False for handler in self.opener.handlers: if isinstance(handler, ProxyHandler): if handler.proxies: self.is_proxy = True self.automation_url = server_url + 'site/automation/' self.batch_upload_url = 'batch/upload' self.batch_execute_url = 'batch/execute' self.current_action = None self.fetch_api()
def __init__(self, server_url, user_id, device_id, client_version, proxies=None, proxy_exceptions=None, password=None, token=None, repository=Options.remote_repo, timeout=20, blob_timeout=60, cookie_jar=None, upload_tmp_dir=None, check_suspended=None): global log log = get_logger(__name__) # Function to check during long-running processing like upload / # download if the synchronization thread needs to be suspended self.check_suspended = check_suspended if timeout is None or timeout < 0: timeout = 20 self.timeout = timeout # Dont allow null timeout if blob_timeout is None or blob_timeout < 0: blob_timeout = 60 self.blob_timeout = blob_timeout self.upload_tmp_dir = (upload_tmp_dir if upload_tmp_dir is not None else tempfile.gettempdir()) if not server_url.endswith('/'): server_url += '/' self.server_url = server_url self.repository = repository self.user_id = user_id self.device_id = device_id self.client_version = client_version self._update_auth(password=password, token=token) self.cookie_jar = cookie_jar cookie_processor = urllib2.HTTPCookieProcessor( cookiejar=cookie_jar) # Get proxy handler proxy_handler = get_proxy_handler(proxies, proxy_exceptions=proxy_exceptions, url=self.server_url) # Build URL openers self.opener = urllib2.build_opener(cookie_processor, proxy_handler) self.streaming_opener = urllib2.build_opener(cookie_processor, proxy_handler, *get_handlers()) # Set Proxy flag self.is_proxy = False opener_proxies = get_opener_proxies(self.opener) log.trace('Proxy configuration: %s, effective proxy list: %r', get_proxy_config(proxies), opener_proxies) if opener_proxies: self.is_proxy = True self.automation_url = server_url + 'site/automation/' self.batch_upload_url = 'batch/upload' self.batch_execute_url = 'batch/execute' # New batch upload API self.new_upload_api_available = True self.rest_api_url = server_url + 'api/v1/' self.batch_upload_path = 'upload' self.is_event_log_id = True self.check_access()