def test_redirect(self): _data, response = appier.get( "https://%s/redirect-to" % self.httpbin , params = dict(url = "https://%s/" % self.httpbin), handle = True, redirect = True ) code = response.getcode() self.assertNotEqual(code, 302) self.assertEqual(code, 200) quoted = appier.legacy.quote("https://%s/" % self.httpbin) _data, response = appier.get( "https://%s/redirect-to?url=%s" % (self.httpbin, quoted), handle = True, redirect = True ) code = response.getcode() self.assertNotEqual(code, 302) self.assertEqual(code, 200) _data, response = appier.get( "https://%s/relative-redirect/2" % self.httpbin , handle = True, redirect = True ) code = response.getcode() self.assertNotEqual(code, 302) self.assertEqual(code, 200)
def test_redirect(self): _data, response = appier.get( "https://httpbin.org/redirect-to", params = dict(url = "https://httpbin.org"), handle = True, redirect = True ) code = response.getcode() self.assertNotEqual(code, 302) self.assertEqual(code, 200) _data, response = appier.get( "http://httpbin.org/redirect-to?url=https%3a%2f%2Fhttpbin.org%2f", handle = True, redirect = True ) code = response.getcode() self.assertNotEqual(code, 302) self.assertEqual(code, 200) _data, response = appier.get( "https://httpbin.org/relative-redirect/2", handle = True, redirect = True ) code = response.getcode() self.assertNotEqual(code, 302) self.assertEqual(code, 200)
def test_timeout(self): self.assertRaises( BaseException, lambda: appier.get("https://%s/delay/3" % self.httpbin, handle=True, redirect=True, timeout=1), ) data, response = appier.get("https://%s/delay/1" % self.httpbin, handle=True, redirect=True, timeout=30) code = response.getcode() self.assertEqual(code, 200) self.assertNotEqual(len(data), 0) self.assertNotEqual(data, None)
def get_price_ripe( self, currency = None, country = None, attributes = None ): if not self.price_url: return self.price attributes_m = json.loads(attributes) p = [] parts = attributes_m.get("parts", {}) embossing = attributes_m.get("embossing", None) letters = attributes_m.get("letters", None) for key, value in appier.legacy.iteritems(parts): material = value["material"] color = value["color"] triplet = "%s:%s:%s" % (key, material, color) p.append(triplet) params = dict( product_id = self.product_id, p = p ) if currency: params["currency"] = currency if country: params["country"] = country if embossing: params["embossing"] = embossing if letters: params["letters"] = letters result = appier.get( self.price_url, params = params ) total = result["total"] return total["price_final"]
def process(self, id): url = self.field("url", mandatory = True) info = appier.get(url) if not type(info) == dict: info = info.decode("utf-8") info = json.loads(info) return info
def loop(self): logging.basicConfig(level = logging.DEBUG) base_url = appier.conf("BASE_URL", BASE_URL) secret_key = appier.conf("SECRET_KEY", None) node_id = appier.conf("NODE_ID", "node") node_name = appier.conf("NODE_NAME", "node") node_location = appier.conf("NODE_LOCATION", "undefined") headers = dict() if secret_key: headers["X-Secret-Key"] = secret_key while True: try: logging.info("Submitting node information") appier.post( base_url + "nodes/%s" % node_id, data_j = dict( name = node_name, location = node_location ), headers = headers ) logging.info("Retrieving jobs for node '%s'" % node_id) jobs = appier.get( base_url + "nodes/%s/jobs" % node_id, headers = headers, timeout = 600 ) logging.info("Retrieved %d jobs for node '%s'" % (len(jobs), node_id)) for job in jobs: self.print_job(job) except BaseException as exception: logging.info("Exception while looping '%s'" % str(exception)) logging.info("Sleeping for %.2f seconds" % self.sleep_time) time.sleep(self.sleep_time)
def caller(): data, response = appier.get( "https://%s/ip" % self.httpbin, handle = True ) result = results[index] result["data"] = data result["response"] = response
def test_redirect(self): _data, response = appier.get("https://httpbin.org/redirect-to", params=dict(url="http://hive.pt"), handle=True, redirect=True) code = response.getcode() self.assertNotEqual(code, 302) self.assertEqual(code, 200) _data, response = appier.get("https://httpbin.org/relative-redirect/2", handle=True, redirect=True) code = response.getcode() self.assertNotEqual(code, 302) self.assertEqual(code, 200)
def test_timeout(self): self.assertRaises( BaseException, lambda: appier.get("https://%s/delay/3" % self.httpbin, handle=True, redirect=True, timeout=1)) data, response = appier.get("https://%s/delay/1" % self.httpbin, handle=True, redirect=True, timeout=30) code = response.getcode() self.assertEqual(code, 200) self.assertNotEqual(len(data), 0) self.assertNotEqual(data, None)
def deploy_url(self, url, force = False): data = appier.get(url) is_dict = type(data) == dict if not is_dict: data = data.decode("utf-8") data = json.loads(data) self.deploy_torus(url, data, force = force)
def tick(self): appier.Scheduler.tick(self) if not self.enabled: return self._init_leak() self.logger.info("Running remote retrieval process ...") for _index in range(self.requests): result = appier.get(self.asset_url) self.bytes += len(result) del result self.logger.info("Current byte count is %d bytes" % self.bytes) self._run_gc() self._status_leak()
def test_error(self): self.assertRaises( appier.HTTPError, lambda: appier.get("https://%s/status/404" % self.httpbin) )
def _get(cls, url, cache=True): if cache and url in cls._CACHE: return cls._CACHE[url] cls._CACHE[url] = appier.get(url) return cls._CACHE[url]
import appier condition = threading.Condition() def callback(result, response): if response: print(response.getcode()) print(response.read()) else: print("Problem in connection") condition.acquire() try: condition.notify() finally: condition.release() appier.get("https://www.flickr.com/", handle=True, asynchronous=True, callback=callback) condition.acquire() try: condition.wait() finally: condition.release()
import appier condition = threading.Condition() def callback(result, response): if response: print(response.getcode()) print(response.read()) else: print("Problem in connection") condition.acquire() try: condition.notify() finally: condition.release() appier.get("https://www.flickr.com/", handle=True, async=True, callback=callback) condition.acquire() try: condition.wait() finally: condition.release()
def copy(input, name, buffer_size=16384): output = open(name, "wb") try: while True: data = input.read(buffer_size) if not data: break output.write(data) finally: output.close() def is_tty(): return hasattr(sys.stdout, "isatty") and sys.stdout.isatty() contents, _response = appier.get(url, handle=True, redirect=True, retry=0, use_file=True, callback_headers=callback_headers, callback_data=callback_data, callback_result=callback_result) try: copy(contents, name) finally: contents.close()
# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # Apache License for more details. # # You should have received a copy of the Apache License along with # Hive Appier Framework. If not, see <http://www.apache.org/licenses/>. __author__ = "João Magalhães <*****@*****.**>" """ The author(s) of the module """ __version__ = "1.0.0" """ The version of the module """ __revision__ = "$LastChangedRevision$" """ The revision number of the module """ __date__ = "$LastChangedDate$" """ The last change date of the module """ __copyright__ = "Copyright (c) 2008-2018 Hive Solutions Lda." """ The copyright for the module """ __license__ = "Apache License, Version 2.0" """ The license for the module """ import appier _contents, response = appier.get("https://www.flickr.com/", handle = True) print(response.getcode()) print(response.read())
__license__ = "Apache License, Version 2.0" """ The license for the module """ import threading import appier condition = threading.Condition() def callback(result, response): if response: print(response.getcode()) print(response.read()) else: print("Problem in connection") condition.acquire() try: condition.notify() finally: condition.release() appier.get( "https://www.flickr.com/", handle = True, async = True, callback = callback ) condition.acquire() try: condition.wait() finally: condition.release()
def test_invalid(self): self.assertRaises( BaseException, lambda: appier.get("https://invalidlargedomain.org/") )
__date__ = "$LastChangedDate$" """ The last change date of the module """ __copyright__ = "Copyright (c) 2008-2019 Hive Solutions Lda." """ The copyright for the module """ __license__ = "Apache License, Version 2.0" """ The license for the module """ import appier def callback(result, response): if response: print(response.getcode()) print(response.read()) else: print("Problem in connection") loop.stop() loop, protocol = appier.get( "https://www.flickr.com/", handle = True, asynchronous = True, callback = callback ) loop.run_forever() loop.close()
def _install(name=None, id=None, version=None, upgrade=False): import appier # verifies if the provided version string is wildcard based and # for such situations invalidated the version value (sets to invalid) if version == "x.x.x": version = None # constructs the proper description string taking into account # if the name or the id has been provided and then prints a # message about the installation operation that is going to start description = name or id output("Installing package %s" % description) # creates the map containing the various parameters that are # going to be sent as part of the filtering process for the # remote request of package retrieval params = dict() if name: params["name"] = name if id: params["identifier"] = id # retrieves the proper repository url that is currently defined # then enforces the value to be a valid sequence, so that the # logic is defined as cycle of url based package detection repo_url = colony.conf("REPO_URL", REPO_URL) if not type(repo_url) in (list, tuple): repo_url = (("colony", repo_url), ) # starts the variable that will hold the found package at invalid # so that the value is set only when a repo contains a package # matching the defined criteria package = None # iterates over the complete set of repositories defined in the # repository url value trying to find the proper package, note # that the package is found when at least one result is returned # matching the provided criteria (as defined in specification) for name, _repo_url in repo_url: url = _repo_url + "packages" result = appier.get(url, params=params) package = result[0] if result else dict() if not package: continue repo_url = _repo_url break # in case no package has been found for any of the defined repos # an exception must be raised indicating the problem to the user if not package: raise RuntimeError("Package not found") # constructs the proper url for package information retrieval and # runs it so that the complete set of information (including dependencies) # is gathered providing the system with the complete set of options url = repo_url + "packages/%s/info" % package["name"] info = appier.get(url, params=dict(version=version)) # verifies if the package is already installed under the current # system and if that's the case returns immediately as there's # nothing remaining to be done for such situation if _exists(info, upgrade=upgrade): output("Package %s is already installed, skipping" % description) return # runs the dependencies operation for the current package information # this operation should be able to install all the requirements for # the current package in transit (avoid package corruption) try: indent() _dependencies(info, upgrade=upgrade) finally: unindent() # prints information about the starting of the package download, this # is required for the user to be notified about such action output("Downloading %s" % description) # creates the proper package retrieval url and runs the remote get request # to try to retrieve the package contents of so that they are installed url = repo_url + "packages/%s" % info["short_name"] data = appier.get(url, params=dict(version=info["version"])) # creates a new temporary directory for the new bundle file that is going # to be created and stores it under such directory (for deployment) temp_path = tempfile.mkdtemp() target_path = os.path.join(temp_path, "%s.cbx" % info["short_name"]) file = open(target_path, "wb") try: file.write(data) finally: file.close() # runs the deployment process for the package bundle that has been retrieved # and then removes the temporary directory path, as it's no longer required _deploy(target_path, timestamp=info["timestamp"]) shutil.rmtree(temp_path) # prints a message about the end of the installation process for the current # package, this will allow the user to be aware of the end of operation output("Finished installing %s" % description)
# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # Apache License for more details. # # You should have received a copy of the Apache License along with # Hive Appier Framework. If not, see <http://www.apache.org/licenses/>. __author__ = "João Magalhães <*****@*****.**>" """ The author(s) of the module """ __version__ = "1.0.0" """ The version of the module """ __revision__ = "$LastChangedRevision$" """ The revision number of the module """ __date__ = "$LastChangedDate$" """ The last change date of the module """ __copyright__ = "Copyright (c) 2008-2019 Hive Solutions Lda." """ The copyright for the module """ __license__ = "Apache License, Version 2.0" """ The license for the module """ import appier _contents, response = appier.get("https://www.flickr.com/", handle = True) print(response.getcode()) print(response.read())
def sync_product(self, merchandise, inventory_line = None, force = False): # retrieves the reference to the api object that is # going to be used for api based operations api = self.get_api() # retrieves some of the most general attributes of the # merchandise that is going to be integrated as a product _class = merchandise["_class"] object_id = merchandise["object_id"] # builds a new product instance from the merchandise # information that has just been retrieved product = budy.Product.from_omni( merchandise, inventory_line = inventory_line, force = force ) product.save() product.images = [] # retrieves the media information associated with the # current merchandise to be able to sync it by either # creating new local medias or re-using existing ones media = api.info_media_entity( object_id, dimensions = "original" ) # iterates over the complete set of media associated with # the current product to try to create/update its media for item in media: # creates the unique value for the media from its object # identifier and its last modification data, using this # value tries to retrieve a possible already existing # and equivalent media (avoids duplication) unique = "%d-%d" % (item["object_id"], item["modify_date"]) _media = budy.Media.get(unique = unique, raise_e = False) # in case the media does not exist, tries to retrieve the # new remote data from the source and create a new media if not _media: media_url = api.get_media_url(item["secret"]) data = appier.get(media_url) _media = budy.Media( description = item["dimensions"], label = item["label"], order = item["position"] or 1, size = item["dimensions"], unique = unique, file = appier.File((item["label"], None, data)) ) _media.save() # iterates over the complete set of resized images to # be created and for each of them verifies it has to # be generated or if one already exists for suffix, size in ( ("thumbnail", 260), ("thumbnail_2x", 540), ("large", 540), ("large_2x", 1080) ): resized_unique = "%s-%s" % (unique, suffix) resized = budy.Media.get(unique = resized_unique, raise_e = False) if not resized: resized = _media.thumbnail_s(width = size, suffix = suffix) resized.save() product.images.append(resized) product.images.append(_media) product.save()
url = sys.argv[1] if len(sys.argv) > 1 else BIG_BUCK_URL name = os.path.basename(appier.legacy.urlparse(url).path) def copy(input, name, buffer_size = 16384): output = open(name, "wb") try: while True: data = input.read(buffer_size) if not data: break output.write(data) finally: output.close() with appier_console.ctx_http_callbacks(name) as callbacks: contents, _response = appier.get( url, handle = True, silent = True, redirect = True, retry = 0, use_file = True, callback_init = callbacks["callback_init"], callback_open = callbacks["callback_open"], callback_headers = callbacks["callback_headers"], callback_data = callbacks["callback_data"], callback_result = callbacks["callback_result"] ) try: copy(contents, name) finally: contents.close()