def __init__(self, keyspace_name, table_name, record_schema, cassandra_session, replication_strategy=None): title = '%s.__init__' % self.__class__.__name__ # construct fields model from jsonmodel.validators import jsonModel self.fields = jsonModel(self._class_fields) # validate inputs input_fields = { 'keyspace_name': keyspace_name, 'table_name': table_name, 'record_schema': record_schema, 'replication_strategy': replication_strategy } for key, value in input_fields.items(): if value: object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # validate cassandra session from sys import path as sys_path sys_path.append(sys_path.pop(0)) from cassandra.cluster import Session sys_path.insert(0, sys_path.pop()) if not isinstance(cassandra_session, Session): raise ValueError('%s(cassandra_session) must be a cassandra.cluster.Session datatype.' % title) self.session = cassandra_session
def _config_check(): from message import Messager from sys import path from copy import deepcopy from os.path import dirname # Reset the path to force config.py to be in the root (could be hacked # using __init__.py, but we can be monkey-patched anyway) orig_path = deepcopy(path) # Can't you empty in O(1) instead of O(N)? while path: path.pop() path.append(path_join(abspath(dirname(__file__)), '../..')) # Check if we have a config, otherwise whine try: import config del config except ImportError, e: path.extend(orig_path) # "Prettiest" way to check specific failure if e.message == 'No module named config': Messager.error(_miss_config_msg(), duration=-1) else: Messager.error(_get_stack_trace(), duration=-1) raise ConfigurationError
def script_path(script_dir: str): """ Context manager for adding a dir to the sys path and restoring it afterwards. This trick allows relative imports to work on the target script. if script_dir is empty function will do nothing Slightly modified from wolf's script_path (see https://github.com/Duroktar/Wolf) Exception-safe (os.error will not be raised) """ if script_dir is None or script_dir == "": yield else: arepl_dir = path[0] path[0] = script_dir path.append(arepl_dir) try: original_cwd = os.getcwd() os.chdir(script_dir) except os.error: # no idea why this would happen but a user got this error once # this func is not critical to arepl so we dont want error to bubble up pass try: yield finally: if path[-1] == arepl_dir: path.pop() path[0] = arepl_dir try: os.chdir(original_cwd) except os.error: pass
def __compute_tracks_tr(self): translated = [] for track in self.__compute_tracks(): path.insert(0, self.eng.curr_path + 'assets/tracks/' + track) mod = __import__('track_tr') reload(mod) translated += [mod.translated] path.pop(0) return lambda: translated
def validate_family(value): try: path.append(config.datafilepath('families')) __import__('%s_family' % value) except ImportError: raise ValidationError(_("Family %(family)s doesn't exist.") % {'family': value} ) finally: path.pop()
def __exit__(self, *args): ''' Some code may already have removed the thing we added. ''' path = sys_path.pop() if self.path != path: sys_path.append(path)
def _config_check(): from message import Messager from sys import path from copy import deepcopy from os.path import dirname # Reset the path to force config.py to be in the root (could be hacked # using __init__.py, but we can be monkey-patched anyway) orig_path = deepcopy(path) try: # Can't you empty in O(1) instead of O(N)? while path: path.pop() path.append(path_join(abspath(dirname(__file__)), '../..')) # Check if we have a config, otherwise whine try: import config del config except ImportError as e: path.extend(orig_path) # "Prettiest" way to check specific failure if e.message == 'No module named config': Messager.error(_miss_config_msg(), duration=-1) else: Messager.error(_get_stack_trace(), duration=-1) raise ConfigurationError # Try importing the config entries we need try: from config import DEBUG except ImportError: path.extend(orig_path) Messager.error(_miss_var_msg('DEBUG'), duration=-1) raise ConfigurationError try: from config import ADMIN_CONTACT_EMAIL except ImportError: path.extend(orig_path) Messager.error(_miss_var_msg('ADMIN_CONTACT_EMAIL'), duration=-1) raise ConfigurationError finally: # Remove our entry to the path while path: path.pop() # Then restore it path.extend(orig_path)
def __init__(self, access_token, collection_name=''): ''' a method to initialize the dropboxClient class :param access_token: string with oauth2 access token for users account ''' title = '%s.__init__' % self.__class__.__name__ # construct input validation model self.fields = jsonModel(self._class_fields) # validate inputs input_fields = { 'access_token': access_token, 'collection_name': collection_name } for key, value in input_fields.items(): object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # workaround for module namespace conflict from sys import path as sys_path sys_path.append(sys_path.pop(0)) from dropbox import Dropbox from dropbox.files import FileMetadata, WriteMode, DeleteArg from dropbox.exceptions import ApiError sys_path.insert(0, sys_path.pop()) # construct dropbox client from labpack.compilers.objects import _method_constructor self.dropbox = Dropbox(oauth2_access_token=access_token) # construct dropbox objects self.objects = _method_constructor({ 'FileMetadata': FileMetadata, 'ApiError': ApiError, 'WriteMode': WriteMode, 'DeleteArg': DeleteArg }) # construct collection name self.collection_name = collection_name
try: from config import DEBUG except ImportError: path.extend(orig_path) Messager.error(_miss_var_msg('DEBUG'), duration=-1) raise ConfigurationError try: from config import ADMIN_CONTACT_EMAIL except ImportError: path.extend(orig_path) Messager.error(_miss_var_msg('ADMIN_CONTACT_EMAIL'), duration=-1) raise ConfigurationError finally: # Remove our entry to the path while path: path.pop() # Then restore it path.extend(orig_path) # Convert internal log level to `logging` log level def _convert_log_level(log_level): import config import logging if log_level == config.LL_DEBUG: return logging.DEBUG elif log_level == config.LL_INFO: return logging.INFO elif log_level == config.LL_WARNING: return logging.WARNING elif log_level == config.LL_ERROR: return logging.ERROR
from os.path import abspath, dirname, join from sys import argv, path # To temporarily modify sys.path SETUP_DIR = abspath(dirname(__file__)) try: from setuptools import setup, find_packages except ImportError: path.insert(0, SETUP_DIR) import ez_setup path.pop(0) ez_setup.use_setuptools() from setuptools import setup, find_packages # Import version to get the version string path.insert(0, join(SETUP_DIR, 'mminte')) from version import get_version, update_release_version path.pop(0) version = get_version(pep440=True) # If building something for distribution, ensure the VERSION # file is up to date if 'sdist' in argv or 'bdist_wheel' in argv: update_release_version() # @todo extra with jupyter dependency? requirements = [ 'six', 'pandas>=0.18.0',
import asyncio import discord from discord.ext import commands from sys import path path.insert(1, '../../') from utils import randomorg from utils import variables from utils.variables import db path.pop(1) import requests as rq import shutil class DogmasCog(commands.Cog): def __init__(self, bot): self.bot = bot @commands.command(name = 'догма', aliases = ['dogma', 'dogme']) async def dogme(self, ctx, clan: str, key: str): channel = ctx.message.channel key = key dictionary = db.search(variables.SUI.key == key) if (clan in ['а', 'a']): if (key in ['девиз', 'motto']): await ctx.send('**Прах ты, и в прах возвратишься!**') return try: img = dictionary[0]['image'] except:
def __init__(self, magic_file=''): ''' initialization method for labMagic class :param magic_file: [optional] string with local path to magic.mgc file ''' title = '%s.__init__' % self.__class__.__name__ # construct class field model from jsonmodel.validators import jsonModel self.fields = jsonModel(self._class_fields) # validate inputs input_fields = { 'magic_file': magic_file } for key, value in input_fields.items(): if value: object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # construct magic method magic_kwargs = { 'mime': True, 'uncompress': True } from labpack.platforms.localhost import localhostClient sys_name = localhostClient().os.sysname if sys_name == 'Windows': if not magic_file: raise IndexError('%s(magic_file="...") is required on Windows systems.') import os if magic_file: if not os.path.exists(magic_file): raise ValueError('%s(magic_file=%s) is not a valid file path.' % (title, magic_file)) magic_kwargs['magic_file'] = magic_file try: # workaround for module namespace conflict from sys import path as sys_path sys_path.append(sys_path.pop(0)) import magic sys_path.insert(0, sys_path.pop()) self.magic = magic.Magic(**magic_kwargs) except: raise Exception('\nmagiclab requires the python-magic module. try: pip install python-magic\npython-magic requires the C library libmagic. See documentation in labpack.parsing.magic.') # construct mimetypes method import mimetypes self.mimetypes = mimetypes.MimeTypes() # retrieve updates to mimetypes mimetype_urls = self.fields.schema['mimetype_urls'] from labpack.storage.appdata import appdataClient mime_collection = appdataClient('Mime Types') mime_filter = mime_collection.conditional_filter([{-1:{'must_contain': ['mime.types']}}]) mime_list = mime_collection.list(mime_filter) for key in mimetype_urls.keys(): file_path = os.path.join(mime_collection.collection_folder, key) if key not in mime_list: file_dir = os.path.split(file_path)[0] if not os.path.exists(file_dir): os.makedirs(file_dir) import requests try: response = requests.get(mimetype_urls[key]) except Exception: from labpack.handlers.requests import handle_requests request_kwargs = {'url': mimetype_urls[key]} response_details = handle_requests(requests.Request(**request_kwargs)) print('magiclab attempted to retrieve latest mimetype registry resource at %s but ran into this non-fatal error: %s' % (mimetype_urls[key], response_details['error'])) break with open(file_path, 'wb') as f: f.write(response.content) f.close() ext_map = mimetypes.read_mime_types(file_path) for key, value in ext_map.items(): self.mimetypes.add_type(value, key)
from os.path import isfile, abspath, dirname, join from sys import argv, path # To temporarily modify sys.path SETUP_DIR = abspath(dirname(__file__)) try: from setuptools import setup, find_packages except ImportError: path.insert(0, SETUP_DIR) import ez_setup path.pop(0) ez_setup.use_setuptools() from setuptools import setup, find_packages # for running parallel tests due to a bug in python 2.7.3 # http://bugs.python.org/issue15881#msg170215 try: import multiprocessing except: None # import version to get the version string path.insert(0, join(SETUP_DIR, "cobra")) from version import get_version, update_release_version path.pop(0) version = get_version(pep440=True) # If building something for distribution, ensure the VERSION # file is up to date
__author__ = 'rcj1492' __created__ = '2016.12' __license__ = 'MIT' ''' PLEASE NOTE: dropbox package requires the dropbox module. (all platforms) pip3 install dropbox ''' try: from sys import path as sys_path sys_path.append(sys_path.pop(0)) from dropbox import Dropbox sys_path.insert(0, sys_path.pop()) except: import sys print('dropbox package requires the dropbox module. try: pip3 install dropbox') sys.exit(1) # TODO: incorporate rate limiting logic # TODO: add method to retrieve metadata from record # TODO: add method to use Dropbox search functionality import os from jsonmodel.validators import jsonModel class DropboxConnectionError(Exception): def __init__(self, request='', message='', errors=None, captured_error=None):
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ from sys import argv, exit, path as spath from os.path import dirname _us = dirname(__file__) spath.insert(0, "{0}/../src".format(_us)) from fieldforce_tcm import * spath.pop(0) import termios import threading from event import * from time import time as _time _stdin = sys.stdin.fileno() _termios_settings = None def _term_save(): global _termios_settings _termios_settings = termios.tcgetattr(_stdin) def _term_restore(): termios.tcsetattr(_stdin, termios.TCSANOW, _termios_settings)
def __init__(self, hostname, port=9042, username='', password='', cert_path=''): title = '%s.__init__' % self.__class__.__name__ # construct fields model from jsonmodel.validators import jsonModel self.fields = jsonModel(self._class_fields) # ingest hostname self_hostname = hostname if isinstance(hostname, str): if hostname: self_hostname = [ hostname ] # validate inputs input_fields = { 'hostname': self_hostname, 'port': port, 'username': username, 'password': password, 'cert_path': cert_path } for key, value in input_fields.items(): if value: object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # construct endpoint self.hostname = self_hostname self.port = port self.username = username self.password = password self.cert_path = cert_path # construct cluster cluster_kwargs = { 'contact_points': self.hostname, 'port': self.port } if self.username and self.password: from sys import path as sys_path sys_path.append(sys_path.pop(0)) from cassandra.auth import PlainTextAuthProvider sys_path.insert(0, sys_path.pop()) cluster_kwargs['auth_provider'] = PlainTextAuthProvider( username=username, password=password ) if cert_path: from os import path import ssl if not path.exists(cert_path): raise ValueError('%s(cert_path="%s") is not a valid file path.' % (title, cert_path)) cluster_kwargs['ssl_options'] = { 'ca_certs': cert_path, 'cert_reqs': ssl.CERT_REQUIRED, 'ssl_version': ssl.PROTOCOL_TLSv1 } self.cluster = Cluster(**cluster_kwargs) # construct session self.session = self.cluster.connect()