Exemplo n.º 1
0
 def query(self, proxyType):
     if 'simple' == proxyType:
         link = 'http://www.baidu.com';
     else:
         link = 'http://www.google.com';
     
     pymysql.install_as_MySQLdb();
     output = open('misc/proxy.py', 'w');
     conn = pymysql.connect(host='192.168.0.27', port=3306, user='******', passwd='Admin1234#', db='quartz');
     cur = conn.cursor()
     
     output.write('HTTPPROXIES = ['+'\n');
     cur.execute("SELECT IPADDRESS, PORT FROM quartz.data_proxy where upper(type) = 'HTTP' order by dealdate desc ");
     for r in cur:
         if self.test(r[0]+':'+r[1], link, 'http'):
             output.write('{"ip_port": "'+r[0]+':'+r[1]+'"},'+'\n');
     output.writelines(']'+'\n\n');
     
     output.write('HTTPSPROXIES = ['+'\n');
     cur.execute("SELECT IPADDRESS, PORT FROM quartz.data_proxy where upper(type) = 'HTTPS' order by dealdate desc ");
     for r in cur:
         if self.test(r[0]+':'+r[1], link, 'https'):
             output.write('{"ip_port": "'+r[0]+':'+r[1]+'"},'+'\n');
     output.writelines(']'+'\n\n');
         
     cur.close();
     conn.close();
     output.close();
Exemplo n.º 2
0
    def connect(self):
        try:
            import pymysql
            pymysql.install_as_MySQLdb()
            info("Using pure python SQL client")
        except ImportError:
            info("Using other SQL client")

        try:
            import MySQLdb
        except ImportError:
            critical("ERROR: missing a mysql python module")
            critical("Install either 'PyMySQL' or 'mysqlclient' from your OS software repository or from PyPI")
            raise

        try:
            args = {
                'host': self.config.db_host,
                'port': self.config.db_port,
                'user': self.config.db_user,
                'passwd': self.config.db_pass,
                'db': self.config.db_name
            }
            if self.config.db_socket:
                args['unix_socket'] = self.config.db_socket

            conn = MySQLdb.connect(**args)
            conn.autocommit(True)
            conn.ping(True)
            self._db[threading.get_ident()] = conn
        except Exception as e:
            critical("ERROR: Could not connect to MySQL database! {}".format(e))
            raise
Exemplo n.º 3
0
def use_pymysql():
    try:
        from pymysql import install_as_MySQLdb
    except ImportError:
        pass
    else:
        install_as_MySQLdb()
Exemplo n.º 4
0
 def __init__(self, host, username, password, dbname):
     pymysql.install_as_MySQLdb()
     db = 'mysql+mysqldb://' + username + ':' + password + '@' + host + '/' + dbname + '?charset=utf8'
     self.engine = create_engine(db)
     self.engine.connect()
     self.s = Session(self.engine)
     self.Base = automap_base()
     self.Base.prepare(self.engine, reflect=True)
Exemplo n.º 5
0
def readUserDB_list():
    pymysql.install_as_MySQLdb()
    conn = pymysql.connect(host=datahost, database=database, user=datauser, password=userpass)
    mycursor = conn.cursor()
    mycursor.execute('SELECT * FROM Test.Users')
    result = mycursor.fetchall()
    conn.close
    return result
Exemplo n.º 6
0
    def __init__(self, host, username, password, database):
        # use as MYSQLdb alternative
        pymysql.install_as_MySQLdb()

        self.host = host
        self.username = username
        self.password = password
        self.database = database

        super().__init__()
Exemplo n.º 7
0
def readUserDB():
    pymysql.install_as_MySQLdb()
    conn = pymysql.connect(host=datahost, database=database, user=datauser, password=userpass)
    mycursor = conn.cursor()
    mycursor.execute('SELECT * FROM Test.Users')
    result = mycursor.fetchall()

    outp=''
    for (Id, Name, EMail) in result:
        outp+= "Name={1}, Id={0}, EMail={2}<BR/>".format(Id, Name, EMail) 
    
    conn.close
    return outp
Exemplo n.º 8
0
def test_suite():
    try:
        # attempt to import MySQLdb
        try:
            import MySQLdb
        except ImportError:
            # use all pymysql as an alternative
            import pymysql
            pymysql.install_as_MySQLdb()
        import MySQLdb
    except ImportError, e:
        import warnings
        warnings.warn("MySQLdb is not importable, so MySQL tests disabled")
        return unittest.TestSuite()
Exemplo n.º 9
0
def runserver(*args, **kwargs):
    len_args = len(args)
    certdir = os.path.join(CONF._storage_dir, 'cert')
    certfile = os.path.join(certdir, 'server.crt')
    csrfile = os.path.join(certdir, 'server.csr')
    keyfile_tmp = os.path.join(certdir, 'server.key.tmp')
    keyfile = os.path.join(certdir, 'server.key')

    if len_args > 0 and 'create_cert' in args:
        if not os.path.exists(certdir):
            os.mkdir(certdir)

        print '\nCreate private key: {0}'.format(keyfile_tmp)
        if not os.path.exists(keyfile_tmp):
            local('openssl genrsa -aes128 -out {0} 4096'.format(keyfile_tmp))
        else:
            print '{0} is already exists.'.format(keyfile_tmp)

        print '\nCreate CSR: {0}'.format(csrfile)
        if not os.path.exists(csrfile):
            local('openssl req -new -key {0} -sha256 -out {1}'.format(keyfile_tmp, csrfile))
        else:
            print '{0} is already exists.'.format(csrfile)

        print '\nCreate SSL certificate (public key): {0}'.format(certfile)
        if not os.path.exists(certfile):
            local('openssl x509 -in {0} -days 365 -req -signkey {1} -sha256 -out {2}'.format(
                csrfile, keyfile_tmp, certfile))
        else:
            print '{0} is already exists.'.format(certfile)

        print '\nCreate Decryption private key: {0}'.format(keyfile)
        if not os.path.exists(keyfile):
            local('openssl rsa -in {0} -out {1}'.format(keyfile_tmp, keyfile))
        else:
            print '{0} is already exists.'.format(keyfile)

        return

    from django.core.wsgi import get_wsgi_application
    import pymysql
    pymysql.install_as_MySQLdb()

    os.environ.setdefault("DJANGO_SETTINGS_MODULE", "web_conf.settings")
    application = get_wsgi_application()

    wsgi.server(eventlet.listen(('', 8080)), application)
Exemplo n.º 10
0
def patch_all():
    monkey.patch_all()

    if module_exists('psycogreen'):
        from psycogreen.gevent.psyco_gevent import make_psycopg_green
        make_psycopg_green()

    if module_exists('pymysql'):
        import pymysql
        pymysql.install_as_MySQLdb()

    if module_exists('gevent_zeromq'):
        from gevent_zeromq import zmq
        sys.modules["zmq"] = zmq

    if module_exists('pylibmc'):
        import memcache
        sys.modules["pylibmc"] = memcache
Exemplo n.º 11
0
def main(argv=['', 'runserver', '0.0.0.0:8000']):
    pymysql.install_as_MySQLdb()
    match_file = re.compile("initial\.py$")
    argv = argv if len(sys.argv) == 1 else sys.argv
    base_dir = os.path.dirname(os.path.abspath(__file__))
    logs_dir = os.path.join(base_dir, 'logs')
    if not all([os.path.isdir(logs_dir), not os.path.isfile(logs_dir)]):
        os.makedirs(logs_dir)
    os.environ.setdefault("DJANGO_SETTINGS_MODULE", "afcat.settings")
    try:
        from django.core.management import execute_from_command_line, call_command
    except ImportError:
        # The above import may fail for some other reason. Ensure that the
        # issue is really that Django is missing to avoid masking other
        # exceptions on Python 2.
        try:
            import django
        except ImportError:
            raise ImportError(
                "Couldn't import Django. Are you sure it's installed and "
                "available on your PYTHONPATH environment variable? Did you "
                "forget to activate a virtual environment?"
            )
        raise
    has_migrate = False
    root_dir = settings.BASE_DIR
    for app in settings.INSTALLED_APPS:
        app = app.split('.')[-1]
        app_dir = os.path.join(root_dir, app)
        if os.path.isdir(app_dir):
            migrations_dir = "%s/%s" % (app_dir, "migrations")
            if os.path.isdir(migrations_dir):
                match = any(map(lambda initial_file: match_file.search(initial_file), os.listdir(migrations_dir)))
                if not match:
                    if not has_migrate:
                        has_migrate = True
                        execute_from_command_line(['afcat_client.py', 'migrate','--database', 'cmdb'])
                    execute_from_command_line(['afcat_client.py', 'makemigrations', app])
    if has_migrate:
        execute_from_command_line(['afcat_client.py', 'migrate'])
    execute_from_command_line(argv)
Exemplo n.º 12
0
def main(max_id):
    # MySQL Connector using pymysql
    pymysql.install_as_MySQLdb()
    # import MySQLdb
    db_data = 'mysql+mysqldb://' + 'ssafy' + ':' + 'ssafy' + '@' + 'localhost' + ':3306/' \
              + 'findme' + '?charset=utf8mb4'
    engine = create_engine(db_data, encoding='utf-8')
    conn = pymysql.connect(host='localhost',
                           user='******',
                           password='******',
                           db='findme',
                           connect_timeout=1)
    # https://oapi.saramin.co.kr/job-search/?access-key=0Q5ESrsPZNoxQPN98JpXKSFYmIHImsAyLfHbS2hUMGQUlxZ5O&start=0&count=110&job_category=4&sort=pd
    access_key = "0Q5ESrsPZNoxQPN98JpXKSFYmIHImsAyLfHbS2hUMGQUlxZ5O"
    start = 0
    isEnd = False
    while True:
        url = "https://oapi.saramin.co.kr/job-search/?access-key=" + access_key + "&start=" + str(
            start) + "&count=110&job_category=4&sort=pd"
        request = urllib.request.Request(url)
        response = urllib.request.urlopen(request)
        rescode = response.getcode()
        if (rescode == 200):
            response_body = response.read()
            # print(response_body.decode('utf-8'))

            language = [
                'Java', 'C', 'Python', 'C++', 'C#', 'Visual Basic .NET',
                'JavaScript', 'PHP', 'SQL', 'Go', 'R', 'Assembly', 'Swift',
                'Ruby', 'MATLAB', 'PL/SQL', 'Perl', 'Visual Basic',
                'Objective-C', 'Delphi', '자바스크립트'
            ]
            dict = json.loads(response_body.decode('utf-8'))
            count = dict['jobs']['count']
            start = dict['jobs']['start']
            total = dict['jobs']['total']
            if count == 0:
                break
            if isEnd:
                break
            recruit_ids = []
            comp_names = []
            recruit_utilitys = []
            post_dates = []
            start_dates = []
            due_dates = []
            recruit_titles = []
            job_categorys = []
            tech_stacks = []
            urls = []
            img_urls = []
            salarys = []
            javas = []
            cs = []
            pythons = []
            cpps = []
            csharps = []
            vbns = []
            jss = []
            phps = []
            sqls = []
            gos = []
            rs = []
            assemblys = []
            swifts = []
            rubys = []
            matlabs = []
            pls = []
            perls = []
            vbs = []
            ocs = []
            delphis = []
            # print(re.split('[,·]','java,c·c++'))
            for i in range(0, count):
                recruit_id = dict['jobs']['job'][i]['id']
                if recruit_id <= max_id:
                    isEnd = True
                    break
                try:
                    comp_name = dict['jobs']['job'][i]['company']['detail'][
                        'name']
                except KeyError:
                    comp_name = ''
                try:
                    tmp_url = dict['jobs']['job'][i]['company']['detail'][
                        'href']
                    data = urllib.request.urlopen(tmp_url)
                    # 검색이 용이한 soup객체를 생성합니다.
                    soup = BeautifulSoup(data, 'html.parser')
                    findheader = soup.find('div',
                                           attrs={'class': 'header_info'})
                    try:
                        img_url = findheader.find('img')['src']
                    except:
                        img_url = ''
                except KeyError:
                    img_url = ''
                try:
                    tech_stack = dict['jobs']['job'][i]['keyword']
                except KeyError:
                    tech_stack = ''
                try:
                    recruit_utility = dict['jobs']['job'][i]['active']
                except KeyError:
                    recruit_utility = ''
                try:
                    salary = dict['jobs']['job'][i]['salary']['code']
                except KeyError:
                    salary = ''
                try:
                    post_date = dict['jobs']['job'][i]['posting-timestamp']
                except KeyError:
                    post_date = ''
                try:
                    start_date = dict['jobs']['job'][i]['opening-timestamp']
                except KeyError:
                    start_date = ''
                try:
                    due_date = dict['jobs']['job'][i]['expiration-timestamp']
                except KeyError:
                    due_date = ''
                try:
                    recruit_title = dict['jobs']['job'][i]['position']['title']
                except KeyError:
                    recruit_title = ''
                try:
                    job_category = dict['jobs']['job'][i]['position'][
                        'job-category']['name']
                except KeyError:
                    job_category = ''
                try:
                    url = dict['jobs']['job'][i]['url']
                except KeyError:
                    url = ''
                keyword = re.split('[,·]', tech_stack)
                lang_list = list(
                    set(language) - set(set(language) - set(keyword)))
                mylist = [False] * 20
                tech_stack = ''
                for l in lang_list:
                    if (l == '자바스크립트'):
                        mylist[6] = True
                        tech_stack += "JavaScript, "
                    elif (mylist[language.index(l)] == False):
                        mylist[language.index(l)] = True
                        tech_stack += l + ", "
                tech_stack = tech_stack[:-2]

                recruit_ids.append(recruit_id)
                comp_names.append(comp_name)
                img_urls.append(img_url)
                recruit_utilitys.append(recruit_utility)
                salarys.append(salary)
                post_dates.append(post_date)
                start_dates.append(start_date)
                due_dates.append(due_date)
                recruit_titles.append(recruit_title)
                job_categorys.append(job_category)
                tech_stacks.append(tech_stack)
                urls.append(url)
                javas.append(mylist[0])
                cs.append(mylist[1])
                pythons.append(mylist[2])
                cpps.append(mylist[3])
                csharps.append(mylist[4])
                vbns.append(mylist[5])
                jss.append(mylist[6])
                phps.append(mylist[7])
                sqls.append(mylist[8])
                gos.append(mylist[9])
                rs.append(mylist[10])
                assemblys.append(mylist[11])
                swifts.append(mylist[12])
                rubys.append(mylist[13])
                matlabs.append(mylist[14])
                pls.append(mylist[15])
                perls.append(mylist[16])
                vbs.append(mylist[17])
                ocs.append(mylist[18])
                delphis.append(mylist[19])
                # print(comp_name)
                # print()
            my_dict = {
                'id': recruit_ids,
                'comp_name': comp_names,
                'img_url': img_urls,
                'utility': recruit_utilitys,
                'post_date': post_dates,
                'start_date': start_dates,
                'due_date': due_dates,
                'title': recruit_titles,
                'job_category': job_categorys,
                'tech_stack': tech_stacks,
                'url': urls,
                'salary': salarys,
                'Java': javas,
                'C': cs,
                'Python': pythons,
                'C++': cpps,
                'C#': csharps,
                'VB.NET': vbns,
                'Java_Script': jss,
                'PHP': phps,
                'SQL': sqls,
                'Go': gos,
                'R': rs,
                'Assembly': assemblys,
                'Swift': swifts,
                'Ruby': rubys,
                'MATLAB': matlabs,
                'PL/SQL': pls,
                'Perl': perls,
                'Visual Basic': vbs,
                'Objective-C': ocs,
                'Delphi': delphis
            }
            my_df = pd.DataFrame(my_dict)
            # print(my_df)
            # Execute the to_sql for writting DF into SQL
            my_df.to_sql('temp', engine, if_exists='replace', index=False)
            # create cursor
            cursor = conn.cursor()
            # Execute query
            sql1 = "INSERT IGNORE INTO `recruit` (`COMP_NAME`, `ID`, `TITLE`, `JOB_CATEGORY`, `TECH_STACK`, `URL`, `UTILITY`, `POST_DATE`, `START_DATE`, `DUE_DATE`, `IMG_URL`) SELECT `COMP_NAME`, `ID`, `TITLE`, `JOB_CATEGORY`, `TECH_STACK`, `URL`, `UTILITY`, `POST_DATE`, `START_DATE`, `DUE_DATE`, `IMG_URL` FROM `temp`"
            cursor.execute(sql1)
            conn.commit()
            sql2 = "INSERT IGNORE INTO `saramin_data` (`ID`, `COMP_NAME`, `UTILITY`, `POST_DATE`, `DUE_DATE`, `SALARY`, `JAVA`, `C`, `PYTHON`, `C++`, `C#`, `VB_NET`, `JAVA_SCRIPT`, `PHP`, `SQL`, `GO`, `R`, `ASSEMBLY`, `SWIFT`, `RUBY`, `MATLAB`, `PL/SQL`, `PERL`, `VISUAL BASIC`, `OBJECTIVE-C`, `DELPHI`) SELECT `ID`, `COMP_NAME`, `UTILITY`, `POST_DATE`, `DUE_DATE`, `SALARY`, `JAVA`, `C`, `PYTHON`, `C++`, `C#`, `VB.NET`, `JAVA_SCRIPT`, `PHP`, `SQL`, `GO`, `R`, `ASSEMBLY`, `SWIFT`, `RUBY`, `MATLAB`, `PL/SQL`, `PERL`, `VISUAL BASIC`, `OBJECTIVE-C`, `DELPHI` FROM `temp`"
            cursor.execute(sql2)
            conn.commit()
            start += 1
        else:
            print("Error Code:" + rescode)
    engine.dispose()
    conn.close()
Exemplo n.º 13
0
import pymysql

pymysql.version_info = (1, 4, 13, "final", 0)  #通过这个命令指定pymysql的版本
pymysql.install_as_MySQLdb()  # 使用pymysql代替mysqldb连接数据库
Exemplo n.º 14
0
def run(base_dir, start_gunicorn_app=True):

    # Store a pidfile before doing anything else
    store_pidfile(base_dir)

    # For dumping stacktraces
    register_diag_handlers()

    # Capture warnings to log files
    logging.captureWarnings(True)

    # Start initializing the server now
    os.chdir(base_dir)

    try:
        import pymysql
        pymysql.install_as_MySQLdb()
    except ImportError:
        pass

    # We're doing it here even if someone doesn't use PostgreSQL at all
    # so we're not suprised when someone suddenly starts using PG.
    # TODO: Make sure it's registered for each of the subprocess
    psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
    psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)

    # We know we don't need warnings because users may explicitly configure no certificate validation.
    # We don't want for urllib3 to warn us about it.
    import requests as _r
    _r.packages.urllib3.disable_warnings()

    repo_location = os.path.join(base_dir, 'config', 'repo')

    # Configure the logging first, before configuring the actual server.
    logging.addLevelName('TRACE1', TRACE1)

    with open(os.path.join(repo_location, 'logging.conf')) as f:
        dictConfig(yaml.load(f))

    logger = logging.getLogger(__name__)
    kvdb_logger = logging.getLogger('zato_kvdb')

    config = get_config(repo_location, 'server.conf')

    # New in 2.0 - Start monitoring as soon as possible
    if config.get('newrelic', {}).get('config'):
        import newrelic.agent
        newrelic.agent.initialize(
            config.newrelic.config, config.newrelic.environment or None, config.newrelic.ignore_errors or None,
            config.newrelic.log_file or None, config.newrelic.log_level or None)

    # New in 2.0 - override gunicorn-set Server HTTP header
    gunicorn.SERVER_SOFTWARE = config.misc.get('http_server_header', 'Zato')

    # Store KVDB config in logs, possibly replacing its password if told to
    kvdb_config = get_kvdb_config_for_log(config.kvdb)
    kvdb_logger.info('Master process config `%s`', kvdb_config)

    # New in 2.0 hence optional
    user_locale = config.misc.get('locale', None)
    if user_locale:
        locale.setlocale(locale.LC_ALL, user_locale)
        value = 12345
        logger.info('Locale is `%s`, amount of %s -> `%s`', user_locale, value,
                    locale.currency(value, grouping=True).decode('utf-8'))

    # Spring Python
    app_context = get_app_context(config)

    # Makes queries against Postgres asynchronous
    if asbool(config.odb.use_async_driver) and config.odb.engine == 'postgresql':
        make_psycopg_green()

    # New in 2.0 - Put HTTP_PROXY in os.environ.
    http_proxy = config.misc.get('http_proxy', False)
    if http_proxy:
        os.environ['http_proxy'] = http_proxy

    crypto_manager = get_crypto_manager(repo_location, app_context, config)
    parallel_server = app_context.get_object('parallel_server')

    zato_gunicorn_app = ZatoGunicornApplication(parallel_server, repo_location, config.main, config.crypto)

    parallel_server.crypto_manager = crypto_manager
    parallel_server.odb_data = config.odb
    parallel_server.host = zato_gunicorn_app.zato_host
    parallel_server.port = zato_gunicorn_app.zato_port
    parallel_server.repo_location = repo_location
    parallel_server.base_dir = base_dir
    parallel_server.tls_dir = os.path.join(parallel_server.base_dir, 'config', 'repo', 'tls')
    parallel_server.fs_server_config = config
    parallel_server.user_config.update(config.user_config_items)
    parallel_server.startup_jobs = app_context.get_object('startup_jobs')
    parallel_server.app_context = app_context

    # Remove all locks possibly left over by previous server instances
    kvdb = app_context.get_object('kvdb')
    kvdb.component = 'master-proc'
    clear_locks(kvdb, config.main.token, config.kvdb, crypto_manager.decrypt)

    # Turn the repo dir into an actual repository and commit any new/modified files
    RepoManager(repo_location).ensure_repo_consistency()

    # New in 2.0 so it's optional.
    profiler_enabled = config.get('profiler', {}).get('enabled', False)

    # New in 2.0 so it's optional.
    sentry_config = config.get('sentry')

    dsn = sentry_config.pop('dsn', None)
    if dsn:

        from raven import Client
        from raven.handlers.logging import SentryHandler

        handler_level = sentry_config.pop('level')
        client = Client(dsn, **sentry_config)

        handler = SentryHandler(client=client)
        handler.setLevel(getattr(logging, handler_level))

        logger = logging.getLogger('')
        logger.addHandler(handler)

        for name in logging.Logger.manager.loggerDict:
            if name.startswith('zato'):
                logger = logging.getLogger(name)
                logger.addHandler(handler)

    if asbool(profiler_enabled):
        profiler_dir = os.path.abspath(os.path.join(base_dir, config.profiler.profiler_dir))
        parallel_server.on_wsgi_request = ProfileMiddleware(
            parallel_server.on_wsgi_request,
            log_filename = os.path.join(profiler_dir, config.profiler.log_filename),
            cachegrind_filename = os.path.join(profiler_dir, config.profiler.cachegrind_filename),
            discard_first_request = config.profiler.discard_first_request,
            flush_at_shutdown = config.profiler.flush_at_shutdown,
            path = config.profiler.url_path,
            unwind = config.profiler.unwind)

    # New in 2.0 - set environmet variables for servers to inherit
    os_environ = config.get('os_environ', {})
    for key, value in os_environ.items():
        os.environ[key] = value

    # Run the app at last
    if start_gunicorn_app:
        zato_gunicorn_app.run()
    else:
        return zato_gunicorn_app.zato_wsgi_app
Exemplo n.º 15
0
Arquivo: prod.py Projeto: chan48/ot
from .common import *
import os

ALLOWED_HOSTS = ['*']
# DEBUG = False  # sentry를 통한 에러로깅

import pymysql
pymysql.install_as_MySQLdb()  # pymysql이 MySQLdb처럼 동작토록 세팅

DATABASES = {
    'default': {
        'ENGINE': os.environ.get('DB_ENGINE', 'django.db.backends.mysql'),
        'HOST': os.environ['DB_HOST'],
        'USER': os.environ['DB_USER'],
        'PASSWORD': os.environ['DB_PASSWORD'],
        'NAME': os.environ['DB_NAME'],
        'PORT': os.environ['DB_PORT'],
    },
}

INSTALLED_APPS += ['storages']

# django-storages 앱 의존성 추가
# 기본 static/media 저장소를 django-storages로 변경
STATICFILES_STORAGE = 'ot.storages.StaticS3Boto3Storage'
DEFAULT_FILE_STORAGE = 'ot.storages.MediaS3Boto3Storage'

# S3 파일 관리에 필요한 최소 설정
# 소스코드에 설정정보를 남기지마세요. 환경변수를 통한 설정 추천
AWS_ACCESS_KEY_ID = os.environ['AWS_ACCESS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY']
Exemplo n.º 16
0
import pymysql
pymysql.install_as_MySQLdb()

# from .celery import app as celery_app
#
# __all__ = ['celery_app']
#
Exemplo n.º 17
0
#encoding=utf-8
import data
import chardet
import pymysql as MySQLdb
from matplotlib import pylab as plt
import cPickle
import numpy as np
import datetime

MySQLdb.install_as_MySQLdb()
conn = MySQLdb.connect(host='localhost', user='******',passwd='1', db = 'ped_new', charset='utf8')   
cursor = conn.cursor()


def weibo_number_analysis():
    initTime = datetime.datetime(2013, 04,01)
    timeinfo = []
    count = []

    for r in xrange(0, 31):
        preTime = initTime + datetime.timedelta(days = r)
        nextTime = initTime + datetime.timedelta(days = r+1)
        c = cursor.execute('select * from weibo_new where publish_time > "%s" and publish_time < "%s"' %(str(preTime), str(nextTime)))
        print "Between %s and %s there are %s weibos" %(preTime, nextTime, c)
        timeinfo.append(str(datetime.date.isoformat(preTime)))
        count.append(c)

    plt.xticks(range(len(timeinfo)), timeinfo, size='small', rotation='vertical')
    plt.ylabel('Number of Tweet')
    plt.xlabel('date')
    # plt.hist(count, 50, normed=1, facecolor='g', alpha=0.75)
Exemplo n.º 18
0
def install_project_hook():
    import pymysql
    pymysql.install_as_MySQLdb()
Exemplo n.º 19
0
from pymysql import install_as_MySQLdb

install_as_MySQLdb()
Exemplo n.º 20
0
 def __init__(self,name="CR"):
     self.app = Flask(name)
     self.db = SQLAlchemy(self.app)
     pm.install_as_MySQLdb()
     self.app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://*****:*****@localhost/shuracr?charset=utf8'
     self.db.init_app(self.app)
Exemplo n.º 21
0
# Requires PyMySql as first choice or
# MySQLdb <https://sourceforge.net/projects/mysql-python/>
try:
    import pymysql as mysqldb
except ImportError:
    try:
        import MySQLdb as mysqldb
    except ImportError:
        raise ImportError('No supported MySQL library installed. '
                          'Please install PyMySQL.')
    else:
        pywikibot.warning("PyMySQL not found. It'll fallback "
                          'on the deprecated library MySQLdb.')
else:
    mysqldb.install_as_MySQLdb()

from pywikibot import config2 as config
from pywikibot.tools import deprecated_args, UnicodeType


@deprecated_args(encoding=None)
def mysql_query(query, params=None, dbname=None, verbose=None):
    """Yield rows from a MySQL query.

    An example query that yields all ns0 pages might look like::

        SELECT
         page_namespace,
         page_title,
        FROM page
Exemplo n.º 22
0
import pymysql

pymysql.install_as_MySQLdb()  #告诉django用pymysql来代替默认的mysqldb
Exemplo n.º 23
0
from pymysql import install_as_MySQLdb

install_as_MySQLdb()
# import pymysql
# pymysql.install_as_MySQLdb()
Exemplo n.º 24
0
# Device data for Alexa discovery - MariaDB version; duplicate records are avoided by using upsert
# Uses DataSet - a simple data abstraction layer over SQLAlchemy
# https://dataset.readthedocs.io/en/latest/
# MySQL/MariaDB driver work around:
# https://stackoverflow.com/questions/53024891/modulenotfounderror-no-module-named-mysqldb

# pip install dataset

import pymysql
pymysql.install_as_MySQLdb(
)  # this is a work around to make pymysql work with Python 3.x
import dataset

#-----------------------------------------------------------------------------------------
# Globals
#-----------------------------------------------------------------------------------------

# format of database url is "dialect://*****:*****@host:port/dbname"
#db = dataset.connect ('sqlite:///temp.db')  # triple slash to denote it is on the local folder
#db = dataset.connect ('sqlite:///:memory:') # ephemeral; useful for testing
db = dataset.connect(
)  # setup an environment variable named DATABASE_URL (defaults to :memory:)
# 'customers' database should already exist; DataSet library cannot create it programmatically
#db = dataset.connect ("mysql://*****:*****@100.101.102.103:3306/customers")
print(type(db))

device_table = None
#-----------------------------------------------------------------------------------------
# Functions
#-----------------------------------------------------------------------------------------
Exemplo n.º 25
0
 def __init__(self, username, password, database):
     pymysql.install_as_MySQLdb()
     connection_string = "mysql+mysqldb://" + username + ":" + password + "@127.0.0.1/" + database + "?charset=utf8"
     super().__init__(connection_string)
Exemplo n.º 26
0
Generated by 'django-admin startproject' using Django 1.8.

For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/

For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import pymysql
from django.core.urlresolvers import reverse_lazy

pymysql.install_as_MySQLdb()  # force Django to use PyMySQL instead of MySQLdb

BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))


# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'lthoamx630l=zyl1d9lnl!rplokutufilbo&kzsf7+vuqr=qh+'

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False

ALLOWED_HOSTS = ['*']
Exemplo n.º 27
0
import pymysql as ps
from sqlalchemy import create_engine
# from threading import Thread
from datapool.E_okex.E_okex_ws.E_okex_api import OkexApi
from utilPool.generalUtil import myThread

ps.install_as_MySQLdb()

proxy = {'http_proxy_host':'118.114.77.47',
         'http_proxy_port':'8080'}

tableName = 'okex'
conn = create_engine( 'mysql://*****:*****@localhost/datapool?charset=utf8',echo = False)

threadList = []
test = OkexApi()
param = {'depth':{'event':'addChannel',
                  'channel':'ok_sub_spot_btc_usdt_depth_5'}
         }

threadList.append(myThread(name='monitor', target=test.isDisconn))
threadList.append(myThread(name='keepConnect', target=test.keepconnect, kwargs=proxy))
threadList.append(myThread(name='refreshCommand', target=test.refreshCommand, args=(param,)))
threadList.append(myThread(name='savedata',target=test.saveData, args=(conn, tableName)))
for i in threadList:
    i.start()
# test.sendMarketDataRequest(param)
# t1 = test.saveData(conn,tablename)

Exemplo n.º 28
0
#encoding=utf-8
import marshal
import pymysql as MySQLdb
import datetime
import cPickle

STORE_DIR = 'tmp_data/hours/'
T = 10
t = 2
DAYS = False
initTime = datetime.datetime(2013,05,01,0,0,0)

#initailization of the mysqldb
MySQLdb.install_as_MySQLdb()
conn = MySQLdb.connect(host='localhost', charset='utf8', user='******',passwd='1', db = 'ped_new', use_unicode=False )   
# conn = MySQLdb.connect(host='localhost', user='******',passwd='1', db = 'ped', charset='utf8')   
cursor = conn.cursor()

corpus = [] #The ith set in the list represent the TWt(the corpus in interval i)
weiboVector = []  #the ith list in the list represent the vetors of the tweet in interval i
nutrition = [] #the ith dict in the list represent the nuturition of each term at interval i
energy = [] #the ith dict in the list represent the nuturition of each term at interval i
# topics = [] #the ith set in the list represent the topics at interval i
contentEnergy = []

# get the user authority
users_ptr = open('../data/backup/new_user', 'r')
users = cPickle.load(users_ptr)
users_ptr.close()
authority = {user['idstr']:user['authority'] for user in users}
Exemplo n.º 29
0
    def __init__(self, db, user, password='', host='localhost', port=0, **kw):
        drivers = kw.pop('driver', None) or 'mysqldb'
        for driver in drivers.split(','):
            driver = driver.strip()
            if not driver:
                continue
            try:
                if driver.lower() in ('mysqldb', 'pymysql'):
                    if driver.lower() == 'pymysql':
                        import pymysql
                        pymysql.install_as_MySQLdb()
                    import MySQLdb
                    if driver.lower() == 'mysqldb':
                        if MySQLdb.version_info[:3] < (1, 2, 2):
                            raise ValueError(
                                'SQLObject requires MySQLdb 1.2.2 or later')
                    import MySQLdb.constants.CR
                    import MySQLdb.constants.ER
                    self.module = MySQLdb
                    if driver.lower() == 'mysqldb':
                        self.CR_SERVER_GONE_ERROR = \
                            MySQLdb.constants.CR.SERVER_GONE_ERROR
                        self.CR_SERVER_LOST = \
                            MySQLdb.constants.CR.SERVER_LOST
                    else:
                        self.CR_SERVER_GONE_ERROR = \
                            MySQLdb.constants.CR.CR_SERVER_GONE_ERROR
                        self.CR_SERVER_LOST = \
                            MySQLdb.constants.CR.CR_SERVER_LOST
                    self.ER_DUP_ENTRY = MySQLdb.constants.ER.DUP_ENTRY
                elif driver == 'connector':
                    import mysql.connector
                    self.module = mysql.connector
                    self.CR_SERVER_GONE_ERROR = \
                        mysql.connector.errorcode.CR_SERVER_GONE_ERROR
                    self.CR_SERVER_LOST = \
                        mysql.connector.errorcode.CR_SERVER_LOST
                    self.ER_DUP_ENTRY = mysql.connector.errorcode.ER_DUP_ENTRY
                elif driver == 'oursql':
                    import oursql
                    self.module = oursql
                    self.CR_SERVER_GONE_ERROR = \
                        oursql.errnos['CR_SERVER_GONE_ERROR']
                    self.CR_SERVER_LOST = oursql.errnos['CR_SERVER_LOST']
                    self.ER_DUP_ENTRY = oursql.errnos['ER_DUP_ENTRY']
                else:
                    raise ValueError(
                        'Unknown MySQL driver "%s", '
                        'expected mysqldb, connector, '
                        'oursql or pymysql' % driver)
            except ImportError:
                pass
            else:
                break
        else:
            raise ImportError(
                'Cannot find a MySQL driver, tried %s' % drivers)
        self.host = host
        self.port = port or 3306
        self.db = db
        self.user = user
        self.password = password
        self.kw = {}
        for key in ("unix_socket", "init_command",
                    "read_default_file", "read_default_group", "conv"):
            if key in kw:
                self.kw[key] = kw.pop(key)
        for key in ("connect_timeout", "compress", "named_pipe", "use_unicode",
                    "client_flag", "local_infile"):
            if key in kw:
                self.kw[key] = int(kw.pop(key))
        for key in ("ssl_key", "ssl_cert", "ssl_ca", "ssl_capath"):
            if key in kw:
                if "ssl" not in self.kw:
                    self.kw["ssl"] = {}
                self.kw["ssl"][key[4:]] = kw.pop(key)
        if "charset" in kw:
            self.dbEncoding = self.kw["charset"] = kw.pop("charset")
        else:
            self.dbEncoding = None

        global mysql_Bin
        if not PY2 and mysql_Bin is None:
            mysql_Bin = self.module.Binary
            self.module.Binary = lambda x: mysql_Bin(x).decode(
                'ascii', errors='surrogateescape')

        self._server_version = None
        self._can_use_microseconds = None
        DBAPI.__init__(self, **kw)
Exemplo n.º 30
0
"""
Django settings for config project.

Generated by 'django-admin startproject' using Django 3.0.6.

For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/

For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""

import os
import pymysql				        # !!!
pymysql.version_info = (1, 3, 13, "final", 0)   # !!!
pymysql.install_as_MySQLdb()                    # !!!

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))


# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '5iu)6=6itgw#%hg&)2w6p7y7h97atbv7uo#vq0eepy$j^k44zg'

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True

ALLOWED_HOSTS = []
Exemplo n.º 31
0
Arquivo: main.py Projeto: SciF0r/zato
def run(base_dir, start_gunicorn_app=True):

    # Store a pidfile before doing anything else
    store_pidfile(base_dir)

    # For dumping stacktraces
    register_diag_handlers()

    # Start initializing the server now
    os.chdir(base_dir)

    try:
        import pymysql
        pymysql.install_as_MySQLdb()
    except ImportError:
        pass

    # We're doing it here even if someone doesn't use PostgreSQL at all
    # so we're not suprised when someone suddenly starts using PG.
    # TODO: Make sure it's registered for each of the subprocess
    psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
    psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)

    repo_location = os.path.join(base_dir, 'config', 'repo')

    # Configure the logging first, before configuring the actual server.
    logging.addLevelName('TRACE1', TRACE1)
    logging.config.fileConfig(os.path.join(repo_location, 'logging.conf'))

    logger = logging.getLogger(__name__)
    kvdb_logger = logging.getLogger('zato_kvdb')

    config = get_config(repo_location, 'server.conf')

    # New in 2.0 - Start monitoring as soon as possible
    if config.get('newrelic', {}).get('config'):
        import newrelic.agent
        newrelic.agent.initialize(
            config.newrelic.config, config.newrelic.environment or None, config.newrelic.ignore_errors or None,
            config.newrelic.log_file or None, config.newrelic.log_level or None)

    # Store KVDB config in logs, possibly replacing its password if told to
    kvdb_config = get_kvdb_config_for_log(config.kvdb)
    kvdb_logger.info('Master process config `%s`', kvdb_config)

    # New in 2.0 hence optional
    user_locale = config.misc.get('locale', None)
    if user_locale:
        locale.setlocale(locale.LC_ALL, user_locale)
        value = 12345
        logger.info('Locale is `%s`, amount of %s -> `%s`', user_locale, value,
                    locale.currency(value, grouping=True).decode('utf-8'))

    # Spring Python
    app_context = get_app_context(config)

    # Makes queries against Postgres asynchronous
    if asbool(config.odb.use_async_driver) and config.odb.engine == 'postgresql':
        make_psycopg_green()

    # New in 2.0 - Put HTTP_PROXY in os.environ.
    http_proxy = config.misc.get('http_proxy', False)
    if http_proxy:
        os.environ['http_proxy'] = http_proxy

    crypto_manager = get_crypto_manager(repo_location, app_context, config)
    parallel_server = app_context.get_object('parallel_server')

    zato_gunicorn_app = ZatoGunicornApplication(parallel_server, repo_location, config.main, config.crypto)

    parallel_server.crypto_manager = crypto_manager
    parallel_server.odb_data = config.odb
    parallel_server.host = zato_gunicorn_app.zato_host
    parallel_server.port = zato_gunicorn_app.zato_port
    parallel_server.repo_location = repo_location
    parallel_server.base_dir = base_dir
    parallel_server.fs_server_config = config
    parallel_server.user_config.update(config.user_config_items)
    parallel_server.startup_jobs = app_context.get_object('startup_jobs')
    parallel_server.app_context = app_context

    # Remove all locks possibly left over by previous server instances
    kvdb = app_context.get_object('kvdb')
    kvdb.component = 'master-proc'
    clear_locks(kvdb, config.main.token, config.kvdb, crypto_manager.decrypt)

    # Turn the repo dir into an actual repository and commit any new/modified files
    RepoManager(repo_location).ensure_repo_consistency()

    # New in 2.0 so it's optional.
    profiler_enabled = config.get('profiler', {}).get('enabled', False)

    if asbool(profiler_enabled):
        profiler_dir = os.path.abspath(os.path.join(base_dir, config.profiler.profiler_dir))
        parallel_server.on_wsgi_request = ProfileMiddleware(
            parallel_server.on_wsgi_request,
            log_filename = os.path.join(profiler_dir, config.profiler.log_filename),
            cachegrind_filename = os.path.join(profiler_dir, config.profiler.cachegrind_filename),
            discard_first_request = config.profiler.discard_first_request,
            flush_at_shutdown = config.profiler.flush_at_shutdown,
            path = config.profiler.url_path,
            unwind = config.profiler.unwind)

    # Run the app at last we execute from command line
    if start_gunicorn_app:
        zato_gunicorn_app.run()
    else:
        return zato_gunicorn_app.zato_wsgi_app
Exemplo n.º 32
0
#!/usr/bin/python3 -u
import sys
import os
#import sqlite3
import pymysql
pymysql.install_as_MySQLdb() #makes pymysql function as MySQLdb
#
# General database access
#
db = None
cursor = None
rpm2htmlVerbose = 10

def init_sql():
    global db
    global cursor

    try:
        myhost = os.environ["MySQL_HOST"]
    except:
        myhost = '192.168.1.4'

    try:
        mybase = os.environ["MySQL_BASE"]
    except:
        mybase = 'python_rpm2html'

    try:
        user = os.environ["MySQL_USER"]
    except:
        user = '******'
Exemplo n.º 33
0
import os
from fnmatch import fnmatch
from django.conf import global_settings
from varlet import variable
import pymysql
pymysql.install_as_MySQLdb()


PROJECT_DIR = os.path.dirname(__file__)
HOME_DIR = os.path.normpath(os.path.join(PROJECT_DIR, '../'))

DEBUG = variable("DEBUG", default=False)
TEMPLATE_DEBUG = DEBUG

# if you're having trouble connecting to LDAP set this to True so you can login
# to track, bypassing LDAP group checks
LDAP_DISABLED = variable("LDAP_DISABLED", default=False)

LDAP = {
    'default': {
        'host': "ldap://ldap-login.oit.pdx.edu",
        'username': '******',
        'password': '',
        'search_dn': 'dc=pdx,dc=edu'
    }
}

# ('Your Name', '*****@*****.**'),
ADMINS = variable("ADMINS", [])
MANAGERS = ADMINS
Exemplo n.º 34
0
#Football Analytics

#Scraper to obtain multiple user-agents and insert them into the user_agents table

#Author: Liam Culligan
#Date: January 2017

#Import required packages and functions
import pymysql
pymysql.install_as_MySQLdb()  #Install MySQL driver
import MySQLdb as my
import requests
from bs4 import BeautifulSoup

#Connect to the MySQL database
db = my.connect(host='localhost',
                user='******',
                passwd='',
                db='football_analytics')

cursor = db.cursor()

#Get user-agents
#Get url
url = "http://techpatterns.com/downloads/firefox/useragentswitcher.xml"
headers = {
    'User-Agent':
    'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'
}
r = requests.get(url, headers=headers)