示例#1
0
def step_impl(context,post_count):
    cnt = int(post_count)
    for it in range(cnt):
        pdf_id              =   str(get_guid())
        context.pdf_id_grp.append(pdf_id)
        context.data['json'][0]['pdf_id']=pdf_id
        context.execute_steps(unicode("when the data is posted"))
示例#2
0
def step_impl(context):
    headers             =   {'Content-type' :   'application/json'}
    context.data        =   {'json':[{
                                  'pdf_id'              : str(get_guid()),
                                  'printer_id'          : 'printer_id1',
                                  'machine_id'          : 'vendor1',
                                  'application_name'    : 'application_name1',
                                  'doc_name'            : 'test_unit_1'
                                    }],
                             'headers'      :   headers}
示例#3
0
    def __init__(self):
        from py_classes                         import To_Class_Dict,To_Sub_Classes
        from os                                 import environ          as os_environ
        from uuid                               import uuid4            as get_guid
        D                                   =   {'guid'                 :   str(get_guid().hex)[:7],
                                                 'user'                 :   os_environ['USER'],
                                                }

        self.T                              =   To_Class_Dict(  self,
                                                                dict_list=[ D, locals() ],
                                                                update_globals=True
                                                              )
        self.One                            =   One(self)
        self.Two                            =   Two(self)
示例#4
0
def step_impl(context,order_post_url):
    context.post_url        =   order_post_url
    pdf_id                  =   str(get_guid())
    if hasattr(context,'pdf_id_grp'):
        context.pdf_id_grp.append(pdf_id)
    else:
        context.pdf_id_grp  =   [pdf_id]
    headers                 =   {'Content-type' :   'application/json'}
    context.data            =   {'json'         :[{
                                        'pdf_id'            :   pdf_id,
                                        'printer_id'        :   'printer_id1',
                                        'application_name'  :   'application_name1',
                                        'doc_name'          :   'test_unit_1'
                                    }],
                                'headers'       :   headers}
示例#5
0
文件: tests.py 项目: sethc23/aprinto
def printer_driver_check_in(base_url,post_action='',show_info='',show_post=False,show_resp=False):
    p_url   = base_url+'/api/check/'
    if show_info == 'show_info': print '\n\t\tTEST #1: printer driver check-in',post_action,'\n\t\t\tURL:',p_url
    guid    = str(get_guid())
    data = [{
              'pdf2_id'                  : guid,
              'printer_id'              : 'printer_id1',
              'machine_id'              : '12c61d88-5d0c-44af-a5fc-734f6327e1ec',        # authenticated client
               #'machine_id'            : 'admin1',                                    # authenticated admin
              # 'machine_id'              : 'vendor1',
              'application_name'        : 'application_name1',
              'doc_name'                : 'test_%s'%(DT.strftime(DT.now(),'%Y_%m_%d at %H:%M:%S'))
            }]

    resp = post_json(data,p_url,show_post,show_resp)
    if show_info == 'show_info': print '\n\t\t\t--> #1 SUCCESS\n'
    return True,resp,guid
示例#6
0
def step_impl(context,doc_type,upload_url,paired):
    context.post_url            =   upload_url

    if doc_type=='Client Contract':
        f_name                  =   'test_contract.pdf'
    elif doc_type=='pdf':
        f_name                  =   'test_page.pdf'
    elif doc_type=='txt':
        f_name                  =   'test_doc.txt'
    f_path                      =   F_DIR + f_name

    if paired=='the same':
        if hasattr(context,'order_tag'):
            new_name            =   context.order_tag + '_' + context.pdf_id_grp[-1] + f_path[f_path.rfind('.'):]
        else: new_name          =   context.pdf_id_grp[-1] + f_path[f_path.rfind('.'):]
    elif paired=='a different':
        new_uuid                =   str(get_guid())
        if hasattr(context,'order_tag'):
            new_name            =   context.order_tag + '_' + new_uuid + f_path[f_path.rfind('.'):]
        else: new_name          =   new_uuid + f_path[f_path.rfind('.'):]

    context.upload_file         =   new_name
    new_path                    =   '/tmp/' + new_name
    context.upload_fpath        =   new_path
    if hasattr(context,'files_created'):
        context.files_created.append(new_path)
    else:
        context.files_created   =   [new_path]

    os_cmd('cp %s %s'%(f_path,new_path))
    h                           =   'form-data; '
    h                          +=   'name="local_document"; '
    h                          +=   'filename="%s"'%new_path
    headers                     =   {'Content-Disposition' :   h}
    context.data                =   {'headers'      :   headers,
                                    'files': {'local_document': open(new_path,'rb')},
                                    }
示例#7
0
    def __init__(self):

        from py_classes import To_Class
        from uuid import uuid4 as get_guid
        from types import NoneType
        from re import sub as re_sub  # re_sub('patt','repl','str','cnt')
        from re import search as re_search  # re_search('patt','str')
        import pandas as pd
        pd.set_option('expand_frame_repr', False)
        pd.set_option('display.max_columns', None)
        pd.set_option('display.max_colwidth', 250)
        pd.set_option('display.max_rows', 1000)
        pd.set_option('display.width', 1500)
        pd.set_option('display.colheader_justify', 'left')
        np = pd.np
        np.set_printoptions(linewidth=1500, threshold=np.nan)
        from db_settings import DB_NAME, DB_HOST, DB_PORT, DB_USER, DB_PW
        from sqlalchemy import create_engine
        from logging import getLogger
        from logging import INFO as logging_info
        getLogger('sqlalchemy.dialects.postgresql').setLevel(logging_info)
        eng = create_engine(r'postgresql://%s:%s@%s:%s/%s' %
                            (DB_NAME, DB_USER, DB_HOST, DB_PORT, DB_NAME),
                            encoding='utf-8',
                            echo=False)
        D = {'guid': str(get_guid().hex)[:7]}
        self.T = To_Class(D)
        all_imports = locals().keys()  #+ globals().keys()
        for k in all_imports:
            if not k == 'D' and not k == 'self':
                self.T.update({k: eval(k)})
        globals().update(self.T.__dict__)

        self.ST_Parts = ST_Parts()
        self.Addr_Parsing = Addr_Parsing(self.T)
        self.GeoCoding = Geocoding(self.T)
示例#8
0
    def __init__(self):

        from py_classes                         import To_Class
        from uuid                               import uuid4            as get_guid
        from types                              import NoneType
        from re                                 import sub              as re_sub               # re_sub('patt','repl','str','cnt')
        from re                                 import search           as re_search            # re_search('patt','str')
        import                                  pandas                  as pd
        pd.set_option(                          'expand_frame_repr', False)
        pd.set_option(                          'display.max_columns', None)
        pd.set_option(                          'display.max_colwidth', 250)
        pd.set_option(                          'display.max_rows', 1000)
        pd.set_option(                          'display.width', 1500)
        pd.set_option(                          'display.colheader_justify','left')
        np                                  =   pd.np
        np.set_printoptions(                    linewidth=1500,threshold=np.nan)
        from db_settings                        import DB_NAME,DB_HOST,DB_PORT,DB_USER,DB_PW
        from sqlalchemy                         import create_engine
        from logging                            import getLogger
        from logging                            import INFO             as logging_info
        getLogger(                              'sqlalchemy.dialects.postgresql').setLevel(logging_info)
        eng                                 =   create_engine(r'postgresql://%s:%s@%s:%s/%s'
                                                          %(DB_NAME,DB_USER,DB_HOST,DB_PORT,DB_NAME),
                                                          encoding='utf-8',
                                                          echo=False)
        D                                   =   {'guid'                 :   str(get_guid().hex)[:7]}
        self.T                              =   To_Class(D)
        all_imports                         =   locals().keys() #+ globals().keys()
        for k in all_imports:
            if not k=='D' and not k=='self':
                self.T.update(                  {k                      :   eval(k) })
        globals().update(                       self.T.__dict__)

        self.ST_Parts                       =   ST_Parts()
        self.Addr_Parsing                   =   Addr_Parsing(self.T)
        self.GeoCoding                      =   Geocoding(self.T)
示例#9
0
def main(args,kwargs):
    if args.count('requests'):              import requests
    if args.count('urllib'):                from urllib import quote_plus,unquote

    import                                  datetime                as DT
    import                                  time
    delay                                   =   time.sleep
    from dateutil                           import parser           as DU               # e.g., DU.parse('some date as str') --> obj(datetime.datetime)
    from re                                 import findall          as re_findall
    from re                                 import sub              as re_sub           # re_sub('patt','repl','str','cnt')
    from re                                 import search           as re_search        # re_search('patt','str')
    import json
    from subprocess                         import Popen            as sub_popen
    from subprocess                         import PIPE             as sub_PIPE
    from traceback                          import format_exc       as tb_format_exc
    from sys                                import exc_info         as sys_exc_info
    from types                              import NoneType
    from uuid                               import uuid4            as get_guid
    from py_classes                         import To_Class,To_Class_Dict,To_Sub_Classes
    import                                  pandas                  as pd
    pd.set_option(                          'expand_frame_repr', False)
    pd.set_option(                          'display.max_columns', None)
    pd.set_option(                          'display.max_colwidth', 250)
    pd.set_option(                          'display.max_rows', 1000)
    pd.set_option(                          'display.width', 1500)
    pd.set_option(                          'display.colheader_justify','left')
    np                                      =   pd.np
    np.set_printoptions(                    linewidth=1500,threshold=np.nan)
    import logging
    logger = logging.getLogger(             'sqlalchemy.dialects.postgresql')
    logger.setLevel(logging.INFO)

    if args.count('pgsql'):              
        from sqlalchemy                     import create_engine
        from psycopg2                       import connect          as pg_connect
        try:
            eng                             =   create_engine(r'postgresql://%(DB_USER)s:%(DB_PW)s@%(DB_HOST)s:%(DB_PORT)s/%(DB_NAME)s'
                                                              % self.T.pgsql,
                                                              encoding='utf-8',
                                                              echo=False)
            conn                            =   pg_connect("dbname='%(DB_NAME)s' host='%(DB_HOST)s' port=%(DB_PORT)s \
                                                           user='******' password='******' "
                                                           % self.T.pgsql);
            cur                             =   conn.cursor()

        except:
            from getpass import getpass
            pw = getpass('Root password (to create DB:"%(DB_NAME)s" via CL): ' % self.T.pgsql)
            p = sub_popen(" ".join(["echo '%s' | sudo -S prompt='' " % pw,
                                    'su postgres -c "psql --cluster 9.4/main -c ',
                                    "'create database %(DB_NAME)s;'" % self.T.pgsql,
                                    '"']),
                          stdout=sub_PIPE,
                          shell=True)
            (_out, _err) = p.communicate()
            assert _err is None

    import inspect, os
    D                                       =   {'guid'                 :   str(get_guid().hex)[:7],
                                                 'pg_classes_pwd'       :   os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))),
                                                }
    D.update(                                   {'tmp_tbl'              :   'tmp_'+D['guid'],
                                                'current_filepath'     :   inspect.getfile(inspect.currentframe())})



    T                                       =   To_Class_Dict(  self,
                                                            dict_list=[D,locals()],
                                                            update_globals=True)    
示例#10
0
    def __init__(self,**kwargs):
        """

            pgSQL(db_settings=[DB_NAME, DB_USER, DB_PW, DB_HOST, DB_PORT])

        """

        def run_cmd(cmd):
            p = sub_popen(cmd,stdout=sub_PIPE,
                          shell=True,
                          executable='/bin/bash')
            (_out,_err) = p.communicate()
            assert _err is None
            return _out.rstrip('\n')

        def download_file(url,save_path):
            import os
            _dir = save_path[:save_path.rfind('/')]
            if not os.path.exists(_dir):
                os.makedirs(_dir)

            with open(save_path, 'wb') as handle:
                response = self.T.requests.get( url, stream=True)

                if not response.ok:
                    # Something went wrong
                    print 'error'

                for block in response.iter_content(1024):
                    if not block:
                        break

                    handle.write(block)
                    handle.flush()
            return True

        def read_json_from_url_response(url):
            r = self.T.requests.get(url)
            assert r.status_code=='200'
            # print r.text
            g = r.text
            g = g.replace('true',"'true'")
            a = eval(g)
            return a

        def to_sql(cmd):
            self.T.conn.set_isolation_level(    0)
            self.T.cur.execute(                 cmd)

        def redirect_logs_to_file(file_desc='/dev/pts/0',msg_form="%(asctime)s - %(levelname)s - %(message)s"):
            # print T.logger.__dict__
            # print T.logger.manager.__dict__

            # for it in dir(logger):
            #     print it,getattr(logger,it)

            for it in self.T.logger.handlers:
                self.T.logger.removeHandler(it)

            for it in self.T.logger.parent.handlers:
                self.T.logger.parent.removeHandler(it)

            for it in self.T.logger.root.handlers:
                self.T.logger.root.removeHandler(it)

            # print logger.manager.__dict__
            del_these                       =   ['IPKernelApp','basic_logger']
            for it in del_these:
                if self.T.logger.manager.__dict__['loggerDict'].has_key(it):
                    del self.T.logger.manager.__dict__['loggerDict'][it]

            for k in self.T.logger.manager.__dict__['loggerDict'].keys():
                if k.count('sqlalchemy') or k.count('pandas'):
                    del self.T.logger.manager.__dict__['loggerDict'][k]

            self.T.logging.basicConfig(filename=file_desc, level=self.T.logging.DEBUG, format=msg_form)
            return

        def custom_geoseries_plot(s,figsize=(8,8)):
            # s=T.gd.GeoSeries(A)
            colormap='Set1'
            axes=None
            linewidth=1.0

            import matplotlib.pyplot as plt
            if axes is None:
                fig, ax = plt.subplots(figsize=figsize)
                ax.set_aspect('equal')
            else:
                ax = axes
            ax.get_xaxis().get_major_formatter().set_scientific(False)
            ax.get_xaxis().get_major_formatter().set_useOffset(False)
            plt.xticks(rotation='vertical')
            ax.get_yaxis().get_major_formatter().set_scientific(False)
            ax.get_yaxis().get_major_formatter().set_useOffset(False)
            color = T.gd.plotting.gencolor(len(s), colormap=colormap)
            for geom in s:
                if geom.type == 'Polygon' or geom.type == 'MultiPolygon':
                    T.gd.plotting.plot_multipolygon(ax, geom, facecolor=next(color), linewidth=linewidth)
                elif geom.type == 'LineString' or geom.type == 'MultiLineString':
                    T.gd.plotting.plot_multilinestring(ax, geom, color=next(color), linewidth=linewidth)
                elif geom.type == 'Point':
                    T.gd.plotting.plot_point(ax, geom)
            plt.ticklabel_format(style='plain')
            plt.grid()
            plt.draw()

        def _load_connectors():
            eng                             =   create_engine(r'postgresql://%(DB_USER)s:%(DB_PW)s@%(DB_HOST)s:%(DB_PORT)s/%(DB_NAME)s'
                                                              % T,
                                                              encoding='utf-8',
                                                              echo=False)
            conn                            =   pg_connect("dbname='%(DB_NAME)s' host='%(DB_HOST)s' port=%(DB_PORT)s \
                                                           user='******' password='******' "
                                                           % T);
            cur                             =   conn.cursor()
            return eng,conn,cur

        import                                  datetime                as DT
        dt = DT
        from dateutil                           import parser           as DU               # e.g., DU.parse('some date as str') --> obj(datetime.datetime)
        import                                  time
        delay                               =   time.sleep
        from urllib                             import quote_plus,unquote
        import re
        from re                                 import findall          as re_findall
        from re                                 import sub              as re_sub           # re_sub('patt','repl','str','cnt')
        from re                                 import search           as re_search        # re_search('patt','str')
        import json
        from subprocess                         import Popen            as sub_popen
        from subprocess                         import PIPE             as sub_PIPE
        from traceback                          import format_exc       as tb_format_exc
        from sys                                import exc_info         as sys_exc_info
        from types                              import NoneType
        from time                               import sleep            as delay
        from uuid                               import uuid4            as get_guid
        import                                  requests

        from py_classes.py_classes              import To_Sub_Classes,To_Class,To_Class_Dict
        T                                   =   To_Class()
        T.config                            =   To_Class(kwargs,recursive=True)
        if hasattr(T,'config') and hasattr(T.config,'pgsql'): 
            T.update(                           T.config.pgsql.__dict__)
        else:
            T.update(                           T.config.__dict__)
        
        db_vars = ['DB_NAME','DB_HOST','DB_PORT','DB_USER','DB_PW']
        db_vars = [it for it in db_vars if not T._has_key(it)]
        
        if not db_vars:
            pass

        elif locals().keys().count('db_settings'):
            DB_NAME,DB_USER,DB_PW,DB_HOST,DB_PORT = db_settings
            for it in db_vars:
                eval('T["%s"] = %s' % (it,it))
            
        else:
            z = eval("__import__('db_settings')")
            for it in db_vars:
                T[it] = getattr(z,it)
        
        import                                  pandas                  as pd
        pd.set_option(                          'expand_frame_repr', False)
        pd.set_option(                          'display.max_columns', None)
        pd.set_option(                          'display.max_colwidth', 250)
        pd.set_option(                          'display.max_rows', 1000)
        pd.set_option(                          'display.width', 1500)
        pd.set_option(                          'display.colheader_justify','left')
        np                                  =   pd.np
        np.set_printoptions(                    linewidth=1500,threshold=np.nan)
        # import                                  geopandas               as gd
        import logging
        logger = logging.getLogger(             'sqlalchemy.dialects.postgresql')
        logger.setLevel(logging.INFO)
        from sqlalchemy                         import create_engine
        from psycopg2                           import connect          as pg_connect
        try:
            eng,conn,cur                    =   _load_connectors()

        except:
            from getpass import getpass
            pw = getpass('Root password (to create DB:"%(DB_NAME)s" via CL): ' % pgsql)
            p = sub_popen(" ".join(["echo '%s' | sudo -S prompt='' " % pw,
                                    'su postgres -c "psql --cluster 9.4/main -c ',
                                    "'create database %(DB_NAME)s;'" % T,
                                    '"']),
                          stdout=sub_PIPE,
                          shell=True)
            (_out, _err) = p.communicate()
            assert _err is None
            eng,conn,cur                    =   _load_connectors()


        import inspect, os
        D                                   =   {'guid'                 :   str(get_guid().hex)[:7],
                                                 'pg_classes_pwd'       :   os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))),
                                                }
        D.update(                               {'tmp_tbl'              :   'tmp_'+D['guid'],
                                                 'current_filepath'     :   inspect.getfile(inspect.currentframe())})

        self.T                              =   To_Class_Dict(  self,
                                                                dict_list=[T.__dict__,D,locals()],
                                                                update_globals=True)

        self.Functions                      =   pgSQL_Functions(self)
        self.Triggers                       =   pgSQL_Triggers(self)
        self.Tables                         =   pgSQL_Tables(self)
        self.Databases                      =   pgSQL_Databases(self)
        self.Types                          =   pgSQL_Types(self)

        # if hasattr(T,'project_sql_files') and T.project_sql_files:
        #     self.F.functions_create_from_command_line(one_directory=T.project_sql_files)
        # if hasattr(T,'base_sql_files') and T.base_sql_files:
        #     self.F.functions_create_from_command_line(one_directory=T.base_sql_files)  
        if hasattr(T,'initial_check') and T.initial_check:
            self.__initial_check__()
        if hasattr(T,'temp_options') and T.temp_options:
            self.__temp_options__()