Example #1
0
def inDb(conn,curs,csvFile,tableName,dbType):
    print(tableName , csvFile)

    # postgresql 大量数据入库
    # 强大的copy_from(),是postgresSQ的内置函数
    try:
        output = StringIO()

        fileReader = pd.read_csv(
            csvFile,  # 读取的文件
            header=None,  # 指定 title 所在的行,  None 为添加默认列名
            # index_col='MROID', # 指定列为索引
            index_col=None,  # 不指定列索引, 即自动生成列索引

            # 指定需要读取的列
            # usecols=['CITY', 'SDATE', 'GRIDX', 'GRIDY', 'S_RSRP', 'MROID'],

            # 每次读取固定的行数(chunksize=6), 返回一个可迭代的文件读取对象
            chunksize=10000,

            iterator=True,

            # 打印读取数据信息统计
            verbose=True,

            # 如果设定为True并且parse_dates 可用,那么pandas将尝试转换为日期类型,
            # 如果可以转换,转换方法并解析。在某些情况下会快5~10倍
            # infer_datetime_format=True,
            # parse_dates=['SDATE'],

            # 使用的分析引擎。可以选择C或者是python。C引擎快但是Python引擎功能更加完备。'
            engine='c',
            # engine: {‘c’, ‘python’}, optional

            # 引号,用作标识开始和解释的字符,引号内的分割符将被忽略。
            quotechar="'",

            # 指定 解析时间列的函数名
            # date_parser=parse_dates,

            # 读取文件时遇到和列数不对应的行,此时会报错。若报错行可以忽略,则添加以下参数
            error_bad_lines=False,

            # 读取文件的字符集
            encoding='utf-8',

            # 一组用于替换NA / NaN的值。如果传参,需要制定特定列的空值。默认为‘1.
            # IND’, ‘1.#QNAN’, ‘N/A’, ‘NA’, ‘NULL’, ‘NaN’, ‘nan’`.
            na_values = 'NULL',

            # 指定某些列的数据类型
            dtype=object
            # dtype={'CITY': object,
            #        'GRIDX': np.float16,
            #        'GRIDY': np.float16,
            #        'S_RSRP': np.int16,
            #        }

            )

        for subDf in fileReader:
            print("当前行: " + str(fileReader._currow))
            # 删除无效数据
            subDf.drop(subDf[subDf[4] != '15'].index, inplace=True)
            # 删除第四列不为'15' 的行

            # 替换控制为 'NULL'
            subDf.replace(np.NaN, 'NULL', inplace=True)

            # dataframe类型转换为IO缓冲区中的str类型
            subDf.to_csv(output, sep='\t', index=False, header=False)
            output1 = output.getvalue()

            result = 1
            while result:
                result = not insert_date(conn, curs,tableName,dbType,StringIO(output1))

    except Exception as e:
        print(str(e))
Example #2
0
import pytest
from docutils import nodes

from sphinx import addnodes
from sphinx.ext.autosummary import (autosummary_table, autosummary_toc,
                                    extract_summary, import_by_name,
                                    mangle_signature)
from sphinx.ext.autosummary.generate import (AutosummaryEntry,
                                             generate_autosummary_content,
                                             generate_autosummary_docs)
from sphinx.ext.autosummary.generate import main as autogen_main
from sphinx.testing.util import assert_node, etree_parse
from sphinx.util.docutils import new_document
from sphinx.util.osutil import cd

html_warnfile = StringIO()

default_kw = {
    'testroot': 'autosummary',
    'confoverrides': {
        'extensions': ['sphinx.ext.autosummary'],
        'autosummary_generate': True,
        'autosummary_generate_overwrite': False,
        'source_suffix': '.rst'
    }
}


@pytest.fixture(scope='function', autouse=True)
def unload_target_module():
    sys.modules.pop('target', None)
Example #3
0
def test_multiple_date_col_custom(all_parsers, keep_date_col):
    data = """\
KORD,19990127, 19:00:00, 18:56:00, 0.8100, 2.8100, 7.2000, 0.0000, 280.0000
KORD,19990127, 20:00:00, 19:56:00, 0.0100, 2.2100, 7.2000, 0.0000, 260.0000
KORD,19990127, 21:00:00, 20:56:00, -0.5900, 2.2100, 5.7000, 0.0000, 280.0000
KORD,19990127, 21:00:00, 21:18:00, -0.9900, 2.0100, 3.6000, 0.0000, 270.0000
KORD,19990127, 22:00:00, 21:56:00, -0.5900, 1.7100, 5.1000, 0.0000, 290.0000
KORD,19990127, 23:00:00, 22:56:00, -0.5900, 1.7100, 4.6000, 0.0000, 280.0000
"""
    parser = all_parsers

    def date_parser(*date_cols):
        """
        Test date parser.

        Parameters
        ----------
        date_cols : args
            The list of data columns to parse.

        Returns
        -------
        parsed : Series
        """
        return parsing.try_parse_dates(parsers._concat_date_cols(date_cols))

    result = parser.read_csv(StringIO(data),
                             header=None,
                             date_parser=date_parser,
                             prefix="X",
                             parse_dates={
                                 "actual": [1, 2],
                                 "nominal": [1, 3]
                             },
                             keep_date_col=keep_date_col)
    expected = DataFrame([
        [
            datetime(1999, 1, 27, 19, 0),
            datetime(1999, 1, 27, 18, 56), "KORD", "19990127", " 19:00:00",
            " 18:56:00", 0.81, 2.81, 7.2, 0.0, 280.0
        ],
        [
            datetime(1999, 1, 27, 20, 0),
            datetime(1999, 1, 27, 19, 56), "KORD", "19990127", " 20:00:00",
            " 19:56:00", 0.01, 2.21, 7.2, 0.0, 260.0
        ],
        [
            datetime(1999, 1, 27, 21, 0),
            datetime(1999, 1, 27, 20, 56), "KORD", "19990127", " 21:00:00",
            " 20:56:00", -0.59, 2.21, 5.7, 0.0, 280.0
        ],
        [
            datetime(1999, 1, 27, 21, 0),
            datetime(1999, 1, 27, 21, 18), "KORD", "19990127", " 21:00:00",
            " 21:18:00", -0.99, 2.01, 3.6, 0.0, 270.0
        ],
        [
            datetime(1999, 1, 27, 22, 0),
            datetime(1999, 1, 27, 21, 56), "KORD", "19990127", " 22:00:00",
            " 21:56:00", -0.59, 1.71, 5.1, 0.0, 290.0
        ],
        [
            datetime(1999, 1, 27, 23, 0),
            datetime(1999, 1, 27, 22, 56), "KORD", "19990127", " 23:00:00",
            " 22:56:00", -0.59, 1.71, 4.6, 0.0, 280.0
        ],
    ],
                         columns=[
                             "actual", "nominal", "X0", "X1", "X2", "X3", "X4",
                             "X5", "X6", "X7", "X8"
                         ])

    if not keep_date_col:
        expected = expected.drop(["X1", "X2", "X3"], axis=1)
    elif parser.engine == "python":
        expected["X1"] = expected["X1"].astype(np.int64)

    # Python can sometimes be flaky about how
    # the aggregated columns are entered, so
    # this standardizes the order.
    result = result[expected.columns]
    tm.assert_frame_equal(result, expected)
Example #4
0
def save_ihelp_to_file(function,save_help=False,save_code=True, 
                        as_md=False,as_txt=True,
                        folder='readme_resources/ihelp_outputs/',
                        filename=None,file_mode='w'):
    """Saves the string representation of the ihelp source code as markdown. 
    Filename should NOT have an extension. .txt or .md will be added based on
    as_md/as_txt.
    If filename is None, function name is used."""

    if as_md & as_txt:
        raise Exception('Only one of as_md / as_txt may be true.')

    import sys
    from io import StringIO
    ## save original output to restore
    orig_output = sys.stdout
    ## instantiate io stream to capture output
    io_out = StringIO()
    ## Redirect output to output stream
    sys.stdout = io_out
    
    if save_code:
        print('### SOURCE:')
        help_md = get_source_code_markdown(function)
        ## print output to io_stream
        print(help_md)
        
    if save_help:
        print('### HELP:')
        help(function)
        
    ## Get printed text from io stream
    text_to_save = io_out.getvalue()
    

    ## MAKE FULL FILENAME
    if filename is None:

        ## Find the name of the function
        import re
        func_names_exp = re.compile(r'def (\w*)\(')
        func_name = func_names_exp.findall(text_to_save)[0]    
        print(f'Found code for {func_name}')

        save_filename = folder+func_name#+'.txt'
    else:
        save_filename = folder+filename

    if as_md:
        ext = '.md'
    elif as_txt:
        ext='.txt'

    full_filename = save_filename + ext
    
    with open(full_filename,file_mode) as f:
        f.write(text_to_save)
        
    print(f'Output saved as {full_filename}')
    
    sys.stdout = orig_output
Example #5
0
 def __init__(self, prefix=''):
     self.oStringIO = StringIO()
     self.prefix = prefix
Example #6
0
 def on_pretrain_routine_start(self):
     self._resout = StringIO()
     self.ctx = redirect_stderr(self._resout)
     self.ctx.__enter__()
Example #7
0
from configobj import ConfigObj
from validate import Validator

import pybridge.environment as env

# Config spec
spec = StringIO("""# PyBridge configuration file

[Connection]
    HostAddress = string
    PortNumber = integer(0, 65535)
    Username = string
    Password = string

[Appearance]
    CardStyle = string
    Background = string
    SuitSymbols = boolean(default=True)

    [[Colours]]
        Club = string(min=12, max=12)
        Diamond = string(min=12, max=12)
        Heart = string(min=12, max=12)
        Spade = string(min=12, max=12)

""")


config = None
val = Validator()

def load():
Example #8
0
 def test_call_command_with_required_parameters_in_mixed_options(self):
     out = StringIO()
     management.call_command('required_option', '--need-me=foo', needme2='bar', stdout=out)
     self.assertIn('need_me', out.getvalue())
     self.assertIn('needme2', out.getvalue())
Example #9
0
 def test_command_add_arguments_after_common_arguments(self):
     out = StringIO()
     management.call_command('common_args', stdout=out)
     self.assertIn('Detected that --version already exists', out.getvalue())
Example #10
0
 def test_calling_a_command_with_no_app_labels_and_parameters_should_raise_a_command_error(self):
     with self.assertRaises(CommandError):
         management.call_command('hal', stdout=StringIO())
Example #11
0
 def test_output_transaction(self):
     output = management.call_command('transaction', stdout=StringIO(), no_color=True)
     self.assertTrue(output.strip().startswith(connection.ops.start_transaction_sql()))
     self.assertTrue(output.strip().endswith(connection.ops.end_transaction_sql()))
Example #12
0
 def test_calling_command_with_parameters_and_app_labels_at_the_end_should_be_ok(self):
     out = StringIO()
     management.call_command('hal', "--verbosity", "3", "myapp", stdout=out)
     self.assertIn("Dave, my mind is going. I can feel it. I can feel it.\n", out.getvalue())
Example #13
0
 def test_calling_a_command_with_only_empty_parameter_should_ends_gracefully(self):
     out = StringIO()
     management.call_command('hal', "--empty", stdout=out)
     self.assertIn("Dave, I can't do that.\n", out.getvalue())
Example #14
0
    def _optimize_recursive(self, sdfg: SDFG, depth: int):
        if depth == self.depth:
            return

        matches = list(self.get_pattern_matches(sdfg=sdfg))

        # Apply each transformation
        for match in matches:
            # Copy the SDFG
            new_sdfg: SDFG = copy.deepcopy(sdfg)

            # Try to apply, handle any exception
            try:
                # Redirect outputs
                output = StringIO()
                sys.stdout = output
                sys.stderr = output

                print('    ' * depth, type(match).__name__, '- ', end='', file=self.stdout)

                tsdfg: SDFG = new_sdfg.sdfg_list[match.sdfg_id]
                tgraph = tsdfg.node(match.state_id) if match.state_id >= 0 else tsdfg
                match._sdfg = tsdfg
                match.apply(tgraph, tsdfg)

                sdfg.save(os.path.join('_dacegraphs', 'program.sdfg'))

                # Validate
                if self.validate:
                    new_sdfg.validate()

                # Expand library nodes
                new_sdfg.expand_library_nodes()

                # Generate code
                if self.generate_code:
                    new_sdfg.generate_code()

                if self.compile:
                    compiled = new_sdfg.compile()
                    del compiled

                print('PASS', file=self.stdout)
                self.passed_tests += 1

                # Recursively optimize as necessary
                self._optimize_recursive(sdfg, depth + 1)

            except:  # Literally anything can happen here
                print('FAIL', file=self.stdout)
                self.failed_tests += 1
                if self.halt_on_exception:
                    print(output.getvalue(), file=self.stderr)
                    raise
                if self.print_exception:
                    print(output.getvalue(), file=self.stderr)
                    traceback.print_exc(file=self.stderr)
                continue
            finally:
                # Restore redirected outputs
                sys.stdout = self.stdout
                sys.stderr = self.stderr
def read_csv_and_index_fips(csv_str: str) -> pd.DataFrame:
    """Read a CSV in a str to a DataFrame and set the FIPS column as an index."""
    return pd.read_csv(
        StringIO(csv_str), dtype={CommonFields.FIPS: str}, low_memory=False,
    ).set_index(CommonFields.FIPS)
Example #16
0
 def test_subparser(self):
     out = StringIO()
     management.call_command('subparser', 'foo', 12, stdout=out)
     self.assertIn('bar', out.getvalue())
from io import StringIO

from keras.models import load_model
import numpy as np

data = ""
data += input('Number of times pregnant: ') + ","
data += input('Plasma glucose concentration: ') + ","
data += input('blood pressure in mm Hg: ') + ","
data += input('Tricep skinfold thickness: ') + ","
data += input('2-hour serum insulin: ') + ","
data += input('body mass index: ') + ","
data += input('diabetes pedigree function: ') + ","
data += input('age: ') + " "

data = StringIO(data)

# load pima indians dataset
dataset = np.loadtxt(data, delimiter=",")
dataset = np.reshape(dataset, (-1, 8))

X = dataset[:, 0:8]
print(X)

# returns a compiled model
# identical to the previous one
model = load_model('diabetes_model.h5')

# calculate predictions
prediction = model.predict(dataset)
# round predictions
Example #18
0
 def test_command(self):
     out = StringIO()
     management.call_command('dance', stdout=out)
     self.assertIn("I don't feel like dancing Rock'n'Roll.\n", out.getvalue())
def data_sheet(request, id):

    idprefix = request.POST['idprefix']
    print(idprefix, 'jjjjjjjjjjjj')

    doc_final_path = 'E:/certa-drdo/certa/TA_Datasheet.docx'
    final_path = 'E:/certa-drdo/certa/'
    # finalpath=final_path.replace('/','\\')
    pdf_final_path = 'E:/certa-drdo/certa/TA_Datasheet.pdf'
    if os.path.isfile(pdf_final_path):
        with open(pdf_final_path, 'rb') as pdf:
            response = HttpResponse(pdf.read(), content_type='application/pdf')
            response['Content-Disposition'] = 'filename=some_file.pdf'
        return response
    elif os.path.isfile(doc_final_path):
        print('mmmmmmmmmmmmmm')
        pythoncom.CoInitialize()
        wdFormatPDF = 17
        # print(tempfile.gettempdir(),'temp')

        in_file = os.path.abspath(doc_final_path)
        # out_file = os.path.abspath('D:/cemilac/certa/defence/media/org1.pdf')

        word = comtypes.client.CreateObject('Word.Application')
        doc = word.Documents.Open(in_file)
        doc.SaveAs('E:/certa-drdo/certa/TA_Datasheet.pdf',
                   FileFormat=wdFormatPDF)
        print('nnnnnnnnnnn')
        doc.Close()
        word.Quit()
        with open(final_path + 'TA_Datasheet.pdf', 'rb') as pdf:
            response = HttpResponse(pdf.read(), content_type='application/pdf')
            response['Content-Disposition'] = 'filename=some_file.pdf'
        return response
    else:
        curr_path = "/" + str(id) + "/" + idprefix + "Annexure 6/"
        curr_path = curr_path.replace('/', '\\')
        new_path = os.path.join(settings.MEDIA_ROOT + curr_path)

        # if os.path.isdir(new_path):
        #     with open(new_path+'TA Datasheet.docx', 'rb') as pdf:
        #         response = HttpResponse(pdf.read(),content_type='application/pdf')
        #         response['Content-Disposition'] = 'filename=some_file.pdf'
        #     return response
        # else:
        taa = TAapplicationmodel.objects.filter(user_id=id).first()

        # template = get_template('dealing officer/Draft TA pdf.html')
        target_file = StringIO()
        template = DocxTemplate(
            "E:/certa-drdo/certa/dashboard/templates/dealing officer/DS template.docx"
        )
        context = {
            'firmname': taa.firmname,
            'addr1': taa.addr1,
            'item_name': taa.item_name,
            'part_no': taa.part_no
        }
        html = template.render(context)
        doc_io = io.BytesIO()  # create a file-like object
        template.save("TA_Datasheet.docx")  # save data to file-like object

        new_path1 = 'E:\certa-drdo\certa\TA_Datasheet.docx'
        # output_path = os.path.join(settings.MEDIA_ROOT) + '/89/result.pdf'
        # new_path=new_path.replace('\','//')

        taa = TAapplicationfiles.objects.filter(user_id=id,
                                                refid=idprefix,
                                                refpath='Annexure 6').first()
        aesurl = taa.filepath
        docurl = aesurl[:-4]
        print('aesview', aesurl)
        print('docurl', docurl)

        bufferSize = 64 * 1024
        passw = "#EX\xc8\xd5\xbfI{\xa2$\x05(\xd5\x18\xbf\xc0\x85)\x10nc\x94\x02)j\xdf\xcb\xc4\x94\x9d(\x9e"
        encFileSize = stat(aesurl).st_size
        with open(aesurl, "rb") as fIn:
            with open(docurl, "wb") as fOut:
                pyAesCrypt.decryptStream(fIn, fOut, passw, bufferSize,
                                         encFileSize)

        templateDoc1 = Document(new_path1)
        templateDoc = Document(docurl)

        # templateDoc1.add_page_break()

        for element in templateDoc.element.body:
            templateDoc1.element.body.append(element)

        templateDoc1.save(new_path1)
        messages.success(
            request,
            'Data_sheet Successfully Prepared, Click again to view the file !')

        reg = TAapplicationmodel.objects.filter(
            file_in_id=str(request.user.id))
        return render(request, 'tcs do/receivedtyperecord.html', {
            'details': reg,
            'status': True
        })
Example #20
0
 def test_language_preserved(self):
     out = StringIO()
     with translation.override('fr'):
         management.call_command('dance', stdout=out)
         self.assertEqual(translation.get_language(), 'fr')
Example #21
0
def ihelp_menu(function_list,box_style='warning', to_embed=False):#, to_file=False):#, json_file='ihelp_output.txt' ):
    """
    Creates a widget menu of the source code and and help documentation of the functions in function_list.
    
    Args:
        function_list (list): list of function object or string names of loaded function. 
        to_embed (bool, optional): Returns interface (layout,output) if True. Defaults to False.
        to_file (bool, optional): Save . Defaults to False.
        json_file (str, optional): [description]. Defaults to 'ihelp_output.txt'.
        
    Returns:
        full_layout (ipywidgets GridBox): Layout of interface.
        output ()
    """
    
    # Accepts a list of string names for loaded modules/functions to save the `help` output and 
    # inspect.getsource() outputs to dictionary for later reference and display
    ## One way using sys to write txt file
    import pandas as pd
    import sys
    import inspect
    from io import StringIO
    from IPython.display import display,Markdown
    notebook_output = sys.stdout
    result = StringIO()
    sys.stdout=result
    
    ## Turn single input into a list
    if isinstance(function_list,list)==False:
        function_list = [function_list]
    
    ## Make a dictionary of{function_name : function_object}
    functions_dict = dict()
    for fun in function_list:
        
        ## if input is a string, save string as key, and eval(function) as value
        if isinstance(fun, str):
            functions_dict[fun] = eval(fun)

        ## if input is a function, get the name of function using inspect and make key, function as value
        elif inspect.isfunction(fun):

            members= inspect.getmembers(fun)
            member_df = pd.DataFrame(members,columns=['param','values']).set_index('param')

            fun_name = member_df.loc['__name__'].values[0]
            functions_dict[fun_name] = fun
            
            
    ## Create an output dict to store results for functions
    output_dict = {}

    for fun_name, real_func in functions_dict.items():
        
        output_dict[fun_name] = {}
                
        ## First save help
        help(real_func)
        output_dict[fun_name]['help'] = result.getvalue()
        
        ## Clear contents of io stream
        result.truncate(0)
                
        try:
            ## Next save source
            source_DF = inspect.getsource(real_func)
            # # if markdown == True:
                
            #     output = "```python" +'\n'+source_DF+'\n'+"```"
            #     display(Markdown(output))
            # else:
            #     output=source_DF
            print(source_DF)
            # output_dict[fun_name]['source'] = source_DF

            # print(inspect.getsource(real_func)) #eval(fun)))###f"{eval(fun)}"))
        except:
            # print("Source code for object was not found")
            print("Source code for object was not found")


        # finally:
        output_dict[fun_name]['source'] = result.getvalue()
        ## clear contents of io stream
        result.truncate(0)
    
        
        ## Get file location
        try:
            file_loc = inspect.getfile(real_func)
            print(file_loc)
        except:
            print("File location not found")
            
        output_dict[fun_name]['file_location'] =result.getvalue()
        
        
        ## clear contents of io stream
        result.truncate(0)        
        
    ## Reset display back to notebook
    sys.stdout = notebook_output    

    # if to_file==True:    
    #     with open(json_file,'w') as f:
    #         import json
    #         json.dump(output_dict,f)

    ## CREATE INTERACTIVE MENU
    from ipywidgets import interact, interactive, interactive_output
    import ipywidgets as widgets
    from IPython.display import display
    # from functions_combined_BEST import ihelp
    # import functions_combined_BEST as ji

    ## Check boxes
    check_help = widgets.Checkbox(description="Show 'help(func)'",value=True)
    check_source = widgets.Checkbox(description="Show source code",value=True)
    check_fileloc=widgets.Checkbox(description="Show source filepath",value=False)
    check_boxes = widgets.HBox(children=[check_help,check_source,check_fileloc])

    ## dropdown menu (dropdown, label, button)
    dropdown = widgets.Dropdown(options=list(output_dict.keys()))
    label = widgets.Label('Function Menu')
    button = widgets.ToggleButton(description='Show/hide',value=False)
    
    ## Putting it all together
    title = widgets.Label('iHelp Menu: View Help and/or Source Code')
    menu = widgets.HBox(children=[label,dropdown,button])
    titled_menu = widgets.VBox(children=[title,menu])
    full_layout = widgets.GridBox(children=[titled_menu,check_boxes],box_style=box_style)
    

    ## Define output manager
    # show_output = widgets.Output()

    def dropdown_event(change): 
        new_key = change.new
        output_display = output_dict[new_key]
    dropdown.observe(dropdown_event,names='values')

    
    def show_ihelp(display_help=button.value,function=dropdown.value,
                   show_help=check_help.value,show_code=check_source.value, 
                   show_file=check_fileloc.value,ouput_dict=output_dict):

        from IPython.display import Markdown
        # import functions_combined_BEST as ji
        from IPython.display import display        
        page_header = '---'*28
        # import json
        # with open(json_file,'r') as f:
        #     output_dict = json.load(f)
        func_dict = output_dict[function]
        source_code=None

        if display_help:
            if show_help:
#                 display(print(func_dict['help']))
                print(page_header)
                banner = ''.join(["---"*2,' HELP ',"---"*24,'\n'])
                print(banner)
                print(func_dict['help'])

            if show_code:
                print(page_header)

                banner = ''.join(["---"*2,' SOURCE -',"---"*23])
                print(banner)

                source_code = func_dict['source']#.encode('utf-8')
                if source_code.startswith('`'):
                    source_code = source_code.replace('`',"").encode('utf-8')

                if 'google.colab' in sys.modules:
                    print(source_code)
                else:
                    md_source = "```python\n"+source_code
                    md_source += "```"
                    display(Markdown(md_source))
            
            
            if show_file:
                print(page_header)
                banner = ''.join(["---"*2,' FILE LOCATION ',"---"*21])
                print(banner)
                
                file_loc = func_dict['file_location']
                print(file_loc)
                
            if show_help==False & show_code==False & show_file==False:
                display('Check at least one "Show" checkbox for output.')
                
        else:
            display('Press "Show/hide" for display')
            
    ## Fully integrated output
    output = widgets.interactive_output(show_ihelp,{'display_help':button,
                                                   'function':dropdown,
                                                   'show_help':check_help,
                                                   'show_code':check_source,
                                                   'show_file':check_fileloc})
    if to_embed:
        return full_layout, output
    else:
        display(full_layout, output)
Example #22
0
 def reset(self):
     self._buffer = StringIO()
Example #23
0
def download(modeladmin, request, selected):
    buf = StringIO('This is the content of the file')
    return StreamingHttpResponse(FileWrapper(buf))
Example #24
0
import requests
from lxml.html import parse
from io import StringIO

text = requests.get('http://finance.yahoo.com/quote/AAPL/options?ltr=1').text
parsed = parse(StringIO(text))

doc = parsed.getroot()

print(doc)
Example #25
0
    def run(  # pylint: disable=unused-argument,too-many-arguments,too-many-locals
            self,
            command: str,
            warn: bool = False,
            encoding: str = 'utf-8',  # pylint: disable=redefined-outer-name
            hide=True,
            watchers=None,
            env=None,
            replace_env=False,
            in_stream=False,
            timeout=None) -> Result:
        """Run command, wait till it ends and return result in Result class.
        If `watchers` are defined it runs `SSHReaderThread` that reads data from the socket and forwards it to Queue.
        If `hide` is True it does not collect stdout and stderr.
        if `env` is set it loads variables from the dict to the session environment.
        Returns: instance of `Result`
        """
        if timeout is None:
            timeout = self.timings.read_command_output_timeout
        exception = None
        timeout_reached = False
        stdout = StringIO()
        stderr = StringIO()
        # TODO: Implement replace_env
        if env is None:
            shell = '/bin/bash'
        else:
            shell = env.get('SHELL', '/bin/bash')
        result = Result(command=command,
                        encoding=encoding,
                        env=env,
                        hide=('stderr', 'stdout') if hide else (),
                        pty=False,
                        exited=None,
                        shell=shell,
                        stdout='',
                        stderr='')
        channel: Optional[Channel] = None
        try:
            if self.session is None:
                self.connect()
            channel = self.open_channel()
        except Exception as exc:  # pylint: disable=broad-except
            return self._complete_run(channel, FailedToRunCommand(result, exc),
                                      timeout_reached, timeout, result, warn,
                                      stdout, stderr)
        try:
            self._apply_env(channel, env)
        except Exception as exc:  # pylint: disable=broad-except
            return self._complete_run(channel, FailedToRunCommand(result, exc),
                                      timeout_reached, timeout, result, warn,
                                      stdout, stderr)
        if watchers:
            reader = SSHReaderThread(
                self.session, channel, timeout,
                self.timings.interactive_read_data_chunk_timeout)
            try:

                self.execute(command, channel=channel, use_pty=False)
                self._process_output(
                    watchers, encoding, stdout, stderr, reader, timeout,
                    self.timings.interactive_read_data_chunk_timeout)
            except Exception as exc:  # pylint: disable=broad-except
                exception = FailedToReadCommandOutput(result, exc)
            if reader.is_alive():
                reader.stop()
            if exception is None and reader.raised:
                exception = reader.raised
            timeout_reached = reader.timeout_reached
        else:
            try:
                self.execute(command, channel=channel, use_pty=False)
                timeout_reached = not self._process_output_no_watchers(
                    self.session, channel, encoding, stdout, stderr, timeout,
                    self.timings.read_data_chunk_timeout)
            except Exception as exc:  # pylint: disable=broad-except
                exception = FailedToReadCommandOutput(result, exc)
        return self._complete_run(channel, exception, timeout_reached, timeout,
                                  result, warn, stdout, stderr)
Example #26
0
    def print_report_transfer_order(self):
        if self.print_format == 'pdf':
            datas = {'model': 'optesis.transfer.order', 'form': self.read()[0]}
            return self.env.ref('optipay.transfer_order').report_action(
                [], data=datas)
        else:
            number_month_to_word = {
                "1": "janvier",
                "2": "février",
                "3": "mars",
                "4": "avril",
                "5": "mai",
                "6": "juin",
                "7": "julliet",
                "8": "aout",
                "9": "septembre",
                "10": "octobre",
                "11": "novembre",
                "12": "decembre"
            }
            now = datetime.now()
            server_dt = DEFAULT_SERVER_DATE_FORMAT
            month = datetime.strptime(str(self.date_from), server_dt).month
            year = datetime.strptime(str(self.date_from), server_dt).year
            self.env.cr.execute(
                "SELECT hr_payslip_line.total,hr_employee.name,res_partner_bank.acc_number,"
                "hr_employee.bank_account_id AS hr_employee_bank_account_id,"
                "res_bank.id AS res_bank_id,res_bank.name AS res_bank_name,"
                "res_partner_bank.bank_id AS res_partner_bank_bank_id FROM "
                "hr_payslip_line hr_payslip_line INNER JOIN hr_payslip hr_payslip ON "
                "hr_payslip_line.slip_id = hr_payslip.id "
                "INNER JOIN hr_employee hr_employee ON hr_payslip_line.employee_id = hr_employee.id "
                "INNER JOIN res_partner_bank res_partner_bank ON "
                "hr_employee.bank_account_id = res_partner_bank.id "
                "INNER JOIN public.res_bank res_bank ON res_partner_bank.bank_id = res_bank.id WHERE "
                "date_part('month',hr_payslip.date_from) = %s "
                "AND hr_payslip_line.code = %s "
                "AND hr_payslip_line.year = %s "
                "AND hr_employee.company_id = %s ",
                (month, 'C5000', str(year), self.env.user.company_id.id))
            line_ids = [x for x in self.env.cr.fetchall()]
            if len(line_ids) > 0:
                file = StringIO()
                workbook = xlwt.Workbook()
                format0 = xlwt.easyxf(
                    'font:height 300,bold True;pattern: pattern solid, fore_colour pale_blue;align: horiz center'
                )
                format1 = xlwt.easyxf(
                    'font:bold True;pattern: pattern solid, fore_colour pale_blue;align: '
                    'vert center, horiz center')
                format2 = xlwt.easyxf(
                    'font:bold True;pattern: pattern solid, fore_colour pale_blue;align: horiz left'
                )
                format3 = xlwt.easyxf('align: vert center, horiz center')

                sheet = workbook.add_sheet('Ordre de virement')
                sheet.col(0).width = int(15 * 260)
                sheet.col(1).width = int(15 * 260)
                sheet.col(2).width = int(15 * 260)
                sheet.col(3).width = int(18 * 260)
                sheet.col(4).width = int(18 * 260)
                sheet.write_merge(0, 2, 0, 4, 'Ordre de virement ', format0)

                sheet.write(5, 3, 'Banque:', format2)
                sheet.write(
                    5, 4,
                    self.env.user.company_id.bank_journal_ids[0].bank_id.name)
                sheet.write(6, 3, 'Rue:', format2)
                sheet.write(
                    6, 4, self.env.user.company_id.bank_journal_ids[0].bank_id.
                    street)
                sheet.write(7, 3, 'Code Postal:', format2)
                sheet.write(
                    7, 4,
                    self.env.user.company_id.bank_journal_ids[0].bank_id.zip)
                sheet.write(8, 3, 'Ville:', format2)
                sheet.write(
                    8, 4,
                    self.env.user.company_id.bank_journal_ids[0].bank_id.city)
                sheet.write(9, 3, 'Date:', format2)
                sheet.write(9, 4, now.strftime("%d/%m/%Y"))

                sheet.write(11, 0, 'Objet:')
                sheet.write(11, 1, 'Ordre de Virement')

                account_number = ''
                if self.env.user.company_id.bank_journal_ids:
                    account_number = self.env.user.company_id.bank_journal_ids[
                        0].bank_acc_number

                sheet.write_merge(
                    13, 13, 0, 4,
                    'Par le débit de notre compte n° ' + str(account_number) +
                    ' ouvert dans vos livres, nous vous prions de vouloir ')
                sheet.write_merge(
                    14, 14, 0, 4,
                    'efféctuer les virements  pour les titulaires de compteci-dessous'
                    ' en réglement de leur ')
                sheet.write_merge(15, 15, 0, 4,
                                  'rénumérations du mois de mai.')

                sheet.write(17, 0, 'N°', format1)
                sheet.write(17, 1, 'Prénom-Nom', format1)
                sheet.write(17, 2, 'Domiciliation', format1)
                sheet.write(17, 3, 'N° Compte', format1)
                sheet.write(17, 4, 'MontantFCFA', format1)
                row = 18
                index = 0
                total = 0
                for line in line_ids:
                    index += 1
                    sheet.write(row, 0, index, format3)
                    sheet.write(row, 1, line[1], format3)
                    sheet.write(row, 2, line[5], format3)
                    sheet.write(row, 3, line[2], format3)
                    sheet.write(row, 4, line[0], format3)
                    total += line[0]
                    row += 1
                sheet.write_merge(row, row, 0, 3, 'Total ' + str(index),
                                  format1)
                sheet.write(row, 4, total, format3)
                sheet.write_merge(
                    row + 2, row + 2, 0, 7,
                    'Veuillez agréer, Monsieur, l\'expression'
                    ' de notre parfaiteconsidération.')
                filename = ('/tmp/Ordre de virement Report' + '.xls')
                workbook.save(filename)
                file = open(filename, "rb")
                file_data = file.read()
                out = base64.encodestring(file_data)
                self.write({
                    'state': 'get',
                    'file_name': out,
                    'transfer_data': 'Ordre de virement Report.xls'
                })
                return {
                    'type': 'ir.actions.act_window',
                    'res_model': 'optesis.transfer.order',
                    'view_mode': 'form',
                    'view_type': 'form',
                    'res_id': self.id,
                    'target': 'new',
                }
            else:
                raise Warning("Pas de données pour cette période")
Example #27
0
def test_multiple_date_col_name_collision(all_parsers, data, parse_dates, msg):
    parser = all_parsers

    with pytest.raises(ValueError, match=msg):
        parser.read_csv(StringIO(data), parse_dates=parse_dates)
Example #28
0
 def test_invalid_command_args(self):
     output = StringIO()
     cmd = CmdFactory(Strace, outstream=output)
     # fails if number of args != 1
     pytest.raises(InvalidCommand, cmd.execute, {}, [])
     pytest.raises(InvalidCommand, cmd.execute, {}, ['t1', 't2'])
Example #29
0
def test_parse_dates_no_convert_thousands(all_parsers, data, kwargs, expected):
    # see gh-14066
    parser = all_parsers

    result = parser.read_csv(StringIO(data), thousands=".", **kwargs)
    tm.assert_frame_equal(result, expected)
    def format_args(self, show_annotation: bool = True) -> str:
        def get_annotation(param: Parameter) -> Any:
            if isinstance(param.annotation,
                          str) and param.name in self.annotations:
                return self.annotations[param.name]
            else:
                return param.annotation

        args = []
        last_kind = None
        for i, param in enumerate(self.parameters.values()):
            # skip first argument if subject is bound method
            if self.skip_first_argument and i == 0:
                continue

            arg = StringIO()

            # insert '*' between POSITIONAL args and KEYWORD_ONLY args::
            #     func(a, b, *, c, d):
            if param.kind == param.KEYWORD_ONLY and last_kind in (
                    param.POSITIONAL_OR_KEYWORD, param.POSITIONAL_ONLY, None):
                args.append('*')

            if param.kind in (param.POSITIONAL_ONLY,
                              param.POSITIONAL_OR_KEYWORD, param.KEYWORD_ONLY):
                arg.write(param.name)
                if show_annotation and param.annotation is not param.empty:
                    arg.write(': ')
                    arg.write(stringify_annotation(get_annotation(param)))
                if param.default is not param.empty:
                    if param.annotation is param.empty or show_annotation is False:
                        arg.write('=')
                        arg.write(object_description(param.default))
                    else:
                        arg.write(' = ')
                        arg.write(object_description(param.default))
            elif param.kind == param.VAR_POSITIONAL:
                arg.write('*')
                arg.write(param.name)
                if show_annotation and param.annotation is not param.empty:
                    arg.write(': ')
                    arg.write(stringify_annotation(get_annotation(param)))
            elif param.kind == param.VAR_KEYWORD:
                arg.write('**')
                arg.write(param.name)
                if show_annotation and param.annotation is not param.empty:
                    arg.write(': ')
                    arg.write(stringify_annotation(get_annotation(param)))

            args.append(arg.getvalue())
            last_kind = param.kind

        if self.return_annotation is Parameter.empty or show_annotation is False:
            return '(%s)' % ', '.join(args)
        else:
            if 'return' in self.annotations:
                annotation = stringify_annotation(self.annotations['return'])
            else:
                annotation = stringify_annotation(self.return_annotation)

            return '(%s) -> %s' % (', '.join(args), annotation)