Exemple #1
0
def test_iterator():

    s = u"1234567890\n"
    sio = StringIO(s * 10)

    assert iter(sio) is sio
    assert hasattr(sio, "__iter__")
    assert hasattr(sio, "next")

    i = 0
    for line in sio:
        assert line == s
        i += 1
    assert i == 10
    sio.seek(0)
    i = 0
    for line in sio:
        assert line == s
        i += 1
    assert i == 10
    sio.seek(len(s) * 10 + 1)
    assert list(sio) == []
    sio = StringIO(s * 2)
    sio.close()
    raises(ValueError, next, sio)
class TestBase(unittest.TestCase):
    __configFilePath = os.getcwd() + '/unit/testutilities/config.py'

    def setUp(self):
        self.aminer_config = AMinerConfig.load_config(self.__configFilePath)
        self.analysis_context = AnalysisContext(self.aminer_config)
        self.output_stream = StringIO()
        self.stream_printer_event_handler = StreamPrinterEventHandler(
            self.analysis_context, self.output_stream)
        persistence_file_name = AMinerConfig.build_persistence_file_name(
            self.aminer_config)
        if os.path.exists(persistence_file_name):
            shutil.rmtree(persistence_file_name)
        if not os.path.exists(persistence_file_name):
            os.makedirs(persistence_file_name)

    def tearDown(self):
        self.aminer_config = AMinerConfig.load_config(self.__configFilePath)
        persistence_file_name = AMinerConfig.build_persistence_file_name(
            self.aminer_config)
        if os.path.exists(persistence_file_name):
            shutil.rmtree(persistence_file_name)
        if not os.path.exists(persistence_file_name):
            os.makedirs(persistence_file_name)

    def reset_output_stream(self):
        self.output_stream.seek(0)
        self.output_stream.truncate(0)
Exemple #3
0
def test_newline_empty():

    sio = StringIO(u"a\nb\r\nc\rd", newline="")
    res = list(sio)
    assert res == [u"a\n", u"b\r\n", u"c\r", u"d"]
    sio.seek(0)
    res = sio.read(4)
    assert res == u"a\nb\r"
    res = sio.read(2)
    assert res == u"\nc"
    res = sio.read(1)
    assert res == u"\r"

    sio = StringIO(newline="")
    res = sio.write(u"a\n")
    assert res == 2
    res = sio.write(u"b\r")
    assert res == 2
    res = sio.write(u"\nc")
    assert res == 2
    res = sio.write(u"\rd")
    assert res == 2
    sio.seek(0)
    res = list(sio)
    assert res == [u"a\n", u"b\r\n", u"c\r", u"d"]
Exemple #4
0
def test_truncate():

    s = u"1234567890"
    sio = StringIO(s)

    raises(ValueError, sio.truncate, -1)
    sio.seek(6)
    res = sio.truncate()
    assert res == 6
    assert sio.getvalue() == s[:6]
    res = sio.truncate(4)
    assert res == 4
    assert sio.getvalue() == s[:4]
    # truncate() accepts long objects
    res = sio.truncate(4L)
    assert res == 4
    assert sio.getvalue() == s[:4]
    assert sio.tell() == 6
    sio.seek(0, 2)
    sio.write(s)
    assert sio.getvalue() == s[:4] + s
    pos = sio.tell()
    res = sio.truncate(None)
    assert res == pos
    assert sio.tell() == pos
    raises(TypeError, sio.truncate, '0')
    sio.close()
    raises(ValueError, sio.truncate, 0)
Exemple #5
0
def test_newline_none():

    sio = StringIO(u"a\nb\r\nc\rd", newline=None)
    res = list(sio)
    assert res == [u"a\n", u"b\n", u"c\n", u"d"]
    sio.seek(0)
    res = sio.read(1)
    assert res == u"a"
    res = sio.read(2)
    assert res == u"\nb"
    res = sio.read(2)
    assert res == u"\nc"
    res = sio.read(1)
    assert res == u"\n"

    sio = StringIO(newline=None)
    res = sio.write(u"a\n")
    assert res == 2
    res = sio.write(u"b\r\n")
    assert res == 3
    res = sio.write(u"c\rd")
    assert res == 3
    sio.seek(0)
    res = sio.read()
    assert res == u"a\nb\nc\nd"
    sio = StringIO(u"a\r\nb", newline=None)
    res = sio.read(3)
    assert res == u"a\nb"
    def loadFile(self):
        #         path = os.path.join('/docs/github/Opal/src/ui/view/opalview', 'bookInfo.html')

        out = StringIO()
        htmlhandler = rt.RichTextHTMLHandler()
        buffer = self.rtc.GetBuffer()
        #         htmlhandler.SetFlags(rt.RICHTEXT_HANDLER_SAVE_IMAGES_TO_MEMORY)
        htmlhandler.SetFontSizeMapping([7, 9, 11, 12, 14, 22, 100])
        logger.debug('canload: %s', htmlhandler.CanLoad())
        logger.debug('cansave: %s', htmlhandler.CanSave())
        logger.debug('CanHandle: %s', htmlhandler.CanHandle('bookInfo.html'))
        rt.RichTextBuffer.AddHandler(htmlhandler)
        #         buffer.AddHandler(htmlhandler)
        try:
            if self.book != None:
                out.write(self.book.bookDescription)
            out.seek(0)
        except Exception as e:
            logger.error(e)
#         htmlhandler.LoadStream(buffer, out)
#         htmlhandler.LoadFile(path,'text')
        if self.book != None and self.book.bookDescription != None:
            self.rtc.AppendText(self.book.bookDescription)
#         htmlhandler.LoadStream(buffer, out.getvalue())
        self.rtc.Refresh()
Exemple #7
0
    def test3get_match_element_valid_match(self):
        """Parse data and check if the MatchContext was not changed."""
        old_stderr = sys.stderr
        output = StringIO()
        sys.stderr = output
        debug_model_element = DebugModelElement(self.id_)
        self.assertEqual(output.getvalue(), "DebugModelElement %s added\n" % self.id_)

        output.seek(0)
        output.truncate(0)

        data = b"some data"
        match_context = DummyMatchContext(data)
        match_element = debug_model_element.get_match_element(self.path, match_context)
        self.assertEqual(
            output.getvalue(), 'DebugModelElement path = "%s", unmatched = "%s"\n' % (match_element.get_path(), repr(
                match_context.match_data)))
        self.compare_match_results(data, match_element, match_context, self.id_, self.path, b"", b"", None)

        output.seek(0)
        output.truncate(0)

        data = b"123 0x2a. [\"abc\"]:"
        match_context = DummyMatchContext(data)
        match_element = debug_model_element.get_match_element(self.path, match_context)
        self.assertEqual(
            output.getvalue(), 'DebugModelElement path = "%s", unmatched = "%s"\n' % (match_element.get_path(), repr(
                match_context.match_data)))
        self.compare_match_results(data, match_element, match_context, self.id_, self.path, b"", b"", None)

        sys.stderr = old_stderr
Exemple #8
0
def test_newline_crlf():

    sio = StringIO(u"a\nb\r\nc\rd", newline="\r\n")
    res = sio.read()
    assert res == u"a\r\nb\r\r\nc\rd"
    sio.seek(0)
    res = list(sio)
    assert res == [u"a\r\n", u"b\r\r\n", u"c\rd"]
Exemple #9
0
def test_roundtrip_state():
    s = u'12345678'
    sio1 = StringIO(s)
    sio1.foo = 42
    sio1.seek(2)
    assert sio1.getvalue() == s
    state = sio1.__getstate__()
    sio2 = StringIO()
    sio2.__setstate__(state)
    assert sio2.getvalue() == s
    assert sio2.foo == 42
    assert sio2.tell() == 2
def downloadprepros(masukan):
    f = StringIO()
    for a in masukan:
        print('manja', a)
        f.write(a)
        f.write('\n')
    f.flush()
    f.seek(0)

    response = HttpResponse(FileWrapper(f), content_type='text/csv')
    response['Content-Disposition'] = 'attachment; filename=test.csv'
    return response
Exemple #11
0
def test_tell():

    s = u"1234567890"
    sio = StringIO(s)

    assert sio.tell() == 0
    sio.seek(5)
    assert sio.tell() == 5
    sio.seek(10000)
    assert sio.tell() == 10000

    sio.close()
    raises(ValueError, sio.tell)
Exemple #12
0
    def submitWait(self,jcl,wait=30):
        """
        :param jcl: dataset or pds(member) containting JCL to submit
        :param wait: wait time in seconds until function is to return
        :return Job: Job object containing information on Job submitted
                     or None

        >>> fz=ftptoolz.Ftpzos('zibm','mm','pw',verbose=2)
        >>> j=fz.submitWait("'mm.jobs(copyy)'")
        >>> x.cc
        'AE37'

        """
        j=None
        f=StringIO()   # py2/3
        for line in self.getlines(jcl):
            f.write(line+'\n')
        f.seek(0)
        self.ftp.sendcmd('SITE file=jes') # switch to Spool mode
        try:
            if PY3:
                # convert to latin1 (iso-8859-1) byte string
                f = BytesIO(f.read().encode('latin1'))
            fresp = self.ftp.storlines('STOR myjob.seq', f)
            if fresp.startswith('250-'):
                jobid = fresp.split()[6]
                if jobid.startswith('JOB') and len(jobid)==8:
                    j=Job(jobid,jcl)

        finally:
            self.ftp.sendcmd('SITE file=seq') # switch to File mode

        if not j:
            return j

        for i in range(wait):
            js,sp = self.listjob(j.jobid)
            j.status=js.status
            j.jobstatus=js
            if js.status=='OUTPUT':
                j.cc=js.cc
                j.spoolfiles=sp
                break
            elif js.status=='ACTIVE':
                j.cputime=js.cputime
                j.elapsed=js.elapsed
            time.sleep(1.)
        return j
Exemple #13
0
    def touchmembers(self, pds, membertimes, touchtemplate):
        """Submit TOUCH job to set modification times in members of a
           partitioned dataset.

        :param pds: partitioned dataset name
        :param membertimes: list of (membername, modtime, uid, size) tuples
                            modtime is of datetime type or
                            of string 'yyyymmdd.HHMMSS'
        :param touchtemplate: Touch template job skeleton
                              (see touchtemplate_sample for further
                              details
        """
        if len(membertimes)==0:
            return
        f=StringIO()   # py2/3
        f.write(touchtemplate % pds.upper().strip())

        for m, t, u, s in membertimes:
            # if touchuid/touchdate given as parameter asmdate will only count lines
            if self.verbose:
                print(m, t, u)
            if t:
                if isinstance(t,datetime.datetime):
                    ttime = t.strftime('%Y%m%d.%H%M%S')
                else:
                    ttime = t   # 'yyyymmdd.HHMMSS'
                f.write('SET DATE=%s\n' % ttime)
            if u:
                f.write('SET USER=%s\n' % u.upper())
            if s:
                f.write('SET LINES=%d\n' % s)
            f.write(m.upper()+'\n')

        f.write('//\n')  # end of job
        f.seek(0)   # rewind
        if self.test:
            print('\nThe following generated TOUCH job is not submitted in test mode:)')
            for line in f:
                print(line[:-1])
        else:
            if PY3:
                # convert to latin1 (iso-8859-1) byte string
                f = BytesIO(f.read().encode('latin1'))
            self.ftp.sendcmd('SITE file=jes') # switch to Spool mode
            self.ftp.storlines('STOR touch.seq', f)
            self.ftp.sendcmd('SITE file=seq') # switch to File mode
Exemple #14
0
def hasilCSV(request):
    if request.method == 'POST':
        name = request.FILES['fileInput'].name
        typeFile = name.split('.')[1]
        if(typeFile == 'txt'):
            reader = TextIOWrapper(request.FILES['fileInput'].file, encoding='utf-8')
        elif(typeFile == 'csv'):
            try:
                text = TextIOWrapper(request.FILES['fileInput'].file, encoding='utf-8')
                reader = csv.reader(text)
            except:
                text = StringIO(request.FILES['fileInput'].file.read().decode())
                reader = csv.reader(text)
        
        arrData = []
        for line in reader:
            line = ''.join(line)
            arrData.append(line)
        
        myfile = StringIO()
        
        metode = request.POST['metode']
        statusFormalisasi = request.POST.get('formalisasi', False)
        if(metode == 'EDR'):
            for line in arrData:
                hasil = F_EDR(line)
                myfile.write(hasil + os.linesep)
        elif(metode == 'ED'):
            for line in arrData:
                hasil = F_ED(line)
                myfile.write(hasil + os.linesep)
        elif(metode == 'BG'):
            for line in arrData:
                hasil = F_BG(line)
                myfile.write(hasil + os.linesep)
        
        myfile.flush()
        myfile.seek(0)
        
        response = HttpResponse(FileWrapper(myfile), content_type='text/csv')
        response['Content-Disposition'] = 'attachment; filename=hasil.csv'
        return response
    else:
        return render(request, 'index_preprocess.html', {})
        
        
Exemple #15
0
def excepthook(excType, excValue, tracebackobj):
    """
    Global function to catch unhandled exceptions.
    @param excType exception type
    @param excValue exception value
    @param tracebackobj traceback object
    """
    separator = '-' * 80

    tbinfofile = StringIO()
    traceback.print_tb(tracebackobj, None, tbinfofile)
    tbinfofile.seek(0)
    tbinfo = tbinfofile.read()
    errmsg = '%s: \n%s' % (str(excType), str(excValue))
    sections = [separator, errmsg, separator, tbinfo]
    msg = '\n'.join(sections)
    sys.exit(str(msg))
Exemple #16
0
def test_roundtrip_translation():
    sio1 = StringIO(u'a\nb', newline='\r\n')
    pos = sio1.seek(1)
    assert sio1.getvalue() == u'a\r\nb'
    state = sio1.__getstate__()
    sio2 = StringIO()
    sio2.__setstate__(state)
    assert sio2.getvalue() == u'a\r\nb'
    assert sio2.tell() == pos
Exemple #17
0
    def test1start_debugging(self):
        """This test case checks if the DebugModelElement was initiated and the output was correct."""
        output = StringIO()
        sys.stderr = output
        debug_model_element = DebugModelElement('debug')
        self.assertEqual(
            output.getvalue(),
            'DebugModelElement %s added\n' % debug_model_element.element_id)

        output.seek(0)
        output.truncate(0)

        matchContext = MatchContext(b'some data')
        matchElement = debug_model_element.get_match_element(
            'debugMatch', matchContext)
        self.assertEqual(
            output.getvalue(),
            'DebugModelElement path = "%s", unmatched = "%s"\n' %
            (matchElement.get_path(), matchContext.match_data))
class TestBase(unittest.TestCase):
    """This is the base class for all unittests."""

    __configFilePath = os.getcwd() + '/unit/config/config.py'

    def setUp(self):
        """Set up all needed variables and remove persisted data."""
        PersistenceUtil.persistable_components = []
        self.aminer_config = AminerConfig.load_config(self.__configFilePath)
        self.analysis_context = AnalysisContext(self.aminer_config)
        self.output_stream = StringIO()
        self.stream_printer_event_handler = StreamPrinterEventHandler(
            self.analysis_context, self.output_stream)
        persistence_dir_name = AminerConfig.build_persistence_file_name(
            self.aminer_config)
        if os.path.exists(persistence_dir_name):
            shutil.rmtree(persistence_dir_name)
        if not os.path.exists(persistence_dir_name):
            os.makedirs(persistence_dir_name)
        initialize_loggers(self.aminer_config,
                           getpwnam('aminer').pw_uid,
                           getgrnam('aminer').gr_gid)
        if isinstance(persistence_dir_name, str):
            persistence_dir_name = persistence_dir_name.encode()
        SecureOSFunctions.secure_open_base_directory(
            persistence_dir_name, os.O_RDONLY | os.O_DIRECTORY | os.O_PATH)
        PersistenceUtil.SKIP_PERSISTENCE_ID_WARNING = True

    def tearDown(self):
        """Delete all persisted data after the tests."""
        self.aminer_config = AminerConfig.load_config(self.__configFilePath)
        persistence_file_name = AminerConfig.build_persistence_file_name(
            self.aminer_config)
        if os.path.exists(persistence_file_name):
            shutil.rmtree(persistence_file_name)
        if not os.path.exists(persistence_file_name):
            os.makedirs(persistence_file_name)
        SecureOSFunctions.close_base_directory()

    def reset_output_stream(self):
        """Reset the output stream."""
        self.output_stream.seek(0)
        self.output_stream.truncate(0)
Exemple #19
0
def test_seek():

    s = u"1234567890"
    sio = StringIO(s)

    sio.read(5)
    sio.seek(0)
    r = sio.read()
    assert r == s

    sio.seek(3)
    r = sio.read()
    assert r == s[3:]
    raises(TypeError, sio.seek, 0.0)

    exc_info = raises(ValueError, sio.seek, -3)
    assert exc_info.value.args[0] == "Negative seek position -3"

    raises(ValueError, sio.seek, 3, -1)
    raises(ValueError, sio.seek, 3, -3)

    sio.close()
    raises(ValueError, sio.seek, 0)
Exemple #20
0
    def create_thumbnail(self):
        if not self.M.image:
            THUMBNAIL_SIZE = (200, 200)
            DJANGO_TYPE = self.M_image.file.content_type
            if DJANGO_TYPE == 'image/jpeg':
                PIL_TYPE = 'jpeg'
                FILE_EXTENSION = 'jpg'
            elif DJANGO_TYPE == 'image/png':
                PIL_TYPE = 'png'
                FILE_EXTENSION = 'png'
                image = Image.open(StringIO(self.M_image.read()))
                image.thumbnail(THUMBNAIL_SIZE, Image.ANTIALIAS)
                temp_handle = StringIO()
                image.save(temp_handle, PIL_TYPE)
                temp_handle.seek(0)
                suf = SimpleUploadedFile(os.path.split(self.M_image.name)[-1],
                                         temp_handle.read(),
                                         content_type=DJANGO_TYPE)
                self.thumb_pic.save(
                    '%s.%s' % (os.path.splitext(suf.name)[0], FILE_EXTENSION),
                    suf,
                    save=False)

        return
Exemple #21
0
def test_overseek():

    s = u"1234567890"
    sio = StringIO(s)

    res = sio.seek(11)
    assert res == 11
    res = sio.read()
    assert res == u""
    assert sio.tell() == 11
    assert sio.getvalue() == s
    sio.write(u"")
    assert sio.getvalue() == s
    sio.write(s)
    assert sio.getvalue() == s + u"\0" + s
class TestBase(unittest.TestCase):
    """This is the base class for all unittests."""

    __configFilePath = os.getcwd() + '/unit/data/config.py'

    def setUp(self):
        """Set up all needed variables and remove persisted data."""
        PersistenceUtil.persistable_components = []
        self.aminer_config = load_config(self.__configFilePath)
        self.analysis_context = AnalysisContext(self.aminer_config)
        self.output_stream = StringIO()
        self.stream_printer_event_handler = StreamPrinterEventHandler(
            self.analysis_context, self.output_stream)
        persistence_dir_name = build_persistence_file_name(self.aminer_config)
        if os.path.exists(persistence_dir_name):
            shutil.rmtree(persistence_dir_name)
        if not os.path.exists(persistence_dir_name):
            os.makedirs(persistence_dir_name)
        initialize_loggers(self.aminer_config, os.getuid(), os.getgid())
        if isinstance(persistence_dir_name, str):
            persistence_dir_name = persistence_dir_name.encode()
        SecureOSFunctions.secure_open_base_directory(
            persistence_dir_name, os.O_RDONLY | os.O_DIRECTORY | os.O_PATH)
        PersistenceUtil.SKIP_PERSISTENCE_ID_WARNING = True

    def tearDown(self):
        """Delete all persisted data after the tests."""
        self.aminer_config = load_config(self.__configFilePath)
        persistence_file_name = build_persistence_file_name(self.aminer_config)
        if os.path.exists(persistence_file_name):
            shutil.rmtree(persistence_file_name)
        if not os.path.exists(persistence_file_name):
            os.makedirs(persistence_file_name)
        SecureOSFunctions.close_base_directory()

    def reset_output_stream(self):
        """Reset the output stream."""
        self.output_stream.seek(0)
        self.output_stream.truncate(0)

    def compare_match_results(self, data, match_element, match_context, id_,
                              path, match_string, match_object, children):
        """Compare the results of get_match_element() if match_element is not None."""
        self.assertEqual(match_element.path, "%s/%s" % (path, id_))
        self.assertEqual(match_element.match_string, match_string)
        self.assertEqual(match_element.match_object, match_object)
        if children is None:
            self.assertIsNone(match_element.children, children)
        else:
            self.assertEqual(len(children), len(match_element.children))
            for i, child in enumerate(children):
                self.assertEqual(match_element.children[i].path, child.path)
                self.assertEqual(match_element.children[i].match_string,
                                 child.match_string)
                self.assertEqual(match_element.children[i].match_object,
                                 child.match_object)
                self.assertIsNone(match_element.children[i].children, children)
        self.assertEqual(match_context.match_string, match_string)
        self.assertEqual(match_context.match_data, data[len(match_string):])

    def compare_no_match_results(self, data, match_element, match_context):
        """Compare the results of get_match_element() if match_element is not None."""
        self.assertIsNone(match_element, None)
        self.assertEqual(match_context.match_data, data)
def downloadfile1(allfile, facebookdata):
    panjang = len(allfile)

    if panjang == 3:
        ##do something
        f = StringIO()
        wb = Workbook()
        ws = wb.active

        for row in facebookdata:
            f.write(row.name + ',' + row.status + ',' + str(row.like) + ',' +
                    str(row.comment) + ',' + str(row.share))
            f.write('\n')
            ws.append([row.name, row.status, row.like, row.comment, row.share])

        f.flush()
        f.seek(0)

        ##make json
        g = StringIO()
        data = []
        for row in facebookdata:
            data1 = {}
            data1["NAME"] = row.name
            data1["STATUS"] = row.status
            data1["LIKE"] = row.like
            data1["COMMENT"] = row.comment
            data1["SHARE"] = row.share
            data.append(data1)
        json.dump(data, g, indent=3)
        g.flush()
        g.seek(0)

        output = BytesIO()
        zip = ZipFile(output, 'w')
        zip.writestr("Data_facebook.csv", f.getvalue())
        zip.writestr("Data_facebook.JSON", g.getvalue())
        zip.writestr("Data_facebook.xlsx", save_virtual_workbook(wb))
        zip.close()
        response = HttpResponse(output.getvalue(),
                                content_type='application/octet-stream')
        response[
            'Content-Disposition'] = 'attachment; filename=Data_facebook.zip'

        return response

    elif panjang == 2:

        ##read list
        if '0' and '1' in allfile:
            ##return csv and json
            print('csv and jsn')
            #make csv
            f = StringIO()

            for row in facebookdata:
                f.write(row.name + ',' + row.status + ',' + str(row.like) +
                        ',' + str(row.comment) + ',' + str(row.share))
                f.write('\n')

            f.flush()
            f.seek(0)

            ##make json
            g = StringIO()
            data = []
            for row in facebookdata:
                data1 = {}
                data1["NAME"] = row.name
                data1["STATUS"] = row.status
                data1["LIKE"] = row.like
                data1["COMMENT"] = row.comment
                data1["SHARE"] = row.share
                data.append(data1)
            json.dump(data, g, indent=3)
            g.flush()
            g.seek(0)

            output = BytesIO()
            zip = ZipFile(output, 'w')
            zip.writestr("Data_facebook.csv", f.getvalue())
            zip.writestr("Data_facebook.JSON", g.getvalue())
            zip.close()
            response = HttpResponse(output.getvalue(),
                                    content_type='application/octet-stream')
            response[
                'Content-Disposition'] = 'attachment; filename=Data_facebook.zip'

            return response

        elif '0' and '2' in allfile:
            #return csv and xls
            print('csv and xls')
            f = StringIO()

            for row in facebookdata:
                f.write(row.name + ',' + row.status + ',' + str(row.like) +
                        ',' + str(row.comment) + ',' + str(row.share))
                f.write('\n')

            f.flush()
            f.seek(0)

            g = StringIO()
            wb = Workbook()
            ws = wb.active

            for row in facebookdata:
                ws.append(
                    [row.name, row.status, row.like, row.comment, row.share])

            output = BytesIO()
            zip = ZipFile(output, 'w')
            zip.writestr("Data_facebook.csv", f.getvalue())
            zip.writestr("Data_facebook.xlsx", save_virtual_workbook(wb))
            zip.close()
            response = HttpResponse(output.getvalue(),
                                    content_type='application/octet-stream')
            response[
                'Content-Disposition'] = 'attachment; filename=Data_facebook.zip'

            return response
        else:
            #return json xls

            #make json file
            f = StringIO()
            data = []
            for row in facebookdata:
                data1 = {}
                data1["NAME"] = row.name
                data1["TWEET"] = row.tweet
                data1["RETWEET_USER"] = row.Retweet_user
                data1["HASHTAGS"] = row.hashtag
                data1["DATE"] = str(row.date)
                data.append(data1)
            json.dump(data, f, indent=3)
            f.flush()
            f.seek(0)

            #make xls file
            g = StringIO()
            wb = Workbook()
            ws = wb.active

            for row in facebookdata:
                ws.append(
                    [row.name, row.status, row.like, row.comment, row.share])

            output = BytesIO()
            zip = ZipFile(output, 'w')
            zip.writestr("Data_facebook.csv", f.getvalue())
            zip.writestr("Data_facebook.xlsx", save_virtual_workbook(wb))
            zip.close()
            response = HttpResponse(output.getvalue(),
                                    content_type='application/octet-stream')
            response[
                'Content-Disposition'] = 'attachment; filename=Data_facebook.zip'

            return response

    elif panjang == 0:

        ##asli
        print('do nothing')
        return HttpResponseRedirect('../search2')

    else:
        if '0' in allfile:
            f = StringIO()
            writer = csv.writer(f)

            for row in facebookdata:
                writer.writerow(
                    [row.name, row.status, row.like, row.comment, row.share])
            f.flush()
            f.seek(0)
            response = HttpResponse(FileWrapper(f), content_type='text/csv')
            response[
                'Content-Disposition'] = 'attachment; filename=Data_facebook.csv'
            return response
        elif '1' in allfile:
            f = StringIO()
            data = []
            for row in facebookdata:
                data1 = {}
                data1["NAME"] = row.name
                data1["STATUS"] = row.status
                data1["LIKE"] = row.like
                data1["COMMENT"] = row.comment
                data1["SHARE"] = row.share
                data.append(data1)
            json.dump(data, f, indent=3)
            f.flush()
            f.seek(0)
            response = HttpResponse(FileWrapper(f), content_type='js/json')
            response[
                'Content-Disposition'] = 'attachment; filename=Data_facebook.json'
            return response
        else:
            f = StringIO()
            wb = Workbook()
            ws = wb.active

            for row in facebookdata:
                ws.append(
                    [row.name, row.status, row.like, row.comment, row.share])

            response = HttpResponse(save_virtual_workbook(wb),
                                    content_type='application/vnd.ms-excel')
            response[
                'Content-Disposition'] = 'attachment; filename=Data_facebook.xlsx'

            return response
Exemple #24
0
    pyfile.write('# -*- coding: utf-8 -*-\n'.encode(encoding='utf_8',
                                                    errors='strict'))
    pyfile.write('#@PydevCodeAnalysisIgnore\n'.encode(encoding='utf_8',
                                                      errors='strict'))
    pyfile.write('#pylint: disable-all\n'.encode(encoding='utf_8',
                                                 errors='strict'))

    xfile = StringIO()
    compileUi(uifile,
              xfile,
              execute=True,
              indent=4,
              from_imports=True,
              resource_suffix='_rc')

    xfile.seek(0)
    for line in xfile.readlines():

        # --- patch to qt4 /qt5 compatibility
        line = line.replace('QtGui.', '')
        line = line.replace('QtCore.', '')
        line = line.replace('QtWidgets.', '')
        line = line.replace('from PyQt5 import QtCore, QtGui, QtWidgets',
                            UNI_QT_IMPORT)
        # ^^^ patch to qt4 /qt5 compatibility

        if line == 'from PyQt5 import Qsci\n':
            pyfile.write(QSCSIPATCH.encode(encoding='utf_8', errors='strict'))
        else:
            pyfile.write(line.encode(encoding='utf_8', errors='strict'))