예제 #1
0
def rebuildIntervall(configfile, dbStorage, dateIntervall):
    """
    :param configfile:
    :param dbStorage:
    :param dateIntervall:
    :return:
    """
    import wx
    from httpmanager import readURL
    from searchrevdialog import getConfigData
    from searchrevdialog import httpReader
    from searchrevdialog import eraseifnot

    pulse_dlg = wx.ProgressDialog(title="Completing data base",
                                  message="Receiving missing time intervall ... ",
                                  maximum=int(101),
                                  style=wx.PD_CAN_ABORT | wx.PD_ELAPSED_TIME | wx.PD_REMAINING_TIME)

    ficon = 'dat/images/search.ico'
    icon = wx.Icon(ficon, wx.BITMAP_TYPE_ICO)
    # icon = wx.IconFromBitmap('dat/images/search.ico')
    pulse_dlg.SetIcon(icon)

    userData = getConfigData(configfile)
    serverdata = userData[0]

    # get list from daily server
    URL = readURL.URLmanager('read')
    URL.authorize(serverdata.url, serverdata.user, serverdata.passw)

    # get all daily names and file versions
    sublinks = []
    sublinksdb = []
    canceledProcess = -1
    ii = 0
    for i in reversed(range(len(dateIntervall))):
        # process progress bar
        updmessage = "Receiving missing time intervall ... " + " - " + str(round((float(ii) / float(len(dateIntervall))) * 100)) + ' %' # + str(i) + " of " + str(len(linkdb)) +
        (keepGoin, skip) = pulse_dlg.Update(round((float(ii) / float(len(dateIntervall)-1)) * 100), updmessage)
        if not keepGoin:
            canceledProcess = 1
            break

        # get information from daily date
        linkstr = str(dateIntervall[i])
        sublinks.append(serverdata.url + linkstr)
        subcontent = URL.readURL(sublinks[ii])
        sublinksdbtmp = URL.linkfilter(subcontent)
        sublinksdb.append(eraseifnot(sublinksdbtmp, '.zip'))
        ii += 1

    pulse_dlg.Destroy()

    return sublinksdb
예제 #2
0
def rebuild(configfile, dbStorage):
    """
    :param configfile:
    :param dbStorage:
    :return:
    """
    import wx
    from httpmanager import readURL
    from searchrevdialog import getConfigData
    from searchrevdialog import httpReader
    from searchrevdialog import eraseifnot

    pulse_dlg = wx.ProgressDialog(title="Building up data base",
                                  message="Receiving daily information ... ",
                                  maximum=int(101),
                                  style=wx.PD_CAN_ABORT | wx.PD_ELAPSED_TIME | wx.PD_REMAINING_TIME)

    userData = getConfigData(configfile)
    serverdata = userData[0]

    # get list from daily server
    dailycontent = httpReader(serverdata)
    URL = readURL.URLmanager('read')
    linkdb = URL.linkfilter(dailycontent)
    linkdb = eraseifnot(linkdb, 'num')

    # get all daily names and file version to extract revision number in the end
    sublinks = []
    sublinksdb = []
    canceledProcess = -1
    for i in range(len(linkdb)):

        updmessage = "Receiving information ... " + " - " + str(round((float(i) / float(len(linkdb))) * 100)) + ' %' # + str(i) + " of " + str(len(linkdb)) +
        (keepGoin, skip) = pulse_dlg.Update(round((float(i) / float(len(linkdb))) * 100), updmessage)
        if not keepGoin:
            canceledProcess = 1
            break

        linkstr = str(linkdb[i])
        sublinks.append(serverdata.url + linkstr[2:-2])
        subcontent = URL.readURL(sublinks[i])
        sublinksdbtmp = URL.linkfilter(subcontent)
        sublinksdb.append(eraseifnot(sublinksdbtmp, '.zip'))

    if canceledProcess != 1:
        stat = writeDB(sublinksdb, dbStorage, strRange=(2, -2))
    else:
        stat = -1

    pulse_dlg.Destroy()

    return stat