Example #1
0
def publicsearch(request):

    if request.method == 'GET':
        requestObject = request.GET
    elif request.method == 'POST':
        requestObject = request.POST
    else:
        pass
        #error!

    context = {'items': [], 'searchValues': requestObject, 'displayType': setDisplayType(requestObject)}
    if requestObject != {}:
        form = forms.Form(requestObject)

        if form.is_valid() or request.method == 'GET':

            context = doSearch(SOLRSERVER, SOLRCORE, context)

            if 'csv' in requestObject:
                # Create the HttpResponse object with the appropriate CSV header.
                response = HttpResponse(content_type='text/csv')
                response['Content-Disposition'] = 'attachment; filename="ucjeps.csv"'
                #response.write(u'\ufeff'.encode('utf8'))
                writeCsv(response,context['items'],writeheader=True)
                return response
            elif 'map-bmapper' in requestObject:
                context = setupBMapper(requestObject, context)
            elif 'map-google' in requestObject:
                context = setupGoogleMap(requestObject, context)
            elif 'email' in requestObject:
                pass

    context = setConstants(requestObject, context)

    return render(request, 'publicsearch.html', context)
Example #2
0
def getFold(fold=0,
            fname_in='trainFolds.csv',
            fnames=['CTs.csv', 'Fleischner.csv', 'Nodules.csv'],
            prefix_in='train',
            prefix_out='',
            excludeFold=True):

    if not prefix_out:
        prefix_out = 'fold{}'.format(fold)  # eg. fold0

    #Get fold lnds
    nodules = readCsv(fname_in)
    header = nodules[0]
    lines = nodules[1:]

    foldind = header.index('Fold{}'.format(fold))  # get fold idx from file
    foldlnd = [l[foldind] for l in lines if len(l) > foldind
               ]  # select correct lnd number except with missing data

    for fname in fnames:  # loop thru filetypes
        lines = readCsv(prefix_in + fname)
        header = lines[0]
        lines = lines[1:]

        lndind = header.index('LNDbID')
        if not excludeFold:
            lines = [l for l in lines if l[lndind] in foldlnd]
        else:
            lines = [l for l in lines if not l[lndind] in foldlnd]

        #Save to csv
        writeCsv(prefix_out + fname, [header] + lines)
Example #3
0
def getFold(fold=0, fname_in='trainFolds.csv',
            fnames=['CTs.csv', 'Fleischner.csv', 'Nodules.csv'],
            prefix_in='train', prefix_out='',
            excludeFold=False):
    if not prefix_out:
        prefix_out = 'fold{}'.format(fold)

    # Get fold lnds
    nodules = readCsv(fname_in)
    header = nodules[0]
    lines = nodules[1:]

    foldind = header.index('Fold{}'.format(fold))
    foldlnd = [l[foldind] for l in lines if len(l) > foldind]

    for fname in fnames:
        lines = readCsv(prefix_in + fname)
        header = lines[0]
        lines = lines[1:]

        lndind = header.index('LNDbID')
        if not excludeFold:
            lines = [l for l in lines if l[lndind] in foldlnd]
        else:
            lines = [l for l in lines if not l[lndind] in foldlnd]

        # Save to csv
        writeCsv(prefix_out + fname, [header] + lines)
Example #4
0
def csv(request):
    if request.method == 'POST' and request.POST != {}:
        requestObject = dict(request.POST.iteritems())
        form = forms.Form(requestObject)

        if form.is_valid():
            try:
                context = {'searchValues': requestObject}
                csvformat, fieldset, csvitems = setupCSV(
                    requestObject, context)
                loginfo('csv', context, request)

                # create the HttpResponse object with the appropriate CSV header.
                response = HttpResponse(content_type='text/csv')
                response[
                    'Content-Disposition'] = 'attachment; filename="%s-%s.%s"' % (
                        CSVPREFIX,
                        datetime.datetime.utcnow().strftime("%Y%m%d%H%M%S"),
                        CSVEXTENSION)
                return writeCsv(response,
                                fieldset,
                                csvitems,
                                writeheader=True,
                                csvFormat=csvformat)
            except:
                messages.error(request, 'Problem creating .csv file. Sorry!')
                context['messages'] = messages
                return search(request)
Example #5
0
def csv(request):
    if request.method == 'POST' and request.POST != {}:
        requestObject = request.POST
        form = forms.Form(requestObject)

        if form.is_valid():
            # context = {'searchValues': requestObject}
            # context = doSearch(SOLRSERVER, SOLRCORE, context)
            context = SEARCHRESULTS

            # Create the HttpResponse object with the appropriate CSV header.
            response = HttpResponse(content_type='text/csv')
            response['Content-Disposition'] = 'attachment; filename="%s-%s.%s"' % (CSVPREFIX,datetime.datetime.utcnow().strftime("%Y%m%d%H%M%S"),CSVEXTENSION)
            #response.write(u'\ufeff'.encode('utf8'))
            writeCsv(response, context['items'], writeheader=True)

            loginfo('csv', context, request)
            return response
Example #6
0
def csv(request):
    if request.method == 'POST' and request.POST != {}:
        requestObject = request.POST
        form = forms.Form(requestObject)

        if form.is_valid():
            # context = {'searchValues': requestObject}
            # context = doSearch(SOLRSERVER, SOLRCORE, context)
            context = SEARCHRESULTS

            # Create the HttpResponse object with the appropriate CSV header.
            response = HttpResponse(content_type='text/csv')
            response['Content-Disposition'] = 'attachment; filename="ucjeps.csv"'
            #response.write(u'\ufeff'.encode('utf8'))
            writeCsv(response, context['items'], writeheader=True)

            loginfo('csv', context, request)
            return response
Example #7
0
def csv(request):
    if request.method == 'POST' and request.POST != {}:
        requestObject = request.POST
        form = forms.Form(requestObject)

        if form.is_valid():
            # context = {'searchValues': requestObject}
            # context = doSearch(SOLRSERVER, SOLRCORE, context)
            context = SEARCHRESULTS

            # Create the HttpResponse object with the appropriate CSV header.
            response = HttpResponse(content_type='text/tsv')
            response[
                'Content-Disposition'] = 'attachment; filename="ucjeps.tsv"'
            #response.write(u'\ufeff'.encode('utf8'))
            writeCsv(response, context['items'], writeheader=True)

            loginfo('csv', context, request)
            return response
Example #8
0
def csv(request):
    if request.method == 'POST' and request.POST != {}:
        requestObject = dict(request.POST.iteritems())
        form = forms.Form(requestObject)

        if form.is_valid():
            try:
                context = {'searchValues': requestObject}
                csvformat, fieldset, csvitems = setupCSV(requestObject, context)
                loginfo('csv', context, request)

                # create the HttpResponse object with the appropriate CSV header.
                response = HttpResponse(content_type='text/csv')
                response['Content-Disposition'] = 'attachment; filename="%s-%s.%s"' % (
                    CSVPREFIX, datetime.datetime.utcnow().strftime("%Y%m%d%H%M%S"), CSVEXTENSION)
                return writeCsv(response, fieldset, csvitems, writeheader=True, csvFormat=csvformat)
            except:
                messages.error(request, 'Problem creating .csv file. Sorry!')
                context['messages'] = messages
                return search(request)
        for i, td in enumerate(tr.find_all('td')):
            if i == 0:
                if td.get_text() == '':
                    coach_name = 'NA'
                else:
                    coach = td.get_text()
                    coach = re.sub("[^a-z A-Z]+", "", coach)
                    coach_name = coach
            if i == 1:
                if td.get_text() == '':
                    from_date = 'NA'
                else:
                    from_date = td.get_text()
            if i == 2:
                if td.get_text() == '':
                    to_date = 'NA'
                else:
                    to_date = td.get_text()

        coachBasicDictTemp = {
                                 "Coach Name" : coach_name,
                                 "Active From" : from_date,
                                 "Active To" : to_date
                                 }
        coachBasicDict.append(coachBasicDictTemp)

writeCsv(coachBasicDict, csvFolder + "Coach_Basic_Data.csv")

#Status message

print 'The file Coach_Basic_Data.csv has been written successfully.'
Example #10
0
import numpy as np
import utils
import random
if __name__ == "__main__":
    src = "./train_label.csv"
    csvdata = utils.readCsv(src)
    random.shuffle(csvdata)

    val = csvdata[:100]
    train = csvdata[100:]
    utils.writeCsv("./train.csv", train)
    utils.writeCsv("./val.csv", val)
Example #11
0
def uploadfiles(request):

    jobinfo = {}
    constants = {}
    images = []
    dropdowns = getDropdowns()
    elapsedtime = time.time()

    form = forms.Form(request)
    if request.POST:

        contributor = request.POST['contributor']
        overrideContributor = request.POST['overridecreator']

        creatorDisplayname = request.POST['creator']
        overrideCreator = request.POST['overridecreator']

        rightsholderDisplayname = request.POST['rightsholder']
        overrideRightsholder = request.POST['overriderightsholder']

        constants = {'creator': creatorDisplayname, 'contributor': contributor, 'rightsholder': rightsholderDisplayname}

        for lineno,afile in enumerate(request.FILES.getlist('imagefiles')):
            #print afile
            try:
                print "%s %s: %s %s (%s %s)" % ('id', lineno, 'name', afile.name, 'size', afile.size)
                im = get_exif(afile)
                objectnumber = getNumber(afile.name)
                #objectCSID = getCSID(objectnumber)
                creator, creatorRefname = assignValue(creatorDisplayname,overrideCreator,im,'Artist',dropdowns['creators'])
                contributor, dummy = assignValue(contributor,overrideContributor,im,'ImageDescription',{})
                rightsholder, rightsholderRefname = assignValue(rightsholderDisplayname,overrideRightsholder,im,'RightsHolder',dropdowns['rightsholders'])
                datetimedigitized, dummy = assignValue('','ifblank',im,'DateTimeDigitized',{})
                imageinfo = {'id': lineno, 'name': afile.name, 'size': afile.size,
                             'objectnumber': objectnumber,
                             #'objectCSID': objectCSID,
                             'date': datetimedigitized,
                             'creator': creatorRefname,
                             'contributor': contributor,
                             'rightsholder': rightsholderRefname,
                             'creatorDisplayname': creator,
                             'rightsholderDisplayname': rightsholder,
                             'contributorDisplayname': contributor
                }
                handle_uploaded_file(afile, imageinfo)
                images.append(imageinfo)
            except:
                #raise
                images.append({'name': afile.name, 'size': afile.size, 'error': 'problem extracting image metadata, not processed'})

        if len(images) > 0:
            jobnumber = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())
            jobinfo['jobnumber'] = jobnumber
            writeCsv(getJobfile(jobnumber)+'.step1.csv', images,
                     ['name', 'size', 'objectnumber', 'date', 'creator', 'contributor', 'rightsholder'])
            jobinfo['estimatedtime'] = '%8.1f' % (len(images) * 10 / 60.0)

            if 'createmedia' in request.POST:
                jobinfo['status'] = 'createmedia'
                loginfo('start', getJobfile(jobnumber), request)
                try:
                    retcode = subprocess.call(["/usr/local/share/django/pahma_project/uploadmedia/postblob.sh", getJobfile(jobnumber)])
                    if retcode < 0:
                        loginfo('process', jobnumber+" Child was terminated by signal %s" %  -retcode, request)
                    else:
                        loginfo('process', jobnumber+": Child returned %s" %  retcode, request)
                except OSError as e:
                    loginfo('error', "Execution failed: %s" % e, request)
                loginfo('finish', getJobfile(jobnumber), request)

            elif 'uploadmedia' in request.POST:
                jobinfo['status'] = 'uploadmedia'
            else:
                jobinfo['status'] = 'No status possible'

    status = 'up'
    timestamp = time.strftime("%b %d %Y %H:%M:%S", time.localtime())
    elapsedtime = time.time() - elapsedtime

    return render(request, 'uploadmedia.html',
                  {'title': TITLE, 'images': images, 'count': len(images), 'constants': constants, 'jobinfo': jobinfo,
                   'dropdowns': dropdowns, 'overrides': overrides, 'status': status, 'timestamp': timestamp, 'elapsedtime': '%8.2f' % elapsedtime})
def prepareFiles(request, validateonly, BMUoptions, constants):
    jobnumber = constants['jobnumber']
    jobinfo = {}
    images = []
    for lineno, afile in enumerate(request.FILES.getlist('imagefiles')):
        # print afile
        try:
            print "%s %s: %s %s (%s %s)" % ('id', lineno + 1, 'name', afile.name, 'size', afile.size)
            image = get_exif(afile)
            filename, objectnumber, imagenumber = getNumber(afile.name, INSTITUTION)
            datetimedigitized, dummy = assignValue('', 'ifblank', image, 'DateTimeDigitized', {})
            imageinfo = {'id': lineno, 'name': afile.name, 'size': afile.size,
                         'objectnumber': objectnumber,
                         'imagenumber': imagenumber,
                         # 'objectCSID': objectCSID,
                         'date': datetimedigitized}
            for override in BMUoptions['overrides']:
                dname,refname = assignValue(constants[override[2]][0], constants[override[2]][1], image, override[3], override[4])
                imageinfo[override[2]] = refname
                # add the Displayname just in case...
                imageinfo['%sDisplayname' % override[2]] = dname

            if not validateonly:
                handle_uploaded_file(afile)

            for option in ['handling', 'approvedforweb']:
                if option in request.POST:
                    imageinfo[option] = request.POST[option]
                else:
                    imageinfo[option] = ''

            if 'handling' in request.POST:
                handling = request.POST['handling']
                for parms in BMUoptions['bmuconstants'][handling]:
                    imageinfo[parms] = BMUoptions['bmuconstants'][handling][parms]

                # special case:
                # borndigital media have their mh id numbers unconditionally replaced with a sequence number
                if imageinfo['handling'] == 'borndigital':
                    # for these, we create a media handling number...
                    # options considered were:
                    # DP-2015-10-08-12-16-43-0001 length: 27
                    # DP-201510081216430001 length: 21
                    # DP-2CBE859E990BFB1 length: 18
                    # DP-2015-10-08-12-16-43-0001 length: 27 the winner! (most legible)
                    mhnumber = jobnumber + ("-%0.4d" % (lineno + 1))
                    #mhnumber = hex(int(mhnumber.replace('-','')))[2:]
                    imageinfo['objectnumber'] = 'DP-' + mhnumber
            images.append(imageinfo)

        except:
            # raise
            if not validateonly:
                # we still upload the file, anyway...
                handle_uploaded_file(afile)
            images.append({'name': afile.name, 'size': afile.size,
                           'error': 'problem extracting image metadata, not processed'})

    if len(images) > 0:
        jobinfo['jobnumber'] = jobnumber

        if not validateonly:
            writeCsv(getJobfile(jobnumber) + '.step1.csv', images, FIELDS2WRITE)
        jobinfo['estimatedtime'] = '%8.1f' % (len(images) * 10 / 60.0)

        if 'createmedia' in request.POST:
            jobinfo['status'] = 'createmedia'
            if not validateonly:
                loginfo('start', getJobfile(jobnumber), request)
                try:
                    retcode = subprocess.call([path.join(POSTBLOBPATH, 'postblob.sh'), INSTITUTION, getJobfile(jobnumber), BATCHPARAMETERS])
                    if retcode < 0:
                        loginfo('process', jobnumber + " Child was terminated by signal %s" % -retcode, request)
                    else:
                        loginfo('process', jobnumber + ": Child returned %s" % retcode, request)
                except OSError as e:
                    loginfo('error', "Execution failed: %s" % e, request)
                loginfo('finish', getJobfile(jobnumber), request)

        elif 'uploadmedia' in request.POST:
            jobinfo['status'] = 'uploadmedia'
        else:
            jobinfo['status'] = 'No status possible'

    return jobinfo, images
Example #13
0
def prepareFiles(request, validateonly, BMUoptions, constants):
    jobnumber = constants['jobnumber']
    jobinfo = {}
    images = []
    for lineno, afile in enumerate(request.FILES.getlist('imagefiles')):
        # print afile
        try:
            print "%s %s: %s %s (%s %s)" % ('id', lineno + 1, 'name', afile.name, 'size', afile.size)
            image = get_exif(afile)
            filename, objectnumber, imagenumber = getNumber(afile.name, INSTITUTION)
            datetimedigitized, dummy = assignValue('', 'ifblank', image, 'DateTimeDigitized', {})
            imageinfo = {'id': lineno, 'name': afile.name, 'size': afile.size,
                         'objectnumber': objectnumber,
                         'imagenumber': imagenumber,
                         # 'objectCSID': objectCSID,
                         'date': datetimedigitized}
            for override in BMUoptions['overrides']:
                dname,refname = assignValue(constants[override[2]][0], constants[override[2]][1], image, override[3], override[4])
                imageinfo[override[2]] = refname
                # add the Displayname just in case...
                imageinfo['%sDisplayname' % override[2]] = dname

            if not validateonly:
                handle_uploaded_file(afile)

            for option in ['handling', 'approvedforweb']:
                if option in request.POST:
                    imageinfo[option] = request.POST[option]
                else:
                    imageinfo[option] = ''

            if 'handling' in request.POST:
                handling = request.POST['handling']
                for parms in BMUoptions['bmuconstants'][handling]:
                    imageinfo[parms] = BMUoptions['bmuconstants'][handling][parms]

                # special case:
                # borndigital media have their mh id numbers unconditionally replaced with a sequence number
                if imageinfo['handling'] == 'borndigital':
                    # for these, we create a media handling number...
                    # options considered were:
                    # DP-2015-10-08-12-16-43-0001 length: 27
                    # DP-201510081216430001 length: 21
                    # DP-2CBE859E990BFB1 length: 18
                    # DP-2cbe859e990bfb1 length: 18 the winner!
                    mhnumber = jobnumber + ("-%0.4d" % (lineno + 1))
                    #mhnumber = hex(int(mhnumber.replace('-','')))[2:]
                    imageinfo['objectnumber'] = 'DP-' + mhnumber
            images.append(imageinfo)

        except:
            # raise
            if not validateonly:
                # we still upload the file, anyway...
                handle_uploaded_file(afile)
            images.append({'name': afile.name, 'size': afile.size,
                           'error': 'problem extracting image metadata, not processed'})

    if len(images) > 0:
        jobinfo['jobnumber'] = jobnumber

        if not validateonly:
            writeCsv(getJobfile(jobnumber) + '.step1.csv', images, FIELDS2WRITE)
        jobinfo['estimatedtime'] = '%8.1f' % (len(images) * 10 / 60.0)

        if 'createmedia' in request.POST:
            jobinfo['status'] = 'createmedia'
            if not validateonly:
                loginfo('start', getJobfile(jobnumber), request)
                try:
                    retcode = subprocess.call([path.join(POSTBLOBPATH, 'postblob.sh'), INSTITUTION, getJobfile(jobnumber), BATCHPARAMETERS])
                    if retcode < 0:
                        loginfo('process', jobnumber + " Child was terminated by signal %s" % -retcode, request)
                    else:
                        loginfo('process', jobnumber + ": Child returned %s" % retcode, request)
                except OSError as e:
                    loginfo('error', "Execution failed: %s" % e, request)
                loginfo('finish', getJobfile(jobnumber), request)

        elif 'uploadmedia' in request.POST:
            jobinfo['status'] = 'uploadmedia'
        else:
            jobinfo['status'] = 'No status possible'

    return jobinfo, images
Example #14
0
def uploadfiles(request):

    jobinfo = {}
    constants = {}
    images = []
    dropdowns = getDropdowns()
    elapsedtime = time.time()

    form = forms.Form(request)
    if request.POST:

        contributor = request.POST['contributor']
        overrideContributor = request.POST['overridecreator']

        creatorDisplayname = request.POST['creator']
        overrideCreator = request.POST['overridecreator']

        rightsholderDisplayname = request.POST['rightsholder']
        overrideRightsholder = request.POST['overriderightsholder']

        constants = {
            'creator': creatorDisplayname,
            'contributor': contributor,
            'rightsholder': rightsholderDisplayname
        }

        for lineno, afile in enumerate(request.FILES.getlist('imagefiles')):
            #print afile
            try:
                print "%s %s: %s %s (%s %s)" % ('id', lineno, 'name',
                                                afile.name, 'size', afile.size)
                im = get_exif(afile)
                objectnumber = getNumber(afile.name)
                #objectCSID = getCSID(objectnumber)
                creator, creatorRefname = assignValue(creatorDisplayname,
                                                      overrideCreator, im,
                                                      'Artist',
                                                      dropdowns['creators'])
                contributor, dummy = assignValue(contributor,
                                                 overrideContributor, im,
                                                 'ImageDescription', {})
                rightsholder, rightsholderRefname = assignValue(
                    rightsholderDisplayname, overrideRightsholder, im,
                    'RightsHolder', dropdowns['rightsholders'])
                datetimedigitized, dummy = assignValue('', 'ifblank', im,
                                                       'DateTimeDigitized', {})
                imageinfo = {
                    'id': lineno,
                    'name': afile.name,
                    'size': afile.size,
                    'objectnumber': objectnumber,
                    #'objectCSID': objectCSID,
                    'date': datetimedigitized,
                    'creator': creatorRefname,
                    'contributor': contributor,
                    'rightsholder': rightsholderRefname,
                    'creatorDisplayname': creator,
                    'rightsholderDisplayname': rightsholder,
                    'contributorDisplayname': contributor
                }
                handle_uploaded_file(afile, imageinfo)
                images.append(imageinfo)
            except:
                #raise
                images.append({
                    'name':
                    afile.name,
                    'size':
                    afile.size,
                    'error':
                    'problem extracting image metadata, not processed'
                })

        if len(images) > 0:
            jobnumber = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())
            jobinfo['jobnumber'] = jobnumber
            writeCsv(
                getJobfile(jobnumber) + '.step1.csv', images, [
                    'name', 'size', 'objectnumber', 'date', 'creator',
                    'contributor', 'rightsholder'
                ])
            jobinfo['estimatedtime'] = '%8.1f' % (len(images) * 10 / 60.0)

            if 'createmedia' in request.POST:
                jobinfo['status'] = 'createmedia'
                loginfo('start', getJobfile(jobnumber), request)
                try:
                    retcode = subprocess.call([
                        "/usr/local/share/django/pahma_project/uploadmedia/postblob.sh",
                        getJobfile(jobnumber)
                    ])
                    if retcode < 0:
                        loginfo(
                            'process', jobnumber +
                            " Child was terminated by signal %s" % -retcode,
                            request)
                    else:
                        loginfo('process',
                                jobnumber + ": Child returned %s" % retcode,
                                request)
                except OSError as e:
                    loginfo('error', "Execution failed: %s" % e, request)
                loginfo('finish', getJobfile(jobnumber), request)

            elif 'uploadmedia' in request.POST:
                jobinfo['status'] = 'uploadmedia'
            else:
                jobinfo['status'] = 'No status possible'

    status = 'up'
    timestamp = time.strftime("%b %d %Y %H:%M:%S", time.localtime())
    elapsedtime = time.time() - elapsedtime

    return render(
        request, 'uploadmedia.html', {
            'title': TITLE,
            'images': images,
            'count': len(images),
            'constants': constants,
            'jobinfo': jobinfo,
            'dropdowns': dropdowns,
            'overrides': overrides,
            'status': status,
            'timestamp': timestamp,
            'elapsedtime': '%8.2f' % elapsedtime
        })
Example #15
0
    def createOffers(self):
        offerId = startOfferId
        currentDate = date.today()

        for child in self.childRows:
            offerId += 1

            hasDiscount = random.choice([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) == 1
            discountStartDate = None
            discountEndDate = None
            randomDays = random.randint(1, 60)
            discountPrice = 0
            discountRanges = None
            if hasDiscount:
                discountStartDate = currentDate - timedelta(days=randomDays)
                discountEndDate = currentDate + timedelta(days=randomDays)

                discountStartDate = datetime.strftime(
                    discountStartDate, '%y-%m-%d') + 'T05:00:00Z'
                discountEndDate = datetime.strftime(discountEndDate,
                                                    '%y-%m-%d') + 'T05:00:00Z'
                discountPrice = child['PRICE']
                discountRanges = f'1|{discountPrice}.00'

            hasAvailableDate = random.choice([1, 2, 3, 4, 5, 6, 7, 8, 9, 10
                                              ]) > 1
            availableStartDate = None
            availableEndDate = None
            leadTimeToShip = None
            if hasAvailableDate:
                availableStartDate = currentDate - timedelta(days=randomDays)
                availableEndDate = currentDate + timedelta(days=randomDays)

                availableStartDate = datetime.strftime(
                    availableStartDate, '%y-%m-%d') + 'T05:00:00Z'
                availableEndDate = datetime.strftime(availableEndDate,
                                                     '%y-%m-%d') + 'T05:00:00Z'
                leadTimeToShip = True

            shopId = child['SELLER_ID']
            row = {
                'offer-id': offerId,
                'product-sku': child['PRODUCT_SKU'],
                'min-shipping-price': minShippinPrice,
                'min-shipping-price-additional': minShippinPriceAdd,
                'min-shipping-zone': minShippinZone,
                'min-shipping-type': minShippinType,
                'price': child['PRICE'],
                'total-price': child['PRICE'],
                'price-additional-info': priceAditionalInfo,
                'quantity': random.randint(1, 50),
                'description': description,
                'state-code': stateCode,
                'shop-id': shopId,
                'shop-name': f'Tienda TEST {shopId}',
                'professional': professional,
                'premium': premium,
                'logistic-class': random.choice(logisticClasses),
                'active': random.choice(active),
                'favorite-rank': favoriteRank,
                'channels': channels,
                'deleted': deleted,
                'origin-price': child['ORIGIN_PRICE'],
                'discount-start-date': discountStartDate,
                'discount-end-date': discountEndDate,
                'available-start-date': availableStartDate,
                'available-end-date': availableEndDate,
                'discount-price': discountPrice,
                'currency-iso-code': currencyIsoCode,
                'discount-ranges': discountRanges,
                'leadtime-to-ship': leadTimeToShip,
                'allow-quote-requests': allowQuoteRequests,
                'price-ranges': priceRanges
            }
            self.offerRows.append(row)
        utils.writeCsv('data.csv', self.offerRows, self.fieldnames)
Example #16
0
def prepareFiles(request, validateonly, dropdowns):
    jobinfo = {}
    images = []
    for lineno, afile in enumerate(request.FILES.getlist('imagefiles')):
        # print afile
        try:
            print "%s %s: %s %s (%s %s)" % ('id', lineno, 'name', afile.name,
                                            'size', afile.size)
            image = get_exif(afile)
            filename, objectnumber, imagenumber = getNumber(afile.name)
            # objectCSID = getCSID(objectnumber)
            im.creator, im.creatorRefname = assignValue(
                im.creatorDisplayname, im.overrideCreator, image, 'Artist',
                dropdowns['creators'])
            im.contributor, dummy = assignValue(im.contributor,
                                                im.overrideContributor, image,
                                                'ImageDescription', {})
            im.rightsholder, im.rightsholderRefname = assignValue(
                im.rightsholderDisplayname, im.overrideRightsholder, image,
                'RightsHolder', dropdowns['rightsholders'])
            datetimedigitized, dummy = assignValue('', 'ifblank', image,
                                                   'DateTimeDigitized', {})
            imageinfo = {
                'id': lineno,
                'name': afile.name,
                'size': afile.size,
                'objectnumber': objectnumber,
                'imagenumber': imagenumber,
                # 'objectCSID': objectCSID,
                'date': datetimedigitized,
                'creator': im.creatorRefname,
                'contributor': im.contributor,
                'rightsholder': im.rightsholderRefname,
                'creatorDisplayname': im.creator,
                'rightsholderDisplayname': im.rightsholder,
                'contributorDisplayname': im.contributor
            }
            if not validateonly:
                handle_uploaded_file(afile)
            images.append(imageinfo)
        except:
            if not validateonly:
                # we still upload the file, anyway...
                handle_uploaded_file(afile)
            images.append({
                'name':
                afile.name,
                'size':
                afile.size,
                'error':
                'problem extracting image metadata, not processed'
            })

    if len(images) > 0:
        jobnumber = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())
        jobinfo['jobnumber'] = jobnumber

        if not validateonly:
            writeCsv(
                getJobfile(jobnumber) + '.step1.csv', images, [
                    'name', 'size', 'objectnumber', 'date', 'creator',
                    'contributor', 'rightsholder', 'imagenumber'
                ])
        jobinfo['estimatedtime'] = '%8.1f' % (len(images) * 10 / 60.0)

        if 'createmedia' in request.POST:
            jobinfo['status'] = 'createmedia'
            if not validateonly:
                loginfo('start', getJobfile(jobnumber), request)
                try:
                    retcode = subprocess.call([
                        path.join(POSTBLOBPATH, 'postblob.sh'),
                        getJobfile(jobnumber)
                    ])
                    if retcode < 0:
                        loginfo(
                            'process', jobnumber +
                            " Child was terminated by signal %s" % -retcode,
                            request)
                    else:
                        loginfo('process',
                                jobnumber + ": Child returned %s" % retcode,
                                request)
                except OSError as e:
                    loginfo('error', "Execution failed: %s" % e, request)
                loginfo('finish', getJobfile(jobnumber), request)

        elif 'uploadmedia' in request.POST:
            jobinfo['status'] = 'uploadmedia'
        else:
            jobinfo['status'] = 'No status possible'

    return jobinfo, images
                            'From' : from_date,
                            'To' : to,
                            'G' : g,
                            'Yrs' : yrs,
                            'W':w,
                            'L':l,
                            'W/L%' : wl,
                            'Plyfs' : plyfs,
                            'Div' : div,
                            'Conf' : conf,
                            'Champ' : champ
                           }

franchise_dict.append(franchise_dict_temp)

writeCsv(franchise_dict, csvFolder + "Team_Franchise_Info.csv")

# Status message
print 'Built file Team_Franchise_Info.csv successfully.'

#Retrieve active links data
activeTableDiv = soup.findAll('div', {'class': 'stw'})[0]
fullTableTrs = activeTableDiv.findAll('tr', {'class': "full_table"})

seasons_dict = []
names_dict = []
teamSalDict = []

#Indentifying and following URLs
for t in fullTableTrs:
Example #18
0
                               'Lg' : lg_text,
                               'From' : from_date,
                               'To' : to,
                               'G' : g,
                               'Yrs' : yrs,
                               'W':w,
                               'L':l,
                               'W/L%' : wl,
                               'Plyfs' : plyfs,
                               'Div' : div,
                               'Conf' : conf,
                               'Champ' : champ}

    franchise_dict.append(franchise_dict_temp)

writeCsv(franchise_dict, csvFolder + "Basic_Team_Franchise_Info.csv")

# Status message
print 'Built file Basic_Team_Franchise_Info.csv successfully.'

#Retrieve active links data
activeTableDiv = soup.findAll('div', {'class': 'stw'})[0]
fullTableTrs = activeTableDiv.findAll('tr', {'class': "full_table"})

seasons_dict = []
names_dict = []

#Indentifying and following URLs
for t in fullTableTrs:

     for target in t.find_all('a'):
Example #19
0
            nodules.append([
                int(n[lndind][0]),
                ','.join([str(int(r))
                          for r in n[radind]]),  #list radiologist IDs
                ','.join([str(int(f)) for f in n[fndind]
                          ]),  #list radiologist finding's IDs
                ind + 1,  # new finding ID
                np.mean(n[xind]),  #centroid is the average of centroids
                np.mean(n[yind]),
                np.mean(n[zind]),
                agrlvl,  # number of radiologists that annotated the finding (0 if non-nodule)
                nod,
                vol,
                tex
            ])
    if verb:
        for n in nodules:
            print(n)
    return nodules


if __name__ == "__main__":
    # Merge nodules from train set
    prefix = 'train'
    fname_gtNodulesFleischner = '{}Nodules.csv'.format(prefix)
    gtNodules = readCsv(fname_gtNodulesFleischner)
    for line in gtNodules:
        print(line)
    gtNodules = joinNodules(gtNodules)
    writeCsv('{}Nodules_gt.csv'.format(prefix), gtNodules)  #write to csv
Example #20
0
def prepareFiles(request, validateonly, dropdowns):
    jobinfo = {}
    images = []
    for lineno, afile in enumerate(request.FILES.getlist('imagefiles')):
        # print afile
        try:
            print "%s %s: %s %s (%s %s)" % ('id', lineno, 'name', afile.name, 'size', afile.size)
            image = get_exif(afile)
            filename, objectnumber, imagenumber = getNumber(afile.name)
            # objectCSID = getCSID(objectnumber)
            im.creator, im.creatorRefname = assignValue(im.creatorDisplayname, im.overrideCreator, image, 'Artist',
                                                        dropdowns['creators'])
            im.contributor, dummy = assignValue(im.contributor, im.overrideContributor, image, 'ImageDescription', {})
            im.rightsholder, im.rightsholderRefname = assignValue(im.rightsholderDisplayname, im.overrideRightsholder,
                                                                  image, 'RightsHolder', dropdowns['rightsholders'])
            datetimedigitized, dummy = assignValue('', 'ifblank', image, 'DateTimeDigitized', {})
            imageinfo = {'id': lineno, 'name': afile.name, 'size': afile.size,
                         'objectnumber': objectnumber,
                         'imagenumber': imagenumber,
                         # 'objectCSID': objectCSID,
                         'date': datetimedigitized,
                         'creator': im.creatorRefname,
                         'contributor': im.contributor,
                         'rightsholder': im.rightsholderRefname,
                         'creatorDisplayname': im.creator,
                         'rightsholderDisplayname': im.rightsholder,
                         'contributorDisplayname': im.contributor
            }
            if not validateonly:
                handle_uploaded_file(afile)
            images.append(imageinfo)
        except:
            if not validateonly:
                # we still upload the file, anyway...
                handle_uploaded_file(afile)
            images.append({'name': afile.name, 'size': afile.size,
                           'error': 'problem extracting image metadata, not processed'})

    if len(images) > 0:
        jobnumber = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())
        jobinfo['jobnumber'] = jobnumber

        if not validateonly:
            writeCsv(getJobfile(jobnumber) + '.step1.csv', images,
                     ['name', 'size', 'objectnumber', 'date', 'creator', 'contributor', 'rightsholder', 'imagenumber'])
        jobinfo['estimatedtime'] = '%8.1f' % (len(images) * 10 / 60.0)

        if 'createmedia' in request.POST:
            jobinfo['status'] = 'createmedia'
            if not validateonly:
                loginfo('start', getJobfile(jobnumber), request)
                try:
                    retcode = subprocess.call([path.join(POSTBLOBPATH, 'postblob.sh'), getJobfile(jobnumber)])
                    if retcode < 0:
                        loginfo('process', jobnumber + " Child was terminated by signal %s" % -retcode, request)
                    else:
                        loginfo('process', jobnumber + ": Child returned %s" % retcode, request)
                except OSError as e:
                    loginfo('error', "Execution failed: %s" % e, request)
                loginfo('finish', getJobfile(jobnumber), request)

        elif 'uploadmedia' in request.POST:
            jobinfo['status'] = 'uploadmedia'
        else:
            jobinfo['status'] = 'No status possible'

    return jobinfo, images
Example #21
0
from collections import namedtuple

from models import *
import utils

# read from database and put together data for training
apts = Sold\
        .select()\
        .join(SoldAreas)\
        .join(Area)\
        .where((Area.priceIndex != None) &
            (Sold.livingArea != None) &
            (Sold.rooms != None) &
            (Sold.floor != None) &
            (Sold.rent != None) &
            (Sold.livingArea > 0) &
            (Sold.rooms > 0) &
            (Sold.floor > 0) &
            (Sold.rent > 0) &
            (Sold.soldDate > "2015-07-01"))

# store data as csv
utils.writeCsv(apts)
                        if i == 12:
                            if j.get_text() == "":
                                plyOffL = "NA"
                            else:
                                plyOffL = j.get_text().encode("utf-8")
                        if i == 13:
                            if j.get_text() == "":
                                plyOffWLP = "NA"
                            else:
                                plyOffWLP = j.get_text().encode("utf-8")

                    coaches_dict_temp = {
                        "Coach Name": coach_name,
                        "Lg": Lg,
                        "G": G,
                        "W": W,
                        "L": L,
                        "WLP": WLP,
                        "W_500": W_500,
                        "PlayOff-G": plyOffG,
                        "PlayOff-W": plyOffW,
                        "PlayOff-L": plyOffL,
                        "Playoff-WLP": plyOffWLP,
                    }
                    coaches_dict.append(coaches_dict_temp)

writeCsv(coaches_dict, csvFolder + "Coach_Statistics.csv")

# Status message
print "The file Coach_Statistics.csv has been built successfully"