Ejemplo n.º 1
0
def draw_predictions(ax, outputs):
    """
    Draws predicted bounding boxes.
    Params:
        ax: axis where boxes will be drawn.
        outputs (dict): model outputs when example has been fed in.
    """
    for output in outputs:
        boxes = output['box3d_lidar'].cpu().detach().numpy()
        confidences = output['scores'].cpu().detach().numpy()
        classes = output['label_preds'].cpu().detach().numpy()
        class_txts = at(class_to_name, *classes)
        for k, box3d in enumerate(boxes):
            x, y, z, w, l, h, r = box3d
            drawBoundingBoxes(ax,
                              x,
                              y,
                              z,
                              w,
                              l,
                              h,
                              r,
                              col='green',
                              linewidth=0.8)
            ax.text(x + (w / 2.0) + 1,
                    y + (l / 2.0) + 2,
                    z + h,
                    f"{class_txts[k]}<{confidences[k]:.2f}>",
                    color=(0.4, 0.95, 0.3),
                    fontsize=8.0,
                    rotation=math.degrees(r))
Ejemplo n.º 2
0
 def test_export_computers_place(self):
     computers_id = self.get_fixtures_computers()
     place = self.get_fixture(self.PLACES, 'place')
     place['devices'] = [computers_id[0]]
     self.post_and_check(self.PLACES, place)
     db, *_ = self.app.config['DATABASES']
     url = '/{}/export/devices?{}'.format(
         db,
         urlencode({
             'ids': computers_id,
             'groupBy': 'Actual place'
         }, True))
     headers = Headers()
     headers.add('Authorization', 'Basic ' + self.token)
     headers.add('Accept',
                 ','.join([_XLSX_MIME, FILE_TYPE_MIME_TABLE['ods']]))
     response = self.test_client.get(url, headers=headers)
     self.assert200(response.status_code)
     assert_that(response.content_type).is_equal_to(_XLSX_MIME)
     book = pyexcel.get_book(file_type='xlsx', file_content=response.data)
     book_dict = book.to_dict()
     assert_that(book_dict).contains(place['label'])
     first_computer, _ = self.get('devices', '', computers_id[0])
     assert_that(book_dict[place['label']][1]).contains(
         *at(first_computer, 'serialNumber', 'model', 'manufacturer'))
Ejemplo n.º 3
0
def prepareArticles(documentsIds=reuters.fileids()):
    """
    Organizes articles in a dictionary structure
    @params documentsIds: a list of document ids - by default get reuter's documents ids
    @rtype {Dictionary}
    """
    allDocs = []
    for id in documentsIds:
        doc = {
            'title':
            getDocTitle(reuters.raw(fileids=id)),
            'size':
            len(reuters.raw(fileids=id)),
            'text':
            removeTitleFromText(
                reuters.raw(fileids=id).replace('\n', ''),
                getDocTitle(reuters.raw(fileids=id))),
            'id':
            id
        }
        allDocs.append(doc)
    sortedDocs = sorted(allDocs, key=lambda x: x['size'])
    suitableDocs = [
        doc for doc in sortedDocs
        if doc['size'] >= 1000 and doc['size'] <= 2000
    ]
    suitableDocsIds = [
        at(doc, 'id', 'title', 'text') for doc in suitableDocs
        if len(nltk.sent_tokenize(doc['text'])) > 3
    ]
    return suitableDocsIds
Ejemplo n.º 4
0
 def validate(self, data):
     errors = dict()
     url, file = at(data, 'url', 'file')
     if not url and not file:
         errors.update({'common': _('Please provide url or HTML file!')})
     elif url and file:
         errors.update({'common': _('Please choose only one!')})
     if errors:
         raise serializers.ValidationError(errors)
     return data
Ejemplo n.º 5
0
    def post(self, request):
        try:
            body = json.loads(request.body)
        except json.decoder.JSONDecodeError:
            return HttpResponse("Bad request", status=400)
        username, password = _.at(body, "username", "password")

        user = authenticate(request, username=username, password=password)
        if user is not None:
            response = HttpResponse("OK!", status=200)
            auth_token = auth_manager.add_token(user)
            response.set_cookie("auth_token", auth_token)
            return response
        else:
            return HttpResponse("Bad credentials", status=401)
Ejemplo n.º 6
0
    def post(self, request):
        form_serializer = PdfFormSerializer(data=request.data)
        form_serializer.is_valid(raise_exception=True)

        file, url = at(form_serializer.validated_data, 'file', 'url')
        file_name = generate_filename(file or url,
                                      TYPE_FILE if file else TYPE_URL)
        file_path = None

        if file:
            file_path = UPLOAD_HTML_DIR + file_name
            default_storage.save(file_path, file)

        task = generate_pdf.delay(file_path or url,
                                  TYPE_FILE if file else TYPE_URL, file_name)
        return Response({'id': task.task_id})
Ejemplo n.º 7
0
 def test_export_computers_place(self):
     computers_id = self.get_fixtures_computers()
     place = self.get_fixture(self.PLACES, 'place')
     place['devices'] = [computers_id[0]]
     self.post_and_check(self.PLACES, place)
     db, *_ = self.app.config['DATABASES']
     url = '/{}/export/devices?{}'.format(db, urlencode({'ids': computers_id, 'groupBy': 'Actual place'}, True))
     headers = Headers()
     headers.add('Authorization', 'Basic ' + self.token)
     headers.add('Accept', ','.join([_XLSX_MIME, FILE_TYPE_MIME_TABLE['ods']]))
     response = self.test_client.get(url, headers=headers)
     self.assert200(response.status_code)
     assert_that(response.content_type).is_equal_to(_XLSX_MIME)
     book = pyexcel.get_book(file_type='xlsx', file_content=response.data)
     book_dict = book.to_dict()
     assert_that(book_dict).contains(place['label'])
     first_computer, _ = self.get('devices', '', computers_id[0])
     assert_that(book_dict[place['label']][1]).contains(*at(first_computer, 'serialNumber', 'model', 'manufacturer'))
Ejemplo n.º 8
0
def back_most_probable_word(k, org_k, curr_state):
    # calculate the first step, possible to pass just from 'b' to any letter
    if k == 1:
        ps_op_prob = []
        for ns_idx in range(len(curr_state['prob'])):
            ps_op_prob.append(prob_tbl[let_to_idx['b'], ns_idx] *
                              curr_state['prob'][ns_idx])
        max_idx = np.argmax(ps_op_prob)
        calc_str = 'b' + curr_state['str'][max_idx]
        calc_prob = ps_op_prob[max_idx]

        final_ans = {'str': calc_str, 'prob': calc_prob}
        return final_ans

    # calculate the end step, possible to pass from any letter to end of the word ('-')
    if k == org_k:
        curr_prob = prob_tbl[:-1, let_to_idx['-']]

        curr_str = at(idx_to_let, 0, 1, 2)  # TODO
        curr_state = {'str': curr_str, 'prob': curr_prob}

        return back_most_probable_word(k - 1, org_k, curr_state)

    # regular flow: not the first/last letter
    calc_str = []
    calc_prob = []

    for ps_idx in range(len(prob_tbl) - 1):
        ps_op_prob = []
        for ns_idx in range(len(curr_state['prob'])):
            ps_op_prob.append(prob_tbl[ps_idx, ns_idx] *
                              curr_state['prob'][ns_idx])
        max_idx = np.argmax(ps_op_prob)
        calc_str.append(idx_to_let[ps_idx] + curr_state['str'][max_idx])
        calc_prob.append(ps_op_prob[max_idx])

    curr_state = {'str': calc_str, 'prob': calc_prob}

    return back_most_probable_word(k - 1, org_k, curr_state)
Ejemplo n.º 9
0
from prometheus_client.parser import text_string_to_metric_families
from pydash import at
import requests


metrics = requests.get("https://api.usa-cluster.cto.logi.com/metrics" , auth=('admin', 'HbpsXEiyoE0GHbQMk3ZcXoXHjWMRhEeM'),verify=False).content

for family in text_string_to_metric_families(metrics):
	for sample in family.samples:
		if sample[0] == 'apiserver_request_latencies_summary':
			#for key,value in sample[1].iteritems():
				#if (key=='quantile' and value=='0.99') and (key=='verb' and value=='GET'):
					#print key,value
					list = at(sample[1],'resource','scope','subresource','verb','quantile')
					print list		

Ejemplo n.º 10
0
# loop.close()

# Doing the below throws an error currently
# RecursionError: maximum recursion depth exceeded

funcs = {
    'create_customer': lambda: do_async([ create_customer() ]),
    'update_customer': lambda: update_customer(),
    'delete_customer': lambda: delete_customer(),
    'read_customer': lambda: read_customer()
}

create_customer, update_customer, read_customer, delete_customer = at(
funcs,
'create_customer',
'update_customer',
'read_customer',
'delete_customer'
)

# create_customer()



# -----------------------------------------------

# Asyncio basic example

def run_until_complete():

    async def say(what, when):
Ejemplo n.º 11
0
current_total_size = 0
total_size = []
list = []

# parameters for iteration printing
digits = len(str(blockchain_length - 1))
delete = "\b" * digits

start_time = time.time()

for i in range(blockchain_length + 1):
    print("{0}{1:{2}}".format(delete, i, digits), end="")
    sys.stdout.flush()
    if i == 0:
        current_block_hash = genesis_block_hash
    else:
        current_block_hash = list[0]
    # [0] first elemnent in 'list': current block next hash
    # [1] second element in 'list': current block size
    list = at(rpc_connection.getblock(current_block_hash), 'nextblockhash',
              'size')
    current_total_size += list[1]
    total_size.append(current_total_size)

x = np.arange(0, blockchain_length + 1, 1)
plt.plot(x, total_size)
plt.xlabel('block_number')
plt.ylabel('blockchain_size')
print("\nThe program took", time.time() - start_time, "seconds to run")
plt.show()
Ejemplo n.º 12
0
f = []
s = '*.csv'
c = 1
s = glob.glob(s)
for z in s:
    b.append(z.replace('_', '').replace('.csv', ''))

for y in b:
    while True:

        url = 'http://{}&page={}'.format(y, c)
        print(url)
        response = requests.get(url)
        if response.status_code == 200:
            c += 1
            l = (at(response.json(), 'page')[0]['tenants'])

            for u in l:

                for i in u.values():
                    if isinstance(i, list):
                        for j in i:
                            if isinstance(j, dict):
                                for t in j.values():
                                    if isinstance(t, list):
                                        f.append('\n')
                                    else:
                                        f.append(t)
                            else:
                                f.append(j)
                    else:
Ejemplo n.º 13
0
def test_at(case, expected):
    assert _.at(*case) == expected
Ejemplo n.º 14
0
        except Exception as e:
            print e
            return
        else:
            return alls_dic

def complete_updt(obj):
    try:
        db = psycopg2.connect(host="10.0.10.236",port="5432",database="gwacyw",user="******",password="******")
    except:#psycopg2.Error :
        return
    else:
        try:
            cur = db.cursor()
            time_now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
            sql = "update pd_log_current set obs_stag='complete', obj_comp_time='%s'  where obj_id='%s' and obs_stag='sent'" % str(time_now, obj)
            cur.execute(sql)
            cur.close()
            db.close()
        except Exception as e:
            print e
            return

if __name__ == "__main__":
    obj = sys.argv[1]
    obj_infs = get_obj_infs(obj)
    obj_name, ra, dec, filters, expdurs, frmcnts, runs, run_delay, filter_delay, priority = at(obj_infs, 'obj_name', 'objra', 'objdec', 'filter', 'expdur', 'frmcnt', 'run_name', 'delay', 'note', 'priority' )
    check_status(obj_name, ra, dec, filters, expdurs, frmcnts, runs, run_delay, filter_delay, priority, last_run=0)
    complete_updt(obj)

Ejemplo n.º 15
0
for p in dp:
    if p in pdict:
        value = pdict[p]
t1 = time.time()
print("conventional: ", t1-t0)

t0 = time.time()
to_look_up = []
attrs = {}
for p in dp:
    if p in pdict:
        to_look_up.append(p)
list = itemgetter(*to_look_up)(pdict)

t1 = time.time()
print("itemgetter: ", t1-t0)
t0 = time.time()
list = at(pdict, *dp)
t1 = time.time()
print("pydash: ", t1-t0)









# end
Ejemplo n.º 16
0
dict(map(lambda k: (k, d[k]), selected))

dict(filter(lambda i: i[0] in selected, d.items()))

### METHOD 3: DEFINE WITH THE ZIP() FUNCTION

dict(zip(selected, [d[k] for k in selected]))

# ITEMGETTER() FUNCTION WITH THE UNPACK OPERATOR (*)
from operator import itemgetter
dict(zip(selected, itemgetter(*selected)(d)))

# AT() FUNCTION WITH THE UNPACK OPERATOR (*)
from pydash import at
dict(zip(selected, at(d, *selected)))

### APPLY ABOVE LOGIC TO THE WHOLE LIST OF DICTIONARIES
### WITH THE MAP FUNCTION
map(lambda d: {k: d[k] for k in selected}, l)

### ALTERNATIVELY, WITH THE LIST COMPREHENSION
[(lambda x: {k: x[k] for k in selected})(d) for d in l]


### OR THE PARALLEL POOL.MAP() FUNCTION
# ALWAYS DEFINE THE FUNCTION FIRST
def sel(d):
    return ({k: d[k] for k in selected})