Ejemplo n.º 1
0
 def gibbs_sampling(self, predict=dict(), given=dict(), n=10000, skip = 50):
     bar = Bar('Sampling', max=n)
     nodes = list(self.node.keys())
     sample = self.random_sample(preset=given)
     count = 0
     sum = 0
     for i in range(n):
         last = None
         bar.next()
         node = None
         last = node
         while node is None or node in given.keys() or node == last:
             node = nodes[randint(0,len(nodes)-1)]
         parents = self.node[node]['parents']
         if parents[0] is None:
             sample[node] = self.sample(node)
         else:
             given = {key: sample[key] for key in parents}
             sample[node] = self.sample(node, given=given)
         if count == skip:
             evidence = {key: sample[key] for key in predict.keys()}
             if not predict == evidence:
                 continue
             sum += 1
         else:
             count += 1
     bar.finish()
     return sum/(n-count)
Ejemplo n.º 2
0
    def crawl(self,current=False):
        """
        The main function - crawl the league and mine some data.
        """
        logging.info('Starting crawl')
        self.driver.get(self.league_link)
        self.team_names = set([unidecode(thr.text) for thr in \
                               self.driver.find_element_by_class_name("stat-table").find_elements_by_class_name("team-link")])
        self.driver.find_element(By.XPATH, '//*[@id="sub-navigation"]').find_element(By.PARTIAL_LINK_TEXT, 'Fixtures').click()
        self.played_months = self.get_played_months()    
        self.load_previous_data(current)
        prog_bar = ChargingBar('Progress of %s crawling:'%' '.join([self.league,str(self.year)]),max=sum([len(self.fixtures[month]) for month in self.played_months[self.played_months.index(self.start_month)::-1]]))
        for month in self.played_months[self.played_months.index(self.start_month)::-1]:
            for game in self.fixtures[month]:
                logging.info('Starting to parse game')
                if game: 
                    self.parse_game(game)
                prog_bar.next()
                logging.info('Finished game, moving to the next one')
            else:
                logging.info('Finished month, saving to disk')
                self.save_month(month)
                if current:
                    pass
#                     DBHandler(args_parser.LEAGUE_NAME).update_db(self.all_teams_dict,str(self.year))
        else: #we're done - we can save to the DB now
#             if not current:
            DBHandler(args_parser.LEAGUE_NAME).insert_to_db(self.all_teams_dict,str(self.year))
        self.driver.quit()
        prog_bar.finish()
Ejemplo n.º 3
0
 def rejection_sample(self, predict=dict(), given=dict(), n=10000):
     sum = 0
     bar = Bar('Sampling', max=n)
     for i in range(n):
         bar.next()
         sample = self.compute_sample()
         evidence = {key: sample[key] for key in given.keys()}
         if not given == evidence:
             continue
         evidence = {key: sample[key] for key in predict.keys()}
         if not predict == evidence:
             continue
         sum += 1
     bar.finish()
     return sum/n
Ejemplo n.º 4
0
 def likelihood_weighting(self, predict=dict(), given=dict(), n=10000):
     num = den = 0
     bar = Bar('Sampling', max=n)
     for i in range(n):
         bar.next()
         sample = self.compute_sample(preset=predict)
         for node in predict.keys():
             parents = self.node[node]['parents']
             given_pa = {key: sample[key] for key in parents}
             weight = float(self.get_probability(node, evidence=given_pa, value=predict[node]))
         evidence = {key: sample[key] for key in given.keys()}
         if given == evidence:
             num += weight
         den += weight
     bar.finish()
     return num/den
Ejemplo n.º 5
0
	# compute how many bytes to expect in last block
	nblocks = int(math.ceil(int(f['size'])/float(bsize)))
	slack = nblocks*int(bsize) - int(f['size'])
	actualbytes = int(bsize) - slack
	# read slack space
	tmp = open(tmpfile, "rb")
	bcount = 0
	hidden=""
	try:
	    byte = tmp.read(1)
	    while byte != "":
	        bcount += 1
	        if bcount > actualbytes and ord(byte) != 0x0:
	        	hidden+=byte
	        byte = tmp.read(1)
	finally:
	    tmp.close()
	os.remove(tmpfile)
	# use subdir to store result files
	# create file named after inode and write hidden bytes 
	if hidden != "":
		if not os.path.exists(directory):
			os.makedirs(directory)
		f_out = open(directory+"/"+f["inode"], 'w')
		try:
			f_out.write(hidden)
		finally:
			f_out.close()
	bar.next()
bar.finish()
Ejemplo n.º 6
0
 def create_examples(self,year,lookback=15,current=False):
     """
     This function creates all the examples for self.league, year.
     
     The examples are created using the given lookback.
     """
     def update_all_teams_dict(res,all_teams_dict,team,first):
         for fix in sorted(res):
             if fix == 1 and res[fix] == {}:
                 all_teams_dict[team][fix] = []
                 continue
             if first:
                 all_teams_dict[team][fix] = [res[fix][k] for k in sorted(res[fix])]
             else:
                 all_teams_dict[team][fix] += [res[fix][k] for k in sorted(res[fix])]
     
     def relative_features(arr1,arr2,fn):
         combined_list_all_1 = [value for (value,key) in zip(arr1,fn) if key.split("all_pos")>1 ]
         combined_list_att_1 = [value for (value,key) in zip(arr1,fn) if key.split("att_pos")>1 ]
         combined_list_def_1 = [value for (value,key) in zip(arr1,fn) if key.split("def_pos")>1 ]
         
         combined_list_all_2 = [value for (value,key) in zip(arr2,fn) if key.split("all_pos")>1 ]
         combined_list_att_2 = [value for (value,key) in zip(arr2,fn) if key.split("att_pos")>1 ]
         combined_list_def_2 = [value for (value,key) in zip(arr2,fn) if key.split("def_pos")>1 ]
         
         all_rel = [1 for (val1,val2) in zip (combined_list_all_1,combined_list_all_2) if val1 > val2]
         att_rel = [1 for (val1,val2) in zip (combined_list_att_1,combined_list_att_2) if val1 > val2]
         def_rel = [1 for (val1,val2) in zip (combined_list_def_1,combined_list_def_2) if val1 > val2]
         
         return float(len(all_rel))/len(combined_list_all_1), float(len(att_rel))/len(combined_list_att_1), float(len(def_rel))/len(combined_list_def_1)
     
     from features.features import Features
     temp_DB = self.temp_DB
     all_teams_names = [g['_id'] for g in temp_DB[self.league].aggregate([{"$match":{"Year":int(year)}},{"$group":{"_id":"$GName"}}])]
     all_teams_dict = {name:{} for name in all_teams_names}
     features = Features(temp_DB[self.league],year,self.league)
     features_names = []
     prog_bar = ChargingBar('Creating examples for %s-%s'%(self.league,year),max=len(all_teams_dict))
     for team in all_teams_dict:
         res_by_all, res_by_non_avg = features.create_features(team,lookback)
         if not features_names: features_names = features.features_names
         update_all_teams_dict(res_by_all, all_teams_dict, team, True)
         update_all_teams_dict(res_by_non_avg, all_teams_dict, team, False)
         prog_bar.next()
     examples = []
     tags = []
     curr_examples = []
     prog_bar.finish()
     for team in all_teams_names:
         for fix in sorted(all_teams_dict[team]):
             if fix == 1 and all_teams_dict[team][fix]==[]:
                 continue
             curr_game = temp_DB[self.league].find_one({"GName":team,"Fix":fix,"Year":int(year)})
             if curr_game is None:
                 continue
             if curr_game["HA"]=="home":
                 vs_curr_game = temp_DB[self.league].find_one({"GName":curr_game["VS"],"VS":team,"HA":"away","Year":int(year)})
                 try:
                     vs_curr_fix = vs_curr_game["Fix"]
                 except TypeError as e:
                     vs_curr_fix = fix+1
                     all_teams_dict[curr_game["VS"]][vs_curr_fix] = []
                 if all_teams_dict[curr_game["VS"]][vs_curr_fix] == []:
                     continue
                 rel_all, rel_att, rel_def = relative_features(all_teams_dict[team][fix], all_teams_dict[curr_game["VS"]][vs_curr_fix], features_names)
                 examples += [np.array(all_teams_dict[team][fix])-np.array(all_teams_dict[curr_game["VS"]][vs_curr_fix])]
                 examples[-1] = np.concatenate((examples[-1],[rel_all, rel_att, rel_def]))
                 temp_dict = {"Ex":examples[-1],"Fix":curr_game["Fix"],"Res":curr_game["Result"],"Home":team,"Away":curr_game["VS"],"League":self.league}
                 curr_examples += [temp_dict]
                 tags += [curr_game["Tag"]]
     if not current:
         return examples,tags
     else:
         return curr_examples,tags
Ejemplo n.º 7
0
while i < repeat:

	cl = 0
	sT = int(time.time())
	eT = sT + testing

	while int(time.time()) < eT:
		vspBenchMakeHash()
		operations += 10
		cl += 1

	for _ in range(i-len(cc)+1):
		cc.append(None)

	cc[i] = math.ceil(cl/division)
	progressBar.next()
	i += 1

progressBar.finish()


print(RSTF + '  > Benchmark completed! Calculating results ...\n')

cc.sort()
seconds = int(time.time()) - startTime
operations = int(operations / seconds)
proceed = ' (~' + str(operations) + 'h/sec) '
bResult = INVF + ' ' + str( cc[repeat-1] ) + ' BQ ' + RSTF + proceed + '\n'

print('  > Benchmark result: ' + bResult )
input('  Press any key to exit ... ')
Ejemplo n.º 8
0
def get_transactions(session, from_date, to_date):
    """
    This method extracts the list of transactions from the given time-frame by calling the PayPal API.

    The single query cannot span across 31 days. In case the request spawns across more than 31 days, we need to query the data month-by-month and aggregate the results.

    :param session: Session to be used for the HTTP calls
    :type session: OAuth2Session object
    :return: list of transaction IDs
    :rtype: list
    """

    i = 0
    result = {}
    result[i] = [
        " Date", "Time", "Name", "Status Code", "Currency", "Value",
        "To Email Address", "Transaction ID", "Custom Number", "Quantity",
        "Item Title", "Country Code"
    ]

    query_start = datetime.strptime(from_date, "%Y-%m-%dT%H:%M:%S")
    request_end = datetime.strptime(to_date, "%Y-%m-%dT%H:%M:%S")

    if (request_end - query_start).days > (3 * 365):
        complain("The query can span across 3 years maximum")
    if (query_start - datetime(2016, 7, 1)).days < 0:
        complain("The historical data is available only from July 2016")

    query_count = int((request_end - query_start).days) / int(31)
    bar = ChargingBar('1/3 Listing trx   ',
                      max=query_count,
                      suffix='%(percent).1f%% - %(eta)ds remaining')

    while (query_start < request_end):
        # The query needs to be split into multiple-ones if it spans across more than 31 days
        if (request_end - query_start).days > 31:
            query_end = query_start + timedelta(days=31)
        else:
            query_end = request_end

        page = 1
        total_pages = 1
        # The response might come in multiple pages and each needs to be queried separately
        while (page <= total_pages):
            params = (
                ("start_date",
                 query_start.strftime("%Y-%m-%dT%H:%M:%S") + TIMEZONE_OFFSET),
                ("end_date",
                 query_end.strftime("%Y-%m-%dT%H:%M:%S") + TIMEZONE_OFFSET),
                ("fields", "all"),
                ("page_size", "500"
                 ),  # The maximum amount of transaction IDs per page is 500
                ("page", str(page)))

            response = session.get(TRX_URL, params=params)
            check_status_code(response.status_code)

            reply = json.loads(response.content)
            total_pages = int(reply['total_pages'])

            # Append the data to the resulting list
            for trx in reply['transaction_details']:
                #                print('TRX DETAILS: '+ str(trx) + '\n')
                trx_data = []

                trx_data.append(
                    extract_value(
                        trx,
                        ['transaction_info', 'transaction_initiation_date'
                         ])[:10])  # Date
                trx_data.append(
                    extract_value(
                        trx,
                        ['transaction_info', 'transaction_initiation_date'
                         ])[12:19])  # Time
                trx_data.append(
                    extract_value(
                        trx,
                        ['payer_info', 'payer_name', 'alternate_full_name'
                         ]))  # Name
                trx_data.append(
                    extract_value(trx,
                                  ['transaction_info', 'transaction_status'
                                   ]))  # Status Code
                trx_data.append(
                    extract_value(trx, [
                        'transaction_info', 'transaction_amount',
                        'currency_code'
                    ]))  # Currency
                trx_data.append(
                    extract_value(
                        trx,
                        ['transaction_info', 'transaction_amount', 'value'
                         ]))  # Value
                trx_data.append(
                    extract_value(
                        trx,
                        ['payer_info', 'email_address']))  # To Email Address
                trx_data.append(
                    extract_value(trx, ['transaction_info', 'transaction_id'
                                        ]))  # Transaction ID
                trx_data.append(
                    extract_value(
                        trx,
                        ['transaction_info', 'custom_field']))  # Custom Number
                count = 0.0
                title = ""
                for item in extract_value(trx, ['cart_info', 'item_details']):
                    count += float(extract_value(item, ['item_quantity']))
                    title += extract_value(item, ['item_name']) + "; "
                trx_data.append(int(count))  # Quantity
                trx_data.append(title)  # Item Title
                trx_data.append(
                    extract_value(trx,
                                  ['shipping_info', 'address', 'country_code'
                                   ]))  # Country code

                result[i] = trx_data
                i += 1

            page += 1

        # In case another query is required, the start_time should be 1 second after the end_time of the previous query parameter
        query_start = query_end + timedelta(seconds=1)
        bar.next()

    bar.finish()
    return result
Ejemplo n.º 9
0
"""

from console import clear

# ————————————
# Progress bar
# ____________

from progress.bar import ChargingBar as Bar
from time import sleep

bar = Bar("Progress Bar", max=20)
for i in range(20):
    sleep(0.1)
    bar.next()
bar.finish()

clear()

# ———————————————
# Carriage return
# _______________

for i in range(20):
    print(f"\r{i+1}", end="")
    sleep(0.1)

clear()

# ——————
# Images
Ejemplo n.º 10
0
def slika(width, height, max_depth, camera, light, objects):
    #width = 300*2
    #height = 200*2

    #max_depth = 3
    '''
    camera = np.array([0, 0, 1])
    '''
    ratio = float(width) / height
    screen = (-1, 1 / ratio, 1, -1 / ratio)  # left, top, right, bottom
    '''
    light = { 'position': np.array([5, 5, 5]), 'ambient': np.array([1, 1, 1]), 'diffuse': np.array([1, 1, 1]), 'specular': np.array([1, 1, 1]) }

    objects = [
        #{ 'type': 'ball', 'center': np.array([-0.2, 0, -1]), 'radius': 0.7, 'ambient': np.array([0.1, 0, 0]), 'diffuse': np.array([0.7, 0, 0]), 'specular': np.array([1, 1, 1]), 'shininess': 100, 'reflection': 0.5, 'n2': 1.52 },
        #{ 'type': 'ball', 'center': np.array([0.1, -0.3, 0]), 'radius': 0.1, 'ambient': np.array([0.1, 0, 0.1]), 'diffuse': np.array([0.7, 0, 0.7]), 'specular': np.array([1, 1, 1]), 'shininess': 100, 'reflection': 0.5, 'n2': 1.52},
        {'type': 'cylinder', 'center': np.array([-0.2, 0.2, -0.4]), 'radius': 0.5, 'height': 0.6, 'direction': np.array([0.2, 1, 0.2]), 'ambient': np.array([0.1, 0.1, 0.1]), 'diffuse': np.array([0.7, 0, 0.7]), 'specular': np.array([1, 1, 1]), 'shininess': 100, 'reflection': 0.5, 'n2': 1.52},
        #{ 'type': 'ball', 'center': np.array([-0.3, 0, 0]), 'radius': 0.15, 'ambient': np.array([0, 0.1, 0]), 'diffuse': np.array([0, 0.6, 0]), 'specular': np.array([1, 1, 1]), 'shininess': 100, 'reflection': 0.5, 'n2': 1.52 },
        { 'type': 'plane', 'normal': np.array([0, 1, 0]), 'point': np.array([0, -1, 0]), 'ambient': np.array([0.1, 0.1, 0.1]), 'diffuse': np.array([0.6, 0.6, 0.6]), 'specular': np.array([1, 1, 1]), 'shininess': 100, 'reflection': 0.5, 'n2': 1},
        { 'type': 'cone', 'center': np.array([-0.3, 0.2, -0.8]), 'height': 0.5, 'direction': np.array([0,-2,1]), 'ambient': np.array([0.1, 0, 0]), 'diffuse': np.array([0.7, 0, 0]), 'specular': np.array([1, 1, 1]), 'shininess': 100, 'reflection': 0.5, 'n2': 1.52 }
    ]
    '''

    image = np.zeros((height, width, 3))

    bar = ChargingBar('Processing', max=height)
    for i, y in enumerate(np.linspace(screen[1], screen[3], height)):
        for j, x in enumerate(np.linspace(screen[0], screen[2], width)):
            # screen is on origin
            pixel = np.array([x, y, 0])
            origin = camera
            direction = normalize(pixel - origin)

            color = np.zeros((3))
            reflection = 1

            for k in range(max_depth):
                # check for intersections
                nearest_object, min_distance = nearest_intersected_object(
                    objects, origin, direction)
                if nearest_object is None:
                    break

                intersection = origin + min_distance * direction
                if nearest_object['type'] == 'ball':
                    normal_to_surface = normalize(intersection -
                                                  nearest_object['center'])
                elif nearest_object['type'] == 'cylinder':
                    #normal_to_surface = normalize(intersection - nearest_object['center'])
                    v1 = intersection - nearest_object['center']
                    normal_to_surface = normalize(
                        v1 -
                        (np.dot(v1, normalize(nearest_object['direction']))) *
                        normalize(nearest_object['direction']))
                elif nearest_object['type'] == 'plane':
                    normal_to_surface = normalize(nearest_object['normal'])
                elif nearest_object['type'] == 'cone':
                    #ker gre za stožec z enako višino kot maksimalnim radijem, se zadeve poenostavijo, saj je normala pod kotom 45 stopinj
                    h1 = normalize(nearest_object['direction'])
                    v1 = normalize(intersection - nearest_object['center'])
                    x1 = normalize(
                        np.sqrt(2) * nearest_object['height'] * v1 -
                        nearest_object['height'] * h1)
                    normal_to_surface = (np.sqrt(2) / 2) * x1 - (np.sqrt(2) /
                                                                 2) * h1

                shifted_point = intersection + 1e-5 * normal_to_surface
                intersection_to_light = normalize(light['position'] -
                                                  shifted_point)

                _, min_distance = nearest_intersected_object(
                    objects, shifted_point, intersection_to_light)
                intersection_to_light_distance = np.linalg.norm(
                    light['position'] - intersection)
                is_shadowed = min_distance < intersection_to_light_distance

                if is_shadowed:
                    break

                illumination = np.zeros((3))

                # ambient
                illumination += nearest_object['ambient'] * light['ambient']

                # diffuse
                illumination += nearest_object[
                    'diffuse'] * light['diffuse'] * np.dot(
                        intersection_to_light, normal_to_surface)

                # specular
                intersection_to_camera = normalize(camera - intersection)
                H = normalize(intersection_to_light + intersection_to_camera)
                illumination += nearest_object['specular'] * light[
                    'specular'] * np.dot(normal_to_surface,
                                         H)**(nearest_object['shininess'] / 4)

                # reflection
                color += reflection * illumination
                reflection *= nearest_object['reflection']

                origin = shifted_point
                direction = reflected(direction, normal_to_surface, 1.,
                                      nearest_object['n2'])

            image[i, j] = np.clip(color, 0, 1)
        yield image
        #v programu: trenutna_slika = slika().next()
        bar.next()
    bar.finish()
    #plt.imshow(image)
    #plt.show()
    plt.imsave('imageOriginal.png', image)