Exemplo n.º 1
0
	def process_and_save(self, news):
		
		news_object = build_news_object(news, SOURCE_NAME)
		if is_valid(news_object):
			news_object.link = BASE_URL+str(news_object.link)
			# print news_object
			self.do_save(news_object)
Exemplo n.º 2
0
    def __init__(self, from_addr, outputs, privKey, fee=None,
                 change_addr=None):
        if not change_addr:
            change_addr = from_addr
        if fee is None:
            fee = 100
        for address, value in outputs:
            assert is_valid(address)

        amount = sum(map(lambda x: x[1], outputs))
        inputs = self.getInputs(from_addr)
        total = sum(map(lambda x: x[1], inputs))
        change_amount = total - (amount + fee)

        print 'Change:', change_amount
        print 'Total:', total
        print 'Amount:', amount

        if change_amount < 0:
            raise TransactionError('Not enought funds, or transaction using'
                                   ' all inputs need to be confirmed')

        if amount < 1000000 and fee < 50000:
            raise TransactionError("Not enought fee :"
                                   " a fee of 0.0005 is required for"
                                   " small transactions")

        outputs.append((change_addr, change_amount))

        self.tx = self.signed_tx(inputs, outputs, privKey)

        if not self.pushTx():
            raise TransactionError("An error occur while sending transaction")
        else:
            raise TransactionSubmitted("Transaction successfully transmitted")
Exemplo n.º 3
0
def validate():
    """ Uploads a file to a draft area then tests if it is valid.
        Expects:
            index_view_slug:   unique name of tif file to lookup(string)
            file:              in request.files for upload
    """
    index_view_slug = request.form.get('index_view_slug')

    if not index_view_slug or 'file' not in request.files:
        abort(400)

    if index_view_slug.endswith('_raster'):
        index_view_slug = index_view_slug[:-7]

    draft_file = os.path.join(conf.DRAFT_DIR,
                              '{}.tif'.format(index_view_slug))
    request.files['file'].save(draft_file)
    result = {'errors': []}
    try:
        valid = is_valid(
            index_view_slug, base_path=conf.DRAFT_DIR
        )
        result = {
            'md5': md5sum(draft_file),
            'valid': valid
        }
        os.remove(draft_file)
    except NotImplementedError as e:
        result['errors'].append(e.message)

    return jsonify(result)
Exemplo n.º 4
0
def publish():
    """ Uploads a file to a draft area then moves it to data directory if valid.
        Expects:
            index_view_slug:   unique name of tif file to lookup(string)
            file:              in request.files for upload
    """

    index_view_slug = request.form.get('index_view_slug')

    if not index_view_slug or 'file' not in request.files:
        abort(400)

    if index_view_slug.endswith('_raster'):
        index_view_slug = index_view_slug[:-7]

    draft_file = os.path.join(conf.DRAFT_DIR,
                              '{}.tif'.format(index_view_slug))
    request.files['file'].save(draft_file)
    result = {'errors': []}
    try:
        result['valid'] = is_valid(index_view_slug, base_path=conf.DRAFT_DIR)
        if result['valid']:
            live_file = make_live(draft_file)
            if os.path.exists(live_file):
                result = {
                    'md5': md5sum(live_file),
                }

    #except Exception as e:
    except NotImplementedError as e:
        result['errors'].append(e.message)

    return jsonify(result)
Exemplo n.º 5
0
def bands_files_are_valid(img, bands, api, directory):
    """
    Check if all bands images files are valid.
    """
    filenames = ['{}_band_{}.tif'.format(img['filename'], b) for b in bands]
    paths = [os.path.join(directory, f) for f in filenames]
    return all(utils.is_valid(p) for p in paths)
Exemplo n.º 6
0
def log_prob_to_change(log_prob_1_given_0,
                       log_prob_0_given_0,
                       multiple,
                       hard_cutoff=None):
    '''NLL to  change scaled from 0 to 1'''
    #Clamp rare -infs to min non inf val
    #print(f'Min self probs: {log_prob_0_given_0.min()}')

    log_prob_1_given_0 = clamp_infs(log_prob_1_given_0)
    log_prob_0_given_0 = clamp_infs(log_prob_0_given_0)
    if hard_cutoff == None:
        # Get statistics of 0 given 0 for comparison
        base_mean = log_prob_0_given_0.mean(dim=-1).unsqueeze(-1)
        base_std = log_prob_0_given_0.std(dim=-1).unsqueeze(-1)
        #print(f'Mean {base_mean.item()} std: {base_std.item()}')
        # Minimum change criterion (all values smaller than base_mean by more than multiple*base_std)
        changed_mask = log_prob_1_given_0 < base_mean - multiple * base_std

    else:
        changed_mask = log_prob_1_given_0 < hard_cutoff
    max_change = log_prob_1_given_0.max(dim=-1)[0].unsqueeze(-1)
    min_change = log_prob_1_given_0.min(dim=-1)[0].unsqueeze(-1)
    log_prob_1_given_0 = 1 - (log_prob_1_given_0 - min_change) / (max_change -
                                                                  min_change)

    log_prob_1_given_0[~changed_mask] = 0.0

    assert is_valid(log_prob_1_given_0)
    return log_prob_1_given_0
Exemplo n.º 7
0
def bands_files_are_valid(img, bands, search_api, directory):
    """
    Check if all bands images files are valid.
    """
    name = filename_from_metadata_dict(img, search_api)
    filenames = ['{}_band_{}.tif'.format(name, b) for b in bands]
    paths = [os.path.join(directory, f) for f in filenames]
    return all(utils.is_valid(p) for p in paths)
Exemplo n.º 8
0
async def oauth_redirect(request: Request, path: str, state: str = ""):
    target_domain, _, _ = state.partition("|")
    path = f"/{path}"
    if not is_valid(target_domain, path):
        log.info("unauthorized domain=%s path=%s", target_domain, path)
        raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST)

    full_url = f"https://{target_domain}{path}"
    return responses.RedirectResponse(full_url, status_code=302)
Exemplo n.º 9
0
def main():
    '''
    Launch method
    '''
    parse_ssv()
    while True:
        number = get_input('Please enter a credit card number:')
        number = (regex_check_for_errors(number))
        if utils.is_valid(number):
            print_credit_card_results(number)
Exemplo n.º 10
0
def fetch(args):
    print 'Welcome to Twitter Spell Checking : Fetching !'
    CONFIG = ConfigParser.ConfigParser()
    CONFIG.read(args.config)

    settings = items_to_dict(CONFIG.items('twitter'))
    config = items_to_dict(CONFIG.items('namespace:%s' % args.namespace))
    api = twitter.Api(consumer_key=settings['consumer_key'], consumer_secret=settings['consumer_secret'], access_token_key=settings['access_token'], access_token_secret=settings['access_token_secret'])

    accounts = [account.replace(' ', '') for account in config['accounts'].split(',')]
    max_tweets_file = os.path.join(os.path.dirname(config['files']), 'max_tweets_%s.txt' % args.namespace)

    def save_max_tweets():
        open(max_tweets_file, 'w').write(json.dumps(max_tweets))

    if os.path.exists(max_tweets_file):
        max_tweets = json.loads(open(max_tweets_file).read())
    else:
        max_tweets = dict()

    print max_tweets_file
    f = open(os.path.join(config['files'], 'tweets_%s.txt' % args.namespace), 'a')
    for account in accounts:
        if account in max_tweets and max_tweets[account] > 0:
            retrieving = "new"
        else:
            retrieving = "old"
            page = 0
        while True:
            if retrieving == "new":
                print 'process %s since id %s' % (account, max_tweets[account])
                try:
                    tweets = api.GetUserTimeline(account, count=200, include_rts=False, since_id=max_tweets[account])
                except twitter.TwitterError, e:
                    print 'error : %s' % str(e)
                    tweets = []
            else:
                print 'process %s from zero, page %s' % (account, page)
                try:
                    tweets = api.GetUserTimeline(account, count=200, include_rts=False, page=page)
                except twitter.TwitterError, e:
                    print 'error : %s' % str(e)
                    tweets = []
            if tweets:
                for s in tweets:
                    if is_valid(s, account):
                        f.write(clean(s.text).lower().encode('UTF-8') + '\n')
                        if  account not in max_tweets or s.id > max_tweets[account]:
                            max_tweets[account] = s.id
                if retrieving == "old":
                    page += 1
                save_max_tweets()
            else:
                print 'no more tweets for %s' % account
                break
Exemplo n.º 11
0
    def change_program_properties(self):
        def is_valid(mystring, mybeg, myend):
            if not utils.is_int(mystring): return False
            myint = int(mystring)
            while not (myint >= mybeg and myint <= myend):
                return False
            return True

        # -----------------------------------------
        print("Entering procedure change_program_properties")
        voice_properties = [
            "change rate", "change volume", "change voice", "quit"
        ]
        for count, elem in enumerate(voice_properties):
            print("{}) {}".format(count + 1, elem))
        user_input = input("> ").lower().strip()
        while not utils.is_valid(user_input, voice_properties):
            user_input = input("> ").lower().strip()
            if user_input == "quit": sys.exit()
        print("You chose: {}".format(user_input))
        user_input = int(user_input)
        user_choice = voice_properties[user_input - 1]
        print(user_choice)
        # -----------------------------------------
        if user_choice == "change rate":
            print(
                "The defaut for 'rate' is 200. What would you like to change it to? (1 to 400)"
            )
            user_input = input("> ").lower().strip()
            while not is_valid(user_input, mybeg=1, myend=400):
                user_input = input("> ").lower().strip()
            self.engine.setProperty('rate', user_input)
            self.speak()
        elif user_choice == "change volume":
            print(
                "The defaut for 'volume' is 1. What would you like to change it to? (0 to 3)"
            )
            user_input = input("> ").lower().strip()
            while not is_valid(user_input, 0, 3):
                user_input = input("> ").lower().strip()
            self.engine.setProperty('volume', user_input)
            self.speak()
        elif user_choice == "change voice":
            print("You are about to change the voice.")
            # print("Would you like more information about the available voices? (y/n)")
            # user_input = ""
            # while not user_input in ["y", "yes", "n", "no"]:
            #     user_input = input("> ").lower().strip()
            # if user_input in ["y", "yes"]:
            self.select_voice()
        elif user_choice == "quit":
            raise NotImplemented
        else:
            raise ValueError("Error!")
Exemplo n.º 12
0
 def test_is_valid(self):
     self.assertTrue(utils.is_valid('5558397375275489'))
     self.assertTrue(utils.is_valid('4573055613536303098'))
     self.assertFalse(utils.is_valid('abc'))
     self.assertFalse(utils.is_valid(''))
     self.assertFalse(utils.is_valid('!'))
     self.assertFalse(utils.is_valid('abc123'))
Exemplo n.º 13
0
def new_url():
    """
    Create short url.
    """
    if request.method == 'POST':
        original_url = request.data.get('url')
        if not original_url:
            return Response('No url to shorten was provided', 401)
        if not is_valid(original_url):
            return Response('No valid url to was provided', 401)

        short_url = shortify(original_url)
        db.store(short_url, original_url)
        base = request.url

        return {'shortened_url': base + short_url}, status.HTTP_201_CREATED
Exemplo n.º 14
0
 def do_save(self, news_object, tags=None, category=None):
 	if tags != None:
 		news_object.tags = tags
 	if category != None:
 		news_object.category = category
 	try:
 		if is_valid(news_object):
 			news_object.save()
 			return True
 		else:
 			logging.warn('Already exists!')
 			return False
 	except Exception, e:
 		# import pdb;pdb.set_trace()
 		logging.error("Error Occured! %s" % e)
 		print "Error Occured! %s" % e
 		pass
Exemplo n.º 15
0
def main(FLAGS):
    print("#" * 50)
    print("# Parameters:", FLAGS)
    print("#" * 50)

    # read a set of configuration parameters
    config_params = get_config(FLAGS.config_file)
    print(config_params)
    assert is_valid(FLAGS, config_params), \
            "Please check out parameters again."

    if FLAGS.mode == "single":
        print("Single-Field Mode")
        single(FLAGS, config_params)
    elif FLAGS.mode == "multi":
        print("Multi-Field Mode")
        multi(FLAGS, config_params)
Exemplo n.º 16
0
    def __init__(self,
                 from_addr,
                 outputs,
                 privKey,
                 fee=None,
                 change_addr=None):
        if not change_addr:
            change_addr = from_addr
        if fee is None:
            fee = 100
        for address, value in outputs:
            assert is_valid(address)

        amount = sum(map(lambda x: x[1], outputs))
        inputs = self.getInputs(from_addr)
        total = sum(map(lambda x: x[1], inputs))
        change_amount = total - (amount + fee)

        print 'Change:', change_amount
        print 'Total:', total
        print 'Amount:', amount

        if change_amount < 0:
            raise TransactionError('Not enought funds, or transaction using'
                                   ' all inputs need to be confirmed')

        if amount < 1000000 and fee < 10000:
            raise TransactionError("Not enought fee :"
                                   " a fee of 0.0001 is required for"
                                   " small transactions")

        if change_amount != 0:
            outputs.append((change_addr, change_amount))

        self.tx = self.signed_tx(inputs, outputs, privKey)

        if not self.pushTx():
            raise TransactionError("An error occur while sending transaction")
        else:
            raise TransactionSubmitted("Transaction successfully transmitted")
Exemplo n.º 17
0
def simulated_annealing(graph,
                        num_buses,
                        size_bus,
                        constraints,
                        test_mode=False):
    all_names = graph.nodes()
    x = all_names[0]
    print(graph[x])
    print(graph[x]['weight'])
    CD = constraint_dictionary(all_names, constraints)

    #assignment = random_assignment(all_names, num_buses, size_bus)
    assignment = greedy(graph, num_buses, size_bus, constraints)
    groups = assignment_to_groups(assignment)

    assert is_valid(graph, num_buses, size_bus, assignment)
    T0 = 1.0
    iterations = 5000

    student_status = calculate_student_status(graph, constraints, assignment)
    starting_score = evaluate_assignment(graph, constraints, assignment)
    current_score = starting_score
    for i in range(iterations):
        T = T0 * (1 - i / iterations)
        swap = propose_swap(assignment, num_buses, size_bus, groups=groups)
        if swap is None:
            continue
        val, student_status_swap = swap_value(graph, constraints, CD,
                                              student_status, assignment, swap)

        if val >= 0 or random.random() < np.exp(val / T):
            group_A, A, group_B, B = swap
            if A is not None:
                assignment[A] = group_B
                groups[group_B].append(A)
                groups[group_A].remove(A)
            assignment[B] = group_A
            groups[group_A].append(B)
            groups[group_B].remove(B)

            student_status = student_status_swap
            if (test_mode):
                new_score = evaluate_assignment(graph, constraints, assignment)
                student_status_check = calculate_student_status(
                    graph, constraints, assignment)
                for s in student_status_check:
                    assert student_status[s] == student_status_check[s]
                if new_score != current_score + val:
                    print(new_score, current_score, val)
                    print(swap)
                    print(student_status)
                    print(assignment_to_groups(assignment))
                    print(constraints)

                    nodes = []
                    for x in assignment:
                        if assignment[x] == group_A or assignment[x] == group_B:
                            nodes.append(x)
                    import matplotlib.pyplot as plt
                    subgraph = nx.subgraph(graph, nodes)
                    nx.draw(subgraph, with_labels=True, font_weight='bold')
                    plt.show()
                    assert False
                current_score = new_score

    assert is_valid(graph, num_buses, size_bus, assignment)
    #print('Score:', evaluate_assignment(graph, constraints, assignment))

    return assignment
Exemplo n.º 18
0
def ilp(graph, num_buses, size_bus, constraints):
    edges = graph.edges()
    buses = range(0, num_buses)
    num_students = graph.nodes()

    cxt = Context.make_default_context()
    cxt.timeout = 1
    with Model(name="solver", context=cxt) as mdl:
        X_dict = {}
        es = []
        for s in graph.nodes():
            name = "x" + str(s)
            xslist = mdl.binary_var_list(keys=num_buses, name=name)
            X_dict[s] = xslist
        for s in graph.nodes():
            cons = find_student_in_constraints(s, constraints)
            xslist = X_dict[s]
            constraining_values = []
            for b in buses:
                for C in cons:
                    relevant_dvs_names = ["x" + s + "_" + str(b) for s in C]
                    relevant_dvs = [
                        mdl.get_var_by_name(name)
                        for name in relevant_dvs_names
                    ]
                    constraining_values.append(len(C) - mdl.sum(relevant_dvs))
            rs = mdl.integer_var(name="r" + s)
            expr = (rs <= mdl.min(constraining_values))
            mdl.add_constraint(expr)
        for e in edges:
            name = "e" + e[0] + e[1]
            edge_indic = mdl.binary_var_list(keys=num_buses, name=name)
            es.extend(edge_indic)
        obj = mdl.sum(es)
        mdl.set_objective("max", obj)
        for e in edges:
            for b in buses:
                u = e[0]
                v = e[1]
                name = "e" + u + v + "_" + str(b)
                uexists = "x" + u + "_" + str(b)
                vexists = "x" + v + "_" + str(b)
                rsnameu = "r" + u
                rsnamev = "r" + v

                euvi = mdl.get_var_by_name(name)
                xui = mdl.get_var_by_name(uexists)
                xvi = mdl.get_var_by_name(vexists)
                rsu = mdl.get_var_by_name(rsnameu)
                rsv = mdl.get_var_by_name(rsnamev)
                mdl.add_constraint((euvi <= xui))
                mdl.add_constraint((euvi <= xvi))
                mdl.add_constraint((euvi <= rsu))
                mdl.add_constraint((euvi <= rsv))

        for xslist in X_dict.values():
            expr = (mdl.sum(xslist) == 1)
            mdl.add_constraint(expr)

        for b in buses:
            relevant_list = [xslist[b] for xslist in X_dict.values()]
            expr = (mdl.sum(relevant_list) <= size_bus)
            expr2 = (mdl.sum(relevant_list) >= 1)
            mdl.add_constraint(expr)
            mdl.add_constraint(expr2)

        mdl.set_time_limit(90)
        solution = mdl.solve(log_output=True)
        solved = []
        for i in range(0, num_buses):
            solved.append([])
        iterv = solution.iter_var_values()
        v = next(iterv)
        i = 0
        while i < (len(graph.nodes())):
            name = str(v[0])
            index = name.find('_')

            student = name[1:index]
            bus = int(name[index + 1:])
            solved[bus].append(student)

            i += 1
            v = next(iterv)
        print(solved)

        assignment = utils.groups_to_assignment(solved)
        if not (utils.is_valid(graph, num_buses, size_bus, assignment)):
            print('\n')
            print('NEET')
            print('\n')
            return None
        print('yeet')
        return assignment
Exemplo n.º 19
0
def _validate(index_view_slug):

    return is_valid(
        index_view_slug, base_path=conf.DRAFT_DIR
    )
Exemplo n.º 20
0
def update_product(id):
    """
    Update a Product
    This endpoint will update a Product based on the data that is posted
    ---
    tags:
      - Products
    consumes:
      - application/json
    produces:
      - application/json
    parameters:
      - name: id
        in: path
        description: ID of product to retrieve
        type: integer
        required: true
      - in: body
        name: body
        schema:
          id: data
          required:
            - type
            - quantity
          properties:
            type:
              type: string
              description: type for the Product
            quantity:
              type: integer
              description: the quantity type you specified
    responses:
      200:
        description: Product returned
        schema:
          id: Product
          properties:
            product_id:
              type: integer
              description: unique id assigned internally by service
            location_id:
              type: integer
              description: unique location id assigned internally by service
            restock_level:
              type: integer
              description: max space allocated for the product
            new:
              type: integer
              description: quantity of new products
            used:
              type: integer
              description: quantity of used products
            open_box:
              type: integer
              description: quantity of open_box products
      400:
        description: Bad Request (the posted data was not valid)
      404:
        description: Product not found
    """
    data = inventory.get_product(id)
    info = request.get_json()

    if data is None:
        return make_response("Product not found", HTTP_404_NOT_FOUND)

    if not utils.is_valid(info):
        return make_response("Product data is not valid", HTTP_400_BAD_REQUEST)

    total = int(data[USED]) + int(data[NEW]) + int(data[OPEN_BOX])
    prod_type = info[TYPE]

    if total - int(data[prod_type]) + int(info[QUANTITY]) > int(
            data[RESTOCK_LEVEL]):
        return make_response("Product amount exceed restock level",
                             HTTP_400_BAD_REQUEST)

    if int(info[QUANTITY]) < 0:
        return make_response("Product amount below zero", HTTP_400_BAD_REQUEST)

    data[prod_type] = int(info[QUANTITY])
    inventory.put_product(id, data)
    return make_response(jsonify(data), HTTP_200_OK)
Exemplo n.º 21
0
    def view_index(self,
                   index,
                   multiple=3.,
                   gen_std=0.6,
                   hard_cutoff=None,
                   point_size=5):
        sample_distrib = models.Normal(torch.zeros(1),
                                       torch.ones(1) * gen_std,
                                       shape=(2000,
                                              config['latent_dim'])).to(device)
        dataset_out = self.dataset[index]
        dataset_out = [x.to(device).unsqueeze(0) for x in dataset_out]
        voxel_0_large, voxel_1_small, extra_context, voxel_1_large_self, voxel_1_small_self, voxel_opposite_small, voxel_opposite_large, voxel_0_small_self, voxel_0_large_self, voxel_0_small_original, voxel_1_small_original = dataset_out
        if not self.config['using_extra_context']:
            extra_context = None
        batch_1_0 = [voxel_0_large, voxel_1_small, extra_context]
        batch_0_1 = [voxel_opposite_large, voxel_opposite_small, extra_context]

        batch_0_0 = [voxel_0_large_self, voxel_0_small_self, extra_context]
        batch_1_1 = [voxel_1_large_self, voxel_1_small_self, extra_context]

        loss, log_prob_1_0, nats = inner_loop(batch_1_0, self.model_dict,
                                              config)

        _, log_prob_0_0, _ = inner_loop(batch_0_0, self.model_dict, config)
        assert is_valid(log_prob_1_0)
        change_1_0 = log_prob_to_change(log_prob_1_0,
                                        log_prob_0_0,
                                        multiple=multiple,
                                        hard_cutoff=hard_cutoff)

        assert is_valid(change_1_0)

        is_valid(loss)

        loss, log_prob_0_1, nats = inner_loop(batch_0_1, self.model_dict,
                                              config)

        _, log_prob_1_1, _ = inner_loop(batch_1_1, self.model_dict, config)
        change_0_1 = log_prob_to_change(log_prob_0_1,
                                        log_prob_1_1,
                                        multiple=multiple,
                                        hard_cutoff=hard_cutoff)

        assert is_valid(log_prob_0_1)

        sample_points_given_0 = make_sample(
            n_points=4000,
            extract_0=voxel_0_large[0].unsqueeze(0),
            models_dict=self.model_dict,
            config=config,
            sample_distrib=sample_distrib,
            extra_context=extra_context)
        cond_nump = voxel_0_large[0].cpu().numpy()
        cond_nump[:, 3:6] = np.clip(cond_nump[:, 3:6] * 255, 0, 255)
        sample_points_given_0 = sample_points_given_0.cpu().numpy().squeeze()
        sample_points_given_0[:, 3:6] = np.clip(
            sample_points_given_0[:, 3:6] * 255, 0, 255)

        fig_gen_given_0 = view_cloud_plotly(sample_points_given_0[:, :3],
                                            sample_points_given_0[:, 3:],
                                            show=False,
                                            point_size=point_size)

        fig_0 = view_cloud_plotly(voxel_0_small_original[0][:, :3],
                                  voxel_0_small_original[0][:, 3:],
                                  show=False,
                                  point_size=point_size)

        fig_1 = view_cloud_plotly(voxel_1_small_original[0][:, :3],
                                  voxel_1_small_original[0][:, 3:],
                                  show=False,
                                  point_size=point_size)

        sample_points_given_1 = make_sample(
            n_points=4000,
            extract_0=voxel_opposite_large[0].unsqueeze(0),
            models_dict=self.model_dict,
            config=config,
            sample_distrib=sample_distrib,
            extra_context=extra_context)
        sample_points_given_1 = sample_points_given_1.cpu().numpy().squeeze()
        sample_points_given_1[:, 3:6] = np.clip(
            sample_points_given_1[:, 3:6] * 255, 0, 255)
        fig_gen_given_1 = view_cloud_plotly(sample_points_given_1[:, :3],
                                            sample_points_given_1[:, 3:],
                                            show=False,
                                            point_size=point_size)

        combined_points = torch.cat((voxel_0_small_original[0][:, :3],
                                     voxel_1_small_original[0][:, :3]),
                                    dim=0)

        change_0_1[change_0_1 > 0] = 1.0
        changes_0_1_count = change_0_1.sum()
        change_1_0[change_1_0 > 0] = 1.0
        changes_1_0_count = change_1_0.sum()
        combined_change = torch.cat((change_0_1, change_1_0), dim=-1).squeeze()

        if changes_0_1_count > 0:
            fig_0_given_1 = view_cloud_plotly(voxel_0_small_original[0][:, :3],
                                              change_0_1.squeeze(),
                                              show=False,
                                              colorscale='Bluered',
                                              point_size=point_size)
        else:
            fig_0_given_1 = view_cloud_plotly(
                voxel_0_small_original[0][:, :3],
                torch.zeros_like(voxel_0_small_original[0][:, :3]) +
                torch.tensor([0, 0, 1]).to(device),
                show=False,
                point_size=point_size)
        if changes_1_0_count > 0:
            fig_1_given_0 = view_cloud_plotly(voxel_1_small_original[0][:, :3],
                                              change_1_0.squeeze(),
                                              show=False,
                                              colorscale='Bluered',
                                              point_size=point_size)
        else:
            fig_1_given_0 = view_cloud_plotly(
                voxel_1_small_original[0][:, :3],
                torch.zeros_like(voxel_1_small_original[0][:, :3]) +
                torch.tensor([0, 0, 1]).to(device),
                show=False,
                point_size=point_size)

        changed_percentage = (combined_change.sum() /
                              combined_change.numel()).item()
        print(f'Changed percentage: {changed_percentage:.2f}')
        combined_fig = view_cloud_plotly(combined_points,
                                         combined_change,
                                         show=False,
                                         colorscale='Bluered',
                                         point_size=point_size)

        return fig_0, fig_1, fig_gen_given_0, fig_gen_given_1, combined_fig, fig_0_given_1, fig_1_given_0, changed_percentage
Exemplo n.º 22
0
 def testValidCardNumber(self):
     card_number = 30536723505217
     self.assertTrue(utils.is_valid(card_number))
Exemplo n.º 23
0
def evaluate_on_test(model_dict,
                     config,
                     batch_size=None,
                     generate_samples=False):
    with torch.no_grad():
        device = 'cuda'

        batch_size = config['batch_size'] if batch_size == None else batch_size

        dataset = AmsVoxelLoader(
            config['directory_path_train'],
            config['directory_path_test'],
            out_path='save/processed_dataset',
            preload=True,
            n_samples=config['sample_size'],
            final_voxel_size=config['final_voxel_size'],
            device=device,
            n_samples_context=config['n_samples_context'],
            context_voxel_size=config['context_voxel_size'],
            mode='test',
            include_all=True)
        dataloader = DataLoader(dataset,
                                batch_size=batch_size,
                                num_workers=config['num_workers'],
                                pin_memory=True,
                                prefetch_factor=2,
                                drop_last=True,
                                shuffle=False)
        change_mean_list = []
        print(f'Evaluating on test')
        nats_avg = 0

        for batch_ind, batch in enumerate(tqdm(dataloader)):

            voxel_0_large, voxel_1_small, extra_context, voxel_1_large_self, voxel_1_small_self, voxel_opposite_small, voxel_opposite_large, voxel_0_small_self, voxel_0_large_self, voxel_0_small_original, voxel_1_small_original = [
                x.to(device) for x in batch
            ]
            if not config['using_extra_context']:
                extra_context = None
            batch_1_0 = [voxel_0_large, voxel_1_small, extra_context]
            batch_0_1 = [
                voxel_opposite_large, voxel_opposite_small, extra_context
            ]

            batch_0_0 = [voxel_0_large_self, voxel_0_small_self, extra_context]
            batch_1_1 = [voxel_1_large_self, voxel_1_small_self, extra_context]

            loss, log_prob_1_0, nats = inner_loop(batch_1_0, model_dict,
                                                  config)

            _, log_prob_0_0, _ = inner_loop(batch_0_0, model_dict, config)
            assert is_valid(log_prob_1_0)
            change_1_0 = log_prob_to_change(log_prob_1_0,
                                            log_prob_0_0,
                                            multiple=5.4)

            assert is_valid(change_1_0)

            is_valid(loss)
            change_means = (change_1_0 > 0).float().mean(dim=-1).tolist()
            change_mean_list.extend(change_means)

            if generate_samples:
                change_val = change_means[0]
                loss, log_prob_0_1, nats = inner_loop(batch_0_1, model_dict,
                                                      config)

                _, log_prob_1_1, _ = inner_loop(batch_1_1, model_dict, config)
                change_0_1 = log_prob_to_change(log_prob_0_1,
                                                log_prob_1_1,
                                                multiple=5.4)

                assert is_valid(log_prob_0_1)

                sample_points_given_0 = make_sample(
                    n_points=4000,
                    extract_0=voxel_0_large[0].unsqueeze(0),
                    models_dict=model_dict,
                    config=config,
                    sample_distrib=None,
                    extra_context=extra_context[0].unsqueeze(0))
                cond_nump = voxel_0_large[0].cpu().numpy()
                cond_nump[:, 3:6] = np.clip(cond_nump[:, 3:6] * 255, 0, 255)
                sample_points_given_0 = sample_points_given_0.cpu().numpy(
                ).squeeze()
                sample_points_given_0[:, 3:6] = np.clip(
                    sample_points_given_0[:, 3:6] * 255, 0, 255)

                fig_gen_given_0 = view_cloud_plotly(
                    sample_points_given_0[:, :3],
                    sample_points_given_0[:, 3:],
                    show=False)
                fig_gen_given_0.write_html(
                    f'save/examples/test_set_changes/{change_val:.2f}_{batch_ind}_gen_given_0.html'
                )
                fig_0 = view_cloud_plotly(voxel_0_small_original[0][:, :3],
                                          voxel_0_small_original[0][:, 3:],
                                          show=False)
                fig_0.write_html(
                    f'save/examples/test_set_changes/{change_val:.2f}_{batch_ind}_0_small.html'
                )
                fig_1 = view_cloud_plotly(voxel_1_small_original[0][:, :3],
                                          voxel_1_small_original[0][:, 3:],
                                          show=False)
                fig_1.write_html(
                    f'save/examples/test_set_changes/{change_val:.2f}_{batch_ind}_1_small.html'
                )

                sample_points_given_1 = make_sample(
                    n_points=4000,
                    extract_0=voxel_opposite_large[0].unsqueeze(0),
                    models_dict=model_dict,
                    config=config,
                    sample_distrib=None,
                    extra_context=extra_context[0].unsqueeze(0))
                sample_points_given_1 = sample_points_given_1.cpu().numpy(
                ).squeeze()
                sample_points_given_1[:, 3:6] = np.clip(
                    sample_points_given_1[:, 3:6] * 255, 0, 255)
                fig_gen_given_1 = view_cloud_plotly(
                    sample_points_given_1[:, :3],
                    sample_points_given_1[:, 3:],
                    show=False)
                fig_gen_given_1.write_html(
                    f'save/examples/test_set_changes/{change_val:.2f}_{batch_ind}_gen_given_1.html'
                )

                combined_points = torch.cat((voxel_0_small_original[0][:, :3],
                                             voxel_1_small_original[0][:, :3]),
                                            dim=0)

                change_0_1[log_prob_0_1[0] < 0] = 1.0
                change_1_0 = torch.zeros(voxel_1_small_original.shape[1])
                change_1_0[log_prob_1_0[0] < 0] = 1.0
                combined_change = torch.cat((change_0_1, change_1_0), dim=-1)
                combined_change[combined_change > 0] = 1.0
                combined_fig = view_cloud_plotly(combined_points,
                                                 combined_change,
                                                 show=False,
                                                 colorscale='Bluered')
                combined_fig.write_html(
                    f'save/examples/test_set_changes/{change_val:.2f}_{batch_ind}_change.html'
                )

            nats = nats.item()
            nats_avg = (nats_avg * (batch_ind) + nats) / (batch_ind + 1)
        print(f'Nats: {nats_avg}')
        return nats_avg, change_mean_list
Exemplo n.º 24
0
 def testInvalidCardNumber(self):
     card_number = '305367235--05217'
     self.assertFalse(utils.is_valid(card_number))
Exemplo n.º 25
0
def train(config_path):

    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    print(f'Using device: {device}')
    wandb.init(project="flow_change", config=config_path)
    config = wandb.config

    models_dict = initialize_flow(config, device, mode='train')

    if config['data_loader'] == 'AmsVoxelLoader':
        dataset = AmsVoxelLoader(
            config['directory_path_train'],
            config['directory_path_test'],
            out_path='save/processed_dataset',
            preload=config['preload'],
            n_samples=config['sample_size'],
            final_voxel_size=config['final_voxel_size'],
            device=device,
            n_samples_context=config['n_samples_context'],
            context_voxel_size=config['context_voxel_size'],
            mode='train',
            self_pairs_train=config['self_pairs_train']
            if 'self_pairs_train' in config else None)

    else:
        raise Exception('Invalid dataset type!')

    dataloader = DataLoader(dataset,
                            shuffle=True,
                            batch_size=config['batch_size'],
                            num_workers=config["num_workers"],
                            collate_fn=None,
                            pin_memory=True,
                            prefetch_factor=2,
                            drop_last=True)

    if config["optimizer_type"] == 'Adam':
        optimizer = torch.optim.Adam(models_dict['parameters'],
                                     lr=config["lr"],
                                     weight_decay=config["weight_decay"])
    elif config["optimizer_type"] == 'Adamax':
        optimizer = torch.optim.Adamax(models_dict['parameters'],
                                       lr=config["lr"],
                                       weight_decay=config["weight_decay"],
                                       polyak=0.999)
    elif config["optimizer_type"] == 'AdamW':
        optimizer = torch.optim.AdamW(models_dict['parameters'],
                                      lr=config["lr"],
                                      weight_decay=config["weight_decay"])
    elif config['optimizer_type'] == 'SGD':
        optimizer = torch.optim.SGD(models_dict['parameters'],
                                    lr=config["lr"],
                                    momentum=0,
                                    dampening=0,
                                    weight_decay=config["weight_decay"],
                                    nesterov=False)
    else:
        raise Exception('Invalid optimizer type!')

    scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(
        optimizer,
        patience=config['patience'],
        factor=config['lr_factor'],
        threshold=config['threshold_scheduler'],
        min_lr=config["min_lr"],
        verbose=True)

    save_model_path = r'save/conditional_flow_compare'
    loss_running_avg = 0
    # Load checkpoint params if specified path
    if config['load_checkpoint']:
        print(f"Loading from checkpoint: {config['load_checkpoint']}")
        checkpoint_dict = torch.load(
            config['load_checkpoint'], map_location='cpu'
        )  #Map to cpu to avoid weird pytorch extra gpu mem usage
        models_dict = load_flow(checkpoint_dict, models_dict)
        models_dict['flow'].train()
        #optimizer.load_state_dict(checkpoint_dict['optimizer'])
        scheduler.load_state_dict(checkpoint_dict['scheduler'])
        for g in optimizer.param_groups:
            g['lr'] = checkpoint_dict['optimizer']['param_groups'][0]['lr']

    else:
        print("Starting training from scratch!")

    # Watch models:
    detect_anomaly = False
    if detect_anomaly:
        print('DETECT ANOMALY ON')
        torch.autograd.set_detect_anomaly(True)
    torch.backends.cudnn.deterministic = False
    torch.backends.cudnn.benchmark = True

    scaler = torch.cuda.amp.GradScaler(enabled=config['amp'])
    best_so_far = math.inf
    last_save_path = None
    for epoch in range(config["n_epochs"]):
        print(f"Starting epoch: {epoch}")
        for batch_ind, batch in enumerate(tqdm(dataloader)):
            with torch.cuda.amp.autocast(enabled=config['amp']):
                if config['time_stats']:
                    torch.cuda.synchronize()
                    t0 = perf_counter()
                batch = [x.to(device) for x in batch]

                # Set to None if not using
                if not config['using_extra_context']:
                    batch[-1] = None
                extract_0 = batch[0]
                extra_context = batch[-1]

                loss, _, nats = inner_loop(batch, models_dict, config)
                is_valid(loss)

            scaler.scale(loss).backward()

            torch.nn.utils.clip_grad_norm_(models_dict['parameters'],
                                           max_norm=config['grad_clip_val'])
            scaler.step(optimizer)
            scaler.update()
            scheduler.step(loss)

            optimizer.zero_grad(set_to_none=True)
            current_lr = optimizer.param_groups[0]['lr']
            if config['time_stats']:
                torch.cuda.synchronize()
                time_batch = perf_counter() - t0
            else:
                time_batch = np.NaN
            loss_item = loss.item()
            loss_running_avg = (loss_running_avg *
                                (batch_ind) + loss_item) / (batch_ind + 1)

            if (batch_ind % config['batches_per_save']) == 0 and batch_ind > 0:
                if loss_running_avg < best_so_far:
                    if last_save_path != None:
                        os.remove(last_save_path)
                    print(f'Saving!')
                    savepath = os.path.join(
                        save_model_path,
                        f"{wandb.run.name}_e{epoch}_b{batch_ind}_model_dict.pt"
                    )
                    print(f'Loss epoch: {loss_running_avg}')
                    save_flow(models_dict, config, optimizer, scheduler,
                              savepath)
                    last_save_path = savepath
                    best_so_far = min(loss_running_avg, best_so_far)
                    loss_running_avg = 0

            if ((batch_ind + 1) % config['batches_per_sample']
                    == 0) and config['make_samples']:
                with torch.no_grad():

                    cond_nump = extract_0[0].cpu().numpy()
                    if config['using_extra_context']:
                        sample_extra_context = extra_context[0].unsqueeze(0)
                    else:
                        sample_extra_context = None
                    sample_points = make_sample(
                        n_points=4000,
                        extract_0=extract_0[0].unsqueeze(0),
                        models_dict=models_dict,
                        config=config,
                        sample_distrib=None,
                        extra_context=sample_extra_context)
                    cond_nump[:, 3:6] = np.clip(cond_nump[:, 3:6] * 255, 0,
                                                255)
                    sample_points = sample_points.cpu().numpy().squeeze()
                    sample_points[:,
                                  3:6] = np.clip(sample_points[:, 3:6] * 255,
                                                 0, 255)
                    wandb.log({
                        "Cond_cloud": wandb.Object3D(cond_nump[:, :6]),
                        "Gen_cloud": wandb.Object3D(sample_points[:, :6]),
                        'loss': loss_item,
                        'nats': nats.item(),
                        'lr': current_lr,
                        'time_batch': time_batch
                    })
            else:
                pass
                wandb.log({
                    'loss': loss_item,
                    'nats': nats.item(),
                    'lr': current_lr,
                    'time_batch': time_batch
                })

        wandb.log({'epoch': epoch, "loss_epoch": loss_running_avg})
Exemplo n.º 26
0
 def testValidSeparatedNumber(self):
     card_number = '1234-1234-1234-1234'
     self.assertTrue(utils.is_valid(card_number))
def greedy(graph, num_buses, size_bus, constraints):
    G = graph.copy()
    groups = [[] for i in range(num_buses)]
    constraint_status = {
        i: (-1, len(constraints[i]))
        for i in range(len(constraints))
    }
    CD = constraint_dictionary(G.nodes(), constraints)

    def greedy_value(student, group, group_num, num_neighbors):
        rowdy = set()
        group_set = set(group + [student])
        for ci in CD[student]:
            (g, p) = constraint_status[ci]
            if g != group_num or p > 1:
                continue
            for s in constraints[ci]:
                rowdy.add(s)

        if len(rowdy) > 0:
            return -len(rowdy)
        return num_neighbors[student]

    fill_the_rest = False
    partial_assignment = {n: None for n in G.nodes()}
    for i in range(num_buses):
        # Pick the person with the most friends
        names = G.nodes()
        x = max(names, key=lambda n: G.degree(n))
        group = []
        num_neighbors = {n: 0 for n in names}
        for j in range(size_bus):
            if j == 0 and (num_buses - i == len(num_neighbors)) or (
                    num_buses - i - 1 == len(num_neighbors)):
                groups[i] = group
                fill_remainder(groups, num_buses, list(num_neighbors.keys()))
                fill_the_rest = True
                break

            group_set = set(group)

            def value(student):
                return greedy_value(student, group, i, num_neighbors)

            # Add people to a bus until we can't add any more people
            y = max(num_neighbors, key=value)
            for n in G[y]:
                if n not in group:
                    num_neighbors[n] += 1
            group.append(y)
            partial_assignment[y] = i

            neutralized_constraints = []
            for ci in CD[y]:
                (g, p) = constraint_status[ci]
                if g == -1:
                    constraint_status[ci] = (i, p - 1)
                elif g != i:
                    neutralized_constraints.append(ci)
                else:
                    constraint_status[ci] = (g, p - 1)

            for ci in neutralized_constraints:
                for s in constraints[ci]:
                    CD[s].remove(ci)

            del num_neighbors[y]
            if len(num_neighbors) == 0:
                break

        if fill_the_rest:
            break

        groups[i] = group
        for n in group:
            G.remove_node(n)

    assignment = utils.groups_to_assignment(groups)
    assert utils.is_valid(graph, num_buses, size_bus, assignment)
    return assignment