def parse_cache():
    shared.log_message('Begin KSL parse cache')
    parsed_files = list()
    for filename in sorted(os.listdir(shared.CACHE_CURRENT_KSL_DIR)):
        with open(os.path.join(shared.CACHE_CURRENT_KSL_DIR, filename),
                  'r') as file:
            parsed_files.append(parse_html(file.read()))
    listings = [item for sublist in parsed_files for item in sublist]
    shared.log_message(f'KSL parse cache returned {len(listings)} listings')
    return listings
Exemplo n.º 2
0
def send_email(current, current_dict, previous, previous_dict):
    # New listings includes all current listings that are not previous listings
    all_current = get_all_set(
        current_dict,
        [shared.ACTIVE, shared.BACKUP_OFFER, shared.UNDER_CONTRACT])
    all_previous = get_all_set(previous_dict, [
        shared.ACTIVE, shared.BACKUP_OFFER, shared.UNDER_CONTRACT,
        shared.OFF_MARKET
    ])
    new_listing_ids = [
        mls for mls in all_current - all_previous
        if current[1][mls].status in [shared.ACTIVE]
    ]
    if new_listing_ids:
        shared.log_message('New listings: {}'.format(', '.join(
            map(str, new_listing_ids))))
    existing_ids = all_current.intersection(all_previous)
    more_available_ids = dict()
    price_drop_ids = dict()
    open_house_ids = dict()
    for existing_id in existing_ids:
        prepend = 'MLS #{}: '.format(existing_id)
        # If the listing has become more available than previously
        if current[1][existing_id].status not in [shared.UNDER_CONTRACT, shared.OFF_MARKET] \
                and previous[1][existing_id].status not in [shared.OFF_MARKET] \
                and LISTING_STATE_DICT[current[1][existing_id].status] < LISTING_STATE_DICT[previous[1][existing_id].status]:
            message = 'Availability Change: {} -> {}'.format(
                previous[1][existing_id].status,
                current[1][existing_id].status)
            shared.log_message(prepend + message)
            more_available_ids[existing_id] = message
        # If the listing has had a price drop
        elif current[1][existing_id].price < previous[1][existing_id].price \
                and current[1][existing_id].status not in [shared.UNDER_CONTRACT, shared.OFF_MARKET]:
            message = 'Price Drop: ${:,} -> ${:,}'.format(
                previous[1][existing_id].price, current[1][existing_id].price)
            shared.log_message(prepend + message)
            price_drop_ids[existing_id] = message
        # If the listing has a new open house
        elif current[1][existing_id].open_house \
                and not previous[1][existing_id].open_house \
                and current[1][existing_id].status not in [shared.UNDER_CONTRACT, shared.OFF_MARKET]:
            message = 'New open house: {}'.format(
                current[1][existing_id].open_house)
            shared.log_message(prepend + message)
            open_house_ids[existing_id] = message
    if new_listing_ids or more_available_ids or price_drop_ids or open_house_ids:
        email_manager.generate_and_send_email(current[1], new_listing_ids,
                                              more_available_ids,
                                              price_drop_ids, open_house_ids)
    else:
        shared.log_message('No email sent')
Exemplo n.º 3
0
def test():
    shared.log_message('Begin URE_scraper')
    start = time.time()
    update_cache()
    end = time.time()
    shared.log_message(f'Update cache: {end - start:.2f} seconds')
    start = time.time()
    listings = parse_cache()
    end = time.time()
    shared.log_message(f'Parse cache: {end - start:.2f} seconds')
    for listing in listings:
        shared.log_message(shared.prettify_mls_str(listing))
    shared.log_message('End URE_scraper')
Exemplo n.º 4
0
def parse_cache():
    shared.log_message('Begin URE parse cache')
    parsed_files = list()
    expected_count = 0
    count = 0
    for filename in sorted(os.listdir(shared.CACHE_CURRENT_URE_DIR)):
        with open(os.path.join(shared.CACHE_CURRENT_URE_DIR, filename), 'r') as file:
            expected_count, mls_listings = parse_html(file.read())
            count += len(mls_listings)
            parsed_files.append(mls_listings)
    if count < expected_count or (count == 500 and expected_count > 500):
        raise ValueError(f'Results count ({count}) does not equal expected count ({expected_count})')
    listings = [item for sublist in parsed_files for item in sublist]
    shared.log_message(f'URE parse cache returned {len(listings)} listings')
    return listings
Exemplo n.º 5
0
def send_email(message):
    port = 465  # For SSL
    smtp_server = "smtp.gmail.com"
    message['From'] = shared.CONFIG['email']['from']
    recipients = shared.CONFIG['email']['to'].split(' ')
    message["To"] = ', '.join(recipients)
    if shared.SEND_MESSAGE:
        context = ssl.create_default_context()
        with smtplib.SMTP_SSL(smtp_server, port, context=context) as server:
            server.login(shared.CONFIG['email']['from'], shared.CONFIG['email']['password'])
            server.sendmail(message['From'], recipients, message.as_string())
        shared.log_message('Email sent')
    else:
        log = '***** DUMMY EMAIL *****\n'
        log += '********* TO: *********\n'
        log += message['To'] + '\n'
        log += '******** FROM: ********\n'
        log += message['From'] + '\n'
        log += '****** MESSAGE: *******\n'
        log += message.as_string() + '\n'
        log += '***********************'
        shared.log_message(log)
Exemplo n.º 6
0
def update_cache():
    shared.log_message('Begin URE update cache')
    if shared.UPDATE_CACHE:
        start = time.time()
        sources = scrape()
        end = time.time()
        for file in os.listdir(shared.CACHE_CURRENT_URE_DIR):
            os.remove(os.path.join(shared.CACHE_CURRENT_URE_DIR, file))
        for i, source in enumerate(sources):
            with open(os.path.join(shared.CACHE_CURRENT_URE_DIR, '{}_{}.html'.format(shared.g_timestamp, i)), 'w') as file:
                file.write(source)
        shared.log_message(f'URE cache updated ({len(sources)} pages in {end - start:.2f} seconds) '
                           f'under name {shared.g_timestamp}')
    else:
        shared.log_message('UPDATE_CACHE set to false')
Exemplo n.º 7
0
def update():
    passed = False
    for i in range(5):
        try:
            shared.update_timestamp()
            shared.log_message('{s:{c}^{n}}'.format(s=' BEGIN {} '.format(
                shared.g_timestamp),
                                                    n=40,
                                                    c='*'))
            update_and_alert()
            shared.log_message('{s:{c}^{n}}'.format(s=' END {} '.format(
                shared.g_timestamp),
                                                    n=40,
                                                    c='*'))
            shared.make_checkpoint(shared.CACHE_CURRENT_DIR,
                                   shared.CACHE_CHECKPOINT_DIR)
            shared.make_checkpoint(shared.DB, shared.DB_CHECKPOINT_DIR)
            passed = True
            break
        except:
            shared.log_message(traceback.format_exc())
            short_loop_sleep()
    return passed
Exemplo n.º 8
0
def update_and_alert():
    ksl_results = ksl_scraper.get_mls_listings()
    ure_results = ure_scraper.get_mls_listings()
    results = ure_results + ksl_results
    results_dict = {
        status: [result for result in results if result.status == status]
        for status in shared.STATUSES
    }
    current = shared.format_listings(results)
    current_dict = {
        key: shared.format_listings(value)
        for key, value in results_dict.items()
    }
    if db.db_exists():
        shared.log_message(f'Begin update DB')
        previous, previous_dict = db.get_db_listings()
        new_listings = update_db_new_listings(current, current_dict, previous,
                                              previous_dict)
        if new_listings:
            shared.log_message(
                f'New listings added to DB: {", ".join([str(listing)for listing in new_listings])}'
            )
        else:
            shared.log_message('No new listings added to DB')
        off_market_ids = update_db_off_market(current, current_dict, previous,
                                              previous_dict)
        if off_market_ids:
            shared.log_message(
                f'Off market listings updated in DB: {", ".join([str(listing)for listing in off_market_ids])}'
            )
        else:
            shared.log_message('No off market listings updated in DB')
        update_db(current, current_dict, previous, previous_dict)
        shared.log_message('Updated DB')
        send_email(current, current_dict, previous, previous_dict)
    else:
        shared.log_message(f'Begin create DB, no file at "{shared.DB}"')
        db.create_db()
        db.insert_rows([value for key, value in current[1].items()])
        shared.log_message('Created DB')