def write_to_csv(data_in_dictionary, csvfile): """Write data to a csv file""" with open(csvfile, 'wb') as f: fieldnames = data_in_dictionary.keys() writer = csv.DictWriter(f, fieldnames) writer.writeheader() writer.writerow(data_in_dictionary) logger.info("Data written to file: " + csvfile)
def insert(self, query): self.query = query try: logger.debug('Running query: ' + self.query) DBHandler.cursor.execute(self.query) DBHandler.db.commit() logger.info('Query executed') except: logger.exception('Query Failed') DBHandler.db.rollback()
def send_observation(api_token, observation): obs_endpoint = SendObservationsEndpoint(api_token, [observation]) obs_res = obs_endpoint.post() if not obs_res.is_error(): logger.info("[linkwatch] Observation POST result for %s: %s -- with HTTP STATUS: %s" % (observation, obs_res.get_text(), obs_res.get_status())) else: try: obs_res.response.raise_for_status() except Exception, e: logger.error("[linkwatch] Failed to POST new weight observation %s: %s" % (observation, e))
def __init__(self, query='use python;'): try: DBHandler.db = MySQLdb.connect(env.DB_IP, env.DB_USER, env.DB_PASSWORD, env.DB_DATABASE) logger.info('Connection created !!') DBHandler.cursor = DBHandler.db.cursor() self.query = query logger.debug('Running query: ' + self.query) DBHandler.cursor.execute(self.query) except: logger.exception('Connecetion Failed')
def general_data(self, start_word, end_word_not_including): self.start_word = start_word self.end_word_not_including = end_word_not_including data = scraping.driver.find_element_by_id('content').text #logger.info(data) value = data[data.find(self.start_word):data. find(self.end_word_not_including)] logger.info(value) logger.debug(type(value)) value = value.encode('ascii', 'ignore') logger.info(value) logger.debug(type(value)) dataProcessing.write_to_csv(value, env.CSV_FILE)
def send_observation(api_token, observation): obs_endpoint = SendObservationsEndpoint(api_token, [observation]) obs_res = obs_endpoint.post() if not obs_res.is_error(): logger.info( "[linkwatch] Observation POST result for %s: %s -- with HTTP STATUS: %s" % (observation, obs_res.get_text(), obs_res.get_status())) else: try: obs_res.response.raise_for_status() except Exception, e: logger.error( "[linkwatch] Failed to POST new weight observation %s: %s" % (observation, e))
def process_measurement(measurement_json): login_res = perform_login() token = login_res.get_token() if not token: logger.error("[linkwatch] Auth token unavailable due to error in login_endpoint call. Reason: %s" % (login_res.get_error_reason())) else: logger.info("[linkwatch] Auth token value is: %s" % (token)) obs = None try: obs = get_observation_from_measurement_json(measurement_json) except Exception as e: logger.error("[linkwatch] Could not convert measurement -- %s -- to linkwatch observation: %s" % (measurement_json, e)) return send_observation(token, obs)
def process_measurement(measurement_json): login_res = perform_login() token = login_res.get_token() if not token: logger.error( "[linkwatch] Auth token unavailable due to error in login_endpoint call. Reason: %s" % (login_res.get_error_reason())) else: logger.info("[linkwatch] Auth token value is: %s" % (token)) obs = None try: obs = get_observation_from_measurement_json(measurement_json) except Exception as e: logger.error( "[linkwatch] Could not convert measurement -- %s -- to linkwatch observation: %s" % (measurement_json, e)) return send_observation(token, obs)
def create_dict(data, csvfile): raw_data = data.splitlines() temp = map(lambda x: x.split(":"), raw_data) logger.info(temp) processed_data = OrderedDict(temp) logger.info(processed_data) logger.info("writing in csv") func(processed_data, csvfile)
def main(): logger.info("Execeution Begins") logger.info("******************************") with scraping() as sc: sc.export_to_excel(email='*****@*****.**', password='******')
if (res.status_code < 400 ): logger.debug("[opentele] The result of posting %s to OpenTele: %s -- with HTTP STATUS: %s" % (measurement_json, str(res.text), str(res.status_code))) else: logger.error("[opentele] The result of posting %s to OpenTele: %s -- with HTTP STATUS: %s" % (measurement_json, str(res.text), str(res.status_code))) return raise Exception("Unsupported measurement type: %s" % (measurement_json['type'])) if __name__ == "__main__": # Basic HTTP AUTH data credentials = get_credentials() # ==== Send a weight measurement ==== logger.info("Sending a weight measurement ...") send_weight = SendWeight(credentials, 70.2, 1485703734) res = send_weight.post() logger.info("Status code: " + str(res)) send_weight = SendWeight(credentials, 70.2, 1485703734) res = send_weight.post() logger.info("Status code: " + str(res)) # check that it has been registered get_weight = GetWeightObservations(credentials, params={"filter": "week"}) status, json_str = get_weight.get() if status == 200: print json_str # ==== Send a BP measurement ====
def export_to_excel(self, email, password, security='INFY'): self.email = email self.password = password self.security = security logger.info("******************************************************") logger.info('Clicking excel: ' + scraping.driver.current_url) try: scraping.driver.find_element_by_link_text( 'Export to Excel').click() time.sleep(3) logger.info('Registration Page: ' + scraping.driver.current_url) scraping.driver.find_element_by_link_text('Login here').click() time.sleep(2) logger.info('Entering login credentials: ' + scraping.driver.current_url) uname = scraping.driver.find_element_by_id('id_username') uname.send_keys(self.email) pwd = scraping.driver.find_element_by_id('id_password') pwd.send_keys(self.password) logger.info('Logging in') scraping.driver.find_element_by_tag_name('button').submit() time.sleep(2) logger.info(scraping.driver.current_url) logger.info('Clicking for excel') company = scraping.driver.find_element_by_tag_name('input') company.send_keys(self.security) time.sleep(2) company_list = scraping.driver.find_element_by_class_name( 'dropdown-menu') company_list.click() scraping.driver.find_element_by_tag_name('button').click() time.sleep(2) logger.info(scraping.driver.current_url) script_list = scraping.driver.find_elements_by_tag_name('h4') script_data = map(lambda x: x.text, script_list) logger.info(script_data) scraping.driver.find_element_by_link_text( 'Export to Excel').click() time.sleep(5) logger.info(scraping.driver.current_url) logger.info('CLicked...Check') except Exception as e: logger.exception(e) finally: logger.info( "******************************************************")
def __exit__(self, exc_type, exc_val, exc_tb): scraping.driver.quit() logger.info('Webdriver handler closed')
def __exit__(self, exc_type, exc_value, traceback): DBHandler.db.close() logger.info('******************************************') logger.info('Destroying DB object') logger.info('******************************************')