self.log = False self.compression = False self.ciphers = None if knownhosts is None: knownhosts = known_hosts() self.hostkeys = paramiko.hostkeys.HostKeys() try: self.hostkeys.load(knownhosts) except IOError: pass else: pass ## https://stackoverflow.com/questions/56521549/failed-to-load-hostkeys-warning-while-connecting-to-sftp-server-with-pysftp cnopts = pysftp.CnOpts() cnopts.hostkeys = None def beep(time, msg=None): winsound.Beep(1500, int(time * 1000)) print('---- msg', msg) def elapsed_time(): global start_time elapsed = time() - start_time hours, rem = divmod(elapsed, 3600) minutes, seconds = divmod(rem, 60) return "{:0>2}:{:0>2}:{:0>2.0f}".format(int(hours), int(minutes), seconds)
def put_rows(auth, destination, rows, variant=''): """Processes standard write JSON block for dynamic export of data. Allows us to quickly write the results of a script to a destination. For example write the results of a DCM report into BigQuery. - Will write to multiple destinations if specified. - Extensible, add a handler to define a new destination ( be kind update the documentation json ). Include the following JSON in a recipe, then in the run.py handler when encountering that block pass it to this function and use the returned results. from utils.data import put_rows var_json = { "out":{ "bigquery":{ "dataset": [ string ], "table": [ string ] "schema": [ json - standard bigquery schema json ], "header": [ boolean - true if header exists in rows ] "disposition": [ string - same as BigQuery documentation ] }, "sheets":{ "sheet":[ string - full URL, suggest using share link ], "tab":[ string ], "range":[ string - A1:A notation ] "append": [ boolean - if sheet range should be appended to ] "delete": [ boolean - if sheet range should be cleared before writing ] ] }, "storage":{ "bucket": [ string ], "path": [ string ] }, "file":[ string - full path to place to write file ] } } values = put_rows('user', var_json) Or you can use it directly with project singleton. from util.project import project from utils.data import put_rows @project.from_parameters def something(): values = get_rows(project.task['auth'], project.task['out']) if __name__ == "__main__": something() Args: auth: (string) The type of authentication to use, user or service. destination: (json) A json block resembling var_json described above. rows ( list ) The data being written as a list object. variant (string) Appended to destination to differentieate multiple objects Returns: If single_cell is False: Returns a list of row values [[v1], [v2], ... ] If single_cell is True: Returns a list of values [v1, v2, ...] """ if not rows: if project.verbose: print('No rows provided to put_rows') return if 'bigquery' in destination: skip_rows = 1 if destination['bigquery'].get('header') and destination['bigquery'].get('schema') else 0 if destination['bigquery'].get('format', 'CSV') == 'JSON': json_to_table( destination['bigquery'].get('auth', auth), destination['bigquery'].get('project_id', project.id), destination['bigquery']['dataset'], destination['bigquery']['table'] + variant, rows, destination['bigquery'].get('schema', []), destination['bigquery'].get('disposition', 'WRITE_TRUNCATE'), ) elif destination['bigquery'].get('is_incremental_load', False) == True: incremental_rows_to_table( destination['bigquery'].get('auth', auth), destination['bigquery'].get('project_id', project.id), destination['bigquery']['dataset'], destination['bigquery']['table'] + variant, rows, destination['bigquery'].get('schema', []), destination['bigquery'].get('skip_rows', skip_rows), destination['bigquery'].get('disposition', 'WRITE_APPEND'), billing_project_id=project.id) else: rows_to_table( destination['bigquery'].get('auth', auth), destination['bigquery'].get('project_id', project.id), destination['bigquery']['dataset'], destination['bigquery']['table'] + variant, rows, destination['bigquery'].get('schema', []), destination['bigquery'].get('skip_rows', skip_rows), destination['bigquery'].get('disposition', 'WRITE_TRUNCATE'), ) if 'sheets' in destination: if destination['sheets'].get('delete', False): sheets_clear( auth, destination['sheets']['sheet'], destination['sheets']['tab'] + variant, destination['sheets']['range'], ) sheets_write( auth, destination['sheets']['sheet'], destination['sheets']['tab'] + variant, destination['sheets']['range'], rows, destination['sheets'].get('append', False), ) if 'file' in destination: path_out, file_ext = destination['file'].rsplit('.', 1) file_out = path_out + variant + '.' + file_ext if project.verbose: print('SAVING', file_out) makedirs_safe(parse_path(file_out)) with open(file_out, 'w') as save_file: save_file.write(rows_to_csv(rows).read()) if 'storage' in destination and destination['storage'].get( 'bucket') and destination['storage'].get('path'): # create the bucket bucket_create(auth, project.id, destination['storage']['bucket']) # put the file file_out = destination['storage']['bucket'] + ':' + destination['storage'][ 'path'] + variant if project.verbose: print('SAVING', file_out) object_put(auth, file_out, rows_to_csv(rows)) if 'sftp' in destination: try: cnopts = pysftp.CnOpts() cnopts.hostkeys = None path_out, file_out = destination['sftp']['file'].rsplit('.', 1) file_out = path_out + variant + file_out sftp = pysftp.Connection( host=destination['sftp']['host'], username=destination['sftp']['username'], password=destination['sftp']['password'], port=destination['sftp']['port'], cnopts=cnopts) if '/' in file_out: dir_out, file_out = file_out.rsplit('/', 1) sftp.cwd(dir_out) sftp.putfo(rows_to_csv(rows), file_out) except e: print(str(e)) traceback.print_exc()
def copy_to_target(self, config, target, local_filepath, target_filename): import pysftp user = target.get('user') host = target.get('host') port = target.get('port', 22) print('') print(CBOLD + LGREEN, "Connecting to {}@{}:{}...".format(user, host, port), CRESET) # Init SFTP connection try: cnopts = pysftp.CnOpts() if target.get('disable_hostkey_checking', False): cnopts.hostkeys = None conn = pysftp.Connection(host=host, username=target.get('user'), port=port, cnopts=cnopts) conn._transport.set_keepalive(30) except (pysftp.ConnectionException, pysftp.SSHException): print(CBOLD, "Unknown exception while connecting to host:", CRESET) print(traceback.format_exc()) return traceback except (pysftp.CredentialException, pysftp.AuthenticationException): print( CBOLD, "Credentials or authentication exception while connecting to host:", CRESET) print(traceback.format_exc()) return traceback target_dir = target.get('dir') # Create destination directory if necessary try: # Try... conn.chdir(target_dir) except IOError: # Create directories current_dir = '' for directory in target_dir.split('/'): current_dir = os.path.join(current_dir, directory) try: conn.chdir(current_dir) except IOError: print(CDIM, 'Creating missing directory {}'.format(current_dir), CRESET) conn.mkdir(current_dir) conn.chdir(current_dir) pass print( CBOLD + LGREEN, "Starting transfer: {} => {}".format(local_filepath, target_filename), CRESET) # Upload file conn.put(local_filepath, target_filename, callback=print_progress) print('') print(CBOLD + LGREEN, "Transfer finished.", CRESET) self.rotate_backups(config, target, conn) conn.close()
def uploadFiles(): print("u have 5 seconds to turn on the ignition") time.sleep(5) server_name = "OakOne" password = "******" interface_name = "wlan0" # i. e wlp2s0 F = Finder(server_name=server_name, password=password, interface=interface_name) counter = 0 response = F.run() while (response == False): counter += 1 if (counter < 60): time.sleep(2) print('waiting for a second to try again') response = F.run() else: break print("**************************** did come here") if (response == True): print("Starting Upload") a_file = open(dataLogFile, "r") # read dataLog File json_object = json.load(a_file) a_file.close() videoFiles = json_object['videoFiles'] _videoFiles = videoFiles cnopts = pysftp.CnOpts() cnopts.hostkeys = None print("**************************** hostkeys none") with pysftp.Connection(host=myHostname, username=myUsername, password=myPassword, cnopts=cnopts) as sftp: cnt = 0 for _file in videoFiles: if os.path.exists(_file): print("starting upload -" + _file) remoteFilepath = mediaStorageLocation + _file localFilepath = _file #del _videoFiles[cnt] sftp.put(localFilepath, remoteFilepath, uploadCallback) os.remove(_file) print("Uploaded File " + _file) myfiles = os.listdir("./") print(myfiles) for __file in myfiles: if (".h264" in __file): print(__file) remoteFilepath = mediaStorageLocation + __file localFilepath = __file sftp.put(localFilepath, remoteFilepath) print("uploaded file -" + _file) sftp.close() json_object['videoFiles'] = _videoFiles a_file = open(dataLogFile, "w") json.dump(json_object, a_file, indent=4) a_file.close() return True else: return False
import pysftp as sftp #i'm using a file to store the password, but a hardcoded string works as well if needed with open('pass', 'r') as file: myPassword = file.read().replace('/n', '') myHostname = 'sftp-server' myUsername = '******' cnopts = sftp.CnOpts( knownhosts='known_hosts' ) # known_hosts is a file, usually stored in the ftp /ect/ssh/ with sftp.Connection(host=myHostname, username=myUsername, password=myPassword, cnopts=cnopts) as sftp: print("Connection successful..") sftp.cwd("path/to/find") directory_structure = sftp.listdir_attr() for attr in directory_structure: print(attr.filename, attr) #----End | https://github.com/eabdiel
def run(self): QgsMessageLog.logMessage(f"GeoRectifyTask.run, process: %s" % self.name, tag="OAW", level=Qgis.Info) try: self.set_status('running', 'started') input_tif = os.path.join(self.options["staging_folder"], self.name + ".tif") scripts_folder = os.path.join(QgsApplication.prefixPath(), "..", "Python37/Scripts") geo_rectify = GeoRectifyFactory.create( input=input_tif, qgis_scripts=scripts_folder, min_points=self.options["min_points"], gdal_threads=self.options["gdal_threads"]) geo_rectify.on_progress += self.on_progress geo_rectify.process() auth_id = self.options["remote_authid"] auth_manager = QgsApplication.authManager() auth_cfg = QgsAuthMethodConfig() auth_manager.loadAuthenticationConfig(auth_id, auth_cfg, True) if auth_cfg.id(): username = auth_cfg.config('username', '') password = auth_cfg.config('password', '') uri = auth_cfg.uri() # call FTP task QgsMessageLog.logMessage(f"GeoRectifyTask.run, URI: %s" % str(uri), tag="OAW", level=Qgis.Info) QgsMessageLog.logMessage(f"GeoRectifyTask.run, username: %s" % str(username), tag="OAW", level=Qgis.Info) QgsMessageLog.logMessage(f"GeoRectifyTask.run, password: %s" % "***********", tag="OAW", level=Qgis.Info) # upload file via SFTP output_tif = input_tif.replace(".tif", "_grf_fin.tif") remote_folder = self.options[ "remote_folder"] if "remote_folder" in self.options else "public" cnopts = pysftp.CnOpts() cnopts.hostkeys = None with pysftp.Connection(uri, username=username, password=password, cnopts=cnopts) as sftp: with sftp.cd(remote_folder): sftp.put(output_tif, remotepath=self.name + ".tif") # Remove intermediate file (if requested) if self.options["remove_file_after"] == Qt.Checked: os.remove(output_tif) QgsMessageLog.logMessage( f"GeoRectifyTask.run, removing intermediate file: %s" % output_tif, tag="OAW", level=Qgis.Info) else: raise Exception( "Failed to extract information from the QGIS authentication manager using authid: %s" % auth_id) self.set_status('completed', 'done') except Exception as e: self.exception = e self.set_status('failed', str(e)) QgsMessageLog.logMessage(f"GeoRectifyTask.run, exception: %s" % str(e), tag="OAW", level=Qgis.Warning) self.handlers["on_completed"](self) QgsMessageLog.logMessage(f"GeoRectifyTask.run, result: %s" % self.status, tag="OAW", level=Qgis.Info) return self.status == 'completed'
map.add_child(reg) map.save("index.html") #Save whole map with all markers # Definition of Smaller maps (Single entity map) for lt,ln,ct,ty in zip(lat,lon,City,REType): map2 = folium.Map(location=[lt,ln], zoom_start=15) # Small map fo each property newName = unidecode((ct+"-"+ty[:4])).lower() #creates new file name based on city and type of property reg2 = folium.FeatureGroup(name=("Mapa"+ct)) reg2.add_child(folium.Marker(location=[lt,ln],popup=(ct+", "+ty), icon=folium.Icon(color=Color_Marker(ty), icon=MyIcon(ty)))) map2.add_child(reg2) map2.save((newName+".html")) # creates new file awith required name #Check what files do we have in pwd List_of_Files = glob.glob("*.html") #Send all file to Server / SFTP cnopts = pysftp.CnOpts() # avoiding host key ... if none is provided cnopts.hostkeys = None #connect to server via SFTP and save files there ... try: Secure_Con = pysftp.Connection(sys.argv[1], username=sys.argv[2], password=sys.argv[3], cnopts=cnopts ) #sys.argv[1] = host, sys.argv[2] = username, sys.argv[3]= password for file1 in List_of_Files: Secure_Con.put(file1) Secure_Con.close() except OSError: print('cannot open', arg)
def main(argv): vpn_services_map = {} #load info on VPN providers configs if os.path.isfile(CONFIG_FILE_VPNS): try: with open(CONFIG_FILE_VPNS) as config_file: vpn_services_map = json.load(config_file) #print "Number of VPN providers configured in file " + CONFIG_FILE_VPNS + " is: ", len(vpn_services_map) except ValueError as e: print "Error with the VPN config file: " + CONFIG_FILE_VPNS print str(e) exit() else: print "Could not find the VPN config file: " + CONFIG_FILE_VPNS #exit() helptext_vpns = ','.join(vpn_services_map.keys()) helptext = """vpnspeedtest.py --config Name (and path) of the OpenVPN config file to connect with. --vpn Name of the VPN service to test. --vpn-list Name(s) of the VPN service(s) to test (comma separated list). --auth-username VPN username (only if one VPN service is being tested) [optional] --auth-password VPN password (only if one VPN service is being tested) [optional] --sftp-host Domain or IP address of the SFTP server to send logs and results to [optional] --sftp-username SFTP username [optional] --sftp-password SFTP password [optional] Examples: python vpnspeedtest.py --config vpn_configs/privateinternetaccess/region.ovpn python vpnspeedtest.py --vpn=privateinternetaccess --auth-username=p1234567 --auth-password=password python vpnspeedtest.py --vpn-list=privateinternetaccess,alternative1,alternative2 An installation and user guide is available at *LINK* """ helptext += "Available VPNs for --vpn= and --vpn-list=" + helptext_vpns + "\n" dns_lookup_list = [] vpn_config_file = '' vpn_providers = '' test_location = '' test_id = '' torrent_url = '' https_url = '' auth_username = '' auth_password = '' #auth_zip_url = '' sftp_host = '' sftp_username = '' sftp_password = '' #load list of DNS entries to check if os.path.isfile(CONFIG_FILE_DNS): try: with open(CONFIG_FILE_DNS) as config_file: dns_lookup_list = json.load(config_file) #print "Number of DNS entries to test:", len(dns_lookup_list) except ValueError as e: print "Error with the DNS config file: " + CONFIG_FILE_DNS print str(e) exit() else: print "Could not find the DNS config file: " + CONFIG_FILE_DNS exit() #download the Vultr metadata to determine server location try: vultr_metadata_req = ['curl', '-s', VULTR_METADATA_URL] vultr_metadata_str = subprocess.check_output(vultr_metadata_req) vultr_metadata = json.loads(vultr_metadata_str) except subprocess.CalledProcessError as e: print "curl error:", str(e) print "Are you running this on a Vultr server?" exit() else: test_id = vultr_metadata['instanceid'] if not test_location: vultr_region_code = vultr_metadata['region']['regioncode'] test_location = VULTR_REGIONS[vultr_region_code] print "Testing from " + test_location + " with testID:", test_id try: opts, args = getopt.getopt(argv, "", [ "help", "config=", "vpn=", "vpn-list=", "auth-username="******"auth-password="******"torrent-url=", "https-url=", "sftp-host=", "sftp-username="******"sftp-password="******"Options error!" print(helptext) sys.exit(2) for opt, arg in opts: if opt in ("--help"): print(helptext) sys.exit(2) elif opt in ("--config"): vpn_config_file = arg elif opt in ("--vpn-list") or opt in ("--vpn"): vpn_providers = arg.lower() elif opt in ("--auth-username"): auth_username = arg elif opt in ("--auth-password"): auth_password = arg elif opt in ("--torrent-url"): torrent_url = arg elif opt in ("--https-url"): https_url = arg elif opt in ("--sftp-host"): sftp_host = arg elif opt in ("--sftp-username"): sftp_username = arg elif opt in ("--sftp-password"): sftp_password = arg if not vpn_providers and not vpn_config_file: print( "You need to specify at least one VPN service name in --vpn-list=myvpn or one OpenVPN config file with --config=MyFile.ovpn" ) print(helptext) exit() #if user has specified an openvpn config file with --config if vpn_config_file: if not os.path.isfile(vpn_config_file): print "OpenVPN config file specified with --config not found: " + vpn_config_file exit() else: "Setting up unknown VPN" unknown_vpn = UNKNOWN_VPN vpn_providers = unknown_vpn vpn_test_locations = { test_location: [vpn_config_file] } #later on we expect a list of filenames for this location vpn_services_map[ unknown_vpn] = vpn_test_locations #store the openvpn filename for this unknown vpn if not torrent_url: print "--torrent-url was not specified. Using the default speedtest torrent URL: " + DEFAULT_TORRENT_URL torrent_url = DEFAULT_TORRENT_URL if not https_url: print "--https-url was not specified. Using the default speedtest HTTPS URL: " + DEFAULT_HTTPS_URL https_url = DEFAULT_HTTPS_URL if auth_username and not auth_password: print "You supplied auth-username but you need to specify auth_password too." print(helptext) sys.exit(2) if not auth_username and auth_password: print "You supplied auth-password but you need to specify auth-username too." print(helptext) sys.exit(2) if sftp_host: if not sftp_username: print "You supplied sftp-host but you need to specify sftp-username and sftp-password too." print(helptext) sys.exit(2) if not sftp_password: print "You supplied sftp-host and sftp-username but you need to specify sftp-password too." print(helptext) sys.exit(2) vpn_providers_list = vpn_providers.split(',') if vpn_providers_list and not vpn_config_file: #create auth file if only one vpn is being tested if len(vpn_providers_list) > 1 and auth_username: print "You can only use auth-username and auth-password if you are testing one VPN service." print(helptext) sys.exit(2) #create the auth file if len(vpn_providers_list) == 1 and auth_username and auth_password: vpn_provider = vpn_providers_list[0] auth_file = open('vpn_auth/' + vpn_provider + '.txt', 'w') auth_file.write(auth_username + '\n') auth_file.write(auth_password + '\n') auth_file.close() #check that the vpn service has configs and an auth file for this location for vpn_provider in vpn_providers_list: if vpn_provider not in vpn_services_map: print vpn_provider + " is not supported (yet)" continue #exit() vpn_test_locations = vpn_services_map[vpn_provider] if test_location not in vpn_test_locations: print "OpenVPN config files not defined for VPN service " + vpn_provider + " and location " + test_location print "Supported locations for " + vpn_provider + " are:" for location in vpn_test_locations: print location #exit() vpn_auth_file = "vpn_auth/" + vpn_provider + ".txt" if not os.path.isfile(vpn_auth_file): print "No username/password file found for " + vpn_provider print "Please create vpn_auth/" + vpn_provider + ".txt with the vpn username on line #1 and the vpn password on line #2" #exit() #download the torrent file we'll use for testing try: print "Downloading the torrent file from: " + torrent_url #download_torrent_info = torrent_url.split('/') #torrent_filename = download_torrent_info[-1] torrent_filename = 'vpnspeedtest.torrent' download_torrent = [ 'curl', '-s', '-o', 'torrents/' + torrent_filename, torrent_url ] download_torrent_result = subprocess.check_output(download_torrent) except subprocess.CalledProcessError as e: print "curl error:", str(e) exit() else: print "Downloaded torrent file OK" log_to_terminal = True if (len(vpn_providers_list) > 1): log_to_terminal = False print "Since more than 1 VPN is being tested the logging will be redirected to log files instead of the terminal." speed_test_results = [] try: #for vpn_provider in vpn_providers_list: while vpn_providers_list: vpn_provider = vpn_providers_list.pop( random.randrange(len(vpn_providers_list))) print 'Testing VPN service:', vpn_provider vpn_test_locations = vpn_services_map[vpn_provider] openvpn_filenames = vpn_test_locations[test_location] if not openvpn_filenames: print "ERROR: No OpenVPN config files configured for " + vpn_provider continue try: speedtest_metadata = testVPN(vpn_provider, test_location, test_id, openvpn_filenames, dns_lookup_list, https_url, torrent_filename, torrent_url, log_to_terminal) speedtest_metadata[ "timestamp-end-iso"] = datetime.datetime.utcnow( ).isoformat("T") + "Z" #RFC3339 UTC speed_test_results.append(speedtest_metadata) sys.stdout = sys.__stdout__ #restore stdout - since testVPN() redirects them to log files #write all speedtest metadata to json file with open(LOG_DIR + speedtest_metadata["log-results"], 'w') as json_results: json.dump(speedtest_metadata, json_results, indent=4, sort_keys=True) except KeyboardInterrupt: raise except: e = sys.exc_info()[0] print "Fatal error while testing " + vpn_provider + " was: " + str( e) try: if sftp_host and sftp_username and sftp_password: print "sending test result via SFTP" cnopts = pysftp.CnOpts() cnopts.hostkeys = None with pysftp.Connection(sftp_host, username=sftp_username, password=sftp_password, cnopts=cnopts) as sftp: if os.path.isfile( LOG_DIR + speedtest_metadata['log-bittorrent']): sftp.put(LOG_DIR + speedtest_metadata['log-bittorrent'], preserve_mtime=True) if os.path.isfile(LOG_DIR + speedtest_metadata['log-openvpn']): sftp.put(LOG_DIR + speedtest_metadata['log-openvpn'], preserve_mtime=True) if os.path.isfile(LOG_DIR + speedtest_metadata['log-results']): sftp.put(LOG_DIR + speedtest_metadata['log-results'], preserve_mtime=True) if os.path.isfile(LOG_DIR + speedtest_metadata['log-speedtest']): sftp.put(LOG_DIR + speedtest_metadata['log-speedtest'], preserve_mtime=True) #send OK file to indicate we're done sending log files for this result OK_filename = LOG_DIR + speedtest_metadata[ 'log-results'] + '.OK' f = open(OK_filename, 'w') f.write('OK') f.close() sftp.put(OK_filename, preserve_mtime=True) sftp.close() except KeyboardInterrupt: raise except IOError as e: print "Error while sending logs for " + vpn_provider + " was: " + str( e) except: e = sys.exc_info()[0] print "Error while sending logs for " + vpn_provider + " was: " + str( e) except KeyboardInterrupt: print 'KeyboardInterrupt caught' disconnectVPN() results_json = json.dumps(speed_test_results, indent=4, sort_keys=True) print "Speed test results are:\n", results_json results_filename = LOG_DIR + test_id + '.json' results_file = open(results_filename, 'w') results_file.write(results_json) results_file.close() if sftp_host and sftp_username and sftp_password: print "sending server done message via SFTP" cnopts = pysftp.CnOpts(knownhosts=None) cnopts.hostkeys = None with pysftp.Connection(sftp_host, username=sftp_username, password=sftp_password, cnopts=cnopts) as sftp: #send the .json results which indicates test is finished sftp.put(results_filename, preserve_mtime=True) if os.path.isfile(VULTR_STARTUP_LOG): sftp.put(VULTR_STARTUP_LOG, remotepath=test_id + '.log', preserve_mtime=True) sftp.close() print "Done!" exit()
def put_rows(auth, destination, filename, rows, variant=''): """Processes standard write JSON block for dynamic export of data. Allows us to quickly write the results of a script to a destination. For example write the results of a DCM report into BigQuery. - Will write to multiple destinations if specified. - Extensible, add a handler to define a new destination ( be kind update the documentation json ). Include the following JSON in a recipe, then in the run.py handler when encountering that block pass it to this function and use the returned results. from utils.data import put_rows var_json = { "out":{ "bigquery":{ "dataset": [ string ], "table": [ string ] "schema": [ json - standard bigquery schema json ], "skip_rows": [ integer - for removing header ] "disposition": [ string - same as BigQuery documentation ] }, "sheets":{ "url":[ string - full URL, suggest using share link ], "tab":[ string ], "range":[ string - A1:A notation ] "delete": [ boolean - if sheet range should be cleared before writing ] }, "storage":{ "bucket": [ string ], "path": [ string ] }, "directory":[ string - full path to place to write file ] } } values = put_rows('user', var_json) Or you can use it directly with project singleton. from util.project import project from utils.data import put_rows @project.from_parameters def something(): values = get_rows(project.task['auth'], project.task['out']) if __name__ == "__main__": something() Args: auth: (string) The type of authentication to use, user or service. destination: (json) A json block resembling var_json described above. filename: (string) A unique filename if writing to medium requiring one, Usually gnerated by script. rows ( list ) The data being written as a list object. variant ( string ) Appends this to the destination name to create a variant ( for example when downloading multiple tabs in a sheet ). Returns: If single_cell is False: Returns a list of row values [[v1], [v2], ... ] If single_cell is True: Returns a list of values [v1, v2, ...] """ if 'bigquery' in destination: if destination['bigquery'].get('format' , 'CSV') == 'JSON': json_to_table( destination['bigquery'].get('auth', auth), destination['bigquery'].get('project_id', project.id), destination['bigquery']['dataset'], destination['bigquery']['table'] + variant, rows, destination['bigquery'].get('schema', []), destination['bigquery'].get('disposition', 'WRITE_TRUNCATE'), ) elif destination['bigquery'].get('is_incremental_load', False) == True: incremental_rows_to_table( destination['bigquery'].get('auth', auth), destination['bigquery'].get('project_id', project.id), destination['bigquery']['dataset'], destination['bigquery']['table'] + variant, rows, destination['bigquery'].get('schema', []), destination['bigquery'].get('skip_rows', 1), #0 if 'schema' in destination['bigquery'] else 1), destination['bigquery'].get('disposition', 'WRITE_APPEND'), billing_project_id=project.id ) else: rows_to_table( destination['bigquery'].get('auth', auth), destination['bigquery'].get('project_id', project.id), destination['bigquery']['dataset'], destination['bigquery']['table'] + variant, rows, destination['bigquery'].get('schema', []), destination['bigquery'].get('skip_rows', 1), #0 if 'schema' in destination['bigquery'] else 1), destination['bigquery'].get('disposition', 'WRITE_TRUNCATE'), ) if 'sheets' in destination: if destination['sheets'].get('delete', False): sheets_clear(auth, destination['sheets']['tab'] + variant, destination['sheets']['range'], sheet_url=destination['sheets'].get('sheet', None), sheet_name=destination['sheets'].get('sheet_name', None)) sheets_write(auth, destination['sheets']['tab'] + variant, destination['sheets']['range'], rows, sheet_url=destination['sheets'].get('sheet', None), sheet_name=destination['sheets'].get('sheet_name', None)) if 'directory' in destination: file_out = destination['directory'] + variant + filename if project.verbose: print 'SAVING', file_out makedirs_safe(parse_path(file_out)) with open(file_out, 'wb') as save_file: save_file.write(rows_to_csv(rows).read()) if 'storage' in destination and destination['storage'].get('bucket') and destination['storage'].get('path'): # create the bucket bucket_create(auth, project.id, destination['storage']['bucket']) # put the file file_out = destination['storage']['bucket'] + ':' + destination['storage']['path'] + variant + filename if project.verbose: print 'SAVING', file_out object_put(auth, file_out, rows_to_csv(rows)) # deprecated do not use if 'trix' in destination: trix_update(auth, destination['trix']['sheet_id'], destination['trix']['sheet_range'], rows_to_csv(rows), destination['trix']['clear']) if 'email' in destination: pass if 'sftp' in destination: try: sys.stderr = StringIO(); cnopts = pysftp.CnOpts() cnopts.hostkeys = None file_prefix = 'report' if 'file_prefix' in destination['sftp']: file_prefix = destination['sftp'].get('file_prefix') del destination['sftp']['file_prefix'] #sftp_configs = destination['sftp'] #sftp_configs['cnopts'] = cnopts #sftp = pysftp.Connection(**sftp_configs) sftp = pysftp.Connection(host=destination['sftp']['host'], username=destination['sftp']['username'], password=destination['sftp']['password'], port=destination['sftp']['port'], cnopts=cnopts) if 'directory' in destination['sftp']: sftp.cwd(destination['sftp']['directory']) tmp_file_name = '/tmp/%s_%s.csv' % (file_prefix, datetime.datetime.now().strftime('%Y-%m-%dT%H-%M-%S')) tmp_file = open(tmp_file_name, 'wb') tmp_file.write(rows_to_csv(rows).read()) tmp_file.close() sftp.put(tmp_file_name) os.remove(tmp_file_name) sys.stderr = sys.__stderr__; except e: print e traceback.print_exc()
import pysftp import keyring import os import io import glob import sys from itertools import islice from datetime import datetime, timedelta username = "******" # username for Windows credential cred_name = "EXAMPLE_CRED" # Windows credential name password = keyring.get_password(cred_name, username) # get Windows credential password hostname = "www.example.com" # host to connect to cnopts = pysftp.CnOpts(knownhosts=None) # bypass 'hostkeys' error cnopts.hostkeys = None # bypass 'hostkeys' error def process_files(directory_structure): files_in_folder = glob.glob( "Y:\\FOLDER_NAME\\*") # get files to upload from local folder archive_folder = "Y:\\FOLDER_NAME\\SUB_FOLDER_NAME\\" # get archive folder if len(files_in_folder) == 0: # if folder contains no files, then pass pass else: local_file_path = max(files_in_folder, key=os.path.getctime) # file to upload to sftp filename = os.path.basename( local_file_path ) # extract just the filename for the last uploaded file remote_dir = "dir_name_you_need_to_upload_to" # sftp directory to upload to
import pysftp as sftp cnopts = sftp.CnOpts() cnopts.hostkeys = None myHostname = '107.180.28.75' myUsername = '******' myPassword = '******' path_file = 'updatedlibrarycatalog.csv' remote_path = 'booklibrary' def download_file_to_local(): with sftp.Connection(host=myHostname, username=myUsername, password=myPassword, cnopts=cnopts) as s: print("Connection successfully established...") print("Obtaining strcture of remote directory...") directory_structure = s.listdir_attr() print("Printing data") for attr in directory_structure: print(attr) print("Chdir to Rugutt Library Folder...") with s.cd(remote_path): booklibrary_structure = s.listdir_attr() print("Printing data") for attr in booklibrary_structure: print(attr) try: s.get(path_file) print("File Successfully Downloaded") except: print("Could not connect to ftp")
def open(self): assert not self.ftp_socket_connected super(SFTPTarget, self).open() options = self.get_options_dict() no_prompt = self.get_option("no_prompt", True) store_password = self.get_option("store_password", False) verbose = self.get_option("verbose", 3) verify_host_keys = not self.get_option("no_verify_host_keys", False) if self.get_option("ftp_active", False): raise RuntimeError("SFTP does not have active/passive mode.") if verbose <= 3: logging.getLogger("paramiko.transport").setLevel(logging.WARNING) write("Connecting {}:*** to sftp://{}".format(self.username, self.host)) cnopts = pysftp.CnOpts() cnopts.log = self.get_option("ftp_debug", False) if not verify_host_keys: cnopts.hostkeys = None if self.username is None or self.password is None: creds = get_credentials_for_url( self.host, options, force_user=self.username ) if creds: self.username, self.password = creds assert self.sftp is None while True: try: self.sftp = pysftp.Connection( self.host, username=self.username, password=self.password, port=self.port, cnopts=cnopts, ) break except paramiko.ssh_exception.AuthenticationException as e: write_error( "Could not login to {}@{}: {}".format(self.username, self.host, e) ) if no_prompt or not self.username: raise creds = prompt_for_password(self.host, self.username) self.username, self.password = creds # Continue while-loop except paramiko.ssh_exception.SSHException as e: write_error( "{exc}: Try `ssh-keyscan HOST` to add it " "(or pass `--no-verify-host-keys` if you don't care about security).".format( exc=e ) ) raise if verbose >= 4: write( "Login as '{}'.".format(self.username if self.username else "anonymous") ) if self.sftp.logfile: write("Logging to {}".format(self.sftp.logfile)) self.sftp.timeout = self.timeout self.ftp_socket_connected = True try: self.sftp.cwd(self.root_dir) except IOError as e: # if not e.args[0].startswith("550"): # raise # error other then 550 No such directory' write_error( "Could not change directory to {} ({}): missing permissions?".format( self.root_dir, e ) ) pwd = self.pwd() # pwd = self.to_unicode(pwd) if pwd != self.root_dir: raise RuntimeError( "Unable to navigate to working directory {!r} (now at {!r})".format( self.root_dir, pwd ) ) self.cur_dir = pwd # self.ftp_initialized = True # Successfully authenticated: store password if store_password: save_password(self.host, self.username, self.password) self._lock() return
def startUploading(self): print("u have 5 seconds to turn on the ignition") time.sleep(5) interface_name = "wlan0" # i. e wlp2s0 server_name = "OakOne" password = "******" F = Finder(server_name=server_name, password=password, interface=interface_name) response = F.run() counter = 0 while (response == False): counter += 1 self.ignitionStatus = GPIO.input(17) if self.ignitionStatus: break if (counter < 10): time.sleep(2) print('waiting for a second to try again - ' + str(counter)) response = F.run() else: print("breaking here") break if (response == True and self.ignitionStatus == False): time.sleep(10) cnopts = pysftp.CnOpts() cnopts.hostkeys = None print("**************************** hostkeys none") with pysftp.Connection(host=myHostname, username=myUsername, password=myPassword, cnopts=cnopts) as sftp: print( "=========================> pysftp connection successfull") myfiles = os.listdir(self.home) for __file in myfiles: if (".h264" in __file): print(__file) remoteFilepath = mediaStorageLocation + __file localFilepath = self.home + __file sftp.put(localFilepath, remoteFilepath, self.uploadCallback) #self.ignitionStatus = GPIO.input(17) #if self.ignitionStatus: # break os.remove(self.home + __file) print("\nuploaded file -" + __file) print("file upload successfull") sftp.close() self.processStarted = False print("this is a good time to turn on the ignition") time.sleep(10) return True else: return False
def initiate_connection(self): # Connect, checking hostkey or caching on first connect # Based off of this stackoverflow question: # https://stackoverflow.com/questions/53666106/use-paramiko-autoaddpolicy-with-pysftp # configure pysftp CnOpts hostkeys = None cnopts = pysftp.CnOpts() # loads hostkeys from known_hosts.ssh if cnopts.hostkeys.lookup(self.hostname) is None: logging.debug('Key for host: ' + self.hostname + ' was not found in known_hosts') hostkeys = cnopts.hostkeys cnopts.hostkeys = None args = { 'host': self.hostname, 'username': self.username, 'cnopts': cnopts } # Determine what type of authentication to use based on parameters provided ssh_key = os.path.expanduser('~') + '/.ssh/id_rsa' if self.password is not None: logging.debug('Using plaintext authentication') args['password'] = self.password elif os.path.isfile(ssh_key): logging.debug('Got SSH key: ' + ssh_key) # the file at ~/.ssh/id_rsa exists - use it as the (default) private key args['private_key'] = ssh_key if self.private_key_password is not None: logging.debug( 'Using public key authentication with DER-encoded private key' ) args['private_key_pass'] = self.private_key_password else: logging.debug( 'Using public key authentication with plaintext private key' ) else: raise ssh_exception.BadAuthenticationType( 'No supported authentication methods available', ['password', 'public_key']) # connect using the authentication type determined above logging.debug('Connecting using arguments: ' + str(args)) try: connection = pysftp.Connection(**args) except paramiko.SSHException as e: logging.critical(e) raise # On first connect, Save the new hostkey to known_hosts if hostkeys is not None: logging.debug('Appending new hostkey for ' + self.hostname + ' to known_hosts, and writing to disk...') hostkeys.add(self.hostname, connection.remote_server_key.get_name(), connection.remote_server_key) hostkeys.save(pysftp.helpers.known_hosts()) return connection