def setUpClass(self): super(TestLocation, self).setUpClass() sd = Uploader(self._config_file) sd.use_message_buffer = True json_data = json.loads('''{ "values": { "sample_oxford_id": { "column": 0, "type": "string" }, "study_id": { "column": 1, "type": "string" }, "latitude": { "column": 2, "type": "float" }, "longitude": { "column": 3, "type": "float" }, "location_name": { "column": 4, "type": "string" }, "country": { "column": 5, "type": "string" }, "proxy_latitude": { "column": 6, "type": "float" }, "proxy_longitude": { "column": 7, "type": "float" }, "proxy_location_name": { "column": 8, "type": "string" } } }''') sd.load_data_file(json_data, 'locations.tsv') self._messages = sd.message_buffer self.setUpSSR() sd = Uploader(self._config_file) # sd.use_message_buffer = True json_data = json.loads(self._ag_json) sd.load_data_file(json_data, 'loc_no_study.tsv')
def setUpClass(self): super(TestSampling_Event, self).setUpClass() sd = Uploader(self._config_file) sd.use_message_buffer = True json_data = json.loads('''{ "values": { "sample_oxford_id": { "column": 0, "type": "string" }, "study_id": { "column": 1, "type": "string" }, "latitude": { "column": 2, "type": "string" }, "longitude": { "column": 3, "type": "string" }, "location_name": { "column": 4, "type": "string" } } }''') sd.load_data_file(json_data, 'multiple_study.tsv') self._messages = sd.message_buffer
def upload_crawls(db_session): notifier = EmailNotifier(config.SMTP_USER, config.SMTP_PASS, config.SMTP_FROM, config.SMTP_HOST, config.SMTP_PORT) uploader = Uploader() crawls = db_session.query(Spider).join(Crawl).\ filter(Spider.enabled == True, Crawl.status.in_(['processing_finished', 'upload_errors'])) for spider in crawls.all(): if upload_required(spider): if spider.crawls[-1].products_count < 1: print 'Not uploading crawl with 0 products' continue print 'Uploading for', spider.name try: upload_changes(uploader, spider) spider.crawls[-1].status = 'upload_finished' spider.crawls[-1].uploaded_time = datetime.now() except Exception: spider.crawls[-1].status = 'upload_errors' db_session.add(spider.crawls[-1]) db_session.commit() try: _send_notification(notifier, spider.crawls[-1], spider) except EmailNotifierException, e: print "Failed to send notifications: %s" % e
def _init_uploader(self): return Uploader( region_name=self.hosting_region, endpoint_url=self.hosting_endpoint_url, access_key=self.hosting_access_key, secret=self.hosting_secret, )
def setUpClass(self): super(TestDate, self).setUpClass() sd = Uploader(self._config_file) sd.use_message_buffer = True json_data = json.loads('''{ "values": { "sample_oxford_id": { "column": 0, "type": "string" }, "study_id": { "column": 1, "type": "string" }, "doc": { "column": 2, "type": "datetime", "date_format": "%Y-%m-%d" }, "doc_accuracy": { "column": 3, "type": "string" } } }''') sd.load_data_file(json_data, 'dates.tsv') self._messages = sd.message_buffer
def test_uploader(self): uploader = Uploader(self.files_list, 12, self.q) uploader.start() while uploader.is_active(): progress = self.q.get() print(progress.done, progress.error, progress.total) self.print_result(uploader.result)
def __init__(self, port): ip = getCurrMachineIp() self.username, self.password = None, None self.socket = zhelper.newServerSocket(zmq.REP, ip, port) self.db = Db("TrackerDB") self.uploader = Uploader(self.socket, self.db) self.downloader = Downloader(self.socket, self.db) self.fileExplorer = FileExplorer(self.socket, self.db)
def __switch(self, xiinArgDict): """ Traffic director. """ from reader import Reader reader = Reader() # Write output if xiinArgDict.filename is not None: print('Starting xiin...') print('') with open(xiinArgDict.filename, 'w') as xiinArgDict.outputFile: reader.info(xiinArgDict) #Displays output. elif xiinArgDict.display: print('Starting xiin...') print('') reader.info(xiinArgDict) elif xiinArgDict.grep is not None: print('Starting xiin...') print('') print('Searching files...') print('') self.grepXiinInfo(xiinArgDict.grep) elif xiinArgDict.upload is not None: # xiin.ftp = {'source': '', 'destination': '', 'uname': '', 'password': ''} from uploader import Uploader xiinArgDict.ftpSource = None xiinArgDict.ftpDestination = None xiinArgDict.ftpUname = None xiinArgDict.ftpPwd = None if len(xiinArgDict.upload) > 0: xiinArgDict.ftpSource = xiinArgDict.upload[0] xiinArgDict.ftpDestination = xiinArgDict.upload[1] if len(xiinArgDict.upload) > 2: # Legacy support if xiinArgDict.ftpUname is 'anon' or xiinArgDict.ftpUname is 'anonymous': pass else: xiinArgDict.ftpUname = xiinArgDict.upload[2] xiinArgDict.ftpPwd = xiinArgDict.upload[3] print('Starting xiin uploader...') print('') print('Uploading debugging information...') print('') uploader = Uploader() uploader.upload(xiinArgDict.ftpSource, xiinArgDict.ftpDestination, xiinArgDict.ftpUname, xiinArgDict.ftpPwd) else: print('ERROR: Unknown') exit(7)
def start(self): product_reader = ProductReader(self.data_dir) downloader = Downloader(product_reader, self.data_dir) product_tiler = ProductTiler(product_reader, self.data_dir) uploader = Uploader(product_reader, self.data_dir) downloader.start() product_tiler.start() uploader.start()
def upload_resource_group_settings(context, deployment_name): settings_uploader = Uploader(context, key='{}/{}'.format( constant.RESOURCE_SETTINGS_FOLDER, deployment_name)) response = settings_uploader.upload_content( constant.DEPLOYMENT_RESOURCE_GROUP_SETTINGS, json.dumps(context.config.aggregate_settings, indent=4, sort_keys=True), 'Aggregate settings file from resource group settings files')
def run(): uploader = Uploader(config.TARGET_HOST, 22, config.TARGET_USERNAME, config.TARGET_PASSWORD, config.TARGET_DIR, config.NUM_UPLOAD_THREADS) logging.debug('started') while 1: files = [os.path.join(config.SRC_DIR, f) for f in os.listdir(config.SRC_DIR) if not f.startswith('_')] ret = uploader.upload(files) logger.info('found {} new files'.format(ret)) time.sleep(config.LISTDIR_INTERVAL)
def test_setup_two(self): uploader_2 = Uploader(self.files_2, self.number_of_processes_2, self.q_2) uploader_2.start() res = [f'Done: {file}' for file in self.files_2] for f in self.files_2: progress = self.q_2.get() self.assertIn(progress.done, res) self.assertEqual(progress.error, f'Errors: None')
def test_setup_one(self): uploader_1 = Uploader(self.files_1, self.number_of_processes_1, self.q_1) uploader_1.start() results = [f'Done: {file}' for file in self.files_1] for file in self.files_1: progress = self.q_1.get() self.assertIn(progress.done, results) self.assertEqual(progress.error, f'Errors: None')
def __init__(self): self.client = speech_v1.SpeechClient() self.uploader = Uploader() self.base_config = { "encoding": speech_v1.enums.RecognitionConfig.AudioEncoding.LINEAR16, "enable_word_time_offsets": True, "profanity_filter": False, "model": "default", "enable_automatic_punctuation": True }
def main(): """Run a main program of the KSU F****r.""" if len(sys.argv) < 2: help() return Constants.EXIT_SUCCESS cmd = sys.argv[1] #downloader = PageDownloader() # # Use it if you want to create an estimated student DB automatically. # #downloader.determine_studentID() # # Use it if you want to create an estimated student DB using your hand. # #estimated_students_db_manager = downloader.get_db_manager() #estimated_students_db_manager.register_studentIDs_ranging("g0846002", "g0847498") #entrance_year=2008 #estimated_students_db_manager.register_studentIDs_ranging("g0946010", "g0947622") #entrance_year=2009 #estimated_students_db_manager.register_studentIDs_ranging("g1044011", "g1045344") #entrance_year=2010 #estimated_students_db_manager.register_studentIDs_ranging("g1144010", "g1145505") #entrance_year=2011 #estimated_students_db_manager.label_traced_students_ranging("g1144010", "g1145505", datetime.date(2015,07,14)) #estimated_students_db_manager.register_studentIDs_ranging("g1244028", "g1245397") #entrance_year=2012 #estimated_students_db_manager.register_studentIDs_ranging("g1344018", "g1349031") #entrance_year=2013 #estimated_students_db_manager.register_studentIDs_ranging("g1444026", "g1445539") #entrance_year=2014 #estimated_students_db_manager.register_studentIDs_ranging("g1540074", "g1547932") #entrance_year=2015 # # Download all student data using an estimated student DB above. # if cmd == "download_all": downloader = PageDownloader() downloader.download_all() elif cmd == "upload_to_s3": u = Uploader() u.run("tmp") #u.run(Constants.CC_DOMAIN) #u.run(Constants.CSE_DOMAIN) # # Analyze and save downloaded HTMLs into "cse_student_DB.db". # elif cmd == "analyze_HTMLs": analyzer = StudentAnalyzer(Constants.STUDENT_TABLE_NAME) analyzer.analyze_HTMLs() #analyzer.analyze_images() elif cmd == "create_index_DB": analyzer = StudentAnalyzer(Constants.STUDENT_TABLE_NAME) analyzer.create_index_DB() else: help() return Constants.EXIT_SUCCESS
def test_uploader_stop(self): uploader = Uploader(self.files_list, 12, self.q) uploader.start() dt = datetime.now() while uploader.is_active(): progress = self.q.get() print(progress.done, progress.error, progress.total) # check method to stop uploading and interrupt all uploading process. if (datetime.now() - dt).seconds >= 4: uploader.stop() self.print_result(uploader.result)
def initiate_uploader(self, file_path, desc=None): f = open_file(file_path=file_path) with f: size_total = content_length(f) part_size = Uploader.calc_part_size(size_total) response = self.api.initiate_multipart_upload(self.id, part_size, desc=desc) upload_id = response['x-oas-multipart-upload-id'] response = self.api.describe_multipart(self.id, upload_id) return Uploader(self, response, file_path=file_path)
def upload(self): uploader = Uploader(log=self.console_out, progress=progress(self.progressText, self.progressbar)) filename = self.fileText.get() name = os.path.splitext(os.path.basename(filename))[0] uploader.upload(self.tokenText.get(), filename, name, self.uploadFinished, endpoint=self._endpoint)
def test_multiproc(self): processes_number = 12 dirpath = os.path.dirname(os.path.abspath(__file__)) files_list = [] for dirname, dirnames, filenames in os.walk(dirpath + '/files'): files_list = [os.path.join(dirname, filename) for filename in filenames] m = Manager() q = m.Queue() uploader = Uploader(files_list, processes_number, q) uploader.start() uploader.is_active() pids = [res['pid'] for res in uploader._result_list] assert(len(set(pids)) == processes_number)
def main(): # Issue: if your screen is extend, please make sure the `Screen show profile` is LCD or normal RGB try: # Get latest file from os clipboard img = ImageGrab.grabclipboard() except BaseException as e: helper.notify('Error',str(e)) else: if img is not None: # Move and upload Uploader(img) else: helper.notify('Empty','The clipboard is empty')
def stop_rec(): global rec rec = 0 rec_dot.value = "" timer.value = 0 camera.stop_recording() if file_path != "" and save_cloud == 1: gmail_recipients = ['REDACTED'] u = Uploader(output, print_emails=True) u.upload() u.email(gmail_recipients) camera.stop_preview() app.info("Cloud Status", "Successfully uploaded to cloud!") camera.start_preview(fullscreen=False, window=(10, -40, 400, 400))
def upload_photo(photo, auth): while True: try: f = open(photo, "rb") data = f.read() u = Uploader(photo, data, auth) u.setPublic() req = u.getRequest() res = execute(req) photo_id = u.getPhotoIdFromResponse(res) return photo_id except urllib2.HTTPError as e: pass
def recognize_image(): upl = Uploader(request.files, "file") upload_result = upl.upload() if upload_result is not True: return Response(upload_result, status=500) # Image recognition objectId = trainAndTest.trainOrTest(upl.uploaded_file) if objectId is "": return Response("Bild nicht erkannt", status=500) jp = JsonParser(os.path.join(os.getcwd(), "flaskapp", "data.JSON")) jp.parse() exh = jp.get_item_by_id(objectId) upl.delete_file() return exh
def run_tests(key, secret): try: x = Auth(key, secret) x.authenticate() except urllib2.HTTPError as e: print e.read() raise filename = "/Users/riyer/Desktop/Screen Shot 2013-06-28 at 7.36.02 PM.png" f = open(filename, "rb") pic = f.read() u = Uploader("test_pic", pic, x) u.addTitle("test pic") u.setPublic() req = u.getRequest() try: handle = urllib2.urlopen(req) res = handle.read() except urllib2.HTTPError as e: print e.read() raise photo_id = u.getPhotoIdFromResponse(res) p = Photosets(x) r = p.createGetListRequest() res = execute(r, "createGetListRequest") names = p.getPhotosetList(res) r = p.createNewSetRequest("test set", "test desc", '9404583236') res = execute(r, "createNewSetRequest") set_id = p.getPhotosetIdFromResult(res) r = p.createAddPhotoRequest(photo_id, set_id) execute(r, "createAddPhotoRequest") r = p.createPhotosetDeleteRequest(set_id) execute(r, "createPhotosetDeleteRequest") photos = Photos(x) r = photos.createDeletePhotoRequest(photo_id) execute(r, "createDeletePhotoRequest")
def __init__(self, system_settings, websocket, snmp_websocket, **kwargs): super(SleepyMeshBase, self).__init__(**kwargs) if 'last_syncs' not in self._defaults: self._defaults.update({'last_syncs': list()}) # Internal Members # self._mesh_awake = True self._sync_type = 'timeout' self._save_in_progress = False self._sync_average = None self._delay_average = None # Instances # # TODO: Eliminate as many dependencies as possible self.system_settings = system_settings self.websocket = websocket self.snmp_websocket = snmp_websocket self.modbus_server = ModbusServer() self.snmp_server = SNMPTrapServer(self) self.update_interfaces = UpdateInterfaces(self) self.update_in_progress = self.update_interfaces.update_in_progress self.bridge = Bridge(self.system_settings) self.uploader = Uploader(self) self.nodes = Nodes(self.system_settings) self.platforms = Platforms(self.nodes) self.networks = Networks(self) self.error = BaseError(self.system_settings) if self.system_settings.modbus_enable: system_settings_dict = self.system_settings.attr_dict() # LOGGER.debug('Modbus Attribute Dictionary: ' + str(system_settings_dict)) self.modbus_server.start(system_settings_dict) if self.system_settings.snmp_enable: self.snmp_server.start() # Overload Node Error Methods (SNMP Error Methods)# NodeError.send_snmp = self.snmp_server.send_snmp NodeError.clear_snmp = self.snmp_server.clear_snmp
def take_picture(): global output, file_path if file_path == "": camera.stop_preview() app.warn("Error", "Select File Path") camera.start_preview(fullscreen=False, window=(10, -40, 400, 400)) else: output = strftime(file_path + "/image%d%m%H%M.png", gmtime()) camera.capture(output) if file_path != "" and save_cloud == 1: gmail_recipients = ['REDACTED'] u = Uploader(output, print_emails=True) u.upload() u.email(gmail_recipients) camera.stop_preview() app.info("Cloud Status", "Successfully uploaded to cloud!") camera.start_preview(fullscreen=False, window=(10, -40, 400, 400))
def test_drive(): images = glob.glob("bgr_data/2019-05-09_04-48-50/" + "*.jpg") image_num = len(images) print(image_num) hostname = socket.gethostname() run_time = datetime.datetime.now().strftime('%Y%m%d%H%M%S') folder_name = "set_" + hostname + "_" + run_time set_path = Constant.DATA_SET_PATH + folder_name os.makedirs(set_path) sys.stdout = Logger(set_path + "/log.txt", sys.stdout) de = Detector() d = Driver() # server = Server() # client = Client() # d.client = client is_upload = True # video_stream_thread = threading.Thread(target=server.get_video_stream) # video_stream_thread.setDaemon(True) # video_stream_thread.start() # tl_state_thread = threading.Thread(target=client.get_tl_state) # tl_state_thread.setDaemon(True) # tl_state_thread.start() objects_info_dict = {} start = time.time() # i = 0 for image in images: print(image.split('/')[-1]) objects_info, objects_num, image_array = de.detect(cv2.imread(image)) d.objects_info = objects_info d.objects_num = objects_num d.image_array = image_array cmd = d.drive() # server.send_msg(cmd) print("commond sent to pi: ", cmd) # server.send_msg(cmd.encode(encoding="utf-8")) objects_info_dict[ObjInfoKey(image_array)] = objects_info print("* " * 50) cv2.waitKey(1) end = time.time() local_path = object_dict_to_csv(objects_info_dict, folder_name) print("local_path: ", local_path) print(end - start) Uploader("server_conf.conf", local_path, Constant.SERVER_DATA_PATH).upload() cv2.destroyAllWindows()
def OnUpload(self, event=None, path=None): if path == None: path = self.GetPath() #else: path = filename if path != -1: filename = path filename, extension = os.path.splitext(filename) if os.path.exists(filename + '.hex'): #u = Uploader(self.displaymsg, filename, self.curBoard) #""" if self.curBoard.arch == 8: #try: u = Uploader(self.displaymsg, filename, self.curBoard) #except usb.USBError: #No device #self.displaymsg("No device",0) #return else: fichier = open(os.path.join(SOURCE_DIR, 'stdout'), 'w+') sortie = Popen([ os.path.join(HOME_DIR, self.osdir, 'p32', 'bin', self.u32), "-w", filename + ".hex", "-r", "-n" ], stdout=fichier, stderr=STDOUT) sortie.communicate() fichier.seek(0) self.displaymsg(fichier.read(), 0) fichier.close() #""" else: # no file dlg = wx.MessageDialog( self, _('File must be verified/compiled before upload'), _('Warning!'), wx.OK | wx.ICON_WARNING) result = dlg.ShowModal() dlg.Destroy() else: # not saved dlg = wx.MessageDialog(self, _('File must be saved before upload'), _('Warning!'), wx.OK | wx.ICON_WARNING) result = dlg.ShowModal() dlg.Destroy() if event != None: event.Skip()
def setUpClass(self): super(TestIndividual, self).setUpClass() sd = Uploader(self._config_file) sd.use_message_buffer = True json_data = json.loads('''{ "values": { "unique_id": { "column": 2, "type": "string" }, "unique_os_id": { "column": 2, "type": "string" }, "sample_oxford_id": { "column": 3, "type": "string" }, "sample_alternate_oxford_id": { "column": 4, "type": "string" }, "sample_source_id": { "column": 6, "type": "string" }, "donor_source_code": { "column": 7, "type": "string" }, "sample_source_type": { "column": 8, "type": "string" }, "species": { "column": 11, "type": "string" } } }''') sd.load_data_file(json_data, 'individual.tsv') self._messages = sd.message_buffer
def uploadfile(data, result): """ 异步执行的上传程序 :param data: get请求带来得参数 :param result: 根据参数从数据库查找出的数据(md5和url) :return: """ if result: connector_reday_for_upload(result) upload = Uploader(result) upload.run() else: filename = data.split('\\')[-1] path = os.path.join(WaitingUploadPath, filename) try: os.remove(path) print("已删除测试文件%s" % filename) except Exception as ex: print(ex)