def do_process(self, filename, original_name, name_conversions): """Process file and move it to ASSET_PATH. Unzip if necessary. Return the list of processed files""" tmp_dir = tempfile.mkdtemp(dir=UPLOAD_PATH) src_name = os.path.join(UPLOAD_PATH, filename) fullname = os.path.join(tmp_dir, original_name) os.rename(src_name, fullname) if UNZIP_COLLECTIONS and self.extract(fullname): os.unlink(fullname) dir_name = os.path.dirname(fullname) new_files = [] for subdir, dirs, files in os.walk(dir_name): for src_name in files: src = os.path.join(subdir, src_name) if src_name in name_conversions: dst_name = name_conversions[src_name] else: continue new_name = os.path.join(ASSET_PATH, dst_name) os.rename(src, new_name) Upload.get_thumbnail(new_name) new_files.append(new_name) shutil.rmtree(tmp_dir) return new_files
def do_process (self, filename, original_name, name_conversions): """Process file and move it to ASSET_PATH. Unzip if necessary. Return the list of processed files""" tmp_dir = tempfile.mkdtemp(dir=UPLOAD_PATH) src_name = os.path.join(UPLOAD_PATH,filename) fullname = os.path.join(tmp_dir, original_name) os.rename(src_name, fullname) if UNZIP_COLLECTIONS and self.extract (fullname): os.unlink (fullname) dir_name = os.path.dirname(fullname) new_files = [] for subdir, dirs, files in os.walk(dir_name): for src_name in files: src = os.path.join(subdir,src_name) if src_name in name_conversions: dst_name = name_conversions[src_name] else: continue new_name = os.path.join(ASSET_PATH, dst_name) os.rename(src, new_name) Upload.get_thumbnail(new_name) new_files.append (new_name) shutil.rmtree(tmp_dir) return new_files
def daemonUpload(conn, name, addr, listPartOwned): try: ricevutoByte = conn.recv(const.LENGTH_PACK) if not ricevutoByte: tfunc.write_daemon_error(name, addr[0], "Pacchetto errato") elif (str(ricevutoByte[0:4], "ascii") == pack.CODE_CLOSE): tfunc.write_daemon_success( "Mi è arrivata una richiesta di chiusura, saluti.") else: if str(ricevutoByte[0:4], "ascii") == pack.CODE_DOWNLOAD: #UPLOAD if pfunc.check_presence(int(ricevutoByte[36:]), ricevutoByte[4:36], listPartOwned): upl.upload(ricevutoByte[4:36], ricevutoByte[36:], conn, listPartOwned, name, addr) else: #tfunc.write_daemon_error(name, addr[0], "Errore, la parte " + str(int(ricevutoByte[36:])) + " non è presente.") pass else: tfunc.write_daemon_error( name, addr[0], "Ricevuto pacchetto sbagliato: " + str(ricevutoByte, "ascii")) except: pass finally: conn.close()
def upload_file(): if request.method == 'POST': # check if the post request has the file part if 'file' not in request.files: flash('No file part') return redirect(request.url) file = request.files['file'] # if user does not select file, browser also # submit an empty part without filename if file.filename == '': flash('No selected file') return redirect(request.url) if file and allowed_file(file.filename): filename = secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) random_string = utilities.generate_random_string(30) # create upload object and save to db upload_obj = Upload(utilities.get_user_ip(), filename, random_string, datetime.now().strftime('%Y-%m-%d %H:%M'), utilities.get_date_time_now()) upload_obj.save_upload_to_db() print("saved") return "Your link is: " + random_string + "\n\n" + "Expires: " + utilities.get_date_time_now( ) return '''
def file_upload_postReq_json(self, **kargs): for file in kargs["fname"].split(): self.uploadURL = kargs["url"] if "indows" in platform.system(): self.filePath = os.getcwd() + "\\files\\" + file else: self.filePath = os.getcwd() + "/files/" + file fileid = datetime.datetime.now().strftime("%d%H%M%S") Upload.uploadBigResource( kargs["url"] + "?r=fileUpload.upload", self.filePath, name=fileid, fileId=kargs["fileId"], crc=kargs["crc"] )
def file_upload_postReq_json(self, **kargs): for file in kargs['fname'].split(): self.uploadURL = kargs['url'] if 'indows' in platform.system(): self.filePath = os.getcwd() + '\\files\\' + file else: self.filePath = os.getcwd() + '/files/' + file fileid = datetime.datetime.now().strftime('%d%H%M%S') Upload.uploadBigResource(kargs['url'] + '?r=fileUpload.upload', self.filePath, name=fileid, fileId=kargs['fileId'], crc=kargs['crc'])
def uploadOntologies(infile, outfile): # Read ontologies JSON with open(infile) as openfile: ontologies = json.loads(openfile.read()) # Loop through ontologies for ontology_string, ontology in ontologies.items(): ontology.update({'ontology_string': ontology_string}) P.upload_data(ontology, list(ontology.keys()), 'ontology', reset_counter=True)
def generate(self, input, target): target_base, target_ext = os.path.splitext(target) mtype = Upload.get_type(target_base) if mtype not in ['audio', 'image', 'text', 'video']: mtype = None icons = { 'audio': THUMB_ABS_ASSET_AUDIO, 'image': THUMB_ABS_ASSET_IMAGE, 'text': THUMB_ABS_ASSET_TEXT, 'video': THUMB_ABS_ASSET_VIDEO, None: THUMB_ABS_ASSET, } if mtype not in ['video', 'image']: shutil.copyfile(icons[mtype], target) return try: exe("LD_LIBRARY_PATH=%s/lib %s/bin/ffmpeg -y -i '%s' -itsoffset -%d -vcodec mjpeg -vframes 1 -an -f rawvideo -s %s '%s'" % (BASE_DIR, BASE_DIR, input, THUMB_VIDEO_OFFSET, THUMB_SIZE, target)) except: print 'Could not convert: %s -> %s' % (input, target)
def do_live(self, data): data = self.calibrator.calib(data) OK = True yowindow_file = self.params.get('live', 'yowindow', '') if yowindow_file: self.yowindow.write_file(yowindow_file, data) for template in eval(self.params.get('live', 'twitter', '[]')): if not self.do_twitter(template, data): OK = False for service in eval(self.params.get('live', 'services', '[]')): self.services[service].RapidFire(data, True) uploads = [] for template in eval(self.params.get('live', 'plot', '[]')): upload = self.do_plot(template) if upload and upload not in uploads: uploads.append(upload) for template in eval(self.params.get('live', 'text', '[]')): upload = self.do_template(template, data) if upload not in uploads: uploads.append(upload) if uploads: if not Upload.Upload(self.params, uploads): OK = False for file in uploads: os.unlink(file) return OK
def upload_file_old(self, **kargs): ''' Input: kargs['common_upload_url'],kargs['file_name'] (multi attachments by SPACE) Save: fileid Output: attachment names and ids''' self.fidL = [] for file in kargs['fname'].split(): self.uploadURL = kargs['url'] # if 'Sanity' in os.getcwd(): # cwd = 'Testsuite\Sanity' # else: # cwd = 'Testsuite' # self.filePath = os.getcwd().replace(cwd,'') + 'Libs\\files\\' + file if 'indows' in platform.system(): self.filePath = os.getcwd() + '\\files\\' + file else: self.filePath = os.getcwd() + '/files/' + file fileid = datetime.datetime.now().strftime('%Y%m%d%H%M%S') print '**********************************' attach = Upload.uploadResource(self.uploadURL, self.filePath, fileid, fileName=kargs['fname']) self.fidL.append(attach['id']) self.fids = ','.join(self.fidL) print '[files]: ' + kargs['fname'] + '|[file id]: ' + self.fids print 'Successfully upload attachments!' return self.fids
def __call__(self): for i in range(RETRIES): files = File.get_files_by_flag(self.task_id) if files: break else: time.sleep(10) files = self._cleanup(files) if not status.status[self.task_id] == True: self._execution_failure(files) try: for x in range(len(files)): filename = os.path.join(ASSET_PATH, files[x]['filename']) try: f = Upload.get_info(filename) except: continue for key, value in f.items(): if key == 'filename': value = os.path.basename(value) files[x][key] = value files[x]['queue_flag'] = None File.update_files(files) except: # The queue would freeze with an exception pass File.unset_flag(self.task_id)
def __call__ (self): for i in range(RETRIES): files = File.get_files_by_flag (self.task_id) if files: break else: time.sleep (10) files = self._cleanup (files) if not status.status[self.task_id] == True: self._execution_failure (files) try: for x in range(len(files)): filename = os.path.join (ASSET_PATH, files[x]['filename']) try: f = Upload.get_info (filename) except: continue for key,value in f.items(): if key == 'filename': value = os.path.basename(value) files[x][key] = value files[x]['queue_flag'] = None File.update_files (files) except: # The queue would freeze with an exception pass File.unset_flag (self.task_id)
def clone_file(file_info): """Duplicate a file on disk, returning a file_info dict or raising an IOError""" assert type(file_info) in (dict, type(None)) if not file_info: return None # Basic info new_file = file_info.copy() old_name = new_file.get('filename') full_name = os.path.join(ASSET_PATH, old_name) thumbnail = os.path.join(THUMB_PATH, old_name) + '.%s' % (THUMB_EXT) # Check file sizes stat = os.statvfs(ASSET_PATH) free_space = stat.f_bavail * stat.f_bsize file_size = os.path.getsize(full_name) if file_size > free_space: raise IOError, "Not enough free space to duplicate file." # Duplicate file & thumbnail on a subshell all_files = os.listdir(ASSET_PATH) new_name = Upload.get_unused_name(full_name, all_files) new_fullname = os.path.join(ASSET_PATH, new_name) new_thumbnail = os.path.join(THUMB_PATH, new_name) + '.%s' % (THUMB_EXT) os.system("cp %s %s &" % (full_name, new_fullname)) os.system("cp %s %s &" % (thumbnail, new_thumbnail)) # Return file_info['filename'] = new_name return file_info
def clone_file (file_info): """Duplicate a file on disk, returning a file_info dict or raising an IOError""" assert type(file_info) in (dict, type(None)) if not file_info: return None # Basic info new_file = file_info.copy() old_name = new_file.get('filename') full_name = os.path.join (ASSET_PATH, old_name) thumbnail = os.path.join (THUMB_PATH, old_name) + '.%s'%(THUMB_EXT) # Check file sizes stat = os.statvfs(ASSET_PATH) free_space = stat.f_bavail * stat.f_bsize file_size = os.path.getsize (full_name) if file_size > free_space: raise IOError, "Not enough free space to duplicate file." # Duplicate file & thumbnail on a subshell all_files = os.listdir (ASSET_PATH) new_name = Upload.get_unused_name (full_name, all_files) new_fullname = os.path.join (ASSET_PATH, new_name) new_thumbnail= os.path.join (THUMB_PATH, new_name) + '.%s'%(THUMB_EXT) os.system ("cp %s %s &" %(full_name, new_fullname)) os.system ("cp %s %s &" %(thumbnail, new_thumbnail)) # Return file_info['filename'] = new_name return file_info
def preview_attachments(self, **kargs): """url=${mail_get_attach_url} folder=INBOX""" url = kargs["url"] kargs.pop("url") ids = self.list_all_mail_id(**kargs) print len(ids) if len(ids) != 1: raise Exception("too many mail found!") kargs["messageUid"] = ids[0] fetch_response = self.msg_fetch(**kargs) attachN = len(re.findall('"part": ', fetch_response)) print "----------------------------------------" print attachN failedResult = "" for partN in range(attachN): print "+++++++++++++++++++++" print partN print url url = url % (kargs["folderPath"], kargs["messageUid"], str(partN + 1)) print "^^^^^^^^^^^^^^^^^^^" print url sock = Upload.getResource(url) print "previewing..." + str(kargs["messageUid"]) if str(sock.code) == "200": print "read>>" + str(sock.read(100)) else: failedResult = failedResult + "%s found %s" % (str(partN + 1), str(sock.code)) + "\n" if failedResult: raise Exception("Resource not get %s found!" % str(kargs["messageUid"])) else: return "All esource succesfully get with HTTP code " + str(sock.code)
def upload(): if request.method == 'POST': if 'file' not in request.files: return render_template('upload.html', lngs=Defines.lngs, msg='No file selected') filename = Upload.Upload(request.files['file']) if filename == '': return render_template('upload.html', lngs=Defines.lngs, msg='No file selected') extracted_text, summary, threshold = ProcessImage.ProcessImage( filename, request.form['lng']) render_text = '\n' for sentence, value in summary: # Format to be displayed in js on client render_text += '[`' + sentence + '`,' + str(value) + '],\n' return render_template('upload.html', lngs=Defines.lngs, msg='Successfully processed', threshold=threshold, extracted_text=render_text, img_src=filename) elif request.method == 'GET': return render_template('upload.html', lngs=Defines.lngs)
def add_asset_apply (): # Authentication fail = Auth.assert_is_role (Role.ROLE_UPLOADER) if fail: return fail asset = Asset() asset['creator_id'] = Auth.get_user_id() asset['version'] = 1 if 'parts' in CTK.post: parts = CTK.post['parts'] if parts: parts = Validations.split_list (CTK.post['parts']) asset._parts['has_parts_of'] = [int(p) for p in parts] if 'parent_id' in CTK.post: asset._parent_id = int(CTK.post['parent_id']) for key in ['asset_types_id', 'licenses_id', 'title', 'description', 'version', 'language', 'subject']: if key in CTK.post: asset[key] = CTK.post[key] filenames = [] if 'name' in CTK.post and 'ref' in CTK.post: tmp_name = CTK.post['ref'] src_name = CTK.post['name'] filenames = Upload.process_file (tmp_name, src_name) #Collection if len(filenames) > 1: col_id = create_collection (src_name) asset['collections_id'] = col_id elif len(filenames) == 1: info = Upload.get_info (filenames[0]) #If unique file is broken if not info['filename']: return {'ret': "ok", 'redirect': '%s/broken' %(LOCATION)} ret = create_assets (asset, filenames) if False in ret: return {'ret': "error"} return {'ret': "ok", 'redirect': LOCATION}
def add_asset_apply(): # Authentication fail = Auth.assert_is_role(Role.ROLE_UPLOADER) if fail: return fail asset = Asset() asset['creator_id'] = Auth.get_user_id() asset['version'] = 1 if 'parts' in CTK.post: parts = CTK.post['parts'] if parts: parts = Validations.split_list(CTK.post['parts']) asset._parts['has_parts_of'] = [int(p) for p in parts] if 'parent_id' in CTK.post: asset._parent_id = int(CTK.post['parent_id']) for key in [ 'asset_types_id', 'licenses_id', 'title', 'description', 'version', 'language', 'subject' ]: if key in CTK.post: asset[key] = CTK.post[key] filenames = [] if 'name' in CTK.post and 'ref' in CTK.post: tmp_name = CTK.post['ref'] src_name = CTK.post['name'] filenames = Upload.process_file(tmp_name, src_name) #Collection if len(filenames) > 1: col_id = create_collection(src_name) asset['collections_id'] = col_id elif len(filenames) == 1: info = Upload.get_info(filenames[0]) #If unique file is broken if not info['filename']: return {'ret': "ok", 'redirect': '%s/broken' % (LOCATION)} ret = create_assets(asset, filenames) if False in ret: return {'ret': "error"} return {'ret': "ok", 'redirect': LOCATION}
def set_photo(self, uploadURL, filePath): photo = Upload.uploadResource(uploadURL, filePath, "2014-06-05", "myFileName") print photo["id"] + " : " + photo["filename"] # newfile = open('E:\\Repositaries\\robot\\workspace\\Libs\\tmp.txt','w') # newfile.write(res.read()) # str(re.findall('.*(<todo summary.*/todo>).*',TaskResp)) # newfile.close()
def __init__(self, target_file=None): user_id = Auth.get_user_id() user_usage = Upload.get_usage_user(user_id) system_usage = Upload.get_usage_system() self.limits = { 'size': LIMIT_ASSET_SIZE, 'files': LIMIT_ASSET_FILES, 'total': LIMIT_ASSET_TOTAL } self.usage = { 'size': 0, 'files': user_usage['files'], 'total': system_usage['size'] } if target_file: self.usage['size'] = os.path.getsize(target_file)
def __init__ (self, target_file = None): user_id = Auth.get_user_id () user_usage = Upload.get_usage_user (user_id) system_usage = Upload.get_usage_system() self.limits = { 'size' : LIMIT_ASSET_SIZE, 'files' : LIMIT_ASSET_FILES, 'total' : LIMIT_ASSET_TOTAL } self.usage = { 'size' : 0, 'files' : user_usage['files'], 'total' : system_usage['size'] } if target_file: self.usage['size'] = os.path.getsize (target_file)
def __main__(): check_files_and_folders() socket = Socket.ServerSocket() connection = socket.Socket() sr = Server(connection, Cryptography.session_crypto(None), Registery.Registery(), Login.serverLogin(), Download.Download(), Upload.Upload(), List.List(), Read.Read(), Write.Write(), SessionKeyExchange.ServerSession(None), DACCommands.DACCommands(), Auditor.Auditor()) sr.Handler()
def upload(app, user, outcome, accept): '''Upload the results of a single run.''' reporting_url = user.reporting_url() if reporting_url and outcome.reports: # upload in the background, in case the network is slow import os if os.fork() > 0: return # compress reports in preparation for upload import Upload upload = Upload.Upload(outcome.reports) # collect headers from various contributors import SamplerConfig upload.headers['sampler-version'] = SamplerConfig.version upload.headers['accept'] = accept __add_headers(upload, app) __add_headers(upload, outcome) # install our special redirect hander import urllib2 import RedirectHandler redirect = RedirectHandler.RedirectHandler() urllib2.install_opener(urllib2.build_opener(redirect)) # post the upload and read server's response request = urllib2.Request(reporting_url, upload.body(), upload.headers) reply = urllib2.urlopen(request) # server may have requested a permanent URL change if redirect.permanent: # !!!: sanity check this before applying it # !!!: don't apply change if it is the same as the old value user.change_reporting_url(redirect.permanent) # server may have requested a sparsity change if reply.info().has_key('sampler-change-sparsity'): # !!!: sanity check this before applying it # !!!: don't apply change if it is the same as the old value user.change_sparsity(int(reply.info()['sampler-change-sparsity'])) # server may have posted a message for the user message = reply.read() if message and 'DISPLAY' in os.environ: base = reply.geturl() content_type = reply.info()['content-type'] del reply import ServerMessage dialog = ServerMessage.ServerMessage(base, content_type, message) dialog.run() # child is done; exit without fanfare os._exit(0)
def refresh_coordinates_thread(): time.sleep(2) Upload.new_ship(0, 0, 0, settings.Variables.layerName, settings.Variables.layerID) nextGetTime = 0 while True: if time.time() > nextGetTime: nextGetTime = time.time() + 0.2 threading.Thread(target=OCR.refresh_coordinates()).start() if OCR.Coordinates.success: OCR.Coordinates.success = False # noinspection SpellCheckingInspection threading.Thread(target=Upload.move_ship, args=(OCR.Coordinates.x, OCR.Coordinates.y, OCR.Coordinates.z, settings.Variables.layerName, settings.Variables.layerID)).start() coordinates_display = "" coordinates_display = coordinates_display + "X: " + str( Upload.Vars.latestOnlineX) for i in range(0, 10 - len(str(Upload.Vars.latestOnlineX))): coordinates_display = coordinates_display + " " coordinates_display = coordinates_display + "\n" coordinates_display = coordinates_display + "Y: " + str( Upload.Vars.latestOnlineY) for i in range(0, 10 - len(str(Upload.Vars.latestOnlineY))): coordinates_display = coordinates_display + " " coordinates_display = coordinates_display + "\n" coordinates_display = coordinates_display + "Z: " + str( Upload.Vars.latestOnlineZ) for i in range(0, 10 - len(str(Upload.Vars.latestOnlineZ))): coordinates_display = coordinates_display + " " Graphics.GUI.set_online_coordinates(coordinates_display)
def daemonUpload(conn, name, addr, listPartOwned): try: ricevutoByte = conn.recv(const.LENGTH_PACK) if not ricevutoByte: tfunc.write_daemon_error(name, addr[0], "Pacchetto errato") elif (str(ricevutoByte[0:4], "ascii") == pack.CODE_CLOSE): tfunc.write_daemon_success("Mi è arrivata una richiesta di chiusura, saluti.") else: if str(ricevutoByte[0:4], "ascii") == pack.CODE_DOWNLOAD: #UPLOAD if pfunc.check_presence(int(ricevutoByte[36:]), ricevutoByte[4:36], listPartOwned): upl.upload(ricevutoByte[4:36], ricevutoByte[36:], conn, listPartOwned, name, addr) else: #tfunc.write_daemon_error(name, addr[0], "Errore, la parte " + str(int(ricevutoByte[36:])) + " non è presente.") pass else: tfunc.write_daemon_error(name, addr[0], "Ricevuto pacchetto sbagliato: " + str(ricevutoByte, "ascii")) except: pass finally: conn.close()
def upload_file(self, **kargs): self.fidL = [] for file in kargs["fname"].split(): self.uploadURL = kargs["url"] if "indows" in platform.system(): self.filePath = os.getcwd() + "\\files\\" + file else: self.filePath = os.getcwd() + "/files/" + file fileid = datetime.datetime.now().strftime("%y%m%d%S") attach = Upload.uploadResource(self.uploadURL, self.filePath, fileid, fileName=kargs["fname"]) self.fidL.append(attach["id"]) self.fids = ",".join(self.fidL) print "[files]: " + kargs["fname"] + " | [file id]: " + self.fids print "Successfully upload attachments!" return self.fids
def create_assets (asset, filenames): """Create the assets immediately. Final info is added via callback once the task exits the processing queue.""" ret = [] if not filenames: filenames = [None] for filename in filenames: if filename: queue_flag = id(asset) asset._file = Upload.get_info (filename) op = OpAsset (asset) rc = op.add() ret.append(rc) return ret
def create_assets(asset, filenames): """Create the assets immediately. Final info is added via callback once the task exits the processing queue.""" ret = [] if not filenames: filenames = [None] for filename in filenames: if filename: queue_flag = id(asset) asset._file = Upload.get_info(filename) op = OpAsset(asset) rc = op.add() ret.append(rc) return ret
def download_message(self, **kargs): # for calendar event mail download. ids = self.list_all_mail_id(**kargs) if len(ids) == 0: raise Exception("No mail to download!") # uid = self.check_response(getID='msg uid') envurl = self.url.replace("/dd", "") # valid enum values are [none, embeddedOnly, embeddedAndRemote] url = envurl + "/http/viewattachment?&accountId&folder=%s&uid=%s&part=1" % (kargs["folder"], ids[0]) print "url is " + url sock = Upload.getResource(url) if str(sock.code) == "200": return "download mail read 100>>" + str(sock.read(100)) else: raise Exception("%s found %s" % (str(partN + 1), str(sock.code)))
def print_preview_message(self, **kargs): ids = self.list_all_mail_id(**kargs) if len(ids) == 0: raise Exception('No mail to print priview!') #uid = self.check_response(getID='msg uid') envurl = self.url.replace('/dd', '') #valid enum values are [none, embeddedOnly, embeddedAndRemote] url = envurl + '/bin?r=mail.message.print{accountId:"",folderPath:"%s",messageUid:%s,images:"none"}' % ( kargs['folder'], ids[0]) print 'url is ' + url sock = Upload.getResource(url) if str(sock.code) == '200': return 'print preview read 100>>' + str(sock.read(100)) else: raise Exception('%s found %s' % (str(partN + 1), str(sock.code)))
def download_message(self, **kargs): #for calendar event mail download. ids = self.list_all_mail_id(**kargs) if len(ids) == 0: raise Exception('No mail to download!') #uid = self.check_response(getID='msg uid') envurl = self.url.replace('/dd', '') #valid enum values are [none, embeddedOnly, embeddedAndRemote] url = envurl + '/http/viewattachment?&accountId&folder=%s&uid=%s&part=1' % ( kargs['folder'], ids[0]) print 'url is ' + url sock = Upload.getResource(url) if str(sock.code) == '200': return 'download mail read 100>>' + str(sock.read(100)) else: raise Exception('%s found %s' % (str(partN + 1), str(sock.code)))
def upload_file(self, **kargs): self.fidL = [] for file in kargs['fname'].split(): self.uploadURL = kargs['url'] if 'indows' in platform.system(): self.filePath = os.getcwd() + '\\files\\' + file else: self.filePath = os.getcwd() + '/files/' + file fileid = datetime.datetime.now().strftime('%y%m%d%S') attach = Upload.uploadResource(self.uploadURL, self.filePath, fileid, fileName=kargs['fname']) self.fidL.append(attach['id']) self.fids = ','.join(self.fidL) print '[files]: ' + kargs['fname'] + ' | [file id]: ' + self.fids print 'Successfully upload attachments!' return self.fids
def uploadJson(infile, outfile): # Get dataset json with open(infile) as openfile: dataset = json.loads(openfile.read()) print('Doing {dataset_accession}'.format(**dataset)) # Get dataset accession dataset_accession = os.path.basename(infile).split('-')[0] # Check if dataset exists if not P.exists(dataset_accession): # Upload dataset P.upload_dataset(dataset) # Upload platform P.upload_platform(dataset) # Upload samples P.upload_samples(dataset) # Upload sample metadata P.upload_sample_metadata(dataset)
def print_preview_message(self, **kargs): ids = self.list_all_mail_id(**kargs) if len(ids) == 0: raise Exception("No mail to print priview!") # uid = self.check_response(getID='msg uid') envurl = self.url.replace("/dd", "") # valid enum values are [none, embeddedOnly, embeddedAndRemote] url = envurl + '/bin?r=mail.message.print{accountId:"",folderPath:"%s",messageUid:%s,images:"none"}' % ( kargs["folder"], ids[0], ) print "url is " + url sock = Upload.getResource(url) if str(sock.code) == "200": return "print preview read 100>>" + str(sock.read(100)) else: raise Exception("%s found %s" % (str(partN + 1), str(sock.code)))
def __call__ (self): for i in range(RETRIES): files = File.get_files_by_flag (self.task_id) if files: break else: time.sleep (1) files = self._cleanup (files) for x in range(len(files)): filename = os.path.join (ASSET_PATH, files[x]['filename']) try: f = Upload.get_info (filename) except: continue for key,value in f.items(): if key == 'filename': value = os.path.basename(value) files[x][key] = value File.update_files (files)
def __call__(self): for i in range(RETRIES): files = File.get_files_by_flag(self.task_id) if files: break else: time.sleep(1) files = self._cleanup(files) for x in range(len(files)): filename = os.path.join(ASSET_PATH, files[x]['filename']) try: f = Upload.get_info(filename) except: continue for key, value in f.items(): if key == 'filename': value = os.path.basename(value) files[x][key] = value File.update_files(files)
def transcode(self, target_id, programatic=False): """Dispatch asset to transcoding queue, and reingest result as new asset""" try: filename = self._source_filename except AttributeError: filename = self._asset._file.get('filename') self._source_filename = filename if not filename: return False abs_source = os.path.join(ASSET_PATH, filename) src_type = Upload.get_type(abs_source) if not src_type in Format.TRANSCODE_TYPES: return False # Dispatch http = "http://%s:%s/" % (QUEUE_SERVER, QUEUE_PORT) client = ServerProxy(http) if src_type in ['audio', 'video']: convert = client.ConvertMedia thumb = client.BuildThumbnailMedia else: convert = client.ConvertImage thumb = client.BuildThumbnailImage format = Format.get_format(target_id)['name'] name, ext = os.path.splitext(abs_source) abs_target = '%s.%s' % (abs_source, format) try: task_id = convert(abs_source, abs_target, format) except Exception, e: print str(e) return False
def transcode (self, target_id, programatic=False): """Dispatch asset to transcoding queue, and reingest result as new asset""" try: filename = self._source_filename except AttributeError: filename = self._asset._file.get('filename') self._source_filename = filename if not filename: return False abs_source = os.path.join(ASSET_PATH, filename) src_type = Upload.get_type (abs_source) if not src_type in Format.TRANSCODE_TYPES: return False # Dispatch http = "http://%s:%s/" % (QUEUE_SERVER, QUEUE_PORT) client = ServerProxy (http) if src_type in ['audio','video']: convert = client.ConvertMedia thumb = client.BuildThumbnailMedia else: convert = client.ConvertImage thumb = client.BuildThumbnailImage format = Format.get_format (target_id)['name'] name,ext = os.path.splitext (abs_source) abs_target = '%s.%s' %(abs_source, format) try: task_id = convert (abs_source, abs_target, format) except Exception,e: print str(e) return False
def generate (self, input, target): target_base, target_ext = os.path.splitext (target) mtype = Upload.get_type(target_base) if mtype not in ['audio', 'image', 'text', 'video']: mtype = None icons = {'audio': THUMB_ABS_ASSET_AUDIO, 'image': THUMB_ABS_ASSET_IMAGE, 'text': THUMB_ABS_ASSET_TEXT, 'video': THUMB_ABS_ASSET_VIDEO, None : THUMB_ABS_ASSET,} if mtype not in ['video','image']: shutil.copyfile (icons[mtype], target) return try: exe ("LD_LIBRARY_PATH=%s/lib %s/bin/ffmpeg -y -i '%s' -itsoffset -%d -vcodec mjpeg -vframes 1 -an -f rawvideo -s %s '%s'" % ( BASE_DIR, BASE_DIR, input, THUMB_VIDEO_OFFSET, THUMB_SIZE, target)) except: print 'Could not convert: %s -> %s'%(input, target)
def upload(app, reporting_url, outcome, accept): '''Upload the results of a single run.''' if outcome.reports: # upload in the background, in case the network is slow import os if os.fork() > 0: return # compress reports in preparation for upload import Upload upload = Upload.Upload(outcome.reports) # collect headers from various contributors import SamplerConfig upload.headers['sampler-version'] = SamplerConfig.version upload.headers['accept'] = accept __add_headers(upload, app) __add_headers(upload, outcome) # post the upload and read server's response request = urllib2.Request(reporting_url, upload.body(), upload.headers) reply = urllib2.urlopen(request) # server may have posted a message for the user message = reply.read() if message and 'DISPLAY' in os.environ: base = reply.geturl() content_type = reply.info()['content-type'] del reply import ServerMessage dialog = ServerMessage.ServerMessage(base, content_type, message) dialog.run() # child is done; exit without fanfare os._exit(0)
def preview_attachments(self, **kargs): '''url=${mail_get_attach_url} folder=INBOX''' url = kargs['url'] kargs.pop('url') ids = self.list_all_mail_id(**kargs) print len(ids) if len(ids) != 1: raise Exception('too many mail found!') kargs['messageUid'] = ids[0] fetch_response = self.msg_fetch(**kargs) attachN = len(re.findall('"part": ', fetch_response)) print '----------------------------------------' print attachN failedResult = '' for partN in range(attachN): print '+++++++++++++++++++++' print partN print url url = url % (kargs['folderPath'], kargs['messageUid'], str(partN + 1)) print '^^^^^^^^^^^^^^^^^^^' print url sock = Upload.getResource(url) print 'previewing...' + str(kargs['messageUid']) if str(sock.code) == '200': print 'read>>' + str(sock.read(100)) else: failedResult = failedResult + '%s found %s' % ( str(partN + 1), str(sock.code)) + '\n' if failedResult: raise Exception('Resource not get %s found!' % str(kargs['messageUid'])) else: return 'All esource succesfully get with HTTP code ' + str( sock.code)
def upload_file_old(self, **kargs): """ Input: kargs['common_upload_url'],kargs['file_name'] (multi attachments by SPACE) Save: fileid Output: attachment names and ids""" self.fidL = [] for file in kargs["fname"].split(): self.uploadURL = kargs["url"] # if 'Sanity' in os.getcwd(): # cwd = 'Testsuite\Sanity' # else: # cwd = 'Testsuite' # self.filePath = os.getcwd().replace(cwd,'') + 'Libs\\files\\' + file if "indows" in platform.system(): self.filePath = os.getcwd() + "\\files\\" + file else: self.filePath = os.getcwd() + "/files/" + file fileid = datetime.datetime.now().strftime("%Y%m%d%H%M%S") print "**********************************" attach = Upload.uploadResource(self.uploadURL, self.filePath, fileid, fileName=kargs["fname"]) self.fidL.append(attach["id"]) self.fids = ",".join(self.fidL) print "[files]: " + kargs["fname"] + "|[file id]: " + self.fids print "Successfully upload attachments!" return self.fids
b = tf.Variable(tf.zeros([10])) # Create a model to predict by using softmax function prediction = tf.nn.softmax(tf.matmul(x, w) + b) # Use loss function to calculator loss loss = tf.reduce_mean(tf.square(y - prediction)) # Gradient Descent function to optimizer the loss value for taining train_step = tf.train.GradientDescentOptimizer(0.2).minimize(loss) # Initializer init = tf.global_variables_initializer() # Function tf.argmax will return the largest value from two values and tf.equal if true then will return 1 as result ————https://www.tensorflow.org/api_docs/python/tf/argmax correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(prediction, 1)) # Function tf.cast will transfer dtype of correct_prediction to float32,tf.reduce_mean computers the average of the tensor————https://www.tensorflow.org/api_docs/python/tf/reduce_mean accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32)) # Test model with tf.Session() as sess: sess.run(init) for epoch in range(20): for batch in range(n_batch): batch_xs, batch_ys = mnist.train.next_batch(batch_size) sess.run(train_step, feed_dict={x: batch_xs, y: batch_ys}) #acc=sess.run(accuracy,feed_dict={x:mnist.test.images,y:mnist.test.labels}) sess.run(prediction, feed_dict={x: Upload.load_image(new_array)}) #print("Item"+str(epoch)+",Testing Accuracy"+str(acc)) # def compare(): # result= tf.arg_max(prediction,1)
def do_tasks(self): sections = ['logged'] now = self.calib_data.before(datetime.max) if not now: now = datetime.now() threshold = now.replace(minute=(now.minute / 15) * 15, second=0, microsecond=0) last_update = self.params.get_datetime('hourly', 'last update') if (not last_update) or (last_update < threshold): # time to do hourly tasks sections.append('hourly') # set 12 hourly threshold threshold -= timedelta(hours=(threshold.hour - self.day_end_hour) % 12) last_update = self.params.get_datetime('12 hourly', 'last update') if (not last_update) or (last_update < threshold): # time to do 12 hourly tasks sections.append('12 hourly') # set daily threshold threshold -= timedelta(hours=(threshold.hour - self.day_end_hour) % 24) last_update = self.params.get_datetime('daily', 'last update') if (not last_update) or (last_update < threshold): # time to do daily tasks sections.append('daily') OK = True for section in sections: for template in eval(self.params.get(section, 'twitter', '[]')): if not self.do_twitter(template): OK = False for section in sections: yowindow_file = self.params.get(section, 'yowindow', '') if yowindow_file: self.yowindow.write_file(yowindow_file) break all_services = list() for section in sections: for service in eval(self.params.get(section, 'services', '[]')): if service not in all_services: all_services.append(service) for service in all_services: self.services[service].Upload(True) uploads = [] for section in sections: for template in eval(self.params.get(section, 'plot', '[]')): upload = self.do_plot(template) if upload and upload not in uploads: uploads.append(upload) for template in eval(self.params.get(section, 'text', '[]')): upload = self.do_template(template) if upload not in uploads: uploads.append(upload) if uploads: if not Upload.Upload(self.params, uploads): OK = False for file in uploads: os.unlink(file) if OK: for section in sections: self.params.set(section, 'last update', now.isoformat(' ')) return OK
def still_online_thread(): while True: time.sleep(64) Upload.still_online() print("Still online...")
# 回傳“子動漫標題(List)”、“子動漫URL(List)” URL, ttitle = Anime_Groups(url_Anime) # Loop 子動漫 for i in range(len(ttitle)): print("%8s" % " " + ttitle[i], end=" ", flush=True) URL[i] = Anime_Unit(URL[i]) if (URL[i].find(".m3u8") == -1): print("\033[1;33mDownloading\033[0m", end="", flush=True) # 黃色下載中 Download_mp4(URL[i], download_path, ttitle[i]) else: print("\033[1;33mDownloading\033[0m", end="", flush=True) # 黃色下載中 Download_m3u8(URL[i], download_path, ttitle[i]) # 回傳“下頁狀態”、“下頁URL” NextPage, url_Anime = Next_Page(url_Anime) if (NextPage == str(False)): break print("\n%8s\033[0;30;42m[檢查階段]\033[0m" % " ") # Mod2:Upload.py #================================================================================================# Upload.main(is_update_file_function=bool(True), update_drive_service_folder_name='Anime1', update_drive_service_name=None, update_file_path=download_path)