def test__upload__CallbackNotGiven_ShouldOnlyCallElfCloudAdapter( self, mock_upload, getsize_mock): uploader.upload("localPath1", "remoteParentId1", "remoteName1", "key1") uploader.wait() mock_upload.assert_called_with("remoteParentId1", "remoteName1", "localPath1", unittest.mock.ANY, unittest.mock.ANY, None)
def main(event, context): # Gather URLScan.io results for scans with a request to s3-website domains. req_results = requests.get( "https://urlscan.io/api/v1/search/?q=domain:s3-website-us-east-1.amazonaws.com&size=20" ) if req_results.status_code != 200: logging.error("Bad request.") client = boto3.client('sns') sns_arn = "arn:aws:sns:us-east-1:358663747217:s3eker-upload" results = req_results.json() logging.info("Running through search results.") for result in results["results"]: logging.info(f"Getting info from scan {result['result']}") req_scan = requests.get(result["result"]) if req_scan.status_code != 200: logging.error("Bad request.") scan = req_scan.json() domains = scan["lists"]["domains"] logging.debug(f"Got list of domains. {domains}") s3_sites = [domain for domain in domains if "s3-website" in domain] logging.info(f"Made list of s3 domains. {s3_sites}") for site in s3_sites: upload(client, sns_arn, site)
def storeDataItem(cbObj, parentId, remotename, filename): uploader.upload(filename, parentId, remotename, None, lambda *args : _uploadStartedCb(parentId, remotename, filename, *args), lambda *args : _uploadCompletedCb(cbObj, parentId, remotename, filename, *args), lambda totalSize, totalSizeStored : _uploadChunkCb(parentId, remotename, filename, totalSize, totalSizeStored), lambda exception : _uploadFailedCb(cbObj, parentId, remotename, filename, exception))
def test__upload__ShouldCallElfCloudAdapterAndCallbacks( self, mock_upload, getsize_mock): mock_upload.side_effect = TestUploader._sideEffectRelease startCb = unittest.mock.Mock() completedCb = unittest.mock.Mock() uploader.upload("localPath1", "remoteParentId1", "remoteName1", "key1", startCb, completedCb) TestUploader._sideEffectAcquire() uploader.wait() mock_upload.assert_called_with("remoteParentId1", "remoteName1", "localPath1", unittest.mock.ANY, unittest.mock.ANY, None) startCb.assert_called_with() completedCb.assert_called_with() uploader.upload("localPath2", "remoteParentId2", "remoteName2", "key2", startCb, completedCb) TestUploader._sideEffectAcquire() uploader.wait() mock_upload.assert_called_with("remoteParentId2", "remoteName2", "localPath2", unittest.mock.ANY, unittest.mock.ANY, None) startCb.assert_called_with() completedCb.assert_called_with()
def upload(targets): for target in targets: if os.path.isdir(target): upload([os.path.join(target, f) for f in os.listdir(target)]) else: if os.path.exists(target): uploader.upload(target)
def upload(widget, serial, data=file): obj = compile(widget, data) if obj == -1: return while (gtk.events_pending()): gtk.main_iteration() startSpinner() uploader.upload(obj, serial, tw, sb) stopSpinner()
def sync_upload(event): try: if not event.is_directory: path = event.src_path dropbox_path = path.replace(PATH_TO_WATCH, DROPBOX_ROOT_FOLDER) print 'file %s changed, updating...' % dropbox_path upload(path, dropbox_path) except: pass
def storeDataItem(cbObj, parentId, remotename, filename): uploader.upload( filename, parentId, remotename, None, lambda *args: _uploadStartedCb(parentId, remotename, filename, *args), lambda *args: _uploadCompletedCb(cbObj, parentId, remotename, filename, *args), lambda totalSize, totalSizeStored: _uploadChunkCb( parentId, remotename, filename, totalSize, totalSizeStored), lambda exception: _uploadFailedCb(cbObj, parentId, remotename, filename, exception))
def upload(widget, serial, data=file): # we need to save the state of config.force_protocol when this function is invoked force_protocol = config.force_protocol obj = compile(widget, data) if obj == -1: return while (gtk.events_pending()): gtk.main_iteration() startSpinner() uploader.upload(obj, serial, tw, sb, force_protocol) stopSpinner()
def test_upload(self): upload_dir = os.path.abspath("../result") download_dir = os.path.abspath("../data") upload() # Test by downloading the files download() self.assertEqual( len(os.listdir(download_dir)), 4, msg="Expected the downloaded bucket to have atleast 4 files")
def sync_upload_create(event): try: path = event.src_path dropbox_path = path.replace(PATH_TO_WATCH, DROPBOX_ROOT_FOLDER) print 'file %s created, updating...' % dropbox_path if event.is_directory: create_folder(dropbox_path) else: upload(path, dropbox_path) except: pass
def run(): s = requests.Session() cookies = uploader.login(s) with codecs.open(DATA_FILE, mode='r') as ff: csvreader = csv.reader(ff, delimiter=';') for row in csvreader: if len(row) > 8: try: desc = add_desc('', row[4]) desc = add_desc(desc, row[6]) desc = add_desc(desc, row[8]) desc = add_desc(desc, row[9]) native_id_tmp = row[0] files = { 'company_id': (None, '5'), 'native_id': (None, native_id_tmp), 'type': (None, row[7]), 'region': (None, row[1].decode('cp1250').encode('utf-8')), 'district': (None, row[2].decode('cp1250').encode('utf-8')), 'city': (None, row[3].decode('cp1250').encode('utf-8')), 'street': (None, row[5].decode('cp1250').encode('utf-8')), 'description': (None, desc.decode('cp1250').encode('utf-8')), 'lat': (None, row[12].replace(',', '.')), 'lng': (None, row[13].replace(',', '.')), } #files = dict() photo = '{0}/photos/{1}.jpg'.format( DATA_DIR, native_id_tmp[3:]) mapa = '{0}/maps/{1}m.jpg'.format(DATA_DIR, native_id_tmp[3:]) if os.path.isfile(photo) == True: files['picture_file'] = open(photo, 'rb') if os.path.isfile(mapa) == True: files['map_picture_file'] = open(mapa, 'rb') uploader.upload(s, cookies, files) except Exception as e: logging.exception('Chyba') s.close() time.sleep(3) logging.info('======= Restarting sesssion') s = requests.Session() cookies = uploader.login(s) time.sleep(100)
def run(): uploader.set_cfg('prod') print uploader.cfg s = requests.Session() cookies = uploader.login(s) with codecs.open(DATA_FILE, mode='r') as ff: csvreader = csv.reader(ff, delimiter=';') for row in csvreader: if len(row) > 8: try: native_id_tmp = row[0] files = { 'company_id': (None, '6'), 'native_id': (None, native_id_tmp), 'type': (None, ''), 'region': (None, row[3].decode('cp1250').encode('utf-8')), 'district': (None, row[1].decode('cp1250').encode('utf-8')), 'city': (None, row[4].decode('cp1250').encode('utf-8')), 'street': (None, row[5].decode('cp1250').encode('utf-8')), 'description': (None, row[6].decode('cp1250').encode('utf-8')), 'lat': (None, row[9]), 'lng': (None, row[10]), } #files = dict() photo = '{0}/Images/{1}.jpg'.format(DATA_DIR, row[0]) mapa = '{0}/Maps/{1}.jpg'.format(DATA_DIR, row[0]) if os.path.isfile(photo) == True: files['picture_file'] = open(photo, 'rb') else: photo = '{0}/Images/{1}_1.jpg'.format(DATA_DIR, row[0]) if os.path.isfile(photo) == True: files['picture_file'] = open(photo, 'rb') if os.path.isfile(mapa) == True: files['map_picture_file'] = open(mapa, 'rb') uploader.upload(s, cookies, files) except Exception as e: logging.exception('Chyba') s.close() time.sleep(3) logging.info('======= Restarting sesssion') s = requests.Session() cookies = uploader.login(s) time.sleep(100)
def Upload(self, filepath): """ Takes a file path as a string and returns a url example: import dbus filepath = "/home/ken/Documents/awesomeimage.jpg" obj = dbus.SessionBus().get_object("com.Gwibber.Uploader", "/com/gwibber/Uploader") uploader = dbus.Interface(obj, "com.Gwibber.Uploader") def done(path, url): if path == filepath: sig_complete.remove() sig_failed.remove() print "Uploaded to", url def failed(path, message): if path == filepath: sig_complete.remove() sig_failed.remove() print "Upload failed with message", message sig_complete = uploader.connect_to_signal("UploadComplete", done) sig_failed = uploader.connect_to_signal("UploadFailed", failed) uploader.Upload(filepath) """ logger.info("Uploading image %s", filepath) url = uploader.upload(filepath, self.UploadComplete, self.UploadFailed)
def test__upload__WhenFails_ShouldCallFailedCbWithException_ShouldPauseFailed(self, mock_upload, getsize_mock): EXPECTED_EXCEPTION = uploader.elfcloudclient.ClientException(msg="test originated exception") mock_upload.side_effect = lambda *args : self._raise(EXPECTED_EXCEPTION) startCb = unittest.mock.Mock() completedCb = unittest.mock.Mock() chunkCb = unittest.mock.Mock() # we do not really expect any calls to chunkCb since is is supposed to be called by elfcloudclient.upload() which we actually have mocked failedCb = unittest.mock.Mock() uid = uploader.upload("localPath1", "remoteParentId1", "remoteName1", "key1", startCb, completedCb, chunkCb, failedCb) uploader.wait() mock_upload.assert_called_with("remoteParentId1", "remoteName1", "localPath1", unittest.mock.ANY, unittest.mock.ANY, None) startCb.assert_called_once_with() completedCb.assert_not_called() chunkCb.assert_not_called() failedCb.assert_called_once_with(EXPECTED_EXCEPTION) mock_upload.side_effect = None startCb.reset_mock() completedCb.reset_mock() failedCb.reset_mock() chunkCb.reset_mock() uploader.resume(uid) uploader.wait() startCb.assert_called_once_with() completedCb.assert_called_once_with() failedCb.assert_not_called() chunkCb.assert_not_called()
def test__upload__WhenFails_ShouldCallFailedCbWithException_ShouldPauseFailed( self, mock_upload, getsize_mock): EXPECTED_EXCEPTION = uploader.elfcloudclient.ClientException( msg="test originated exception") mock_upload.side_effect = lambda *args: self._raise(EXPECTED_EXCEPTION) startCb = unittest.mock.Mock() completedCb = unittest.mock.Mock() chunkCb = unittest.mock.Mock( ) # we do not really expect any calls to chunkCb since is is supposed to be called by elfcloudclient.upload() which we actually have mocked failedCb = unittest.mock.Mock() uid = uploader.upload("localPath1", "remoteParentId1", "remoteName1", "key1", startCb, completedCb, chunkCb, failedCb) uploader.wait() mock_upload.assert_called_with("remoteParentId1", "remoteName1", "localPath1", unittest.mock.ANY, unittest.mock.ANY, None) startCb.assert_called_once_with() completedCb.assert_not_called() chunkCb.assert_not_called() failedCb.assert_called_once_with(EXPECTED_EXCEPTION) mock_upload.side_effect = None startCb.reset_mock() completedCb.reset_mock() failedCb.reset_mock() chunkCb.reset_mock() uploader.resume(uid) uploader.wait() startCb.assert_called_once_with() completedCb.assert_called_once_with() failedCb.assert_not_called() chunkCb.assert_not_called()
def monitor_folder(device_type): folder_name = "%s/%s" % (datafolder , device_type) #folder_name = datafolder filenames = os.listdir(folder_name) for filename in filenames: print filenames file_infos=filename.split("_") timestamp = int(file_infos[0]) #duration = int(file_infos[1]) duration = 20 #hardcoded for 20 seconds for now relative_filepath = "%s/%s" % (folder_name,filename) print "Going to send %s for uplaod" % relative_filepath if (uploader.upload(relative_filepath, device_type) == 0): #allow it to execute for specfied time print "Hex File %s uploaded. Now allowing to executer for %d seconds" % (relative_filepath,duration) time.sleep(duration) #remove the file print "Removing the file %s" % relative_filepath os.unlink(relative_filepath) print "-------------------------------------------------------" return
def sendTweets(tweets_for_submission): if tweets_for_submission: tweet_lst = [] for tweet in tweets_for_submission: t = { "id": str(tweet['tweet id']), "text": tweet["text"], "location": tweet["user location"], "hashtags": tweet["hashtags"], "date_created": tweet['created at'] } print(str(tweet['tweet id'])) if not tweet["geo"] or len(tweet["geo"]) == 0: t["geo"] = "" tweet_lst.append(t) print("number of tweets: " + str(len(tweet_lst))) response = upload({"data": tweet_lst}, '/api/tweet/raw/create') print(response.text) now = datetime.now() print("Australia tweets appended at: " + str(now)) else: print('no aus tweets appended')
def main(sleep_time=constants.UPLOAD_DAEMON_SLEEP_TIME): logging.basicConfig(level=logging.INFO, format=constants.LOG_FMT_S_THREADED) logging.info("Upload daemon copying files to gs://" + config.GCS_BUCKET) while True: try: logging.debug("Scanning for files to upload") # Scan for files to upload fpaths = uploader.scan(constants.UPLOAD_DIR, file_ready=file_ready) if len(fpaths) > 0: # Upload files logging.debug("Uploading files: " + str(fpaths)) # Never upload more than 1000 files in a group, as the # code that updates datastore can't handle more than # 1000 entries in a single call. for group in list(chunks(fpaths, 1000)): logging.debug("Uploading group of files: " + str(group)) errors = uploader.upload(group) # Attempt to heal any errors that may have occured logging.info("Healing errors: " + str(errors)) uploader.heal(errors) except Exception as e: logging.error( "Unexpected exception caught at uploader outer loop:" + str(e)) ex_type, ex, tb = sys.exc_info() traceback.print_tb(tb) # Allow some files to accumulate before taking our next pass time.sleep(sleep_time)
def test_upload(setup_and_teardown, bucket: str): obj = "test/mock_data/data.xml" event = {"queryStringParameters": {"file": obj, "bucket": bucket}} response = uploader.upload(event, {}) url = json.loads(response["body"])["url"] assert 204 == response["statusCode"] assert bucket == url.split("/")[-1] assert s3.get_object_as_string(bucket, "uploads/" + obj) != ""
def test_upload_s3_error(): obj = "test/mock_data/data.xml" bucket = "invalid-xml-bucket" event = {"queryStringParameters": {"file": obj, "bucket": bucket}} response = uploader.upload(event, {}) message = json.loads(response["body"])["message"] assert 500 == response["statusCode"] assert message == "S3 file upload error"
def test__upload__pause__resume__ShouldNotUploadPaused_ShouldContinueOnResume( self, mock_upload, getsize_mock): mock_upload.side_effect = TestUploader._sideEffectAcquire uploader.upload("localPath0", "remoteParentId0", "remoteName0", "key0") uploader.upload("localPath1", "remoteParentId1", "remoteName1", "key1") uidOfTaskToPause = uploader.upload("localPath2_to_be_paused", "remoteParentId2", "remoteName2", "key2") uploader.upload("localPath3", "remoteParentId3", "remoteName3", "key3") uploader.pause(uidOfTaskToPause) TestUploader._sideEffectRelease() TestUploader._sideEffectRelease() TestUploader._sideEffectRelease() TestUploader._sideEffectRelease() uploader.wait() mock_upload.assert_has_calls([ unittest.mock.call("remoteParentId0", "remoteName0", "localPath0", unittest.mock.ANY, unittest.mock.ANY, None), unittest.mock.call("remoteParentId1", "remoteName1", "localPath1", unittest.mock.ANY, unittest.mock.ANY, None), unittest.mock.call("remoteParentId3", "remoteName3", "localPath3", unittest.mock.ANY, unittest.mock.ANY, None) ]) mock_upload.reset_mock() uploader.resume(uidOfTaskToPause) uploader.wait() mock_upload.assert_called_once_with("remoteParentId2", "remoteName2", "localPath2_to_be_paused", unittest.mock.ANY, unittest.mock.ANY, None)
def test__upload__ShouldCallElfCloudAdapterAndCallbacks(self, mock_upload, getsize_mock): mock_upload.side_effect = TestUploader._sideEffectRelease startCb = unittest.mock.Mock() completedCb = unittest.mock.Mock() uploader.upload("localPath1", "remoteParentId1", "remoteName1", "key1", startCb, completedCb) TestUploader._sideEffectAcquire() uploader.wait() mock_upload.assert_called_with("remoteParentId1", "remoteName1", "localPath1", unittest.mock.ANY, unittest.mock.ANY, None) startCb.assert_called_with() completedCb.assert_called_with() uploader.upload("localPath2", "remoteParentId2", "remoteName2", "key2", startCb, completedCb) TestUploader._sideEffectAcquire() uploader.wait() mock_upload.assert_called_with("remoteParentId2", "remoteName2", "localPath2", unittest.mock.ANY, unittest.mock.ANY, None) startCb.assert_called_with() completedCb.assert_called_with()
def process(): """主流程""" # 检查七牛相关配置是否已配置 if not all((URI_PREFIX, ACCESS_KEY, SECRET_KEY, BUCKET_NAME)): notice('请先设置七牛相关配置!') open_with_editor('config.py') return try: img_path = clipboard.get_pasteboard_img_path() except CLIPBOARD_EXCEPTIONS as error: notice(str(error)) return file_name = os.path.split(img_path)[-1] file_type = file_name.split('.')[-1] if file_type == 'tiff': new_img_path = '/tmp/{}.png'.format(int(time.time())) # tiff --> png _convert_to_png(img_path, new_img_path) img_path = new_img_path else: new_img_path = '/tmp/{}-{}.{}'.format(file_name, int(time.time()), file_type) os.system('cp {} {}'.format(img_path, new_img_path)) img_path = new_img_path # 获取图片尺寸 # width, height = _get_img_size(img_path) try: # 上传到七牛 upload_result = uploader.upload(img_path, ACCESS_KEY, SECRET_KEY, BUCKET_NAME) if not upload_result: notice('上传图片到七牛失败,请检查七牛相关配置是否正确!') return # 完整的七牛图片URI img_file_name = os.path.split(img_path)[-1] img_uri = '{}/{}'.format(URI_PREFIX, img_file_name) notice('上传成功!') except Exception as error: notice('上传图片到七牛异常!{}'.format(str(error))) return # markdown使用html格式,保证图片大小 markdown_img = IMG_TPL.format(img_uri, SCALE_RATE) # 写入剪贴板 write_to_pasteboard(markdown_img) # 打印出markdown格式的图片地址 print_pasteboard_content()
def test_up(self): f = open('./Tests/test.csv', 'rb') result = up.upload(f, 'test') self.assertEqual(result, { 'upload': True, 'location': 'breakfast/test' })
def tgmupload(filePath): fi = filePath if fi.endswith(".jpg") or fi.endswith('.png') or fi.endswith( '.jpeg') or fi.endswith(".bmp"): if platform.system() == "Windows": fileName = fi.split("\\")[-1] else: fileName = fi.split('/')[-1] uploadedDocuments = open(dir_path + folders[0] + files[0]) udls = [] for line in uploadedDocuments: name = line.split(",")[0] udls.append(name) if fileName in udls: pass else: print(f"Uploading {filePath}") uploader.upload(filePath, fileName) else: print("Unsported File")
def test__upload__list__ShouldGiveListOfUploadsTodo( self, mock_upload, getsize_mock): mock_upload.side_effect = TestUploader._sideEffectAcquire getsize_mock.return_value = 100 cb = unittest.mock.Mock() currentUid = uidgenerator.peekUid() uploader.upload("localPath0", "remoteParentId0", "remoteName0", "key0") uploader.upload("localPath1", "remoteParentId1", "remoteName1", "key1") uploader.upload("localPath3", "remoteParentId3", "remoteName3", "key3") uploader.listAll(cb) TestUploader._sideEffectRelease() TestUploader._sideEffectRelease() TestUploader._sideEffectRelease() uploader.wait() cb.assert_called_once_with([{ "uid": currentUid + 1, "size": getsize_mock.return_value, "remoteName": 'remoteName0', "state": "ongoing", "parentId": "remoteParentId0" }, { "uid": currentUid + 2, "size": getsize_mock.return_value, "remoteName": "remoteName1", "state": "todo", "parentId": "remoteParentId1" }, { "uid": currentUid + 3, "size": getsize_mock.return_value, "remoteName": "remoteName3", "state": "todo", "parentId": "remoteParentId3" }])
def uploadTestFile(data): EXPECTED_CHUNKS = [i_ for i_ in range(elfcloudclient.DEFAULT_REQUEST_SIZE_BYTES, len(data), \ elfcloudclient.DEFAULT_REQUEST_SIZE_BYTES)] + [len(data)] startCb = unittest.mock.Mock() completedCb = unittest.mock.Mock() chunkCb = unittest.mock.Mock() failedCb = unittest.mock.Mock() with tempfile.NamedTemporaryFile('wb') as tf: tf.write(data) tf.flush() remoteName = basename(tf.name) uploader.upload(tf.name, VALID_PARENTID, remoteName, key=None, startCb=startCb, completedCb=completedCb, chunkCb=chunkCb, failedCb=failedCb) yield elfcloudclient.removeDataItem(VALID_PARENTID, remoteName) startCb.assert_called_once_with() completedCb.assert_called_once_with() chunkCb.assert_has_calls([call(len(data),i_) for i_ in EXPECTED_CHUNKS]) failedCb.assert_not_called()
def run(): uploader.set_cfg('prod') print uploader.cfg s = requests.Session() cookies = uploader.login(s) for row in DBF(DATA_FILE, encoding='cp852'): try: desc = add_desc('', row['USEK_SMER']) desc = add_desc(desc, row['OSADENIE']) native_id_tmp = str(row['CISLO']) (lat, lng) = row['GPS'].split(',') files = { 'company_id':(None, '4'), 'native_id':(None, native_id_tmp), 'type':(None, row['TYP'].encode('utf-8')), 'region':(None, ''), 'district':(None, ''), 'city':(None, row['LOKALITA'].encode('utf-8')), 'street':(None, row['ULICA_CEST'].encode('utf-8')), 'description':(None, desc.encode('utf-8')), 'lat':(None, lat.strip()), 'lng':(None, lng.strip()), } #files = dict() photo = '{0}{1}'.format(DATA_DIR, row['OBRAZOK']) mapa = '{0}{1}'.format(DATA_DIR, row['MAPA']) if os.path.isfile(photo) == True: files['picture_file'] = open(photo,'rb') if os.path.isfile(mapa) == True: files['map_picture_file'] = open(mapa,'rb') uploader.upload(s, cookies, files) except Exception as e: logging.exception('Chyba') s.close() time.sleep(3) logging.info('======= Restarting sesssion') s = requests.Session() cookies = uploader.login(s) time.sleep(100)
def run(): uploader.set_cfg('prod') print uploader.cfg s = requests.Session() cookies = uploader.login(s) for row in DBF(DATA_FILE): try: desc = add_desc('', row['CTVRT']) desc = add_desc(desc, row['UL_CIS1']) native_id_tmp = str(row['E_CIS']) files = { 'company_id': (None, '3'), 'native_id': (None, native_id_tmp), 'type': (None, row['VELIKOST'].encode('utf-8')), 'region': (None, row['KRAJ'].encode('utf-8')), 'district': (None, row['OKRES'].encode('utf-8')), 'city': (None, row['MESTO'].encode('utf-8')), 'street': (None, row['UL_CIS'].encode('utf-8')), 'description': (None, desc.encode('utf-8')), 'lat': (None, str(row['VGS84_N'])), 'lng': (None, str(row['VGS84_EO'])), } #files = dict() photo = '{0}/photos/{1}.jpg'.format(DATA_DIR, native_id_tmp[2:]) mapa = '{0}/maps/{1}m.jpg'.format(DATA_DIR, native_id_tmp[2:]) if os.path.isfile(photo) == True: files['picture_file'] = open(photo, 'rb') if os.path.isfile(mapa) == True: files['map_picture_file'] = open(mapa, 'rb') uploader.upload(s, cookies, files) except Exception as e: logging.exception('Chyba') s.close() time.sleep(3) logging.info('======= Restarting sesssion') s = requests.Session() cookies = uploader.login(s) time.sleep(100)
def tgfupload(): f = open(dir_path + folders[1] + files[2], "r") for i in f: i = i.rstrip('\n') lof = os.listdir(i) for fi in lof: if fi.endswith(".jpg") or fi.endswith('.png') or fi.endswith( '.jpeg') or fi.endswith(".bmp"): if platform.system() == "Windows": fileName = fi.split("\\")[-1] else: fileName = fi.split('/')[-1] filePath = i + x + fi uploadedDocuments = open(dir_path + folders[0] + files[0]) udls = [] for line in uploadedDocuments: name = line.split(",")[0] udls.append(name) if fileName in udls: pass else: print(f"Uploading {filePath}") uploader.upload(filePath, fileName)
def stop_capture(): global PID global name # luckily, ffmpeg will accept a q on stdin as an exit command # lucky, becuase it's very hard to send a Ctrl+c to something in windows land PID.stdin.write(b'q') PID.stdin.flush() PID.wait() PID = None full_name = '{}.webm'.format(name) url = make_url(full_name) print('Your url will be: {}'.format(url)) encode_video(name) print('Uploading...') uploader.upload(full_name, full_name) cleanup(name) print('\n\n{}\n\n'.format(url)) webbrowser.open(url)
def download(update, context): url = update.message.text url = url.split() sent_message = context.bot.send_message(chat_id=update.message.chat_id, text="Trying To download ....") # dest = "C:\\Downloads\\" # or '~/Downloads/' on linux dest = "Downloads/" # dest = "Downloads\\" #For windows try: obj = SmartDL(url, dest) obj.start() sent_message.edit_text("Downloading complete") DownloadStatus = True except Exception as e: print(e) sent_message.edit_text("Downloading error :{}".format(e)) DownloadStatus = False filename = obj.get_dest() print(filename) # filename = download_file(url) try: if DownloadStatus: sent_message.edit_text("Uploading Your file") wurl = uploader.upload([filename]) sent_message.edit_text( " Full Link : <a href='{}'>Download</a>".format(wurl), parse_mode=ParseMode.HTML) try: os.remove(filename) print("file Removed") except Exception as e: print(e) except Exception as e: print(e) if DownloadStatus: sent_message.edit_text("Uploading fail :".format(e)) try: os.remove(filename) print("file Removed") except Exception as e: print(e)
def test__upload__cancel__ShouldNotUploadCancelled(self, mock_upload, getsize_mock): mock_upload.side_effect = TestUploader._sideEffectAcquire uploader.upload("localPath0", "remoteParentId0", "remoteName0", "key0") uploader.upload("localPath1", "remoteParentId1", "remoteName1", "key1") uidOfTaskToCancel = uploader.upload("localPath2_to_be_cancelled", "remoteParentId2", "remoteName2", "key2") uploader.cancel(uidOfTaskToCancel) uploader.upload("localPath3", "remoteParentId3", "remoteName3", "key3") TestUploader._sideEffectRelease() TestUploader._sideEffectRelease() TestUploader._sideEffectRelease() uploader.wait() mock_upload.assert_has_calls([unittest.mock.call("remoteParentId0", "remoteName0", "localPath0", unittest.mock.ANY, unittest.mock.ANY, None), unittest.mock.call("remoteParentId1", "remoteName1", "localPath1", unittest.mock.ANY, unittest.mock.ANY, None), unittest.mock.call("remoteParentId3", "remoteName3", "localPath3", unittest.mock.ANY, unittest.mock.ANY, None)])
def main(sleep_time=constants.UPLOAD_DAEMON_SLEEP_TIME): logging.basicConfig(level=logging.INFO, format=constants.LOG_FMT_S_THREADED) while True: # Scan for files to upload fpaths = uploader.scan(constants.UPLOAD_DIR, file_ready=file_ready) if len(fpaths) > 0: # Upload files errors = uploader.upload(fpaths) # Attempt to heal any errors that may have occured uploader.heal(errors) # Allow some files to accumulate before taking our next pass time.sleep(sleep_time)
def setUpClass(clazz): # make temporary directory for downloads TestUnityPlugins.__OUT_DIR = tempfile.mkdtemp(prefix="TestUnity-") exit_code, reply_json = uploader.upload( input_dir=os.path.join(UNITY_TEST_DIR, "Assets", "Patch"), output_dirs=[TestUnityPlugins.__OUT_DIR], name="heavy", generators="c", release="dev", token=TestUnityPlugins.__TEST_TOKEN, x=False) # unittest asserts can only be called on instances assert exit_code == 0, f"Uploader returned with non-zero exit code: {exit_code}" assert len(reply_json.get("errors", [])) == 0, reply_json["errors"][0]["detail"] TestUnityPlugins.__JOB_URL = reply_json["data"]["links"]["files"][ "self"]
def createWidgets(self): self.QUIT = tk.Button(self.FRAME) self.QUIT["text"] = "Quit" self.QUIT["fg"] = "red" self.QUIT["command"] = self.QUIT.quit self.QUIT.pack({"side": "left"}) self.PUBLISH = tk.Button(self.FRAME) self.PUBLISH["text"] = "Publish" self.PUBLISH["command"] = lambda: upload(username, password, host, self.documentTitle, self.TEXT, tk.END) self.PUBLISH.pack({"side": "left"}) self.TEXT = tk.Text(self.FRAME) self.TEXT.pack() self.SETTITLE = tk.Button(self.FRAME) self.SETTITLE["text"] = "Set Title" self.SETTITLE["command"] = lambda: self.setTitle() self.SETTITLE.pack({"side": "left"})
def test__upload__pause__resume__ShouldNotUploadPaused_ShouldContinueOnResume(self, mock_upload, getsize_mock): mock_upload.side_effect = TestUploader._sideEffectAcquire uploader.upload("localPath0", "remoteParentId0", "remoteName0", "key0") uploader.upload("localPath1", "remoteParentId1", "remoteName1", "key1") uidOfTaskToPause = uploader.upload("localPath2_to_be_paused", "remoteParentId2", "remoteName2", "key2") uploader.upload("localPath3", "remoteParentId3", "remoteName3", "key3") uploader.pause(uidOfTaskToPause) TestUploader._sideEffectRelease() TestUploader._sideEffectRelease() TestUploader._sideEffectRelease() TestUploader._sideEffectRelease() uploader.wait() mock_upload.assert_has_calls([unittest.mock.call("remoteParentId0", "remoteName0", "localPath0", unittest.mock.ANY, unittest.mock.ANY, None), unittest.mock.call("remoteParentId1", "remoteName1", "localPath1", unittest.mock.ANY, unittest.mock.ANY, None), unittest.mock.call("remoteParentId3", "remoteName3", "localPath3", unittest.mock.ANY, unittest.mock.ANY, None)]) mock_upload.reset_mock() uploader.resume(uidOfTaskToPause) uploader.wait() mock_upload.assert_called_once_with("remoteParentId2", "remoteName2", "localPath2_to_be_paused", unittest.mock.ANY, unittest.mock.ANY, None)
def process(img_path): """主流程""" # 检查七牛相关配置是否已配置 if not all((URI_PREFIX, ACCESS_KEY, SECRET_KEY, BUCKET_NAME)): notice('请先设置七牛相关配置!') open_with_editor('config.py') return file_name = os.path.split(img_path)[-1] file_type = file_name.split('.')[-1] if file_type == 'tiff': new_img_path = '/tmp/{}.png'.format(int(time.time())) # tiff --> png _convert_to_png(img_path, new_img_path) img_path = new_img_path # 获取图片尺寸 # width, height = _get_img_size(img_path) try: # 上传到七牛 upload_result = uploader.upload( img_path, ACCESS_KEY, SECRET_KEY, BUCKET_NAME) if not upload_result: notice('上传图片到七牛失败,请检查七牛相关配置是否正确!') return # 完整的七牛图片URI img_file_name = os.path.split(img_path)[-1] img_uri = '{}/{}'.format(URI_PREFIX, img_file_name) notice('上传成功!') except Exception as error: notice('上传图片到七牛异常!{}'.format(str(error))) return print('上传完成,地址为:' + img_uri)
def iterate(): checker.setup() links = getter.get_reddit_data() token = commenter.reddit_authorize() for link in links: try: url, tid = link["url"], link["id"] print url, tid fourchan_res = requests.head(url) if fourchan_res.status_code != 200: continue if checker.check(tid): continue image = uploader.upload(url) result = False while not result: result = commenter.comment(token, tid, image) if result: checker.add(tid) checker.commit() except Exception as e: print sys.exc_info()
def iterate(): checker.setup() links = getter.get_reddit_data() token = commenter.reddit_authorize() for link in links: try: url, tid = link["url"], link["id"] print url, tid fourchan_res = requests.head(url) if fourchan_res.status_code != 200: continue if checker.check(tid): continue image = uploader.upload(url) result = False while not result: result = commenter.comment(token,tid,image) if result: checker.add(tid) checker.commit() except Exception as e: print sys.exc_info()
def test__upload__list__ShouldGiveListOfUploadsTodo(self, mock_upload, getsize_mock): mock_upload.side_effect = TestUploader._sideEffectAcquire getsize_mock.return_value = 100 cb = unittest.mock.Mock() currentUid = uidgenerator.peekUid() uploader.upload("localPath0", "remoteParentId0", "remoteName0", "key0") uploader.upload("localPath1", "remoteParentId1", "remoteName1", "key1") uploader.upload("localPath3", "remoteParentId3", "remoteName3", "key3") uploader.listAll(cb) TestUploader._sideEffectRelease() TestUploader._sideEffectRelease() TestUploader._sideEffectRelease() uploader.wait() cb.assert_called_once_with([{"uid":currentUid+1, "size":getsize_mock.return_value, "remoteName":'remoteName0', "state":"ongoing", "parentId":"remoteParentId0"}, {"uid":currentUid+2, "size":getsize_mock.return_value, "remoteName":"remoteName1", "state":"todo", "parentId":"remoteParentId1"}, {"uid":currentUid+3, "size":getsize_mock.return_value, "remoteName":"remoteName3", "state":"todo", "parentId":"remoteParentId3"}])
def test__upload__cancel__ShouldNotUploadCancelled(self, mock_upload, getsize_mock): mock_upload.side_effect = TestUploader._sideEffectAcquire uploader.upload("localPath0", "remoteParentId0", "remoteName0", "key0") uploader.upload("localPath1", "remoteParentId1", "remoteName1", "key1") uidOfTaskToCancel = uploader.upload("localPath2_to_be_cancelled", "remoteParentId2", "remoteName2", "key2") uploader.cancel(uidOfTaskToCancel) uploader.upload("localPath3", "remoteParentId3", "remoteName3", "key3") TestUploader._sideEffectRelease() TestUploader._sideEffectRelease() TestUploader._sideEffectRelease() uploader.wait() mock_upload.assert_has_calls([ unittest.mock.call("remoteParentId0", "remoteName0", "localPath0", unittest.mock.ANY, unittest.mock.ANY, None), unittest.mock.call("remoteParentId1", "remoteName1", "localPath1", unittest.mock.ANY, unittest.mock.ANY, None), unittest.mock.call("remoteParentId3", "remoteName3", "localPath3", unittest.mock.ANY, unittest.mock.ANY, None) ])
def upload_files(ingest_server = '', bundle_name='', file_list=None, bundle_size=0, meta_list=None, tartar = False): """ task created on a separate Celery process to bundle and upload in the background status and errors are pushed by celery to the main server through RabbitMQ """ target_dir = os.path.dirname(bundle_name) if not os.path.isdir(target_dir): current_task.update_state(state='ERROR', meta={'Status': 'Bundle directory does not exist'}) task_state("PROGRESS", "Cleaning previous uploads") ##clean tar directory #if CLEAN_TAR: # err_str = clean_target_directory(target_dir) # if err_str: # task_state('PROGRESS', err_str) # initial state pushed through celery task_state("PROGRESS", "Starting Bundle/Upload Process") bundle(bundle_name=bundle_name, file_list=file_list, meta_list= meta_list, bundle_size=bundle_size) task_state("PROGRESS", "Completed Bundling") if tartar: # create the file tuple list of 1 file dir = os.path.dirname(bundle_name) fname = os.path.basename(bundle_name) file_tuples=[] file_tuples.append((bundle_name, fname)) bundle_size = os.path.getsize(bundle_name) # dual extension indicates tartar bundle_name += '.tar' bundle(bundle_name=bundle_name, file_list=file_tuples, meta_list= meta_list, bundle_size=bundle_size) task_state("PROGRESS", "Starting Upload") result = upload(bundle_name=bundle_name, ingest_server = ingest_server) if not result: task_state('FAILURE', "Uploader dieded. We don't know why it did") try: status = json.loads(result) except Exception, e: task_state('FAILURE', e.message) return 'Upload Failed'
def modify(request): print "modify " + request.get_full_path() global selectedList global user global password global directoryHistory if request.POST: if request.POST.get("Clear"): selectedList = [] if request.POST.get("upDir"): dir = os.path.dirname(rootDir) if os.path.exists(dir): rootDir = dir if request.POST.get("Select All"): contents = os.listdir(rootDir) for path in contents: fullPath = os.path.join(rootDir, path) if not os.path.isdir(fullPath): if fullPath not in selectedList: selectedList.append(fullPath) if request.POST.get("Upload"): # create a list of tuples to meet the call format tupleList = [] for path in selectedList: tupleList.append((path, None)) current_time = datetime.datetime.now().time().strftime("%m.%d.%Y.%H.%M.%S") # current_date = datetime.datetime.now().date().strftime("%H.%M.%S") targetPath = Filepath.objects.get(name="target") if targetPath is not None: targetDir = targetPath.fullpath else: targetDir = rootDir bundleName = os.path.join(targetDir, current_time + ".tar") print bundleName serverPath = Filepath.objects.get(name="server") if serverPath is not None: sPath = serverPath.fullpath else: sPath = "dev1.my.emsl.pnl.gov" # return HttpResponseRedirect(reverse('home.views.list')) bundle( bundle_name=bundleName, instrument_name="insty", tarfile=True, proposal="45796", file_list=tupleList, recursive=False, verbose=True, groups=None, ) """ sesh = OpenSession(protocol="https", server=sPath, user="******", insecure=True, password="******", negotiate = False, verbose=True ) boolVal = TestAuth(protocol="https", server=sPath, user="******", insecure=True, password="******", negotiate = False, verbose=True ) res = UploadBundle( bundle_name=bundleName, session=sesh) """ print user res = upload( bundle_name=bundleName, protocol="https", server=sPath, user=user, insecure=True, password=password, negotiate=False, verbose=True, ) if "http" in res: return HttpResponseRedirect(res) else: o = urlparse(request.get_full_path()) params = o.query.split("=") modType = params[0] path = params[1] # spaces path = path.replace("%20", " ") # backslash path = path.replace("%5C", "\\") full = os.path.join(rootDir, path) if modType == "enterDir": rootDir = os.path.join(rootDir, path) directoryHistory.append(path) print "rootDir = " + rootDir elif modType == "toggleFile": if full not in selectedFiles: selectedFiles.append(full) else: selectedFiles.remove(full) elif modType == "toggleDir": if full not in selectedDirs: selectedDirs.append(full) else: selectedDirs.remove(full) return HttpResponseRedirect(reverse("home.views.list"))
def test__upload__CallbackNotGiven_ShouldOnlyCallElfCloudAdapter(self, mock_upload, getsize_mock): uploader.upload("localPath1", "remoteParentId1", "remoteName1", "key1") uploader.wait() mock_upload.assert_called_with("remoteParentId1", "remoteName1", "localPath1", unittest.mock.ANY, unittest.mock.ANY, None)
status.count = len(caller_list) status.bump() conn = sqlite3.connect('cache/caller_' + str(status.version) + '.db') cur = conn.cursor() cur.execute('''CREATE TABLE IF NOT EXISTS caller ( id INTEGER PRIMARY KEY AUTOINCREMENT, number TEXT UNIQUE, name TEXT, count INTEGER, type INTEGER, source INTEGER, time INTEGER );''') # for caller in caller_list: # print(caller) # pass cur.executemany('insert into caller (number, name, count, type, source, time) values (?, ?, ?, ?, ?, ?)', caller_list) cur.execute('''CREATE TABLE IF NOT EXISTS status ( id INTEGER PRIMARY KEY AUTOINCREMENT, version INTEGER, count INTEGER, new_count INTEGER, time INTEGER );''') cur.execute('insert into status (version, count, new_count, time) values (?, ?, ?, ?)', status.to_list()) conn.commit() cur.close() conn.close() # 5. upload offline database to QiNiu zip_file = compress('cache/caller_' + str(status.version) + '.db') status.update(zip_file) # upload files uploader.upload(zip_file) uploader.upload(data_file)
sys.exit() except: print "Unknown error observed! Please send your documents to Enthalpy." catalysis_globals.terminate() # Get file even when the program is .exe. output_file = directory + "/testData.txt" if getattr(sys, 'frozen', False): output_file = os.path.join(os.path.dirname(sys.executable), output_file) # Write the full information to the file. open(output_file, "w").write('//Definition//Def6\n' + json_data) if not test_mode: try: upload_dict["trial_id"] = int(upload_dict["trial_id"]) except ValueError: print "Choosing not to upload data..." else: print ( "Choosing to upload data to trial {}. Press enter to " + "continue.").format( upload_dict["trial_id"]) raw_input( "Alternately, exit to abort uploading while keeping a valid " + "trial file.") uploader.upload(directory, upload_dict) print "Catalysis complete!" catalysis_globals.terminate()
def upload_only(): url = make_url(args.upload) print("Uploading to: {}".format(url)) uploader.upload(args.upload, args.upload) webbrowser.open(url)
def upload(): if not config.UPLOADING: abort(404) ## create the function for sorting... db.session.connection().connection.connection.create_function("title_sort",1,db.title_sort) db.session.connection().connection.connection.create_function('uuid4', 0, lambda : str(uuid4())) if request.method == 'POST' and 'btn-upload' in request.files: file = request.files['btn-upload'] meta = uploader.upload(file) title = meta.title author = meta.author title_dir = helper.get_valid_filename(title, False) author_dir = helper.get_valid_filename(author.decode('utf-8'), False) data_name = title_dir filepath = config.DB_ROOT + "/" + author_dir + "/" + title_dir saved_filename = filepath + "/" + data_name + meta.extension if not os.path.exists(filepath): try: os.makedirs(filepath) except OSError: flash("Failed to create path %s (Permission denied)." % filepath, category="error") return redirect(url_for('index')) try: move(meta.file_path, saved_filename) except OSError: flash("Failed to store file %s (Permission denied)." % saved_filename, category="error") return redirect(url_for('index')) file_size = os.path.getsize(saved_filename) if meta.cover is None: has_cover = 0 basedir = os.path.dirname(__file__) copyfile(os.path.join(basedir, "static/generic_cover.jpg"), os.path.join(filepath, "cover.jpg")) else: has_cover = 1 move(meta.cover, os.path.join(filepath, "cover.jpg")) is_author = db.session.query(db.Authors).filter(db.Authors.name == author).first() if is_author: db_author = is_author else: db_author = db.Authors(author, "", "") db.session.add(db_author) path = os.path.join(author_dir, title_dir) db_book = db.Books(title, "", "", datetime.datetime.now(), datetime.datetime(101, 01,01), 1, datetime.datetime.now(), path, has_cover, db_author, []) db_book.authors.append(db_author) db_data = db.Data(db_book, meta.extension.upper()[1:], file_size, data_name) db_book.data.append(db_data) db.session.add(db_book) db.session.commit() author_names = [] for author in db_book.authors: author_names.append(author.name) cc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all() if current_user.role_edit() or current_user.role_admin(): return render_template('edit_book.html', book=db_book, authors=author_names, cc=cc) book_in_shelfs = [] return render_template('detail.html', entry=db_book, cc=cc, title=db_book.title, books_shelfs=book_in_shelfs)