def midi_to_note_sequence(midi_data): """Convert MIDI file contents to a NoteSequence. Converts a MIDI file encoded as a string into a NoteSequence. Decoding errors are very common when working with large sets of MIDI files, so be sure to handle MIDIConversionError exceptions. Args: midi_data: A string containing the contents of a MIDI file or populated pretty_midi.PrettyMIDI object. Returns: A NoteSequence. Raises: MIDIConversionError: An improper MIDI mode was supplied. """ # In practice many MIDI files cannot be decoded with pretty_midi. Catch all # errors here and try to log a meaningful message. So many different # exceptions are raised in pretty_midi.PrettyMidi that it is cumbersome to # catch them all only for the purpose of error logging. # pylint: disable=bare-except if isinstance(midi_data, pretty_midi.PrettyMIDI): midi = midi_data else: try: midi = pretty_midi.PrettyMIDI(six.BytesIO(midi_data)) except: raise MIDIConversionError('Midi decoding error %s: %s' % (sys.exc_info()[0], sys.exc_info()[1])) # pylint: enable=bare-except sequence = music_pb2.NoteSequence() # Populate header. sequence.ticks_per_quarter = midi.resolution sequence.source_info.parser = music_pb2.NoteSequence.SourceInfo.PRETTY_MIDI sequence.source_info.encoding_type = ( music_pb2.NoteSequence.SourceInfo.MIDI) # Populate time signatures. for midi_time in midi.time_signature_changes: time_signature = sequence.time_signatures.add() time_signature.time = midi_time.time time_signature.numerator = midi_time.numerator try: # Denominator can be too large for int32. time_signature.denominator = midi_time.denominator except ValueError: raise MIDIConversionError('Invalid time signature denominator %d' % midi_time.denominator) # Populate key signatures. for midi_key in midi.key_signature_changes: key_signature = sequence.key_signatures.add() key_signature.time = midi_key.time key_signature.key = midi_key.key_number % 12 midi_mode = midi_key.key_number // 12 if midi_mode == 0: key_signature.mode = key_signature.MAJOR elif midi_mode == 1: key_signature.mode = key_signature.MINOR else: raise MIDIConversionError('Invalid midi_mode %i' % midi_mode) # Populate tempo changes. tempo_times, tempo_qpms = midi.get_tempo_changes() for time_in_seconds, tempo_in_qpm in zip(tempo_times, tempo_qpms): tempo = sequence.tempos.add() tempo.time = time_in_seconds tempo.qpm = tempo_in_qpm # Populate notes by gathering them all from the midi's instruments. # Also set the sequence.total_time as the max end time in the notes. midi_notes = [] midi_pitch_bends = [] midi_control_changes = [] for num_instrument, midi_instrument in enumerate(midi.instruments): for midi_note in midi_instrument.notes: if not sequence.total_time or midi_note.end > sequence.total_time: sequence.total_time = midi_note.end midi_notes.append((midi_instrument.program, num_instrument, midi_instrument.is_drum, midi_note)) for midi_pitch_bend in midi_instrument.pitch_bends: midi_pitch_bends.append((midi_instrument.program, num_instrument, midi_instrument.is_drum, midi_pitch_bend)) for midi_control_change in midi_instrument.control_changes: midi_control_changes.append( (midi_instrument.program, num_instrument, midi_instrument.is_drum, midi_control_change)) for program, instrument, is_drum, midi_note in midi_notes: note = sequence.notes.add() note.instrument = instrument note.program = program note.start_time = midi_note.start note.end_time = midi_note.end note.pitch = midi_note.pitch note.velocity = midi_note.velocity note.is_drum = is_drum for program, instrument, is_drum, midi_pitch_bend in midi_pitch_bends: pitch_bend = sequence.pitch_bends.add() pitch_bend.instrument = instrument pitch_bend.program = program pitch_bend.time = midi_pitch_bend.time pitch_bend.bend = midi_pitch_bend.pitch pitch_bend.is_drum = is_drum for program, instrument, is_drum, midi_control_change in midi_control_changes: control_change = sequence.control_changes.add() control_change.instrument = instrument control_change.program = program control_change.time = midi_control_change.time control_change.control_number = midi_control_change.number control_change.control_value = midi_control_change.value control_change.is_drum = is_drum # TODO(douglaseck): Estimate note type (e.g. quarter note) and populate # note.numerator and note.denominator. return sequence
def _gzip(self, response): """Apply gzip compression to a response.""" bytesio = six.BytesIO() with gzip.GzipFile(fileobj=bytesio, mode='w') as gz: gz.write(response) return bytesio.getvalue()
def _gunzip(bs): with gzip.GzipFile(fileobj=six.BytesIO(bs), mode='rb') as f: return f.read()
def test_iter_default_chunk_size_64k(self): resp = utils.FakeResponse({}, six.BytesIO(b'X' * 98304)) iterator = http.ResponseBodyIterator(resp) chunks = list(iterator) self.assertEqual([b'X' * 65536, b'X' * 32768], chunks)
def test_body_size(self): size = 1000000007 resp = utils.FakeResponse({'content-length': str(size)}, six.BytesIO(b'BB')) body = http.ResponseBodyIterator(resp) self.assertEqual(size, len(body))
def getRegexParsed( regexs, url, cookieJar=None, forCookieJarOnly=False, recursiveCall=False, cachedPages={}, rawPost=False, cookie_jar_file=None): #0,1,2 = URL, regexOnly, CookieJarOnly #cachedPages = {} #print 'url',url doRegexs = re.compile('\$doregex\[([^\]]*)\]').findall(url) # print 'doRegexs',doRegexs,regexs setresolved = True for k in doRegexs: if k in regexs: #print 'processing ' ,k m = regexs[k] #print m cookieJarParam = False if 'cookiejar' in m: # so either create or reuse existing jar #print 'cookiejar exists',m['cookiejar'] cookieJarParam = m['cookiejar'] if '$doregex' in cookieJarParam: cookieJar = getRegexParsed(regexs, m['cookiejar'], cookieJar, True, True, cachedPages) cookieJarParam = True else: cookieJarParam = True #print 'm[cookiejar]',m['cookiejar'],cookieJar if cookieJarParam: if cookieJar == None: #print 'create cookie jar' cookie_jar_file = None if 'open[' in m['cookiejar']: cookie_jar_file = m['cookiejar'].split( 'open[')[1].split(']')[0] # print 'cookieJar from file name',cookie_jar_file cookieJar = getCookieJar(cookie_jar_file) # print 'cookieJar from file',cookieJar if cookie_jar_file: saveCookieJar(cookieJar, cookie_jar_file) #cookieJar = http_cookiejar.LWPCookieJar() #print 'cookieJar new',cookieJar elif 'save[' in m['cookiejar']: cookie_jar_file = m['cookiejar'].split('save[')[1].split( ']')[0] complete_path = os.path.join(profile, cookie_jar_file) # print 'complete_path',complete_path saveCookieJar(cookieJar, cookie_jar_file) if m['page'] and '$doregex' in m['page']: pg = getRegexParsed(regexs, m['page'], cookieJar, recursiveCall=True, cachedPages=cachedPages) if len(pg) == 0: pg = 'http://regexfailed' m['page'] = pg if 'setcookie' in m and m['setcookie'] and '$doregex' in m[ 'setcookie']: m['setcookie'] = getRegexParsed(regexs, m['setcookie'], cookieJar, recursiveCall=True, cachedPages=cachedPages) if 'appendcookie' in m and m['appendcookie'] and '$doregex' in m[ 'appendcookie']: m['appendcookie'] = getRegexParsed(regexs, m['appendcookie'], cookieJar, recursiveCall=True, cachedPages=cachedPages) if 'post' in m and '$doregex' in m['post']: m['post'] = getRegexParsed(regexs, m['post'], cookieJar, recursiveCall=True, cachedPages=cachedPages) # print 'post is now',m['post'] if 'rawpost' in m and '$doregex' in m['rawpost']: m['rawpost'] = getRegexParsed(regexs, m['rawpost'], cookieJar, recursiveCall=True, cachedPages=cachedPages, rawPost=True) #print 'rawpost is now',m['rawpost'] if 'rawpost' in m and '$epoctime$' in m['rawpost']: m['rawpost'] = m['rawpost'].replace('$epoctime$', getEpocTime()) if 'rawpost' in m and '$epoctime2$' in m['rawpost']: m['rawpost'] = m['rawpost'].replace('$epoctime2$', getEpocTime2()) link = '' if m['page'] and m[ 'page'] in cachedPages and not 'ignorecache' in m and forCookieJarOnly == False: #print 'using cache page',m['page'] link = cachedPages[m['page']] else: if m['page'] and not m['page'] == '' and m['page'].startswith( 'http'): if '$epoctime$' in m['page']: m['page'] = m['page'].replace('$epoctime$', getEpocTime()) if '$epoctime2$' in m['page']: m['page'] = m['page'].replace('$epoctime2$', getEpocTime2()) #print 'Ingoring Cache',m['page'] page_split = m['page'].split('|') pageUrl = page_split[0] header_in_page = None if len(page_split) > 1: header_in_page = page_split[1] # if # proxy = urllib_request.ProxyHandler({ ('https' ? proxytouse[:5]=="https":"http") : proxytouse}) # opener = urllib_request.build_opener(proxy) # urllib_request.install_opener(opener) # print 'urllib_request.getproxies',urllib_request.getproxies() current_proxies = urllib_request.ProxyHandler( urllib_request.getproxies()) #print 'getting pageUrl',pageUrl req = urllib_request.Request(pageUrl) if 'proxy' in m: proxytouse = m['proxy'] # print 'proxytouse',proxytouse # urllib_request.getproxies= lambda: {} if pageUrl[:5] == "https": proxy = urllib_request.ProxyHandler( {'https': proxytouse}) #req.set_proxy(proxytouse, 'https') else: proxy = urllib_request.ProxyHandler( {'http': proxytouse}) #req.set_proxy(proxytouse, 'http') opener = urllib_request.build_opener(proxy) urllib_request.install_opener(opener) req.add_header( 'User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:14.0) Gecko/20100101 Firefox/14.0.1' ) proxytouse = None if 'referer' in m: req.add_header('Referer', m['referer']) if 'accept' in m: req.add_header('Accept', m['accept']) if 'agent' in m: req.add_header('User-agent', m['agent']) if 'x-req' in m: req.add_header('X-Requested-With', m['x-req']) if 'x-addr' in m: req.add_header('x-addr', m['x-addr']) if 'x-forward' in m: req.add_header('X-Forwarded-For', m['x-forward']) if 'setcookie' in m: # print 'adding cookie',m['setcookie'] req.add_header('Cookie', m['setcookie']) if 'appendcookie' in m: # print 'appending cookie to cookiejar',m['appendcookie'] cookiestoApend = m['appendcookie'] cookiestoApend = cookiestoApend.split(';') for h in cookiestoApend: n, v = h.split('=') w, n = n.split(':') ck = http_cookiejar.Cookie( version=0, name=n, value=v, port=None, port_specified=False, domain=w, domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False) cookieJar.set_cookie(ck) if 'origin' in m: req.add_header('Origin', m['origin']) if header_in_page: header_in_page = header_in_page.split('&') for h in header_in_page: n, v = h.split('=') req.add_header(n, v) if not cookieJar == None: # print 'cookieJarVal',cookieJar cookie_handler = urllib_request.HTTPCookieProcessor( cookieJar) opener = urllib_request.build_opener( cookie_handler, urllib_request.HTTPBasicAuthHandler(), urllib_request.HTTPHandler()) opener = urllib_request.install_opener(opener) # print 'noredirect','noredirect' in m if 'noredirect' in m: opener = urllib_request.build_opener( cookie_handler, NoRedirection, urllib_request.HTTPBasicAuthHandler(), urllib_request.HTTPHandler()) opener = urllib_request.install_opener(opener) elif 'noredirect' in m: opener = urllib_request.build_opener( NoRedirection, urllib_request.HTTPBasicAuthHandler(), urllib_request.HTTPHandler()) opener = urllib_request.install_opener(opener) if 'connection' in m: # print '..........................connection//////.',m['connection'] from keepalive import HTTPHandler keepalive_handler = HTTPHandler() opener = urllib_request.build_opener(keepalive_handler) urllib_request.install_opener(opener) #print 'after cookie jar' post = None if 'post' in m: postData = m['post'] #if '$LiveStreamRecaptcha' in postData: # (captcha_challenge,catpcha_word,idfield)=processRecaptcha(m['page'],cookieJar) # if captcha_challenge: # postData=postData.replace('$LiveStreamRecaptcha','manual_recaptcha_challenge_field:'+captcha_challenge+',recaptcha_response_field:'+catpcha_word+',id:'+idfield) splitpost = postData.split(',') post = {} for p in splitpost: n = p.split(':')[0] v = p.split(':')[1] post[n] = v post = urllib_parse.urlencode(post) if 'rawpost' in m: post = m['rawpost'] #if '$LiveStreamRecaptcha' in post: # (captcha_challenge,catpcha_word,idfield)=processRecaptcha(m['page'],cookieJar) # if captcha_challenge: # post=post.replace('$LiveStreamRecaptcha','&manual_recaptcha_challenge_field='+captcha_challenge+'&recaptcha_response_field='+catpcha_word+'&id='+idfield) link = '' try: if post: response = urllib_request.urlopen(req, post) else: response = urllib_request.urlopen(req) if response.info().get('Content-Encoding') == 'gzip': import gzip buf = six.BytesIO(response.read()) f = gzip.GzipFile(fileobj=buf) link = f.read() else: link = response.read() if 'proxy' in m and not current_proxies is None: urllib_request.install_opener( urllib_request.build_opener(current_proxies)) link = javascriptUnEscape(link) #print repr(link) #print link This just print whole webpage in LOG if 'includeheaders' in m: #link+=str(response.headers.get('Set-Cookie')) link += '$$HEADERS_START$$:' for b in response.headers: link += b + ':' + response.headers.get( b) + '\n' link += '$$HEADERS_END$$:' # print link response.close() except: pass cachedPages[m['page']] = link #print link #print 'store link for',m['page'],forCookieJarOnly if forCookieJarOnly: return cookieJar # do nothing elif m['page'] and not m['page'].startswith('http'): if m['page'].startswith('$pyFunction:'): val = doEval(m['page'].split('$pyFunction:')[1], '', cookieJar, m) if forCookieJarOnly: return cookieJar # do nothing link = val link = javascriptUnEscape(link) else: link = m['page'] if '$doregex' in m['expres']: m['expres'] = getRegexParsed(regexs, m['expres'], cookieJar, recursiveCall=True, cachedPages=cachedPages) if not m['expres'] == '': #print 'doing it ',m['expres'] if '$LiveStreamCaptcha' in m['expres']: val = askCaptcha(m, link, cookieJar) #print 'url and val',url,val url = url.replace("$doregex[" + k + "]", val) elif m['expres'].startswith( '$pyFunction:') or '#$pyFunction' in m['expres']: #print 'expeeeeeeeeeeeeeeeeeee',m['expres'] val = '' if m['expres'].startswith('$pyFunction:'): val = doEval(m['expres'].split('$pyFunction:')[1], link, cookieJar, m) else: val = doEvalFunction(m['expres'], link, cookieJar, m) if 'ActivateWindow' in m['expres']: return if forCookieJarOnly: return cookieJar # do nothing if 'listrepeat' in m: listrepeat = m['listrepeat'] return listrepeat, eval(val), m, regexs, cookieJar try: url = url.replace(u"$doregex[" + k + "]", val) except: url = url.replace("$doregex[" + k + "]", six.ensure_text(val)) else: if 'listrepeat' in m: listrepeat = m['listrepeat'] ret = re.findall(m['expres'], link) return listrepeat, ret, m, regexs val = '' if not link == '': #print 'link',link reg = re.compile(m['expres']).search(link) try: val = reg.group(1).strip() except: traceback.print_exc() elif m['page'] == '' or m['page'] == None: val = m['expres'] if rawPost: # print 'rawpost' val = urllib_parse.quote_plus(val) if 'htmlunescape' in m: #val=urllib_parse.unquote_plus(val) import HTMLParser val = HTMLParser.HTMLParser().unescape(val) try: url = url.replace("$doregex[" + k + "]", val) except: url = url.replace("$doregex[" + k + "]", six.ensure_text(val)) #print 'ur',url #return val else: url = url.replace("$doregex[" + k + "]", '') if '$epoctime$' in url: url = url.replace('$epoctime$', getEpocTime()) if '$epoctime2$' in url: url = url.replace('$epoctime2$', getEpocTime2()) if '$GUID$' in url: import uuid url = url.replace('$GUID$', str(uuid.uuid1()).upper()) if '$get_cookies$' in url: url = url.replace('$get_cookies$', getCookiesString(cookieJar)) if recursiveCall: return url #print 'final url',repr(url) if url == "": return else: return url, setresolved
def process(self, file_path, x_index, y_index, z_index, t_index=0): """ Method to load the image file. Args: file_path(str): An absolute file path for the specified tile x_index(int): The tile index in the X dimension y_index(int): The tile index in the Y dimension z_index(int): The tile index in the Z dimension t_index(int): The time index Returns: (io.BufferedReader): A file handle for the specified tile """ file_path = self.fs.get_file(file_path) # Compute global range tile_x_range = [self.parameters["ingest_job"]["tile_size"]["x"] * x_index, self.parameters["ingest_job"]["tile_size"]["x"] * (x_index + 1)] tile_y_range = [self.parameters["ingest_job"]["tile_size"]["y"] * y_index, self.parameters["ingest_job"]["tile_size"]["y"] * (y_index + 1)] # Open hdf5 h5_file = h5py.File(file_path, 'r') # Compute range in actual data, taking offsets into account x_offset = h5_file[self.parameters['offset_name']][1] y_offset = h5_file[self.parameters['offset_name']][0] x_img_extent = h5_file[self.parameters['extent_name']][1] y_img_extent = h5_file[self.parameters['extent_name']][0] x_frame_offset = x_offset + self.parameters['offset_origin_x'] y_frame_offset = y_offset + self.parameters['offset_origin_x'] x1 = max(tile_x_range[0], x_frame_offset) y1 = max(tile_y_range[0], y_frame_offset) x2 = min(tile_x_range[1], x_frame_offset + x_img_extent) y2 = min(tile_y_range[1], y_frame_offset + y_img_extent) if self.parameters['datatype'] == "uint8": datatype = np.uint8 elif self.parameters['datatype']== "uint16": datatype = np.uint16 else: raise Exception("Unsupported datatype: {}".format(self.parameters['datatype'])) # Allocate Tile tile_data = np.zeros((self.parameters["ingest_job"]["tile_size"]["y"], self.parameters["ingest_job"]["tile_size"]["x"]), dtype=datatype, order='C') # Copy sub-img to tile, save, return img_y_index_start = max(0, y1 - y_frame_offset) img_y_index_stop = max(0, y2 - y_frame_offset) img_x_index_start = max(0, x1 - x_frame_offset) img_x_index_stop = max(0, x2 - x_frame_offset) tile_data[y1-tile_y_range[0]:y2-tile_y_range[0], x1 - tile_x_range[0]:x2 - tile_x_range[0]] = np.array(h5_file[self.parameters['data_name']][ img_y_index_start:img_y_index_stop, img_x_index_start:img_x_index_stop]) tile_data = tile_data.astype(datatype) upload_img = Image.fromarray(tile_data) output = six.BytesIO() upload_img.save(output, format=self.parameters["upload_format"].upper()) # Send handle back return output
def export_ctf(segments=None): db = dataset.connect(get_app_config('SQLALCHEMY_DATABASE_URI')) if segments is None: segments = ['challenges', 'teams', 'both', 'metadata'] groups = { 'challenges': [ 'challenges', 'files', 'tags', 'keys', 'hints', ], 'teams': [ 'teams', 'tracking', 'awards', ], 'both': [ 'solves', 'wrong_keys', 'unlocks', ], 'metadata': [ 'alembic_version', 'config', 'pages', ] } # Backup database backup = six.BytesIO() backup_zip = zipfile.ZipFile(backup, 'w') for segment in segments: group = groups[segment] for item in group: result = db[item].all() result_file = six.BytesIO() datafreeze.freeze(result, format='ctfd', fileobj=result_file) result_file.seek(0) backup_zip.writestr('db/{}.json'.format(item), result_file.read()) # Guarantee that alembic_version is saved into the export if 'metadata' not in segments: result = db['alembic_version'].all() result_file = six.BytesIO() datafreeze.freeze(result, format='ctfd', fileobj=result_file) result_file.seek(0) backup_zip.writestr('db/alembic_version.json', result_file.read()) # Backup uploads upload_folder = os.path.join(os.path.normpath(app.root_path), app.config.get('UPLOAD_FOLDER')) for root, dirs, files in os.walk(upload_folder): for file in files: parent_dir = os.path.basename(root) backup_zip.write(os.path.join(root, file), arcname=os.path.join('uploads', parent_dir, file)) backup_zip.close() backup.seek(0) return backup
def test_attachments_and_userfeedback( default_project, reset_snuba, register_event_preprocessor, process_and_save, burst_task_runner, monkeypatch, ): @register_event_preprocessor def event_preprocessor(data): extra = data.setdefault("extra", {}) extra.setdefault("processing_counter", 0) extra["processing_counter"] += 1 cache_key = cache_key_for_event(data) attachments = attachment_cache.get(cache_key) extra.setdefault("attachments", []).append( [attachment.type for attachment in attachments]) return data event_id_to_delete = process_and_save({"message": "hello world"}, seconds_ago=5) event_to_delete = eventstore.get_event_by_id(default_project.id, event_id_to_delete) event_id = process_and_save({"message": "hello world"}) event = eventstore.get_event_by_id(default_project.id, event_id) for evt in (event, event_to_delete): for type in ("event.attachment", "event.minidump"): file = File.objects.create(name="foo", type=type) file.putfile(six.BytesIO(b"hello world")) EventAttachment.objects.create( event_id=evt.event_id, group_id=evt.group_id, project_id=default_project.id, file=file, type=file.type, name="foo", ) UserReport.objects.create( project_id=default_project.id, event_id=evt.event_id, name="User", ) with burst_task_runner() as burst: reprocess_group(default_project.id, event.group_id, max_events=1) burst(max_jobs=100) new_event = eventstore.get_event_by_id(default_project.id, event_id) assert new_event.group_id != event.group_id assert new_event.data["extra"]["attachments"] == [[ "event.attachment", "event.minidump" ]] att, mdmp = EventAttachment.objects.filter( project_id=default_project.id).order_by("type") assert att.group_id == mdmp.group_id == new_event.group_id assert att.event_id == mdmp.event_id == event_id assert att.type == "event.attachment" assert mdmp.type == "event.minidump" (rep, ) = UserReport.objects.filter(project_id=default_project.id) assert rep.group_id == new_event.group_id assert rep.event_id == event_id assert is_group_finished(event.group_id)
def __init__(self, stream): self.stream = stream self._buffer = six.BytesIO() self._capture = CaptureStreamPrinter(self._buffer) self.streams = [self._capture]
def testMoveBetweenAssetstores(self): folder = six.next(Folder().childFolders( self.admin, parentType='user', force=True, filters={ 'name': 'Public' })) resp = self.request(path='/assetstore', method='GET', user=self.admin) self.assertStatusOk(resp) fs_assetstore = resp.json[0] # Clear any old DB data base.dropGridFSDatabase('girder_test_assetstore_move_assetstore') params = { 'name': 'New Name', 'type': AssetstoreType.GRIDFS, 'db': 'girder_test_assetstore_move_assetstore' } resp = self.request(path='/assetstore', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) gridfs_assetstore = resp.json # Upload a file - it should go to the fs assetstore uploadData = 'helloworld' params = { 'parentType': 'folder', 'parentId': folder['_id'], 'name': 'sample1', 'size': len(uploadData), 'mimeType': 'text/plain' } resp = self.request( path='/file', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) upload = resp.json resp = self.request( path='/file/chunk', method='POST', user=self.admin, body=uploadData, params={ 'uploadId': upload['_id'] }, type='text/plain') self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id']) uploadedFiles = [resp.json] # Upload it again targetting a different assetstore params['assetstoreId'] = gridfs_assetstore['_id'] resp = self.request( path='/file', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) upload = resp.json resp = self.request( path='/file/chunk', method='POST', user=self.admin, body=uploadData, params={ 'uploadId': upload['_id'] }, type='text/plain') self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id']) uploadedFiles.append(resp.json) # Replace the first file, directing the replacement to a different # assetstore replaceParams = { 'size': len(uploadData), 'assetstoreId': gridfs_assetstore['_id'], } resp = self.request( path='/file/%s/contents' % uploadedFiles[0]['_id'], method='PUT', user=self.admin, params=replaceParams) self.assertStatusOk(resp) upload = resp.json resp = self.request( path='/file/chunk', method='POST', user=self.admin, body=uploadData, params={ 'uploadId': upload['_id'] }, type='text/plain') self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id']) uploadedFiles[0] = resp.json # Move a file from the gridfs assetstore to the filesystem assetstore resp = self.request( path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT', user=self.admin, params={'assetstoreId': fs_assetstore['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id']) uploadedFiles[0] = resp.json # Doing it again shouldn't change it. resp = self.request( path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT', user=self.admin, params={'assetstoreId': fs_assetstore['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id']) uploadedFiles[0] = resp.json # We should be able to move it back resp = self.request( path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT', user=self.admin, params={'assetstoreId': gridfs_assetstore['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id']) uploadedFiles[0] = resp.json # Test moving a file of zero length params['size'] = 0 resp = self.request( path='/file', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) uploadedFiles.append(resp.json) resp = self.request( path='/file/%s/move' % uploadedFiles[2]['_id'], method='PUT', user=self.admin, params={'assetstoreId': fs_assetstore['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id']) uploadedFiles[2] = resp.json # Test preventing the move via an event def stopMove(event): event.preventDefault() events.bind('model.upload.movefile', 'assetstore_test', stopMove) try: resp = self.request( path='/file/%s/move' % uploadedFiles[0]['_id'], method='PUT', user=self.admin, params={'assetstoreId': fs_assetstore['_id']}, isJson=False) self.assertFalse('Move should have been prevented') except AssertionError as exc: self.assertIn('could not be moved to assetstore', str(exc)) events.unbind('model.upload.movefile', 'assetstore_test') # Test files big enough to be multi-chunk chunkSize = Upload()._getChunkSize() data = six.BytesIO(b' ' * chunkSize * 2) uploadedFiles.append(Upload().uploadFromFile( data, chunkSize * 2, 'sample', parentType='folder', parent=folder, assetstore=fs_assetstore)) resp = self.request( path='/file/%s/move' % uploadedFiles[3]['_id'], method='PUT', user=self.admin, params={'assetstoreId': gridfs_assetstore['_id']}) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id']) uploadedFiles[3] = resp.json # Test progress size = chunkSize * 2 data = six.BytesIO(b' ' * size) upload = Upload().uploadFromFile( data, size, 'progress', parentType='folder', parent=folder, assetstore=fs_assetstore) params = { 'assetstoreId': gridfs_assetstore['_id'], 'progress': True } resp = self.request( path='/file/%s/move' % upload['_id'], method='PUT', user=self.admin, params=params) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], gridfs_assetstore['_id']) resp = self.request( path='/notification/stream', method='GET', user=self.admin, isJson=False, params={'timeout': 1}) messages = self.getSseMessages(resp) self.assertEqual(len(messages), 1) self.assertEqual(messages[0]['type'], 'progress') self.assertEqual(messages[0]['data']['current'], size) # Test moving imported file # Create assetstore to import file into params = { 'name': 'ImportTest', 'type': AssetstoreType.FILESYSTEM, 'root': os.path.join(fs_assetstore['root'], 'import') } resp = self.request(path='/assetstore', method='POST', user=self.admin, params=params) self.assertStatusOk(resp) import_assetstore = resp.json # Import file params = { 'importPath': os.path.join(ROOT_DIR, 'tests', 'cases', 'py_client', 'testdata', 'world.txt'), 'destinationType': 'folder', } Assetstore().importData( import_assetstore, parent=folder, parentType='folder', params=params, progress=ProgressContext(False), user=self.admin, leafFoldersAsItems=False) file = path_util.lookUpPath('/user/admin/Public/world.txt/world.txt', self.admin)['document'] # Move file params = { 'assetstoreId': fs_assetstore['_id'], } resp = self.request( path='/file/%s/move' % file['_id'], method='PUT', user=self.admin, params=params) self.assertStatusOk(resp) self.assertEqual(resp.json['assetstoreId'], fs_assetstore['_id']) # Check that we can still download the file resp = self.request( path='/file/%s/download' % file['_id'], user=self.admin, isJson=False) self.assertStatusOk(resp)
def make_fs(self): _tar_file = six.BytesIO() fs = tarfs.TarFS(_tar_file, write=True) fs._tar_file = _tar_file return fs
def create_wsgi_request(event_info, server_name='zappa', script_name=None, trailing_slash=True, binary_support=False, base_path=None, context_header_mappings={}, ): """ Given some event_info via API Gateway, create and return a valid WSGI request environ. """ method = event_info['httpMethod'] headers = merge_headers(event_info) or {} # Allow for the AGW console 'Test' button to work (Pull #735) """ API Gateway and ALB both started allowing for multi-value querystring params in Nov. 2018. If there aren't multi-value params present, then it acts identically to 'queryStringParameters', so we can use it as a drop-in replacement. The one caveat here is that ALB will only include _one_ of queryStringParameters _or_ multiValueQueryStringParameters, which means we have to check for the existence of one and then fall back to the other. """ if 'multiValueQueryStringParameters' in event_info: query = event_info['multiValueQueryStringParameters'] query_string = urlencode(query, doseq=True) if query else '' else: query = event_info.get('queryStringParameters', {}) query_string = urlencode(query) if query else '' if context_header_mappings: for key, value in context_header_mappings.items(): parts = value.split('.') header_val = event_info['requestContext'] for part in parts: if part not in header_val: header_val = None break else: header_val = header_val[part] if header_val is not None: headers[key] = header_val # Extract remote user from context if Authorizer is enabled remote_user = None if event_info['requestContext'].get('authorizer'): remote_user = event_info['requestContext']['authorizer'].get('principalId') elif event_info['requestContext'].get('identity'): remote_user = event_info['requestContext']['identity'].get('userArn') # Related: https://github.com/Miserlou/Zappa/issues/677 # https://github.com/Miserlou/Zappa/issues/683 # https://github.com/Miserlou/Zappa/issues/696 # https://github.com/Miserlou/Zappa/issues/836 # https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Summary_table if binary_support and (method in BINARY_METHODS): if event_info.get('isBase64Encoded', False): encoded_body = event_info['body'] body = base64.b64decode(encoded_body) else: body = event_info['body'] if isinstance(body, six.string_types): body = body.encode("utf-8") else: body = event_info['body'] if isinstance(body, six.string_types): body = body.encode("utf-8") # Make header names canonical, e.g. content-type => Content-Type # https://github.com/Miserlou/Zappa/issues/1188 headers = titlecase_keys(headers) path = urls.url_unquote(event_info['path']) if base_path: script_name = '/' + base_path if path.startswith(script_name): path = path[len(script_name):] x_forwarded_for = headers.get('X-Forwarded-For', '') if ',' in x_forwarded_for: # The last one is the cloudfront proxy ip. The second to last is the real client ip. # Everything else is user supplied and untrustworthy. remote_addr = x_forwarded_for.split(', ')[-2] else: remote_addr = x_forwarded_for or '127.0.0.1' environ = { 'PATH_INFO': get_wsgi_string(path), 'QUERY_STRING': get_wsgi_string(query_string), 'REMOTE_ADDR': remote_addr, 'REQUEST_METHOD': method, 'SCRIPT_NAME': get_wsgi_string(str(script_name)) if script_name else '', 'SERVER_NAME': str(server_name), 'SERVER_PORT': headers.get('X-Forwarded-Port', '80'), 'SERVER_PROTOCOL': str('HTTP/1.1'), 'wsgi.version': (1, 0), 'wsgi.url_scheme': headers.get('X-Forwarded-Proto', 'http'), 'wsgi.input': body, 'wsgi.errors': sys.stderr, 'wsgi.multiprocess': False, 'wsgi.multithread': False, 'wsgi.run_once': False, } # Input processing if method in ["POST", "PUT", "PATCH", "DELETE"]: if 'Content-Type' in headers: environ['CONTENT_TYPE'] = headers['Content-Type'] # This must be Bytes or None environ['wsgi.input'] = six.BytesIO(body) if body: environ['CONTENT_LENGTH'] = str(len(body)) else: environ['CONTENT_LENGTH'] = '0' for header in headers: wsgi_name = "HTTP_" + header.upper().replace('-', '_') environ[wsgi_name] = str(headers[header]) if script_name: environ['SCRIPT_NAME'] = script_name path_info = environ['PATH_INFO'] if script_name in path_info: environ['PATH_INFO'].replace(script_name, '') if remote_user: environ['REMOTE_USER'] = remote_user if event_info['requestContext'].get('authorizer'): environ['API_GATEWAY_AUTHORIZER'] = event_info['requestContext']['authorizer'] return environ
def test_stream(self): p = "/Jotta/Archive/testfile_up_and_stream.txt" t = jfs.up(p, six.BytesIO(TESTFILEDATA)) s = b"".join( [ chunk for chunk in t.stream() ] ) assert s == TESTFILEDATA t.delete()
def _grid_image(left_gutter, width, right_gutter, height, columns=1, grid_color=None, baseline_color=None, background_color=None, inline=False): if not Image: raise Exception("Images manipulation require PIL") if grid_color is None: grid_color = (120, 170, 250, 15) else: c = Color(grid_color).value grid_color = (c[0], c[1], c[2], int(c[3] * 255.0)) if baseline_color is None: baseline_color = (120, 170, 250, 30) else: c = Color(baseline_color).value baseline_color = (c[0], c[1], c[2], int(c[3] * 255.0)) if background_color is None: background_color = (0, 0, 0, 0) else: c = Color(background_color).value background_color = (c[0], c[1], c[2], int(c[3] * 255.0)) _height = int(height) if height >= 1 else int(height * 1000.0) _width = int(width) if width >= 1 else int(width * 1000.0) _left_gutter = int(left_gutter) if left_gutter >= 1 else int(left_gutter * 1000.0) _right_gutter = int(right_gutter) if right_gutter >= 1 else int( right_gutter * 1000.0) if _height <= 0 or _width <= 0 or _left_gutter <= 0 or _right_gutter <= 0: raise ValueError _full_width = (_left_gutter + _width + _right_gutter) new_image = Image.new(mode='RGBA', size=(_full_width * int(columns), _height), color=background_color) draw = ImageDraw.Draw(new_image) for i in range(int(columns)): draw.rectangle( (i * _full_width + _left_gutter, 0, i * _full_width + _left_gutter + _width - 1, _height - 1), fill=grid_color) if _height > 1: draw.rectangle( (0, _height - 1, _full_width * int(columns) - 1, _height - 1), fill=baseline_color) if not inline: grid_name = 'grid_' if left_gutter: grid_name += str(int(left_gutter)) + '+' grid_name += str(int(width)) if right_gutter: grid_name += '+' + str(int(right_gutter)) if height and height > 1: grid_name += 'x' + str(int(height)) key = (columns, grid_color, baseline_color, background_color) key = grid_name + '-' + make_filename_hash(key) asset_file = key + '.png' asset_path = os.path.join( config.ASSETS_ROOT or os.path.join(config.STATIC_ROOT, 'assets'), asset_file) try: new_image.save(asset_path) except IOError: log.exception("Error while saving image") inline = True # Retry inline version url = '%s%s' % (config.ASSETS_URL, asset_file) if inline: output = six.BytesIO() new_image.save(output, format='PNG') contents = output.getvalue() output.close() url = make_data_url('image/png', contents) inline = 'url("%s")' % escape(url) return String.unquoted(inline)
def _gzip(bytestring): out = six.BytesIO() # Set mtime to zero for deterministic results across TensorBoard launches. with gzip.GzipFile(fileobj=out, mode='wb', compresslevel=3, mtime=0) as f: f.write(bytestring) return out.getvalue()
def test_update(self, put, *args): cases = dict( worker=dict( init_kwargs=dict(name='stack'), update_kwargs={}, side_effect=[ SucceededResult(' Is Manager: false'), # manager status ], args_parser=[ args_parser, ], expected_command_args=[ { 'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'], }, ], expected_result=False, all_hosts=['host1', 'host2'], ), no_changes=dict( init_kwargs=dict(name='stack'), update_kwargs={}, side_effect=[ SucceededResult(' Is Manager: true'), # manager status SucceededResult( json.dumps([{ 'Config': { 'Labels': { 'fabricio.stack.compose.stack': 'Y29tcG9zZS55bWw=', 'fabricio.stack.images.stack': 'e30=', }, } }])), # image info ], args_parser=[ args_parser, docker_inspect_args_parser, ], expected_command_args=[ { 'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'], }, { 'executable': ['docker', 'inspect'], 'type': 'image', 'image_or_container': 'fabricio-current-stack:stack', }, ], expected_result=False, expected_compose_file='docker-compose.yml', ), forced=dict( init_kwargs=dict(name='stack'), update_kwargs=dict(force=True), side_effect=[ SucceededResult(' Is Manager: true'), # manager status SucceededResult(), # stack deploy RuntimeError(), # update sentinel images SucceededResult(), # stack images SucceededResult(), # build new sentinel image ], args_parser=[ args_parser, args_parser, args_parser, args_parser, args_parser, ], expected_command_args=[ { 'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'], }, { 'args': [ 'docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack' ], }, { 'args': [ 'docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack' ], }, { 'args': [ 'docker', 'stack', 'services', '--format', '{{.Image}}', 'stack' ], }, { 'args': [ 'echo', 'FROM scratch\nLABEL fabricio.stack.compose.stack=Y29tcG9zZS55bWw= fabricio.stack.images.stack=e30=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-' ], }, ], expected_result=True, expected_compose_file='docker-compose.yml', should_upload_compose_file=True, ), created=dict( init_kwargs=dict(name='stack'), update_kwargs=dict(), side_effect=[ SucceededResult(' Is Manager: true'), # manager status docker.ImageNotFoundError(), # image info SucceededResult(), # stack deploy SucceededResult(), # update sentinel images SucceededResult(), # stack images SucceededResult(), # build new sentinel image ], args_parser=[ args_parser, docker_inspect_args_parser, args_parser, args_parser, args_parser, args_parser, ], expected_command_args=[ { 'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'], }, { 'executable': ['docker', 'inspect'], 'type': 'image', 'image_or_container': 'fabricio-current-stack:stack', }, { 'args': [ 'docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack' ], }, { 'args': [ 'docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack' ], }, { 'args': [ 'docker', 'stack', 'services', '--format', '{{.Image}}', 'stack' ], }, { 'args': [ 'echo', 'FROM scratch\nLABEL fabricio.stack.compose.stack=Y29tcG9zZS55bWw= fabricio.stack.images.stack=e30=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-' ], }, ], expected_result=True, expected_compose_file='docker-compose.yml', should_upload_compose_file=True, ), created_skip_sentinels_errors=dict( init_kwargs=dict(name='stack'), update_kwargs=dict(), side_effect=[ SucceededResult(' Is Manager: true'), # manager status docker.ImageNotFoundError(), # image info SucceededResult(), # stack deploy RuntimeError(), # update sentinel images RuntimeError(), # stack images RuntimeError(), # build new sentinel image ], args_parser=[ args_parser, docker_inspect_args_parser, args_parser, args_parser, args_parser, args_parser, ], expected_command_args=[ { 'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'], }, { 'executable': ['docker', 'inspect'], 'type': 'image', 'image_or_container': 'fabricio-current-stack:stack', }, { 'args': [ 'docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack' ], }, { 'args': [ 'docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack' ], }, { 'args': [ 'docker', 'stack', 'services', '--format', '{{.Image}}', 'stack' ], }, { 'args': [ 'echo', 'FROM scratch\nLABEL fabricio.stack.compose.stack=Y29tcG9zZS55bWw=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-' ], }, ], expected_result=True, expected_compose_file='docker-compose.yml', should_upload_compose_file=True, ), created_with_custom_compose=dict( init_kwargs=dict(name='stack', options=dict(compose_file='compose.yml')), update_kwargs=dict(), side_effect=[ SucceededResult(' Is Manager: true'), # manager status docker.ImageNotFoundError(), # image info SucceededResult(), # stack deploy SucceededResult(), # update sentinel images SucceededResult(), # stack images SucceededResult(), # build new sentinel image ], args_parser=[ args_parser, docker_inspect_args_parser, args_parser, args_parser, args_parser, args_parser, ], expected_command_args=[ { 'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'], }, { 'executable': ['docker', 'inspect'], 'type': 'image', 'image_or_container': 'fabricio-current-stack:stack', }, { 'args': [ 'docker', 'stack', 'deploy', '--compose-file=compose.yml', 'stack' ], }, { 'args': [ 'docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack' ], }, { 'args': [ 'docker', 'stack', 'services', '--format', '{{.Image}}', 'stack' ], }, { 'args': [ 'echo', 'FROM scratch\nLABEL fabricio.stack.compose.stack=Y29tcG9zZS55bWw= fabricio.stack.images.stack=e30=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-' ], }, ], expected_result=True, expected_compose_file='compose.yml', should_upload_compose_file=True, ), created_with_custom_compose2=dict( init_kwargs=dict(name='stack', options={'compose-file': 'compose.yml'}), update_kwargs=dict(), side_effect=[ SucceededResult(' Is Manager: true'), # manager status docker.ImageNotFoundError(), # image info SucceededResult(), # stack deploy SucceededResult(), # update sentinel images SucceededResult(), # stack images SucceededResult(), # build new sentinel image ], args_parser=[ args_parser, docker_inspect_args_parser, args_parser, args_parser, args_parser, args_parser, ], expected_command_args=[ { 'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'], }, { 'executable': ['docker', 'inspect'], 'type': 'image', 'image_or_container': 'fabricio-current-stack:stack', }, { 'args': [ 'docker', 'stack', 'deploy', '--compose-file=compose.yml', 'stack' ], }, { 'args': [ 'docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack' ], }, { 'args': [ 'docker', 'stack', 'services', '--format', '{{.Image}}', 'stack' ], }, { 'args': [ 'echo', 'FROM scratch\nLABEL fabricio.stack.compose.stack=Y29tcG9zZS55bWw= fabricio.stack.images.stack=e30=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-' ], }, ], expected_result=True, expected_compose_file='compose.yml', should_upload_compose_file=True, ), created_with_custom_image=dict( init_kwargs=dict(name='stack', image='image:tag'), update_kwargs=dict(), side_effect=[ SucceededResult(' Is Manager: true'), # manager status docker.ImageNotFoundError(), # image info SucceededResult(), # stack deploy SucceededResult(), # update sentinel images SucceededResult(), # stack images SucceededResult(), # build new sentinel image ], args_parser=[ args_parser, docker_inspect_args_parser, args_parser, args_parser, args_parser, args_parser, ], expected_command_args=[ { 'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'], }, { 'executable': ['docker', 'inspect'], 'type': 'image', 'image_or_container': 'fabricio-current-stack:stack', }, { 'args': [ 'docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack' ], }, { 'args': [ 'docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack' ], }, { 'args': [ 'docker', 'stack', 'services', '--format', '{{.Image}}', 'stack' ], }, { 'args': [ 'echo', 'FROM image:tag\nLABEL fabricio.stack.compose.stack=Y29tcG9zZS55bWw= fabricio.stack.images.stack=e30=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-' ], }, ], expected_result=True, expected_compose_file_name='docker-compose.yml', should_upload_compose_file=True, ), created_with_custom_image_update_params=dict( init_kwargs=dict(name='stack', image='image:tag'), update_kwargs=dict(tag='new-tag', registry='registry', account='account'), side_effect=[ SucceededResult(' Is Manager: true'), # manager status docker.ImageNotFoundError(), # image info SucceededResult(), # stack deploy SucceededResult(), # update sentinel images SucceededResult(), # stack images SucceededResult(), # build new sentinel image ], args_parser=[ args_parser, docker_inspect_args_parser, args_parser, args_parser, args_parser, args_parser, ], expected_command_args=[ { 'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'], }, { 'executable': ['docker', 'inspect'], 'type': 'image', 'image_or_container': 'fabricio-current-stack:stack', }, { 'args': [ 'docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack' ], }, { 'args': [ 'docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack' ], }, { 'args': [ 'docker', 'stack', 'services', '--format', '{{.Image}}', 'stack' ], }, { 'args': [ 'echo', 'FROM registry/account/image:new-tag\nLABEL fabricio.stack.compose.stack=Y29tcG9zZS55bWw= fabricio.stack.images.stack=e30=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-' ], }, ], expected_result=True, expected_compose_file_name='docker-compose.yml', should_upload_compose_file=True, ), created_from_empty_image_with_custom_image_update_params=dict( init_kwargs=dict(name='stack'), update_kwargs=dict(tag='registry/account/image:tag'), side_effect=[ SucceededResult(' Is Manager: true'), # manager status docker.ImageNotFoundError(), # image info SucceededResult(), # stack deploy SucceededResult(), # update sentinel images SucceededResult(), # stack images SucceededResult(), # build new sentinel image ], args_parser=[ args_parser, docker_inspect_args_parser, args_parser, args_parser, args_parser, args_parser, ], expected_command_args=[ { 'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'], }, { 'executable': ['docker', 'inspect'], 'type': 'image', 'image_or_container': 'fabricio-current-stack:stack', }, { 'args': [ 'docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack' ], }, { 'args': [ 'docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack' ], }, { 'args': [ 'docker', 'stack', 'services', '--format', '{{.Image}}', 'stack' ], }, { 'args': [ 'echo', 'FROM registry/account/image:tag\nLABEL fabricio.stack.compose.stack=Y29tcG9zZS55bWw= fabricio.stack.images.stack=e30=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-' ], }, ], expected_result=True, expected_compose_file_name='docker-compose.yml', should_upload_compose_file=True, ), updated_compose_changed=dict( init_kwargs=dict(name='stack'), update_kwargs=dict(), side_effect=[ SucceededResult(' Is Manager: true'), # manager status SucceededResult( json.dumps([{ 'Config': { 'Labels': { 'fabricio.stack.compose.stack': 'b2xkLWNvbXBvc2UueW1s', 'fabricio.stack.images.stack': 'eyJpbWFnZTp0YWciOiAiZGlnZXN0In0=', }, } }])), # image info SucceededResult(), # stack deploy SucceededResult(), # update sentinel images SucceededResult('image:tag'), # stack images SucceededResult(), # image pull SucceededResult('digest'), # images digests SucceededResult(), # build new sentinel image ], args_parser=[ args_parser, docker_inspect_args_parser, args_parser, args_parser, args_parser, args_parser, args_parser, args_parser, ], expected_command_args=[ { 'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'], }, { 'executable': ['docker', 'inspect'], 'type': 'image', 'image_or_container': 'fabricio-current-stack:stack', }, { 'args': [ 'docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack' ], }, { 'args': [ 'docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack' ], }, { 'args': [ 'docker', 'stack', 'services', '--format', '{{.Image}}', 'stack' ], }, { 'args': ['docker', 'pull', 'image:tag'], }, { 'args': [ 'docker', 'inspect', '--type', 'image', '--format', '{{.RepoDigests}}', 'image:tag' ], }, { 'args': [ 'echo', 'FROM scratch\nLABEL fabricio.stack.compose.stack=Y29tcG9zZS55bWw= fabricio.stack.images.stack=eyJpbWFnZTp0YWciOiAiZGlnZXN0In0=\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-' ], }, ], expected_result=True, expected_compose_file='docker-compose.yml', should_upload_compose_file=True, ), updated_image_changed=dict( init_kwargs=dict(name='stack'), update_kwargs=dict(), side_effect=[ SucceededResult(' Is Manager: true'), # manager status SucceededResult( json.dumps([{ 'Config': { 'Labels': { 'fabricio.stack.compose.stack': 'Y29tcG9zZS55bWw=', 'fabricio.stack.images.stack': 'eyJpbWFnZTp0YWciOiAiZGlnZXN0In0=', }, } }])), # image info SucceededResult(), # image pull SucceededResult('new-digest'), # images digests SucceededResult(), # stack deploy SucceededResult(), # update sentinel images SucceededResult('image:tag'), # stack images SucceededResult(), # image pull SucceededResult('new-digest'), # images digests SucceededResult(), # build new sentinel image ], args_parser=[ args_parser, docker_inspect_args_parser, args_parser, args_parser, args_parser, args_parser, args_parser, args_parser, args_parser, args_parser, ], expected_command_args=[ { 'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'], }, { 'executable': ['docker', 'inspect'], 'type': 'image', 'image_or_container': 'fabricio-current-stack:stack', }, { 'args': ['docker', 'pull', 'image:tag'], }, { 'args': [ 'docker', 'inspect', '--type', 'image', '--format', '{{.RepoDigests}}', 'image:tag' ], }, { 'args': [ 'docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack' ], }, { 'args': [ 'docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack' ], }, { 'args': [ 'docker', 'stack', 'services', '--format', '{{.Image}}', 'stack' ], }, { 'args': ['docker', 'pull', 'image:tag'], }, { 'args': [ 'docker', 'inspect', '--type', 'image', '--format', '{{.RepoDigests}}', 'image:tag' ], }, { 'args': [ 'echo', 'FROM scratch\nLABEL fabricio.stack.compose.stack=Y29tcG9zZS55bWw= fabricio.stack.images.stack=eyJpbWFnZTp0YWciOiAibmV3LWRpZ2VzdCJ9\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-' ], }, ], expected_result=True, expected_compose_file='docker-compose.yml', should_upload_compose_file=True, ), updated_images_changed=dict( init_kwargs=dict(name='stack'), update_kwargs=dict(), side_effect=[ SucceededResult(' Is Manager: true'), # manager status SucceededResult( json.dumps([{ 'Config': { 'Labels': { 'fabricio.stack.compose.stack': 'Y29tcG9zZS55bWw=', 'fabricio.stack.images.stack': 'eyJpbWFnZTE6dGFnIjogImRpZ2VzdDEiLCAiaW1hZ2UyOnRhZyI6ICJkaWdlc3QyIn0=', }, } }])), # image info SucceededResult(), # image1 pull SucceededResult(), # image2 pull SucceededResult( 'new-digest1\nnew-digest2\n'), # images digests SucceededResult(), # stack deploy SucceededResult(), # update sentinel images SucceededResult( 'image1:tag\nimage2:tag\n'), # stack images SucceededResult(), # image1 pull SucceededResult(), # image2 pull SucceededResult( 'new-digest1\nnew-digest2\n'), # images digests SucceededResult(), # build new sentinel image ], args_parser=[ args_parser, docker_inspect_args_parser, args_parser, args_parser, args_parser, args_parser, args_parser, args_parser, args_parser, args_parser, args_parser, args_parser, ], expected_command_args=[ { 'args': ['docker', 'info', '2>&1', '|', 'grep', 'Is Manager:'], }, { 'executable': ['docker', 'inspect'], 'type': 'image', 'image_or_container': 'fabricio-current-stack:stack', }, { 'args': ['docker', 'pull', 'image1:tag'], }, { 'args': ['docker', 'pull', 'image2:tag'], }, { 'args': [ 'docker', 'inspect', '--type', 'image', '--format', '{{.RepoDigests}}', 'image1:tag', 'image2:tag' ], }, { 'args': [ 'docker', 'stack', 'deploy', '--compose-file=docker-compose.yml', 'stack' ], }, { 'args': [ 'docker', 'rmi', 'fabricio-backup-stack:stack;', 'docker', 'tag', 'fabricio-current-stack:stack', 'fabricio-backup-stack:stack;', 'docker', 'rmi', 'fabricio-current-stack:stack' ], }, { 'args': [ 'docker', 'stack', 'services', '--format', '{{.Image}}', 'stack' ], }, { 'args': ['docker', 'pull', 'image1:tag'], }, { 'args': ['docker', 'pull', 'image2:tag'], }, { 'args': [ 'docker', 'inspect', '--type', 'image', '--format', '{{.RepoDigests}}', 'image1:tag', 'image2:tag' ], }, { 'args': [ 'echo', 'FROM scratch\nLABEL fabricio.stack.compose.stack=Y29tcG9zZS55bWw= fabricio.stack.images.stack=eyJpbWFnZTE6dGFnIjogIm5ldy1kaWdlc3QxIiwgImltYWdlMjp0YWciOiAibmV3LWRpZ2VzdDIifQ==\n', '|', 'docker', 'build', '--tag', 'fabricio-current-stack:stack', '-' ], }, ], expected_result=True, expected_compose_file='docker-compose.yml', should_upload_compose_file=True, ), ) for case, data in cases.items(): with self.subTest(case): fab.env.command = '{0}__{1}'.format(self, case) with mock.patch.dict( fab.env, dict(all_hosts=data.get('all_hosts', ['host']))): service.open.return_value = six.BytesIO(b'compose.yml') service.open.reset_mock() put.reset_mock() stack = docker.Stack(**data.get('init_kwargs', {})) side_effect = self.command_checker( args_parsers=data.get('args_parser', []), expected_args_set=data.get('expected_command_args', []), side_effects=data.get('side_effect', []), ) with mock.patch.object(fabricio, 'run', side_effect=side_effect): with mock.patch('six.BytesIO') as compose_file: result = stack.update( **data.get('update_kwargs', {})) self.assertEqual(data['expected_result'], result) expected_compose_file_name = data.get( 'expected_compose_file_name') if expected_compose_file_name: service.open.assert_called_once_with( expected_compose_file_name, 'rb') if data.get('should_upload_compose_file', False): put.assert_called_once() compose_file.assert_called_once_with(b'compose.yml') else: put.assert_not_called()
def dataframe_to_civis(df, database, table, api_key=None, client=None, max_errors=None, existing_table_rows="fail", distkey=None, sortkey1=None, sortkey2=None, headers=None, credential_id=None, polling_interval=None, archive=False, hidden=True, **kwargs): """Upload a `pandas` `DataFrame` into a Civis table. The `DataFrame`'s index will not be included. To store the index along with the other values, use `df.reset_index()` instead of `df` as the first argument to this function. Parameters ---------- df : :class:`pandas:pandas.DataFrame` The `DataFrame` to upload to Civis. database : str or int Upload data into this database. Can be the database name or ID. table : str The schema and table you want to upload to. E.g., ``'scratch.table'``. api_key : DEPRECATED str, optional Your Civis API key. If not given, the :envvar:`CIVIS_API_KEY` environment variable will be used. client : :class:`civis.APIClient`, optional If not provided, an :class:`civis.APIClient` object will be created from the :envvar:`CIVIS_API_KEY`. max_errors : int, optional The maximum number of rows with errors to remove from the import before failing. existing_table_rows : str, optional The behaviour if a table with the requested name already exists. One of ``'fail'``, ``'truncate'``, ``'append'`` or ``'drop'``. Defaults to ``'fail'``. distkey : str, optional The column to use as the distkey for the table. sortkey1 : str, optional The column to use as the sortkey for the table. sortkey2 : str, optional The second column in a compound sortkey for the table. headers : bool, optional Whether or not the first row of the file should be treated as headers. The default, ``None``, attempts to autodetect whether or not the first row contains headers. credential_id : str or int, optional The ID of the database credential. If ``None``, the default credential will be used. polling_interval : int or float, optional Number of seconds to wait between checks for job completion. archive : bool, optional (deprecated) If ``True``, archive the import job as soon as it completes. hidden : bool, optional If ``True`` (the default), this job will not appear in the Civis UI. **kwargs : kwargs Extra keyword arguments will be passed to :meth:`pandas:pandas.DataFrame.to_csv`. Returns ------- fut : :class:`~civis.futures.CivisFuture` A `CivisFuture` object. Examples -------- >>> import pandas as pd >>> df = pd.DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6]}) >>> fut = civis.io.dataframe_to_civis(df, 'my-database', ... 'scratch.df_table') >>> fut.result() """ if client is None: client = APIClient(api_key=api_key, resources='all') if archive: warnings.warn("`archive` is deprecated and will be removed in v2.0.0. " "Use `hidden` instead.", FutureWarning) buf = six.BytesIO() if six.PY3: txt = io.TextIOWrapper(buf, encoding='utf-8') else: txt = buf df.to_csv(txt, encoding='utf-8', index=False, **kwargs) txt.flush() buf.seek(0) delimiter = ',' name = table.split('.')[-1] file_id = file_to_civis(buf, name, client=client) fut = civis_file_to_table(file_id, database, table, client=client, max_errors=max_errors, existing_table_rows=existing_table_rows, distkey=distkey, sortkey1=sortkey1, sortkey2=sortkey2, delimiter=delimiter, headers=headers, credential_id=credential_id, polling_interval=polling_interval, hidden=hidden) return fut
def get_test_assets_zip_provider(): memfile = six.BytesIO() with zipfile.ZipFile(memfile, mode="w", compression=zipfile.ZIP_DEFLATED) as zf: zf.writestr("index.html", FAKE_INDEX_HTML) return lambda: contextlib.closing(six.BytesIO(memfile.getvalue()))
def _repr_image_format(self, format_str): str_buffer = six.BytesIO() io.imsave(str_buffer, self.array, format_str=format_str) return_str = str_buffer.getvalue() str_buffer.close() return return_str
def process(self, file_path, x_index, y_index, z_index, t_index=0): """ Method to load the image file. Args: file_path(str): An absolute file path for the specified tile x_index(int): The tile index in the X dimension y_index(int): The tile index in the Y dimension z_index(int): The tile index in the Z dimension t_index(int): The time index Returns: (io.BufferedReader): A file handle for the specified tile """ file_path = self.fs.get_file(file_path) # Compute global range target_x_range = [self.parameters["ingest_job"]["tile_size"]["x"] * x_index, self.parameters["ingest_job"]["tile_size"]["x"] * (x_index + 1)] target_y_range = [self.parameters["ingest_job"]["tile_size"]["y"] * y_index, self.parameters["ingest_job"]["tile_size"]["y"] * (y_index + 1)] # Open hdf5 h5_file = h5py.File(file_path, 'r') # Compute range in actual data, taking offsets into account x_offset = self.parameters['offset_x'] y_offset = self.parameters['offset_y'] x_tile_size = self.parameters["ingest_job"]["tile_size"]["x"] y_tile_size = self.parameters["ingest_job"]["tile_size"]["y"] h5_x_range = [target_x_range[0] + x_offset, target_x_range[1] + x_offset] h5_y_range = [target_y_range[0] + y_offset, target_y_range[1] + y_offset] h5_z_slice = z_index + self.parameters['offset_z'] tile_x_range = [0, x_tile_size] tile_y_range = [0, y_tile_size] h5_max_x = h5_file[self.parameters['data_name']].shape[2] h5_max_y = h5_file[self.parameters['data_name']].shape[1] if h5_x_range[0] < 0: # insert sub-region into tile tile_x_range = [h5_x_range[0] * -1, x_tile_size] h5_x_range[0] = 0 if h5_y_range[0] < 0: # insert sub-region into tile tile_y_range = [h5_y_range[0] * -1, y_tile_size] h5_y_range[0] = 0 if h5_x_range[1] > h5_max_x: # insert sub-region into tile tile_x_range = [0, x_tile_size - (h5_x_range[1] - h5_max_x)] h5_x_range[1] = h5_max_x if h5_y_range[1] > h5_max_y: # insert sub-region into tile tile_y_range = [0, y_tile_size - (h5_y_range[1] - h5_max_y)] h5_y_range[1] = h5_max_y if self.parameters['datatype'] == "uint8": datatype = np.uint8 elif self.parameters['datatype']== "uint16": datatype = np.uint16 elif self.parameters['datatype']== "uint32": datatype = np.uint32 else: raise Exception("Unsupported datatype: {}".format(self.parameters['datatype'])) # Allocate Tile tile_data = np.zeros((self.parameters["ingest_job"]["tile_size"]["y"], self.parameters["ingest_job"]["tile_size"]["x"]), dtype=datatype, order='C') if h5_z_slice >= 0: # Copy sub-img to tile, save, return tile_data[tile_y_range[0]:tile_y_range[1], tile_x_range[0]:tile_x_range[1]] = np.array(h5_file[self.parameters['data_name']][ h5_z_slice, h5_y_range[0]:h5_y_range[1], h5_x_range[0]:h5_x_range[1]]) tile_data = tile_data.astype(datatype) upload_img = Image.fromarray(tile_data) output = six.BytesIO() upload_img.save(output, format=self.parameters["upload_format"].upper()) # Send handle back return output
def test_cinder_add(self): fake_volume = mock.MagicMock(id=str(uuid.uuid4()), status='available', size=1) volume_file = six.BytesIO() self._test_cinder_add(fake_volume, volume_file)
def test_integrity_check_with_correct_checksum(self): resp = utils.FakeResponse({}, six.BytesIO(b'CCC')) body = http.ResponseBodyIterator(resp) body.set_checksum('defb99e69a9f1f6e06f15006b1f166ae') list(body)
def serialize(store): # store is CSV (csvfile) here output = six.BytesIO() XlsxFormat(store).save_content(output) return output.getvalue()
def __enter__(self): self.container = self.docker.create_container(image=self.image.tag) data = self.docker.export(self.container) self.content = six.BytesIO(data.read()) return self
def test_resources(self): # This is a clone of the same test for 'without-caching-proxy' # Can we just call that test from this context? import transaction transaction.commit() # Request a skin image now = stable_now() browser = Browser(self.app) browser.open(self.portal.absolute_url() + '/rss.png') self.assertEqual('plone.resource', browser.headers['X-Cache-Rule']) self.assertEqual('plone.app.caching.strongCaching', browser.headers['X-Cache-Operation']) # This should use cacheInBrowserAndProxy self.assertEqual('max-age=86400, proxy-revalidate, public', browser.headers['Cache-Control']) # remove this when the next line works self.assertIsNotNone(browser.headers.get('Last-Modified')) timedelta = dateutil.parser.parse(browser.headers['Expires']) - now self.assertGreater(timedelta, datetime.timedelta(seconds=86390)) # Request the skin image again -- with an IMS header to test 304 lastmodified = browser.headers['Last-Modified'] browser = Browser(self.app) browser.raiseHttpErrors = False browser.addHeader('If-Modified-Since', lastmodified) browser.open(self.portal.absolute_url() + '/rss.png') self.assertEqual('plone.resource', browser.headers['X-Cache-Rule']) self.assertEqual('plone.app.caching.strongCaching', browser.headers['X-Cache-Operation']) # This should be a 304 response self.assertEqual('304 Not Modified', browser.headers['Status']) self.assertEqual(b'', browser.contents) # Request a large datafile (over 64K) to test files that use # the "response.write()" function to initiate a streamed response. # This is of type OFS.Image.File but it should also apply to # large OFS.Image.Image, large non-blog ATImages/ATFiles, and # large Resource Registry cooked files, which all use the same # method to initiate a streamed response. s = b'a' * (1 << 16) * 3 self.portal.manage_addFile('bigfile', file=six.BytesIO(s), content_type='application/octet-stream') import transaction transaction.commit() browser = Browser(self.app) browser.open(self.portal['bigfile'].absolute_url()) self.assertEqual('plone.resource', browser.headers['X-Cache-Rule']) self.assertEqual('plone.app.caching.strongCaching', browser.headers['X-Cache-Operation']) # This should use cacheInBrowserAndProxy self.assertEqual('max-age=86400, proxy-revalidate, public', browser.headers['Cache-Control']) # remove this when the next line works self.assertIsNotNone(browser.headers.get('Last-Modified')) timedelta = dateutil.parser.parse(browser.headers['Expires']) - now self.assertGreater(timedelta, datetime.timedelta(seconds=86390))
def _gzip(bs): out = six.BytesIO() with gzip.GzipFile(fileobj=out, mode='wb') as f: f.write(bs) return out.getvalue()
def background_brushed(density=None, intensity=None, color=None, opacity=None, size=None, monochrome=False, direction=(), spread=(), background=None, inline=False): if not Image: raise Exception("Images manipulation require PIL") density = [Number(v).value for v in List.from_maybe(density)] intensity = [Number(v).value for v in List.from_maybe(intensity)] color = [Color(v).value for v in List.from_maybe(color) if v] opacity = [Number(v).value for v in List.from_maybe(opacity)] size = int(Number(size).value) if size else -1 if size < 0 or size > 512: size = 200 monochrome = bool(monochrome) direction = [Number(v).value for v in List.from_maybe(direction)] spread = [Number(v).value for v in List.from_maybe(spread)] background = Color(background).value if background else None new_image = Image.new(mode='RGBA', size=(size, size)) pixdata = new_image.load() _image_brushed(pixdata, size, density, intensity, color, opacity, monochrome, direction, spread, background) if not inline: key = (size, density, intensity, color, opacity, monochrome, direction, spread, background) asset_file = 'brushed-%s%sx%s' % ('mono-' if monochrome else '', size, size) # asset_file += '-[%s][%s][%s]' % ('-'.join(to_str(s).replace('.', '_') for s in density or []), '-'.join(to_str(s).replace('.', '_') for s in opacity or []), '-'.join(to_str(s).replace('.', '_') for s in direction or [])) asset_file += '-' + make_filename_hash(key) asset_file += '.png' asset_path = os.path.join( config.ASSETS_ROOT or os.path.join(config.STATIC_ROOT, 'assets'), asset_file) try: new_image.save(asset_path) except IOError: log.exception("Error while saving image") inline = True # Retry inline version url = '%s%s' % (config.ASSETS_URL, asset_file) if inline: output = six.BytesIO() new_image.save(output, format='PNG') contents = output.getvalue() output.close() url = make_data_url('image/png', contents) inline = 'url("%s")' % escape(url) return String.unquoted(inline)
def GetListeFichiersLocal(self): """ Récupère la liste des fichiers locaux à afficher """ # Lit le répertoire chemin = UTILS_Fichiers.GetRepData() fichiers = os.listdir(chemin) fichiers.sort() listeFichiers = [] for fichier in fichiers: nomFichier = os.path.split(fichier)[1] titre = nomFichier[:-9] cheminFichier = chemin + "/" + fichier if (self.prefixe == None and nomFichier.endswith("_DATA.dat")) or ( self.prefixe != None and nomFichier.endswith("_DATA.dat") and nomFichier.startswith(self.prefixe)): # Taille des 3 bases de données taille = 0 for suffixe in ("DATA", "DOCUMENTS", "PHOTOS"): fichierTemp = u"%s/%s_%s.dat" % (chemin, titre, suffixe) if os.path.isfile(fichierTemp): taille += os.path.getsize(fichierTemp) taille = FormatFileSize(taille) # Date dernière modification t = os.path.getmtime(cheminFichier) date = datetime.datetime.fromtimestamp(t) dateModif = date.strftime("%d/%m/%Y %H:%M") # Ouverture de la base de données pour récupérer les infos sur le fichier logo = None description = u"" try: connexion = sqlite3.connect(cheminFichier.encode('utf-8')) cursor = connexion.cursor() req = "SELECT nom, logo FROM organisateur WHERE IDorganisateur=1;" cursor.execute(req) description, logo = cursor.fetchone() connexion.close() except: pass if logo != None: try: io = six.BytesIO(logo) if 'phoenix' in wx.PlatformInfo: img = wx.Image(io, wx.BITMAP_TYPE_ANY) else: img = wx.ImageFromStream(io, wx.BITMAP_TYPE_ANY) img = RecadreImg(img) image = img.ConvertToBitmap() except: image = None else: image = None # Mémorisation listeFichiers.append({ "titre": titre, "image": image, "description": description, "taille": taille, "dateModif": dateModif }) return listeFichiers
def get_debug_stderr_kwargs(log): return (dict() if log.isEnabledFor(logging.DEBUG) else dict( stderr=six.BytesIO()))