Exemple #1
0
    def render_POST(self, request):
        pre = get_arg(request, 'pre')
        SEP = ":"
        username = None

        if pre:
            username = random_adjspecies()
            token = str(int(time.time())) + SEP + username
            hmac_token = make_digest(token)
            return json.dumps(
                {'username': username,
                 'token': token + ':' + hmac_token})

        token = get_arg(request, 'token')
        if token:
            print "TOKEN>>", token
            try:
                ts, claimed_username, received_hmac = token.split(SEP)
            except ValueError:
                return failure(self.action, request, 'bad request: expected token as ts:username:hmac', 400)

            if ts < time.time() - 60 * 5:
                return failure(self.action, request, 'bad request: expired token', 400)

            expected = make_digest('%s:%s' % (ts, claimed_username))
            if not hmac.compare_digest(received_hmac, expected):
                return failure(self.action, request, 'bad request: corrupted hmac', 400)
            username = claimed_username.lower()
            

        if not check_args(request, 'idkey'):
            return failure(self.action, request, 'bad request: empty idkey', 400)
        if not check_args(request, 'linkkey'):
            return failure(self.action, request, 'bad request: empty linkkey', 400)


        idkey = get_arg(request, 'idkey')
        linkkey = get_arg(request, 'linkkey')

        if not username:
            username = random_adjspecies()

        try:
            self.backend.new(username.lower(), idkey, linkkey)
        except Exception as exc:
            # XXX have retries here
            request.setResponseCode(500)
            return failure(self.action, request, 'error: %r' % exc, 500)
        return success(self.action, username)
Exemple #2
0
import inspect

from nose.tools import *
from nose.tools import assert_raises
from datetime import datetime
from boto3.dynamodb.conditions import Key, Attr
import botocore

# How to run test
# - nosetests test
# - nosetests test.test_sot
# - nosetests test.test_sot.test_sot_id_uniqness

LANGUAGE = 'hi'
USERID = 9090909090909090
RUNTAG = adjspecies.random_adjspecies('_', 7)
ACCESSTOKEN = 'test.sot.{}'.format(adjspecies.random_adjspecies('', 7))
TESTTS = "%.10f" % time.time()


def _getfname(fname):
    return "{:<21}".format(fname[5:][:21])


class test_sot():
    def __init__(self):
        dynamodb = boto3.resource('dynamodb')
        self.table = dynamodb.Table('sot')

    def sot_insert(self, item):
        return self.table.put_item(Item=item)
	def pick_new_name(self):
		new_name = adjspecies.random_adjspecies(sep='_', maxlen=10, prevent_stutter=True)
		while new_name in self.dict:
			new_name = adjspecies.random_adjspecies(sep='_', maxlen=10, prevent_stutter=True)
		return new_name
RPpow = 0.5
print 'RPpow: ' + str(RPpow)

# img_process
# -1 = no absolute value
# 0 = pow
# 1 = hist EQ
# 2 = AdaHist

img_process = 0

RP_list = ''
for k in RPs_in_level : RP_list = RP_list + str(k)

rand_animal = adjspecies.random_adjspecies()
# pickle_file = 'dt20_j' + str(jump_num)+'_' + RP_list + '_' + 'SR' + str(int(tar_freq/1000)) + 'kHz_' + rand_animal
pickle_file = 'NNM_cla_'+ 'ss_'+ str(ss_mode)+ '_' + data_tag +'_j' + str(jump_num) +'_' + RP_list + '_' + 'SR' + str(int(tar_freq/1000)) + 'kHz_' + rand_animal
# pickle_file = 'data_j' + str(jump_num)+'_01010101_' + 'SR12kHz' + '_NOA' +'_dm400'
# pickle_file = 'test_mat'
print 'pickle file name : ' + pickle_file

speak = 0

## Sample interval for each level
# RPs_interval = [8*2048, 4*2048,   2*2048, 2048, 1024, 512,     256, 128, 64, 32]
# RPs_interval = [8*2048, 4*2048, 2*2048, 24, 1024, 256, 256, 64, 64]
# itv = 64
# RPs_interval = [512, 512, 512, 512, 512, 512, 512, 512]
# RPs_interval = [itv, itv, itv, itv, itv, itv, itv, itv, itv, itv]
# itv_stride = [[32],[64],[128],[256],[512],[1024],[2048],[4096]]
Exemple #5
0
                'meta/scd': self._float_feature([scd]),
                'meta/proj_angle': self._float_feature([proj_angle]),
                'meta/thres': self._float_feature([thres]),
                'meta/filename': self._bytes_feature([filename.encode("utf8")
                                                      ]),
                'meta/patient_id': self._bytes_feature(
                    [patient_id.encode("utf8")]),
                'meta/dir_hash': self._bytes_feature([dir_hash.encode("utf8")])
            }))
        del np_img
        del np_vol
        gc.collect()
        yield example


session_nickname = adjspecies.random_adjspecies(
    '-') + '-' + datetime.datetime.now().strftime("%Y-%m-%d--%Hh%Mm%Ss")

options = PipelineOptions(flags=sys.argv)
google_cloud_options = options.view_as(GoogleCloudOptions)
google_cloud_options.project = 'x-ray-reconstruction'
google_cloud_options.job_name = 'create-tfrecords-' + session_nickname
google_cloud_options.staging_location = 'gs://cxr-to-chest-ct2/binaries'
google_cloud_options.temp_location = 'gs://cxr-to-chest-ct2/temp'
# google_cloud_options.region = 'us-east4'
# google_cloud_options.machine_type = 'n1-highmem-2'
options.view_as(SetupOptions).save_main_session = True

with beam.Pipeline(options=options) as p:
    train_dataset_prefix = os.path.join('gs://cxr-to-chest-ct2/tfrecords/',
                                        session_nickname, 'train')
    test_dataset_prefix = os.path.join('gs://cxr-to-chest-ct2/tfrecords/',