Example #1
0
def U4(Z, small=False):

    Z = Z * 2
    if small:
        mean = varify(
            np.array([[-2., 0.], [2., 0.], [0., 2.], [0., -2.]],
                     dtype='float32'))
        lv = Variable(np.log(torch.ones(1) * 0.2))
    else:
        mean = varify(
            np.array([[-5., 0.], [5., 0.], [0., 5.], [0., -5.]],
                     dtype='float32'))
        lv = Variable(np.log(torch.ones(1) * 1.5))

    if cuda:
        mean = mean.cuda()
        lv = lv.cuda()

    d1 = log_normal(Z, mean[None, 0, :], lv).sum(1) + np.log(0.1)
    d2 = log_normal(Z[:, :], mean[None, 1, :], lv).sum(1) + np.log(0.3)
    d3 = log_normal(Z[:, :], mean[None, 2, :], lv).sum(1) + np.log(0.4)
    d4 = log_normal(Z[:, :], mean[None, 3, :], lv).sum(1) + np.log(0.2)

    return logsumexp(
        torch.cat([d1[:, None], d2[:, None], d3[:, None], d4[:, None]], 1),
        1) + 2.5
Example #2
0
    def __init__(self, duration, path, scaling, position, mjpeg_stream):
        utils.log_normal("Showing preview")

        self.buttons = []

        self.duration = duration
        self.path = path
        self.mjpeg_stream = mjpeg_stream

        self.setProperty('zorder', "99")

        WIDTH = 320
        HEIGHT = 180

        width = int(WIDTH * scaling)
        height = int(HEIGHT * scaling)

        if "bottom" in position:
            y = 720 - height
        else:
            y = 0

        if "left" in position:
            x = 0
            start = -width
        else:
            x = 1280 - width
            start = width

        animations = [
            ('WindowOpen',
             "effect=slide start={0:d} time=2000 tween=cubic easing=out".
             format(start)),
            ('WindowClose',
             "effect=slide end={0:d} time=2000 tween=cubic easing=in".format(
                 start))
        ]

        self.image = xbmcgui.ControlImage(x, y, width, height,
                                          utils.TEXTURE_FMT.format('black'))
        self.addControl(self.image)
        self.image.setAnimations(animations)

        trans = utils.TEXTURE_FMT.format('trans')
        self.select_button = xbmcgui.ControlButton(x, y, width, height, "",
                                                   trans, trans)
        self.addControl(self.select_button)
        self.select_button.setAnimations(animations)

        button_scaling = 0.5 * scaling
        button_width = int(round(Button.WIDTH * button_scaling))
        self.close_button = Button(self,
                                   'close',
                                   x + width - button_width - 10,
                                   y + 10,
                                   scaling=button_scaling)
        self.addControl(self.close_button)
        self.close_button.setAnimations(animations)
Example #3
0
File: vae.py Project: lim0606/BDMC
  def sample(self, mu, logvar):
    eps = torch.randn(mu.size()).cuda()
    z = eps.mul(logvar.mul(0.5).exp_()).add_(mu)
    logqz = utils.log_normal(z, mu, logvar)

    zeros = torch.zeros(z.size()).cuda()
    logpz = utils.log_normal(z, zeros, zeros)

    return z, logpz, logqz
Example #4
0
    def __init__(self, duration, interval, path, scaling, position, snapshot_cmd):
        utils.log_normal("Showing preview")
        
        self.buttons = []
        
        self.duration = duration
        self.interval = interval
        self.path = path
        self.snapshot_cmd = snapshot_cmd
        
        self.setProperty('zorder', "99")
        
        WIDTH = 320
        HEIGHT = 180

        width = int(WIDTH * scaling)
        height = int(HEIGHT * scaling)

        if "bottom" in position:
            y = 720 - height
        else:
            y = 0

        if "left" in position:
            x = 0
            start = - width
        else:
            x = 1280 - width
            start = width

        animations = [('WindowOpen',
                       "effect=slide start={0:d} time=2000 tween=cubic easing=out".format(start)),
                      ('WindowClose',
                       "effect=slide end={0:d} time=2000 tween=cubic easing=in".format(start))]

        self.closing = False

        with utils.SnapShot(self.path, self.interval, self.snapshot_cmd) as snapshot:
            self.image = xbmcgui.ControlImage(x, y, width, height, snapshot)
            self.addControl(self.image)
            self.image.setAnimations(animations)
            
            trans = utils.TEXTURE_FMT.format('trans')
            self.select_button = xbmcgui.ControlButton(x, y, width, height, "", trans, trans)
            self.addControl(self.select_button)
            self.select_button.setAnimations(animations)

            button_scaling = 0.5 * scaling
            button_width = int(round(Button.WIDTH * button_scaling))
            self.close_button = Button(self, 'close', x + width - button_width - 10, y + 10, scaling=button_scaling)
            self.addControl(self.close_button)
            self.close_button.setAnimations(animations)
Example #5
0
def internalerror():
    """ HTTP 500 handler"""
    # err_msg = wrap_error(ERROR_SERVER_ERROR,'Server Error 500', HTTP500_DELAY)
    log_normal(logger, {
        'action': 'server-internal-error',
        'error': format_exc()
    }, LOG_ERROR)

    # return web.internalerror(err_msg)
    result = wrap_result(DEFAULT_JSONRPC_ID, OVERALL_WORKING, [])
    web.header('Retry-After', '%d' % RETRY_AFTER)
    web.header('X-Processing-Retry', '%d' % int(PROCESSING_RETRY))
    return web.OK(result)
Example #6
0
def internalerror():
    """ HTTP 500 handler"""
    # err_msg = wrap_error(ERROR_SERVER_ERROR,'Server Error 500', HTTP500_DELAY)
    log_normal(logger, {
        'action': 'server-internal-error',
        'error': format_exc()
    }, LOG_ERROR)

    # return web.internalerror(err_msg)
    result = wrap_result(DEFAULT_JSONRPC_ID, OVERALL_WORKING, [])
    web.header('Retry-After' , '%d' % RETRY_AFTER)
    web.header('X-Processing-Retry', '%d' % int(PROCESSING_RETRY))
    return web.OK(result)
Example #7
0
 def __init__(self, logger, routing_key, exchange_name, publish_timeout, mq_url):
     log_normal(logger, {
         'action': 'config-querybroker',
         'info': {
             'routing_key': routing_key,
             'exchange_name': exchange_name,
             'mq_url': mq_url
         }
     }, LOG_INFO)
     self.logger = logger
     self.ROUTING_KEY = routing_key
     self.exchange = Exchange(exchange_name, type='fanout')
     self.publish_timeout = publish_timeout
     self.connection =  Connection(mq_url)
Example #8
0
def loop_tasks(logger, redis_server, redis_list_name,
               pickle_dir, pickle_ext, pickle_corrupt_time,
               broker_routing_key, broker_exchange, broker_mq_url,
               swift_auth, swift_user, swift_key):
    while process_isalive(os.getppid()):
        try:
            _loop_tasks(logger, redis_server, redis_list_name,
                        pickle_dir, pickle_ext, pickle_corrupt_time,
                        broker_routing_key, broker_exchange, broker_mq_url,
                        swift_auth, swift_user, swift_key)
        except Exception:
            log_normal(logger, {
                'action': 'main-loop-exception',
                'error': traceback.format_exc()
            }, LOG_ERROR)
            time.sleep(0.5)
Example #9
0
 def __init__(self, logger, routing_key, exchange_name, publish_timeout,
              mq_url):
     log_normal(
         logger, {
             'action': 'config-querybroker',
             'info': {
                 'routing_key': routing_key,
                 'exchange_name': exchange_name,
                 'mq_url': mq_url
             }
         }, LOG_INFO)
     self.logger = logger
     self.ROUTING_KEY = routing_key
     self.exchange = Exchange(exchange_name, type='fanout')
     self.publish_timeout = publish_timeout
     self.connection = Connection(mq_url)
Example #10
0
 def density(self, spl, lgd=None, context=None, zeros=None):
     lgd = self.lgd if lgd is None else lgd
     context = self.context if context is None else context
     zeros = self.zeros if zeros is None else zeros
     z, logdet, _ = self.mdl((spl, lgd, context))
     losses = -utils.log_normal(z, zeros, zeros + 1.0).sum(1) - logdet
     return -losses
Example #11
0
 def get_kl(self, z):
     m_mixture, z_mixture = utils.gaussian_parameters(self.z_pre, dim=1)
     m = self.z_mean
     v = self.z_sigma
     kl = torch.mean(
         utils.log_normal(z, m, v) + self.flow_log_prob -
         utils.log_normal_mixture(z, m_mixture, z_mixture))
     return kl
Example #12
0
def swift_upload(container, path, auth, user, key):
    conn = SwiftConnection(auth, user, key, snet=False, insecure=True)
    put_headers = { 'x-object-meta-mtime': "%f" % getmtime(path) }
    retry = SWIFT_RETRY_TIMES 
    while retry > 0:
        try:
            with open(path, 'rb') as fd:
                conn.put_object(container, path, fd,
                                content_length=getsize(path), headers=put_headers)
            return True
        except ClientException:
            log_normal(logger, {
                'action': 'upload-error',
                'error': 'swift client exception'
            }, LOG_ERROR)
            conn.put_container(container, headers={})
            retry -= 1
    return False
Example #13
0
def load_apikey(uuid=None):
    global APIKEY_LAST_MODIFY, APIKEYS
    last_apikey = APIKEYS
    
    with open(APIKEY_FILE, 'r') as f:
        APIKEY_LAST_MODIFY = os.stat(APIKEY_FILE).st_mtime
        data = json.load(f)
        APIKEYS = data.values()
        log_normal(logger, {
            'action': 'load-apikey-from-file',
            'info': {
                'file': APIKEY_FILE,
                'file_modify': os.stat(APIKEY_FILE).st_mtime,
                'apikey_last_modify': APIKEY_LAST_MODIFY,
                'last_apikeys': last_apikey,
                'apikeys': APIKEYS
            }
        }, LOG_INFO, uuid=uuid)
        return APIKEYS
Example #14
0
File: ais.py Project: lxuechen/BDMC
    def log_f_i(z, data, t, log_likelihood_fn=utils.log_bernoulli):
        """Unnormalized density for intermediate distribution `f_i`:
            f_i = p(z)^(1-t) p(x,z)^(t) = p(z) p(x|z)^t
        =>  log f_i = log p(z) + t * log p(x|z)
        """
        zeros = torch.zeros_like(z)
        log_prior = utils.log_normal(z, zeros, zeros)
        log_likelihood = log_likelihood_fn(model.decode(z), data)

        return log_prior + log_likelihood.mul_(t)
Example #15
0
    def start_empty_archieve():
        if os.path.exists(PID_FILE):
            return

        with open(PID_FILE, 'w') as f:
            f.write(str(os.getpid()))
        time.sleep(0.2)
        with open(PID_FILE, 'r') as f:
            pid = int(f.read().strip())
        if pid == os.getpid():
            log_normal(logger, {
                'action': 'empty-archieve-starting',
                'info': {
                    'pid': pid
                }
            }, LOG_INFO)
            r.ltrim(redis_list_name, 0, -1)
            push_redis()
            log_normal(logger, {'action': 'empty-archieve-done'}, LOG_INFO)
            os.remove(PID_FILE)
Example #16
0
def load_apikey(uuid=None):
    global APIKEY_LAST_MODIFY, APIKEYS
    last_apikey = APIKEYS

    with open(APIKEY_FILE, 'r') as f:
        APIKEY_LAST_MODIFY = os.stat(APIKEY_FILE).st_mtime
        data = json.load(f)
        APIKEYS = data.values()
        log_normal(logger, {
            'action': 'load-apikey-from-file',
            'info': {
                'file': APIKEY_FILE,
                'file_modify': os.stat(APIKEY_FILE).st_mtime,
                'apikey_last_modify': APIKEY_LAST_MODIFY,
                'last_apikeys': last_apikey,
                'apikeys': APIKEYS
            }
        },
                   LOG_INFO,
                   uuid=uuid)
        return APIKEYS
Example #17
0
    def objective(self, x, encoder, decoder):
        '''
        elbo: [1]
        '''

        #Encode
        z_mean_logvar = encoder.model(x) #[B,Z*2]
        z_mean = tf.slice(z_mean_logvar, [0,0], [self.batch_size, self.z_size]) #[B,Z] 
        z_logvar = tf.slice(z_mean_logvar, [0,self.z_size], [self.batch_size, self.z_size]) #[B,Z]

        # #Sample z
        # eps = tf.random_normal((self.batch_size, self.n_z_particles, self.z_size), 0, 1, dtype=tf.float32) #[B,P,Z]
        # z = tf.add(z_mean, tf.multiply(tf.sqrt(tf.exp(z_logvar)), eps)) #uses broadcasting,[B,P,Z]

        # Sample z  [B,Z]
        eps = tf.random_normal((self.batch_size, self.z_size), 0, 1, dtype=tf.float32, seed=self.rs) #[B,Z]
        z = tf.add(z_mean, tf.multiply(tf.sqrt(tf.exp(z_logvar)), eps)) #[B,Z]

        # [B]
        log_pz = log_normal(z, tf.zeros([self.batch_size, self.z_size]), tf.log(tf.ones([self.batch_size, self.z_size])))
        log_qz = log_normal(z, z_mean, z_logvar)

        # Decode [B,P,X], [P], [P]
        x_mean, log_pW, log_qW = decoder.model(z)
        
        # Likelihood [B,P]
        log_px = log_bernoulli(x, x_mean)

        # Objective
        self.log_px = tf.reduce_mean(log_px) #over batch + W_particles
        self.log_pz = tf.reduce_mean(log_pz) #over batch
        self.log_qz = tf.reduce_mean(log_qz) #over batch 
        self.log_pW = tf.reduce_mean(log_pW) #W_particles
        self.log_qW = tf.reduce_mean(log_qW) #W_particles

        elbo = self.log_px + self.log_pz - self.log_qz + self.batch_frac*(self.log_pW - self.log_qW)

        self.z_elbo = self.log_px + self.log_pz - self.log_qz 

        return elbo
Example #18
0
def calc_iw(args, data, model, meta_optimizer, criterion, device):
    report_nll_loss = 0
    report_num_words = report_num_sents = 0

    num_iw_samples = 500
    for i in range(len(data)):
        print(f'Iter IW: {i}')
        sents, length, batch_size = data[i]
        sents = sents.to(device)
        length = length.item()
        batch_size = batch_size.item()
        report_num_words += batch_size * length
        report_num_sents += batch_size
        batch_iwae = torch.zeros(batch_size, num_iw_samples)
        for j in range(num_iw_samples):
            mean, logvar = model._enc_forward(sents)
            z_samples = model._reparameterize(mean, logvar)
            preds = model._dec_forward(sents, z_samples)

            nll_vae = torch.Tensor([
                length * criterion(preds[l, :], sents[l, 1:])
                for l in range(batch_size)
            ]).to(device)
            # kl_vae = utils.kl_loss_diag(mean, logvar, average=False)
            batch_log_likelihood = -nll_vae

            zeros = torch.zeros_like(mean)
            log_prior = utils.log_normal(z_samples, zeros, zeros)
            log_approx_posterior = utils.log_normal(z_samples, mean, logvar)
            batch_iwae[:, j] = (batch_log_likelihood + log_prior -
                                log_approx_posterior).data

        batch_iw_loss = utils.logsumexp(batch_iwae) - np.log(num_iw_samples)
        report_nll_loss += torch.sum(-batch_iw_loss).item()

    nll = report_nll_loss / report_num_sents
    ppl = np.exp(report_nll_loss / report_num_words)
    print('iw nll: %.4f, iw ppl: %.4f' % (nll, ppl))
    sys.stdout.flush()
Example #19
0
def do_task(logger, pickle_path, swift_auth, swift_user, swift_key):
    
    with open(pickle_path, 'rb') as f:
        task_info, file_path, container, seed_file = pickle.load(f)

    with open(file_path, 'wb') as f:
        seed_file_bin = base64.b64decode(seed_file)
        f.write(seed_file_bin)

    uuid = task_info[0][-2]
    log_normal(logger, {
        'action': 'going-todo-task',
        'info': {
            'pickle_path': pickle_path,
            'task_info': task_info
        }
    }, LOG_INFO, uuid=uuid)

    try:
        g = gevent.spawn(swift_upload, container, file_path, swift_auth, swift_user, swift_key)
        if not g.get(block=True, timeout=SWIFT_TIMEOUT):
            g.kill()
            raise Exception('Swift Upload Faile!')
            
        log_normal(logger, {
            'action': 'upload-ok',
        }, LOG_INFO, uuid=uuid)
        _args, _kwargs = task_info
        qb.push(*_args, **_kwargs)
        log_normal(logger, {
            'action': 'push-querybroker-ok',
        }, LOG_INFO, uuid=uuid)
    except Exception:
        log_normal(logger, {
            'action': 'upload-error',
            'error': traceback.format_exc()
        }, LOG_ERROR, uuid=uuid)
    finally:
        os.remove(file_path) 
        os.remove(pickle_path)
Example #20
0
def compute_loss(batch, grid, mask, z_params_full, z_params_masked, h, w,
                 decoder):
    ## compute loss
    z_full = sample_z(z_params_full)  # size bsize * hidden
    z_full = z_full.unsqueeze(1).expand(-1, h * w, -1)

    # resize context to have one context per input coordinate
    grid_input = grid.view(1, h * w, -1).expand(batch.size(0), -1, -1)
    target_input = torch.cat([z_full, grid_input], dim=-1)

    reconstructed_image_mean, reconstructed_image_variance = decoder(
        target_input)  # bsize,h*w,1
    reconstruction_loss = -(
        log_normal(x=batch.view(batch.size(0), 3, h * w).transpose(1, 2),
                   m=reconstructed_image_mean,
                   v=reconstructed_image_variance) *
        (1 - mask.view(-1, h * w))).sum(dim=1).mean()

    kl_loss = kl_normal(z_params_full, z_params_masked).mean()
    return reconstruction_loss, kl_loss, reconstructed_image_mean, reconstructed_image_variance
    def _get_elbo(self):
        """
        negative elbo, an upper bound on NLL
        """

        logdets = self.logdets
        logqw = - logdets
        """
        originally...
        logqw = - (0.5*(ep**2).sum(1)+0.5*T.log(2*np.pi)*num_params+logdets)
            --> constants are neglected in this wrapper
        """
        logpw = self.prior(self.weights,0.,-T.log(self.lbda)).sum(1)
        """
        using normal prior centered at zero, with lbda being the inverse 
        of the variance
        """
        kl = (logqw - logpw).mean()
        y_, lv = self.y[:,:1], self.y[:,1:]
        
        logpyx = log_normal(y_,self.target_var,lv).mean()
        self.loss = - (logpyx - kl/T.cast(self.dataset_size,floatX))
Example #22
0
 def forward(self, inputs):
     ret = {}
     x, y_class = inputs['x'], inputs['y_class']
     m, v = self.encoder(x)
     # Compute the mixture of Gaussian prior
     prior = ut.gaussian_parameters(self.z_pre, dim=1)
     if self.use_deterministic_encoder:
         y = self.decoder(m)
         kl_loss = torch.zeros(1)
     else:
         z = ut.sample_gaussian(m, v)
         decoder_input = z if not self.use_encoding_in_decoder else \
         torch.cat((z,m),dim=-1) #BUGBUG: Ideally the encodings before passing to mu and sigma should be here.
         y = self.decoder(decoder_input)
         #compute KL divergence loss :
         z_prior_m, z_prior_v = prior[0], prior[1]
         kl_loss = ut.log_normal(z, m, v) - ut.log_normal_mixture(
             z, z_prior_m, z_prior_v)
     #compute reconstruction loss
     if self.loss_type is 'chamfer':
         x_reconst = CD_loss(y, x)
     # mean or sum
     if self.loss_sum_mean == "mean":
         x_reconst = x_reconst.mean()
         kl_loss = kl_loss.mean()
     else:
         x_reconst = x_reconst.sum()
         kl_loss = kl_loss.sum()
     nelbo = x_reconst + kl_loss
     ret = {'nelbo': nelbo, 'kl_loss': kl_loss, 'x_reconst': x_reconst}
     # classifer network
     mv = torch.cat((m, v), dim=1)
     y_logits = self.z_classifer(mv)
     z_cl_loss = self.z_classifer.cross_entropy_loss(y_logits, y_class)
     ret['z_cl_loss'] = z_cl_loss
     return ret
Example #23
0
    def POST(self):
        """
        Input:
        ======
        POST /query?key=thunder-client&hash=hash-code&f=0 HTTP/1.1
        Content-Type: application/json-rpc
        Content-Length: xxx
        User-Agent: thunder-windows-client 7.0.x
        X-Thunder-Client: some-pirate-client
        X-Download-Protocol: http
        X-Download-Type: video/mp4

        {
            "jsonrpc": "2.0",
            "id": "1",
            "method": "query",
            "params":
            {
              "seed_file":"torrent content, base64 encoded text",
            }
        }

        PostForm:
        ========
        :param seed_file: Seed file(.torrent) content.

        Steps:
        ======
          * Check params
          * Decode `seed_file`
          * Push query broker
          * Return result

        """

        postform = json.loads(web.data())
        jsonrpc_id = postform.get('id', DEFAULT_JSONRPC_ID)
        params = postform.get('params', {})
        seed_file = params.get('seed_file', None)

        if not seed_file:
            self.error_code = ERROR_PARAMS
            self.error_msg = '`seed_file` required!'
            self.error_data.append('seed_file')

        # Error, return
        if self.error_code:
            message = wrap_error(self.error_code, self.error_msg,
                                 self.error_data)
            log_normal(logger, {
                'action': 'bad-request',
                'info': {
                    'message': message
                }
            },
                       LOG_WARN,
                       uuid=self.uuid)
            raise web.BadRequest(message)

        log_bill(logger_bill, {
            'action': 'post-request',
            'client_id': self.client_id,
            'client_addr': self.client_addr,
            'method': 'POST',
            'hash': self.hash,
            'url': self.url
        },
                 uuid=self.uuid)

        result = self.check_cheating()
        if result is not None:
            log_normal(logger, {
                'action': 'return-POST-result',
                'tag': 'filter case',
                'info': {
                    'result': result,
                    'mime-type': self.mime_type,
                    'ext': self.ext,
                }
            },
                       LOG_INFO,
                       uuid=self.uuid)
            return result

        overall = None
        if self.hash:
            self.hash = 'thunder_hash#%s' % self.hash
            overall, listing = mw.query(self.hash, self.uuid)

        seed_file_hash = 'seed_hash#%s' % self.digest
        if overall is None:
            overall, listing = mw.query(seed_file_hash, self.uuid)

        if overall == OVERALL_WORKING:
            web.header('Cache-Control', 'no-cache')
        elif overall is not None:
            web.header('Cache-Control', 'public, max-age=%d' % CACHE_MAX_AGE)
        elif not WORKING_AS_PASS:
            # Two cases: 1. hash from client, 2. hash from gateway.
            web.header('Cache-Control', 'no-cache')
            time_now_str = datetime.now().strftime("%M-%S")
            file_name = '%s_%s.torrent' % (time_now_str, self.uuid)
            file_path = os.path.join(PICKLE_DIR, file_name)
            container = datetime.now().strftime('%Y-%m-%d_%H')
            remote_path = '%s/%s' % (container, file_path)
            log_normal(logger, {
                'action': 'upload-to-swift',
                'info': {
                    'message': 'ok.',
                    'remote_path': remote_path
                }
            },
                       LOG_INFO,
                       uuid=self.uuid)
            url_data = {
                'location': self.url,
                'hash': None,
            }
            seed_file_data = {
                'path': remote_path,
                # base64 str
                'hash': seed_file_hash
            }
            task_info = ((self.progress, self.client_id, self.client_addr,
                          self.digest, self.algorithm, self.file_name,
                          self.file_size, HOST_NAME, self.referer, self.hash,
                          self.mime_type, 'torrent', self.uuid, self.ext,
                          SERVER_FLAG), {
                              'url': url_data,
                              'seed_file': seed_file_data
                          })
            # Push redis server
            r = redis.Redis(REDIS_SERVER)
            list_len = r.llen(REDIS_LIST_NAME)
            if list_len > MAX_LIST_LEN:
                log_normal(logger, {
                    'action': 'redis-list-full',
                    'info': {
                        'max_list_len': MAX_LIST_LEN
                    }
                },
                           LOG_WARN,
                           uuid=self.uuid)
                raise ServiceUnavailable(
                    wrap_error(ERROR_SERVER_BUSY, "Server Busy",
                               HTTP503_DELAY))

            pickle_path = os.path.join(
                PICKLE_DIR, '%s_%s.%s' % (time_now_str, self.uuid, PICKLE_EXT))
            with open(pickle_path, 'wb') as f:
                pickle.dump((task_info, file_path, container, seed_file), f)
            r.lpush(REDIS_LIST_NAME, pickle_path)
            overall = OVERALL_WORKING

        result = wrap_result(jsonrpc_id, overall, listing)
        if overall == OVERALL_WORKING or overall is None:
            if WORKING_AS_PASS:
                result = IMD_RESULT
            else:
                web.header('Retry-After', '%d' % RETRY_AFTER)
                web.header('X-Processing-Retry', '%d' % int(PROCESSING_RETRY))

        tag = 'working_as_pass=%r,overall=%r' % (WORKING_AS_PASS, overall)
        log_normal(logger, {
            'action': 'return-POST-result',
            'client_id': self.client_id,
            'user_agent': self.user_agent,
            'tag': tag,
            'info': result
        },
                   LOG_INFO,
                   uuid=self.uuid)
        return result
Example #24
0
def _loop_tasks(logger, redis_server, redis_list_name,
               pickle_dir, pickle_ext, pickle_corrupt_time,
               broker_routing_key, broker_exchange, broker_mq_url,
               swift_auth, swift_user, swift_key):

    log_normal(logger, {'action': 'uploader-started'}, LOG_INFO)
    
    try:
        if not os.path.exists(pickle_dir):
            os.mkdir(pickle_dir)
    except OSError:
        pass

    r = redis.Redis(redis_server)
    def push_redis():
        ts_now = time.time()
        for path in os.listdir(pickle_dir):
            path = os.path.join(pickle_dir, path)
            if path.endswith(pickle_ext):
                ts_file = os.path.getmtime(path)
                
                if ts_now - ts_file > pickle_corrupt_time:
                    os.remove(path)
                else:
                    r.lpush(redis_list_name, path)

    def start_empty_archieve():
        if os.path.exists(PID_FILE):
            return

        with open(PID_FILE, 'w') as f:
            f.write(str(os.getpid()))
        time.sleep(0.2)
        with open(PID_FILE, 'r') as f:
            pid = int(f.read().strip())
        if pid == os.getpid():
            log_normal(logger, {
                'action': 'empty-archieve-starting',
                'info': {
                    'pid': pid
                }
            }, LOG_INFO)
            r.ltrim(redis_list_name, 0, -1)
            push_redis()
            log_normal(logger, {'action': 'empty-archieve-done'}, LOG_INFO)
            os.remove(PID_FILE)

    if r.llen(redis_list_name) == 0:
        log_normal(logger, {'action': 'no-task-in-redis-queue'}, LOG_INFO)
        start_empty_archieve()

    # Main loop.
    p = Pool(POOL_SIZE)
    while process_isalive(os.getppid()):
        res = r.brpop(redis_list_name, timeout=1)
        if not res:
            continue
        _, pickle_path = res
        log_normal(logger, {
            'action': 'got-redis-task',
            'info': {
                'pickle_path': pickle_path
            }
        }, LOG_INFO)
        p.wait_available()
        p.apply_async(do_task, (logger, pickle_path, swift_auth, swift_user, swift_key))

    p.join()
    # Delete pid file
    if os.path.exists(PID_FILE):
        try:
            os.remove(PID_FILE)
        except OSError:
            pass

    log_normal(logger, {'action': 'exit-uploader-process'}, LOG_INFO)
Example #25
0
    def push(self,
             progress,
             client_id,
             client_address,
             digest,
             digest_algorithm,
             file_name,
             file_size,
             host_name,
             referer,
             thunder_hash,
             mime_type,
             protocol,
             task_uuid,
             file_ext,
             server_flag,
             url=None,
             seed_file=None):

        if url is None:
            log_normal(self.logger, {
                'action': 'query-broker-error',
                'error': 'Url required!',
                'info': {
                    'url': url,
                    'seed_file': seed_file
                }
            },
                       LOG_ERROR,
                       uuid=task_uuid)
            raise QueryBrokerError('url required!')

        if file_size is None:
            file_size = 0
        params = {
            'process': progress,
            'priority': 'low',
            'additional_info': {
                'client_id': client_id,
                'client_address': client_address,
            },
            'digest': digest,
            'digest_algorithm': digest_algorithm,
            'file_name': file_name,
            'file_size': file_size,
            'host_name': host_name,
            'refer': referer,
            'thunder_hash': thunder_hash,
            'mime_type': mime_type,
            'protocol': protocol,
            'external_id': task_uuid,
            'file_ext': file_ext,
            'server_flag': server_flag
        }

        params['url'] = url
        if seed_file is not None:
            params['seed_file'] = seed_file

        task = {
            'jsonrpc': "2.0",
            'method': "submit_task",
            'params': params,
            'id': 1
        }

        timer = gevent.Timeout(self.publish_timeout)
        timer.start()
        try:
            with producers[self.connection].acquire(block=True) as producer:
                producer.publish(task,
                                 serializer='json',
                                 compression='bzip2',
                                 exchange=self.exchange,
                                 declare=[self.exchange],
                                 routing_key=self.ROUTING_KEY)
            log_normal(self.logger, {
                'action': 'push-query-broker-ok',
                'info': {
                    'task': task
                }
            },
                       LOG_DEBUG,
                       uuid=task_uuid)
        except Exception as e:
            log_normal(self.logger, {
                'action': 'push-query-broker-error',
                'error': str(e)
            },
                       LOG_ERROR,
                       uuid=task_uuid)
            raise
        finally:
            timer.cancel()
        #extract set representation from hold out set
        out, _, _ = dict_model['PointNet'](X_hold.permute(
            0, 2, 1))  #out: batch, 1024
        set_rep = dict_model['FeatureCompression'](
            out)  #set_rep: batch, dim_rep

        #encoding. dim: batch, dim_z
        qm, qv = ut.gaussian_parameters(dict_model['EncoderVAE'](set_rep),
                                        dim=1)
        #sample z
        z = ut.sample_gaussian(qm, qv, device=device)  #batch_size, dim_z
        #z to rep
        rep_m, rep_v = ut.gaussian_parameters(dict_model['LatentToRep'](z))

        log_likelihood = ut.log_normal(set_rep, rep_m, rep_v)  #dim: batch
        lb_1 = log_likelihood.mean()  #scalar

        #KL divergence
        m = z_prior_m.expand(P, dim_z)
        v = z_prior_v.expand(P, dim_z)
        lb_2 = -ut.kl_normal(qm, qv, m, v).mean()  #scalar

        loss = -1 * (lb_1 + lb_2)
        loss.backward()
        optimizer.step()

        #reconstruct and plot
        if i % iter_rec == 0:
            X_rec = dict_model['DecoderAE'](ut.sample_gaussian(
                rep_m, rep_v, device=device)).reshape(P, -1, 3)
Example #27
0
File: ais.py Project: lxuechen/BDMC
 def normalized_kinetic(v):
     zeros = torch.zeros_like(v)
     return -utils.log_normal(v, zeros, zeros)
Example #28
0

# ==============================================================================
#  Init things
# ==============================================================================
## Init Logger
logger = mylogger.mylogger()
logger_bill = mylogger.mylogger()
logger_info = mylogger.mylogger()
logger.init_logger('gateway', LOG_LEVEL, LOG_FILE, SysLogHandler.LOG_LOCAL1) # For debug
logger_bill.init_logger('gateway-bill', LOG_LEVEL, LOG_FILE, SysLogHandler.LOG_LOCAL0) # For Bill
logger_info.init_logger('gateway-info', LOG_LEVEL, LOG_FILE, SysLogHandler.LOG_LOCAL2) # For Info

log_normal(logger, {
    'action': 'init-logger-ok',
    'info': {
        'level': LOG_LEVEL,
    }
}, LOG_INFO)

IMD_RESULT = wrap_result(DEFAULT_JSONRPC_ID, OVERALL_UNDETECTED_UNCOPYRIGHTED, [])
IMD_RESULT_UNDETECTED = wrap_result(DEFAULT_JSONRPC_ID, OVERALL_ALL_UNDETECTED, [])

## Init APIKEY from file
load_apikey()
## Make task info directory
if not os.path.exists(PICKLE_DIR):
    os.mkdir(PICKLE_DIR)

## Init QueryBroker and mysystem
mw = mysystem(MEDIA_WISE_USER, MEDIA_WISE_PASSWD,
               MEDIA_WISE_URL, MEDIA_WISE_ALL_MATCHES, MEDIA_WISE_REQ_TIMEOUT,
Example #29
0
#  Init things
# ==============================================================================
## Init Logger
logger = mylogger.mylogger()
logger_bill = mylogger.mylogger()
logger_info = mylogger.mylogger()
logger.init_logger('gateway', LOG_LEVEL, LOG_FILE,
                   SysLogHandler.LOG_LOCAL1)  # For debug
logger_bill.init_logger('gateway-bill', LOG_LEVEL, LOG_FILE,
                        SysLogHandler.LOG_LOCAL0)  # For Bill
logger_info.init_logger('gateway-info', LOG_LEVEL, LOG_FILE,
                        SysLogHandler.LOG_LOCAL2)  # For Info

log_normal(logger, {
    'action': 'init-logger-ok',
    'info': {
        'level': LOG_LEVEL,
    }
}, LOG_INFO)

IMD_RESULT = wrap_result(DEFAULT_JSONRPC_ID, OVERALL_UNDETECTED_UNCOPYRIGHTED,
                         [])
IMD_RESULT_UNDETECTED = wrap_result(DEFAULT_JSONRPC_ID, OVERALL_ALL_UNDETECTED,
                                    [])

## Init APIKEY from file
load_apikey()
## Make task info directory
if not os.path.exists(PICKLE_DIR):
    os.mkdir(PICKLE_DIR)

## Init QueryBroker and mysystem
Example #30
0
 def stop(self):
     utils.log_normal("Closing preview")
     self.removeControl(self.close_button)
     self.closing = True
     self.close()
Example #31
0
 def normalized_kinetic(v):
   zeros = torch.zeros(B, model.latent_dim).cuda()
   return -utils.log_normal(v, zeros, zeros)
Example #32
0
 def stop(self):
     utils.log_normal("Closing preview")
     self.removeControl(self.close_button)
     self.extract_mjpeg.stop()
     self.close()
Example #33
0
    def POST(self):
        """
        Input:
        ======
        POST /query?key=thunder-client&hash=hash-code&f=0 HTTP/1.1
        Content-Type: application/json-rpc
        Content-Length: xxx
        User-Agent: thunder-windows-client 7.0.x
        X-Thunder-Client: some-pirate-client
        X-Download-Protocol: http
        X-Download-Type: video/mp4

        {
            "jsonrpc": "2.0",
            "id": "1",
            "method": "query",
            "params":
            {
              "seed_file":"torrent content, base64 encoded text",
            }
        }

        PostForm:
        ========
        :param seed_file: Seed file(.torrent) content.

        Steps:
        ======
          * Check params
          * Decode `seed_file`
          * Push query broker
          * Return result

        """
        
        postform = json.loads(web.data())
        jsonrpc_id = postform.get('id', DEFAULT_JSONRPC_ID)
        params = postform.get('params', {})
        seed_file = params.get('seed_file', None)

        if not seed_file:
            self.error_code = ERROR_PARAMS
            self.error_msg = '`seed_file` required!'
            self.error_data.append('seed_file')

        # Error, return
        if self.error_code:
            message = wrap_error(self.error_code, self.error_msg, self.error_data)
            log_normal(logger, {
                'action': 'bad-request',
                'info': {
                    'message': message
                }
            }, LOG_WARN, uuid=self.uuid)
            raise web.BadRequest(message)

        log_bill(logger_bill, {
            'action'      : 'post-request',
            'client_id'   : self.client_id,
            'client_addr' : self.client_addr,
            'method'      : 'POST',
            'hash'        : self.hash,
            'url'         : self.url
        }, uuid=self.uuid)
        
        result = self.check_cheating()
        if result is not None:
            log_normal(logger, {
                'action': 'return-POST-result',
                'tag': 'filter case',
                'info': {
                    'result': result,
                    'mime-type': self.mime_type,
                    'ext': self.ext,
                }
            }, LOG_INFO, uuid=self.uuid)
            return result

        overall = None
        if self.hash:
            self.hash = 'thunder_hash#%s' % self.hash
            overall, listing = mw.query(self.hash, self.uuid)

        seed_file_hash = 'seed_hash#%s' % self.digest
        if overall is None:
            overall, listing = mw.query(seed_file_hash, self.uuid)
            
        if overall == OVERALL_WORKING:
            web.header('Cache-Control', 'no-cache')
        elif overall is not None:
            web.header('Cache-Control', 'public, max-age=%d' % CACHE_MAX_AGE)
        elif not WORKING_AS_PASS:
            # Two cases: 1. hash from client, 2. hash from gateway.
            web.header('Cache-Control', 'no-cache')
            time_now_str = datetime.now().strftime("%M-%S")
            file_name = '%s_%s.torrent' % (time_now_str, self.uuid)
            file_path = os.path.join(PICKLE_DIR, file_name)
            container = datetime.now().strftime('%Y-%m-%d_%H')
            remote_path = '%s/%s' % (container, file_path)
            log_normal(logger, {
                'action': 'upload-to-swift',
                'info': {
                    'message': 'ok.',
                    'remote_path': remote_path
                }
            }, LOG_INFO, uuid=self.uuid)
            url_data = {
                'location': self.url,
                'hash': None,
            }
            seed_file_data = {
                'path': remote_path,
                # base64 str
                'hash': seed_file_hash
            }
            task_info = ((self.progress,
                          self.client_id,
                          self.client_addr,
                          self.digest,
                          self.algorithm,
                          self.file_name,
                          self.file_size,
                          HOST_NAME,
                          self.referer,
                          self.hash,
                          self.mime_type,
                          'torrent',
                          self.uuid,
                          self.ext,
                          SERVER_FLAG),
                         {'url': url_data, 'seed_file': seed_file_data})
            # Push redis server
            r = redis.Redis(REDIS_SERVER)
            list_len = r.llen(REDIS_LIST_NAME)
            if list_len > MAX_LIST_LEN:
                log_normal(logger, {
                    'action': 'redis-list-full',
                    'info': {
                        'max_list_len': MAX_LIST_LEN
                    }
                }, LOG_WARN, uuid=self.uuid)
                raise ServiceUnavailable(wrap_error(ERROR_SERVER_BUSY, "Server Busy", HTTP503_DELAY))
            
            pickle_path = os.path.join(PICKLE_DIR, '%s_%s.%s' % (time_now_str, self.uuid, PICKLE_EXT))
            with open(pickle_path, 'wb') as f:
                pickle.dump((task_info, file_path, container, seed_file), f)
            r.lpush(REDIS_LIST_NAME, pickle_path)
            overall = OVERALL_WORKING

        result = wrap_result(jsonrpc_id, overall, listing)
        if overall == OVERALL_WORKING or overall is None:
            if WORKING_AS_PASS:
                result = IMD_RESULT
            else:
                web.header('Retry-After' , '%d' % RETRY_AFTER)
                web.header('X-Processing-Retry', '%d' % int(PROCESSING_RETRY))

        tag = 'working_as_pass=%r,overall=%r' % (WORKING_AS_PASS, overall)
        log_normal(logger, {
            'action': 'return-POST-result',
            'client_id' : self.client_id,
            'user_agent' : self.user_agent, 
            'tag': tag,
            'info': result
        }, LOG_INFO, uuid=self.uuid)
        return result
Example #34
0
    def GET(self):
        """
        Input:
        ======
        GET /query?key=thunder-client&url=http://host/video.mp4&hash=hash-code&f=0 HTTP/1.1
        User-Agent: thunder-windows-client 7.0.x
        ### Update required ###
        X-Thunder-Client: some-pirate-client
        X-Download-Protocol: http
        X-Download-Type: video/mp4

        Steps:
        ======
          * Check params
          * Cheating Case
          * Decode `seed_file`
          * Query mysystem
          * Push query broker if error and has `url`
          * Return result

        """
        # Normal URL File-name and file-size are required!
        # if not self.is_seed and not (self.file_name and self.file_size):
        #     self.error_code = ERROR_PARAMS
        #     self.error_msg = 'Parameter not enough!'
        #     self.error_data.append('X-File-Name')
        #     self.error_data.append('X-File-Size')

        if self.error_code:
            message = wrap_error(self.error_code, self.error_msg, self.error_data)
            log_normal(logger, {
                'action': 'bad-request',
                'info': message
            }, LOG_WARN, uuid=self.uuid)
            raise web.BadRequest(message)

        log_bill(logger_bill, {
            'action': 'get-request',
            'client_id': self.client_id,
            'client_addr': self.client_addr,
            'method': 'GET',
            'hash': self.hash,
            'url': self.url
        }, uuid=self.uuid)

        result = self.check_cheating()
        if result is not None:
            log_normal(logger, {
                'action': 'return-GET-result',
                'tag': 'filter case',
                'client_id' : self.client_id,
                'digest'    : self.digest,
                'user_agent' : self.user_agent, 
                'info': result
            }, LOG_INFO, uuid=self.uuid)
            return result

        overall = None
        if self.hash:
            self.hash = 'thunder_hash#%s' % self.hash
            overall, listing = mw.query(self.hash, self.uuid)

        hash_prefix = 'seed_hash' if self.is_seed else 'url_hash'
        url_hash = '%s#%s' % (hash_prefix, self.digest)
        if overall is None:
            overall, listing = mw.query(url_hash, self.uuid)

        if overall == OVERALL_WORKING:
            web.header('Cache-Control', 'no-cache')
        elif overall is not None:
            web.header('Cache-Control', 'public, max-age=%d' % CACHE_MAX_AGE)
        elif not WORKING_AS_PASS:
            # Two cases: 1. hash from client, 2. hash from gateway.
            web.header('Cache-Control', 'no-cache')
            if self.is_seed:
                web.header('Retry-After' , '%d' % RETRY_AFTER)
                web.header('X-Processing-Retry', '%d' % int(PROCESSING_RETRY))
                result = wrap_error(ERROR_NOT_HIT, 'hash not hit.', [])
                log_normal(logger, {
                    'action': 'hash-not-hit',
                    'info': result
                }, LOG_INFO, uuid=self.uuid)
                return result
            else:
                url_data = {
                    'location': self.url,
                    'hash': url_hash,
                }
                qb.push(self.progress,
                        self.client_id,
                        self.client_addr,
                        self.digest,
                        self.algorithm,
                        self.file_name,
                        self.file_size,
                        HOST_NAME,
                        self.referer,
                        self.hash,
                        self.mime_type,
                        self.scheme,
                        self.uuid,
                        self.ext,
                        SERVER_FLAG,
                        url=url_data)
                overall = OVERALL_WORKING

        result = wrap_result(DEFAULT_JSONRPC_ID, overall, listing)
        if overall == OVERALL_WORKING or overall is None:
            if WORKING_AS_PASS:
                result = IMD_RESULT
            else:
                web.header('Retry-After' , '%d' % RETRY_AFTER)
                web.header('X-Processing-Retry', '%d' % int(PROCESSING_RETRY))

        tag = 'working_as_pass=%r,overall=%r' % (WORKING_AS_PASS, overall)
        request_interval = (datetime.now()-self.request_time).seconds
        log_normal(logger, {
            'action': 'return-GET-result',
            'client_id' : self.client_id,
            'user_agent' : self.user_agent,
            'digest'     : self.digest,
            'request_interval' : str(request_interval),
            'tag': tag,
            'info': result
        }, LOG_INFO, uuid=self.uuid)
        return result
Example #35
0
    def __init__(self):
        """
        Headers:
        ========
        Header-Name	Type	Required	 	Description
        --------------------------------------------------------
        Referer		String	N	 		Referential url. If seed-file given this is not required.
        X-Progress	String	N	 		The rate of progress of target download file.
        X-Client-ID	String	Y			Thunder client id
        X-File-Name	String	given url -> Y,
                                given seed-file -> N	The file name of target download file (UTF-8 encoded). If seed-file given this is not required.
        X-File-Size	String	given url -> Y,
                                given seed-file -> N	The size of target download file. If seed-file given this is not required.
        X-Mime-Type	String	N		        The mime-type of target download file. If seed-file given this is not required.
        X-URL		String	Y			url or seed-file-path

        GetParams:params
        ==========
        Parameter	 Type	 Required	Description
        --------------------------------------------------------
        key		 String	 Y	 	apikey given by mysite.cn must keep it secret.
        hash		 String	 N	 	file_private_id (hash value generate by thunder client)
        digest		 String	 Y	 	The digest of url or seed-file
        digest-algorithm String	 Y	 	Digest algorithm

        Steps:
        ======
          * Parse query_string
          * Auth apikey
          * Parse HTTP headers
          * Check parameters
          * Parse MIME type

        """

        self.uuid = uuid.uuid1().hex

        env = web.ctx.env

        self.error_code = None
        self.error_msg = ''
        self.error_data = []

        params = web.input()
        self.key       = params.get('key', None)
        self.hash      = params.get('hash', None)
        self.digest    = params.get('digest', None)
        self.algorithm = params.get('digest-algorithm', None)

        if not self.authenticated:
            log_normal(logger, {
                'action': 'request-unauthorized',
                'info': {
                    'message': 'unauthorized....',
                    'apikey': self.key
                }
            }, LOG_INFO, uuid=self.uuid)
            raise Unauthorized(wrap_error(ERROR_APIKEY, 'APIKEY Error', ['key']))

        self.referer     = env.get(HEADER_REFERER, None)
        self.last_query  = env.get(HEADER_LAST_QUERY, None)
        self.cur_query   = env.get(HEADER_CUR_QUERY, None)
        self.progress    = env.get(HEADER_PROGRESS, None)
        self.client_addr = env.get(HEADER_CLIENT_ADDR, None)
        self.client_id   = env.get(HEADER_CLIENT_ID, None)
        self.file_name   = env.get(HEADER_FILE_NAME, None)
        self.file_size   = env.get(HEADER_FILE_SIZE, None)
        self.mime_type   = env.get(HEADER_MIME_TYPE, None)
        self.url         = env.get(HEADER_URL, None)
        self.user_agent  = env.get(HEADER_USER_AGENT,None)
        self.request_time = None
        # PRD Section 3.4 requirement.
        try:
            log_info(logger_info, {
                'action': 'show-request-env',
                'info': {
                    # From: Query String
                    'key'          : self.key,
                    'hash'         : self.hash,
                    'digest'       : self.digest,
                    'algorithm'    : self.algorithm,
                    # From: Headers
                    'referer'      : self.referer,
                    'client_id'    : self.client_id,
                    'client_addr'  : self.client_addr,
                    'progress'     : self.progress,
                    'file_name'    : self.file_name,
                    'file_size'    : self.file_size,
                    'mime_type'    : self.mime_type,
                    'url'          : self.url,
                    'user_agent'   : self.user_agent,
                    'request_time' : datetime.now().strftime('%Y-%m-%d %H:%M:%S')
                }
            }, uuid=self.uuid)
            self.request_time = datetime.now()
        except UnicodeDecodeError:
            self.error_code = ERROR_PARAMS
            self.error_msg = 'Parameter encoding unexcept!'
            self.error_data.extend(['Header:Referer',
                                    'Header:X-File-Name',
                                    'Header:X-URL'])
            message = wrap_error(self.error_code, self.error_msg, self.error_data)
            log_normal(logger, {
                'action': 'bad-request',
                'info': message
            }, LOG_WARN, uuid=self.uuid)
            raise web.BadRequest(message)

        try:
            for k in ('last_query', 'cur_query'):
                field = getattr(self, k)
                if field:
                    setattr(self, k, datetime(*rfc822.parsedate(field)[:-3]))
        except TypeError:
            self.error_code = ERROR_PARAMS
            self.error_msg = 'If-Modified-Since or Date error!'
            self.error_data.append('Header:If-Modified-Since')
            self.error_data.append('Header:Date')

        if self.file_size:
            try:
                int(self.file_size)
            except ValueError:
                self.error_code = ERROR_PARAMS
                self.error_msg = 'X-File-Size is not integer!'
                self.error_data.append('X-File-Size')

        if (not self.client_id
            or not self.digest
            or not self.algorithm):
            # Get parameter or header missing
            self.error_code = ERROR_PARAMS
            self.error_msg = 'Parameter not enough!'

        for filed, tag in [
                (self.client_id   , 'X-Client-ID'),
                (self.digest      , 'digest'),
                (self.algorithm   , 'digest-algorithm')]:
            if not filed:
                self.error_data.append(tag)

        self.is_seed = not self.url or self.url.strip().startswith("file://")
        self.ext = None
        if self.file_name:
            parts = self.file_name.rsplit('.', 1)
            if len(parts) >= 2:
                self.ext = parts[-1]

        self.scheme = url_scheme(self.url)

        # If config matched transform the digest(hashinfo) to lower case
        if (DIGEST_TRANSFORM
            and self.algorithm not in IGNORE_ALGORITHMS
            and self.digest):
            self.digest = self.digest.lower()
Example #36
0
 def stop(self):
     utils.log_normal("Closing preview")
     self.removeControl(self.close_button)
     self.extract_mjpeg.stop()
     self.close()
Example #37
0
    def GET(self):
        """
        Input:
        ======
        GET /query?key=thunder-client&url=http://host/video.mp4&hash=hash-code&f=0 HTTP/1.1
        User-Agent: thunder-windows-client 7.0.x
        ### Update required ###
        X-Thunder-Client: some-pirate-client
        X-Download-Protocol: http
        X-Download-Type: video/mp4

        Steps:
        ======
          * Check params
          * Cheating Case
          * Decode `seed_file`
          * Query mysystem
          * Push query broker if error and has `url`
          * Return result

        """
        # Normal URL File-name and file-size are required!
        # if not self.is_seed and not (self.file_name and self.file_size):
        #     self.error_code = ERROR_PARAMS
        #     self.error_msg = 'Parameter not enough!'
        #     self.error_data.append('X-File-Name')
        #     self.error_data.append('X-File-Size')

        if self.error_code:
            message = wrap_error(self.error_code, self.error_msg,
                                 self.error_data)
            log_normal(logger, {
                'action': 'bad-request',
                'info': message
            },
                       LOG_WARN,
                       uuid=self.uuid)
            raise web.BadRequest(message)

        log_bill(logger_bill, {
            'action': 'get-request',
            'client_id': self.client_id,
            'client_addr': self.client_addr,
            'method': 'GET',
            'hash': self.hash,
            'url': self.url
        },
                 uuid=self.uuid)

        result = self.check_cheating()
        if result is not None:
            log_normal(logger, {
                'action': 'return-GET-result',
                'tag': 'filter case',
                'client_id': self.client_id,
                'digest': self.digest,
                'user_agent': self.user_agent,
                'info': result
            },
                       LOG_INFO,
                       uuid=self.uuid)
            return result

        overall = None
        if self.hash:
            self.hash = 'thunder_hash#%s' % self.hash
            overall, listing = mw.query(self.hash, self.uuid)

        hash_prefix = 'seed_hash' if self.is_seed else 'url_hash'
        url_hash = '%s#%s' % (hash_prefix, self.digest)
        if overall is None:
            overall, listing = mw.query(url_hash, self.uuid)

        if overall == OVERALL_WORKING:
            web.header('Cache-Control', 'no-cache')
        elif overall is not None:
            web.header('Cache-Control', 'public, max-age=%d' % CACHE_MAX_AGE)
        elif not WORKING_AS_PASS:
            # Two cases: 1. hash from client, 2. hash from gateway.
            web.header('Cache-Control', 'no-cache')
            if self.is_seed:
                web.header('Retry-After', '%d' % RETRY_AFTER)
                web.header('X-Processing-Retry', '%d' % int(PROCESSING_RETRY))
                result = wrap_error(ERROR_NOT_HIT, 'hash not hit.', [])
                log_normal(logger, {
                    'action': 'hash-not-hit',
                    'info': result
                },
                           LOG_INFO,
                           uuid=self.uuid)
                return result
            else:
                url_data = {
                    'location': self.url,
                    'hash': url_hash,
                }
                qb.push(self.progress,
                        self.client_id,
                        self.client_addr,
                        self.digest,
                        self.algorithm,
                        self.file_name,
                        self.file_size,
                        HOST_NAME,
                        self.referer,
                        self.hash,
                        self.mime_type,
                        self.scheme,
                        self.uuid,
                        self.ext,
                        SERVER_FLAG,
                        url=url_data)
                overall = OVERALL_WORKING

        result = wrap_result(DEFAULT_JSONRPC_ID, overall, listing)
        if overall == OVERALL_WORKING or overall is None:
            if WORKING_AS_PASS:
                result = IMD_RESULT
            else:
                web.header('Retry-After', '%d' % RETRY_AFTER)
                web.header('X-Processing-Retry', '%d' % int(PROCESSING_RETRY))

        tag = 'working_as_pass=%r,overall=%r' % (WORKING_AS_PASS, overall)
        request_interval = (datetime.now() - self.request_time).seconds
        log_normal(logger, {
            'action': 'return-GET-result',
            'client_id': self.client_id,
            'user_agent': self.user_agent,
            'digest': self.digest,
            'request_interval': str(request_interval),
            'tag': tag,
            'info': result
        },
                   LOG_INFO,
                   uuid=self.uuid)
        return result
Example #38
0
    def __init__(self):
        """
        Headers:
        ========
        Header-Name	Type	Required	 	Description
        --------------------------------------------------------
        Referer		String	N	 		Referential url. If seed-file given this is not required.
        X-Progress	String	N	 		The rate of progress of target download file.
        X-Client-ID	String	Y			Thunder client id
        X-File-Name	String	given url -> Y,
                                given seed-file -> N	The file name of target download file (UTF-8 encoded). If seed-file given this is not required.
        X-File-Size	String	given url -> Y,
                                given seed-file -> N	The size of target download file. If seed-file given this is not required.
        X-Mime-Type	String	N		        The mime-type of target download file. If seed-file given this is not required.
        X-URL		String	Y			url or seed-file-path

        GetParams:params
        ==========
        Parameter	 Type	 Required	Description
        --------------------------------------------------------
        key		 String	 Y	 	apikey given by mysite.cn must keep it secret.
        hash		 String	 N	 	file_private_id (hash value generate by thunder client)
        digest		 String	 Y	 	The digest of url or seed-file
        digest-algorithm String	 Y	 	Digest algorithm

        Steps:
        ======
          * Parse query_string
          * Auth apikey
          * Parse HTTP headers
          * Check parameters
          * Parse MIME type

        """

        self.uuid = uuid.uuid1().hex

        env = web.ctx.env

        self.error_code = None
        self.error_msg = ''
        self.error_data = []

        params = web.input()
        self.key = params.get('key', None)
        self.hash = params.get('hash', None)
        self.digest = params.get('digest', None)
        self.algorithm = params.get('digest-algorithm', None)

        if not self.authenticated:
            log_normal(logger, {
                'action': 'request-unauthorized',
                'info': {
                    'message': 'unauthorized....',
                    'apikey': self.key
                }
            },
                       LOG_INFO,
                       uuid=self.uuid)
            raise Unauthorized(
                wrap_error(ERROR_APIKEY, 'APIKEY Error', ['key']))

        self.referer = env.get(HEADER_REFERER, None)
        self.last_query = env.get(HEADER_LAST_QUERY, None)
        self.cur_query = env.get(HEADER_CUR_QUERY, None)
        self.progress = env.get(HEADER_PROGRESS, None)
        self.client_addr = env.get(HEADER_CLIENT_ADDR, None)
        self.client_id = env.get(HEADER_CLIENT_ID, None)
        self.file_name = env.get(HEADER_FILE_NAME, None)
        self.file_size = env.get(HEADER_FILE_SIZE, None)
        self.mime_type = env.get(HEADER_MIME_TYPE, None)
        self.url = env.get(HEADER_URL, None)
        self.user_agent = env.get(HEADER_USER_AGENT, None)
        self.request_time = None
        # PRD Section 3.4 requirement.
        try:
            log_info(
                logger_info,
                {
                    'action': 'show-request-env',
                    'info': {
                        # From: Query String
                        'key':
                        self.key,
                        'hash':
                        self.hash,
                        'digest':
                        self.digest,
                        'algorithm':
                        self.algorithm,
                        # From: Headers
                        'referer':
                        self.referer,
                        'client_id':
                        self.client_id,
                        'client_addr':
                        self.client_addr,
                        'progress':
                        self.progress,
                        'file_name':
                        self.file_name,
                        'file_size':
                        self.file_size,
                        'mime_type':
                        self.mime_type,
                        'url':
                        self.url,
                        'user_agent':
                        self.user_agent,
                        'request_time':
                        datetime.now().strftime('%Y-%m-%d %H:%M:%S')
                    }
                },
                uuid=self.uuid)
            self.request_time = datetime.now()
        except UnicodeDecodeError:
            self.error_code = ERROR_PARAMS
            self.error_msg = 'Parameter encoding unexcept!'
            self.error_data.extend(
                ['Header:Referer', 'Header:X-File-Name', 'Header:X-URL'])
            message = wrap_error(self.error_code, self.error_msg,
                                 self.error_data)
            log_normal(logger, {
                'action': 'bad-request',
                'info': message
            },
                       LOG_WARN,
                       uuid=self.uuid)
            raise web.BadRequest(message)

        try:
            for k in ('last_query', 'cur_query'):
                field = getattr(self, k)
                if field:
                    setattr(self, k, datetime(*rfc822.parsedate(field)[:-3]))
        except TypeError:
            self.error_code = ERROR_PARAMS
            self.error_msg = 'If-Modified-Since or Date error!'
            self.error_data.append('Header:If-Modified-Since')
            self.error_data.append('Header:Date')

        if self.file_size:
            try:
                int(self.file_size)
            except ValueError:
                self.error_code = ERROR_PARAMS
                self.error_msg = 'X-File-Size is not integer!'
                self.error_data.append('X-File-Size')

        if (not self.client_id or not self.digest or not self.algorithm):
            # Get parameter or header missing
            self.error_code = ERROR_PARAMS
            self.error_msg = 'Parameter not enough!'

        for filed, tag in [(self.client_id, 'X-Client-ID'),
                           (self.digest, 'digest'),
                           (self.algorithm, 'digest-algorithm')]:
            if not filed:
                self.error_data.append(tag)

        self.is_seed = not self.url or self.url.strip().startswith("file://")
        self.ext = None
        if self.file_name:
            parts = self.file_name.rsplit('.', 1)
            if len(parts) >= 2:
                self.ext = parts[-1]

        self.scheme = url_scheme(self.url)

        # If config matched transform the digest(hashinfo) to lower case
        if (DIGEST_TRANSFORM and self.algorithm not in IGNORE_ALGORITHMS
                and self.digest):
            self.digest = self.digest.lower()
Example #39
0
    def push(self,
             progress,
             client_id,
             client_address,
             digest,
             digest_algorithm,
             file_name,
             file_size,
             host_name,
             referer,
             thunder_hash,
             mime_type,
             protocol,
             task_uuid,
             file_ext,
             server_flag,
             url=None, seed_file=None):

        if url is None:
            log_normal(self.logger, {
                'action': 'query-broker-error',
                'error': 'Url required!',
                'info': {
                    'url': url,
                    'seed_file': seed_file
                }
            }, LOG_ERROR, uuid=task_uuid)
            raise QueryBrokerError('url required!')

        if file_size is None:
            file_size = 0
        params = {
            'process': progress,
            'priority': 'low',
            'additional_info': {
                'client_id': client_id,
                'client_address': client_address,
            },
            'digest': digest,
            'digest_algorithm': digest_algorithm,
            'file_name': file_name,
            'file_size': file_size,
            'host_name': host_name,
            'refer': referer,
            'thunder_hash' : thunder_hash,
            'mime_type' : mime_type,
            'protocol' : protocol,
            'external_id': task_uuid,
            'file_ext': file_ext,
            'server_flag':server_flag
        }

        params['url'] = url
        if seed_file is not None:
            params['seed_file'] = seed_file

        task = {
            'jsonrpc' : "2.0",
            'method'  : "submit_task",
            'params'  : params,
            'id'      : 1
        }
        
        timer = gevent.Timeout(self.publish_timeout)
        timer.start()
        try:
            with producers[self.connection].acquire(block=True) as producer:
                producer.publish(task,
                                 serializer='json',
                                 compression='bzip2',
                                 exchange=self.exchange,
                                 declare=[self.exchange],
                                 routing_key=self.ROUTING_KEY)
            log_normal(self.logger, {
                'action': 'push-query-broker-ok',
                'info': {'task': task}
            }, LOG_DEBUG, uuid=task_uuid)
        except Exception as e:
            log_normal(self.logger, {
                'action': 'push-query-broker-error',
                'error': str(e)
            }, LOG_ERROR, uuid=task_uuid)
            raise
        finally:
            timer.cancel()
Example #40
0
 def normalized_kinetic(v):
     zeros = torch.zeros(B, z_size, dtype = batch.dtype, device = batch.device)
     # this is superior to the unnormalized version
     return -log_normal(v, zeros, zeros)
Example #41
0
def main():
    """
    MNIST example
    weight norm reparameterized MLP with prior on rescaling parameters
    """

    import argparse

    parser = argparse.ArgumentParser()
    parser.add_argument('--coupling', action='store_true')
    parser.add_argument('--size', default=10000, type=int)
    parser.add_argument('--lrdecay', action='store_true')
    parser.add_argument('--lr0', default=0.1, type=float)
    parser.add_argument('--lbda', default=0.01, type=float)
    parser.add_argument('--bs', default=50, type=int)
    args = parser.parse_args()
    print args

    coupling = args.coupling
    lr0 = args.lr0
    lrdecay = args.lrdecay
    lbda = np.cast[floatX](args.lbda)
    bs = args.bs
    size = max(10, min(50000, args.size))
    clip_grad = 5
    max_norm = 10

    # load dataset
    filename = '/data/lisa/data/mnist.pkl.gz'
    train_x, train_y, valid_x, valid_y, test_x, test_y = load_mnist(filename)
    train_x = train_x.reshape(50000, 1, 28, 28)
    valid_x = valid_x.reshape(10000, 1, 28, 28)
    test_x = test_x.reshape(10000, 1, 28, 28)

    input_var = T.tensor4('input_var')
    target_var = T.matrix('target_var')
    dataset_size = T.scalar('dataset_size')
    lr = T.scalar('lr')

    # 784 -> 20 -> 10
    weight_shapes = [
        (16, 1, 5, 5),  # -> (None, 16, 14, 14)
        (16, 16, 5, 5),  # -> (None, 16,  7,  7)
        (16, 16, 5, 5)
    ]  # -> (None, 16,  4,  4)

    num_params = sum(np.prod(ws) for ws in weight_shapes) + 10
    wd1 = 1

    # stochastic hypernet
    ep = srng.normal(std=0.01, size=(wd1, num_params), dtype=floatX)
    logdets_layers = []
    h_layer = lasagne.layers.InputLayer([None, num_params])

    layer_temp = LinearFlowLayer(h_layer)
    h_layer = IndexLayer(layer_temp, 0)
    logdets_layers.append(IndexLayer(layer_temp, 1))

    if coupling:
        layer_temp = CoupledDenseLayer(h_layer, 200)
        h_layer = IndexLayer(layer_temp, 0)
        logdets_layers.append(IndexLayer(layer_temp, 1))

        h_layer = PermuteLayer(h_layer, num_params)

        layer_temp = CoupledDenseLayer(h_layer, 200)
        h_layer = IndexLayer(layer_temp, 0)
        logdets_layers.append(IndexLayer(layer_temp, 1))

    weights = lasagne.layers.get_output(h_layer, ep)

    # primary net
    t = np.cast['int32'](0)
    layer = lasagne.layers.InputLayer([None, 1, 28, 28])
    inputs = {layer: input_var}
    for ws in weight_shapes:
        num_param = np.prod(ws)
        weight = weights[:, t:t + num_param].reshape(ws)
        num_filters = ws[0]
        filter_size = ws[2]
        stride = 2
        pad = 'same'
        layer = stochasticConv2DLayer([layer, weight], num_filters,
                                      filter_size, stride, pad)
        print layer.output_shape
        t += num_param

    w_layer = lasagne.layers.InputLayer((None, 10))
    weight = weights[:, t:t + 10].reshape((wd1, 10))
    inputs[w_layer] = weight
    layer = stochasticDenseLayer2([layer, w_layer],
                                  10,
                                  nonlinearity=nonlinearities.softmax)

    y = T.clip(get_output(layer, inputs), 0.001, 0.999)

    # loss terms
    logdets = sum([get_output(logdet, ep) for logdet in logdets_layers])
    logqw = -(0.5 *
              (ep**2).sum(1) + 0.5 * T.log(2 * np.pi) * num_params + logdets)
    logpw = log_normal(weights, 0., -T.log(lbda)).sum(1)
    #logpw = log_stdnormal(weights).sum(1)
    kl = (logqw - logpw).mean()
    logpyx = -cc(y, target_var).mean()
    loss = -(logpyx - kl / T.cast(dataset_size, floatX))

    params = lasagne.layers.get_all_params([layer])[1:]  # excluding rand state
    grads = T.grad(loss, params)

    mgrads = lasagne.updates.total_norm_constraint(grads, max_norm=max_norm)
    cgrads = [T.clip(g, -clip_grad, clip_grad) for g in mgrads]
    updates = lasagne.updates.adam(cgrads, params, learning_rate=lr)

    train = theano.function([input_var, target_var, dataset_size, lr],
                            loss,
                            updates=updates)
    predict = theano.function([input_var], y.argmax(1))

    records = train_model(train, predict, train_x[:size], train_y[:size],
                          valid_x, valid_y, lr0, lrdecay, bs)

    output_probs = theano.function([input_var], y)
    MCt = np.zeros((100, 1000, 10))
    MCv = np.zeros((100, 1000, 10))
    for i in range(100):
        MCt[i] = output_probs(train_x[:1000])
        MCv[i] = output_probs(valid_x[:1000])

    tr = np.equal(MCt.mean(0).argmax(-1), train_y[:1000].argmax(-1)).mean()
    va = np.equal(MCv.mean(0).argmax(-1), valid_y[:1000].argmax(-1)).mean()
    print "train perf=", tr
    print "valid perf=", va

    for ii in range(15):
        print np.round(MCt[ii][0] * 1000)