def to_str(self, verbose = True):
   string = self.itemclass
   tx, ty = screen_coords_to_tile_coords(self.initial_x, self.initial_y)
   string += " " + str(tx) + " " + str(ty)
   if verbose:
     log_message("Obj converted to string: " + string)
   return string
 def to_str(self, verbose = True):
   string = VisibleObject.to_str(self, False)
   if self.trigger != None:
     string += " " + str(self.max_activations) + " " + self.trigger.trigger_type
   if verbose:
     log_message("Obj converted to string: " + string)
   return string
 def to_str(self, verbose = True):
   string = DynamicObject.to_str(self, False)
   try:
     string += " " + str_from_dir(self.attached)
   except:
     pass
   if verbose:
     log_message("Obj converted to string: " + string)
   return string
Example #4
0
    def do_GET(self):
        soc=socket.socket(socket.AF_INET,socket.SOCK_STREAM)

        (scm,netloc,path,params,query,fragment)=urlparse.urlparse(self.path,'http')

        ###########解码url编码##################
        query=urllib.unquote(query)
        print "query:%s\n"%query.lower()
        #############################

        pairs=(scm,netloc,path,params,query,fragment)
        print self.path
        if not netloc:
            netloc=self.headers.get('host',"")

        if scm!='http' or fragment or not netloc:
            self.send_error(400,"bad url %s"%self.path)
            return
        """
            sql注入检测
        """
        ret=SQLFilter().detect(self,pairs)
        if ret==False:
            ProxyHandler.lock.acquire()
            if ProxyHandler.buffer.get(self.client_address[0]):
                ProxyHandler.buffer[self.client_address[0]]+=1
            else:
                ProxyHandler.buffer[self.client_address[0]]=0
            if ProxyHandler.buffer[self.client_address[0]]>=5:
                ProxyHandler.buffer.pop(self.client_address[0])
                ProxyHandler.blacklist.append(self.client_address[0])

            ProxyHandler.lock.release()
            print "\t" "bye"
            soc.close()
            self.connection.close()
            return

        try:
            if self._connect_to(netloc,soc):
                log_message(self,'connectAccept.db')         #记录成功连接日志
                self.log_request()
                soc.send("%s %s %s\r\n"%(self.command,urlparse.urlunparse(('','',path,params,query,'')),
                                                 self.request_version))
                self.headers['Connection']='close'
                del self.headers['Proxy-Connection']
                for key_val in self.headers.items():
                    soc.send("%s: %s\r\n" %key_val)
                soc.send("\r\n")
                self._read_write(soc)
        finally:
            print "\t" "bye"
            soc.close()
            self.connection.close()
Example #5
0
def spike_detection_job(DatFileNames, ProbeFileName, output_dir, output_name):
    """
    Top level function that starts a data processing job.
    """
    for DatFileName in DatFileNames:
        if not os.path.exists(DatFileName):
            raise Exception("Dat file %s does not exist" % DatFileName)
    DatFileNames = [os.path.abspath(DatFileName)
                    for DatFileName in DatFileNames]

    probe = probes.Probe(ProbeFileName)

    n_ch_dat = Parameters['NCHANNELS']
    sample_rate = Parameters['SAMPLERATE']
    high_frequency_factor = Parameters['F_HIGH_FACTOR']
    set_globals_samples(sample_rate, high_frequency_factor)
    Parameters['CHUNK_OVERLAP'] = int(
        sample_rate * Parameters['CHUNK_OVERLAP_SECONDS'])

    Parameters['N_CH'] = probe.num_channels

    max_spikes = Parameters['MAX_SPIKES']

    basename = basenamefolder = output_name

   # OutDir = join(output_dir, basenamefolder)
    OutDir = output_dir
    with indir(OutDir):
        # Create a log file
        GlobalVariables['log_fd'] = open(basename + '.log', 'w')

        if Parameters['DEBUG']:
            GlobalVariables['debug_fd'] = open(basename + '.debug', 'w')

        Channels_dat = np.arange(probe.num_channels)
        # Print Parameters dictionary to .log file
        log_message("\n".join(["{0:s} = {1:s}".format(key, str(value))
                    for key, value in sorted(Parameters.iteritems()) if not key.startswith('_')]))
        spike_detection_from_raw_data(basename, DatFileNames, n_ch_dat,
                                      Channels_dat, probe.channel_graph,
                                      probe, max_spikes)

        numwarn = GlobalVariables['warnings']
        if numwarn:
            log_message(
                'WARNINGS ENCOUNTERED: ' + str(numwarn) + ', check log file.')

        # Close the log file at the end.
        if 'log_fd' in GlobalVariables:
            GlobalVariables['log_fd'].close()
Example #6
0
    def detect(self,owner,data):
        (scm,netloc,path,params,query,fragment)=data
        text=self._ParseData(query)
        ret=re.search(':',text)
        if ret:
            if re.search('http://',text)==None:
                return False

        for item in self.patterns:
            ret=item.search(text)
            if ret!=None:
                log_message(owner,'connectDeny.db',*(text,item.pattern))
                print "can not pass pattern:\t%s\n"% item.pattern
                return False
        return True
 def save(self):
   conffile = codecs.open(data.levelpath(self.level_name), "w", "utf_8")
   string = self.to_str()
   log_message('Level data to save:')
   log_message(string)
   log_message('Saving level to ' + data.levelpath(self.level_name))
   conffile.write(string)
   conffile.close()
   log_message('Level saved.')
   return
def parse_config():
  for world in WORLDS:
    Variables.vdict["unlocked" + world] = 0
    Variables.vdict["hiscore" + world] = 0
    Variables.vdict["besttime" + world] = 0
  Variables.vdict["sound"] = True
  Variables.vdict["dialogue"] = True
  Variables.vdict["verbose"] = False
  Variables.vdict["fullscreen"] = False
  file_path = os.path.join(get_config_path(), "config.txt")
  try:
    conffile = codecs.open(file_path, "r", "utf_8")
    for line in conffile:
      if line.strip() != "":
        values = line.split("\t")

        if values[0] == "unlocked":
          try:
            Variables.vdict["unlocked" + values[1]] = int(values[2])
          except:
            Variables.vdict["unlocked" + WORLDS[0]] = int(values[1])  #Old style config file compatibility

        elif values[0] == "hiscore":
          try:
            Variables.vdict["hiscore" + values[1]] = int(values[2])
          except:
            Variables.vdict["hiscore" + WORLDS[0]] = int(values[1])  #Old style config file compatibility

        elif values[0] == "besttime":
          Variables.vdict["besttime" + values[1]] = int(values[2])

        elif values[0] == "sound":
          Variables.vdict["sound"] = str_to_bool(values[1])

        elif values[0] == "dialogue":
          Variables.vdict["dialogue"] = str_to_bool(values[1])

        elif values[0] == "fullscreen":
          Variables.vdict["fullscreen"] = str_to_bool(values[1])

  except:
    if write_config():
      log_message("Created configuration file to " + file_path)
  return
Example #9
0
def parse_config():
    """
    This parses a config file stored in the location given by get_config_path().
    The parsed values are stored in the variables class of the variables module.
    """
    for world in WORLDS:
        variables['unlocked' + world] = 0
        variables['hiscore' + world] = 0
        variables['besttime' + world] = 0
    variables['sound'] = True
    variables['dialogue'] = True
    variables['verbose'] = False
    variables['fullscreen'] = False
    variables['character'] = 0
    
    file_path = os.path.join(get_config_path(), 'config.txt')
    try:
        conffile = codecs.open(file_path, 'r', 'utf_8')
        for line in conffile:
            line = line.strip()
            if line == '':
                continue
            
            values = line.split('\t')
            if values[0] == 'unlocked':
                variables.vdict['unlocked' + values[1]] = int(values[2])
            elif values[0] == 'hiscore':
                variables.vdict['hiscore' + values[1]] = int(values[2])
            elif values[0] == 'besttime':
                variables.vdict['besttime' + values[1]] = int(values[2])
            elif values[0] == 'sound':
                variables.vdict['sound'] = str_to_bool(values[1])
            elif values[0] == 'dialogue':
                variables.vdict['dialogue'] = str_to_bool(values[1])
            elif values[0] == 'fullscreen':
                variables.vdict['fullscreen'] = str_to_bool(values[1])
            elif values[0] == 'character':
                variables.vdict['character'] = int(values[1])
    
    except Exception:
        if write_config():
            log_message('Created configuration file to ' + file_path)
  def change(self, change):
    """Apply a change to the level data according to a Change class object."""
    if change == None:
      return

    log_message("Made change " + change.tile_change + " to coords " + str(change.coords[0]) + ", " + str(change.coords[1]))

    if (change.tile_change == "remove"):
      self.remove_tile(change.coords)

    elif (change.tile_change == "save"):
      self.save()

    elif (change.tile_change == "W") or (change.tile_change == "B") or (change.tile_change == "S"):
      self.remove_tile(change.coords)
      change.coords = (change.coords[0] + FULL_TILES_HOR - TILES_HOR, change.coords[1] + FULL_TILES_VER - TILES_VER)
      self.add_tile(change.tile_change, change.coords)
      self.reset_active_tiles()

    return
Example #11
0
def extract_spikes(h5s, basename, DatFileNames, n_ch_dat,
                   ChannelsToUse, ChannelGraph,
                   max_spikes=None):
    # some global variables we use
    CHUNK_SIZE = Parameters['CHUNK_SIZE']
    CHUNKS_FOR_THRESH = Parameters['CHUNKS_FOR_THRESH']
    DTYPE = Parameters['DTYPE']
    CHUNK_OVERLAP = Parameters['CHUNK_OVERLAP']
    N_CH = Parameters['N_CH']
    S_JOIN_CC = Parameters['S_JOIN_CC']
    S_BEFORE = Parameters['S_BEFORE']
    S_AFTER = Parameters['S_AFTER']
    THRESH_SD = Parameters['THRESH_SD']
    THRESH_SD_LOWER = Parameters['THRESH_SD_LOWER']

    # filter coefficents for the high pass filtering
    filter_params = get_filter_params()
    print filter_params

    progress_bar = ProgressReporter()
    
    #m A code that writes out a high-pass filtered version of the raw data (.fil file)
    fil_writer = FilWriter(DatFileNames, n_ch_dat)

    # Just use first dat file for getting the thresholding data
    with open(DatFileNames[0], 'rb') as fd:
        # Use 5 chunks to figure out threshold
        DatChunk = get_chunk_for_thresholding(fd, n_ch_dat, ChannelsToUse,
                                              num_samples(DatFileNames[0],
                                                          n_ch_dat))
        FilteredChunk = apply_filtering(filter_params, DatChunk)
        # get the STD of the beginning of the filtered data
        if Parameters['USE_HILBERT']:
            first_chunks_std = np.std(FilteredChunk)
            print 'first_chunks_std',  first_chunks_std, '\n'
        else:
            if Parameters['USE_SINGLE_THRESHOLD']:
                ThresholdSDFactor = np.median(np.abs(FilteredChunk))/.6745
            else:
                ThresholdSDFactor = np.median(np.abs(FilteredChunk), axis=0)/.6745
            Threshold = ThresholdSDFactor*THRESH_SD
            print 'Threshold = ', Threshold, '\n' 
            Parameters['THRESHOLD'] = Threshold #Record the absolute Threshold used
            
        
    # set the high and low thresholds
    do_pickle = False
    if Parameters['USE_HILBERT']:
        ThresholdStrong = Parameters['THRESH_STRONG']
        ThresholdWeak = Parameters['THRESH_WEAK']
        do_pickle = True
    elif Parameters['USE_COMPONENT_ALIGNFLOATMASK']:#to be used with a single threshold only
        ThresholdStrong = Threshold
        ThresholdWeak = ThresholdSDFactor*THRESH_SD_LOWER
        do_pickle = True

    if do_pickle:
        picklefile =     open("threshold.p","wb")
        pickle.dump([ThresholdStrong,ThresholdWeak], picklefile)
        threshold_outputstring = 'Threshold strong = ' + repr(ThresholdStrong) + '\n' + 'Threshold weak = ' + repr(ThresholdWeak)
        log_message(threshold_outputstring)
        
    n_samples = num_samples(DatFileNames, n_ch_dat)
    spike_count = 0
    for (DatChunk, s_start, s_end,
         keep_start, keep_end) in chunks(DatFileNames, n_ch_dat, ChannelsToUse):
        ############## FILTERING ########################################
        FilteredChunk = apply_filtering(filter_params, DatChunk)
        
        # write filtered output to file
        if Parameters['WRITE_FIL_FILE']:
            fil_writer.write(FilteredChunk, s_start, s_end, keep_start, keep_end)

        ############## THRESHOLDING #####################################
        
        
        # NEW: HILBERT TRANSFORM
        if Parameters['USE_HILBERT']:
            FilteredChunkHilbert = np.abs(signal.hilbert(FilteredChunk, axis=0) / first_chunks_std) ** 2
            BinaryChunkWeak = FilteredChunkHilbert > ThresholdWeak
            BinaryChunkStrong = FilteredChunkHilbert > ThresholdStrong
            BinaryChunkWeak = BinaryChunkWeak.astype(np.int8)
            BinaryChunkStrong = BinaryChunkStrong.astype(np.int8)
        #elif Parameters['USE_COMPONENT_ALIGNFLOATMASK']:
        else: # Usual method
            #FilteredChunk = apply_filtering(filter_params, DatChunk) Why did you filter twice!!!???
            if Parameters['USE_COMPONENT_ALIGNFLOATMASK']:
                if Parameters['DETECT_POSITIVE']:
                    BinaryChunkWeak = FilteredChunk > ThresholdWeak
                    BinaryChunkStrong = FilteredChunk > ThresholdStrong
                else:
                    BinaryChunkWeak = FilteredChunk < -ThresholdWeak
                    BinaryChunkStrong = FilteredChunk < -ThresholdStrong
                BinaryChunkWeak = BinaryChunkWeak.astype(np.int8)
                BinaryChunkStrong = BinaryChunkStrong.astype(np.int8)
            else:
                if Parameters['DETECT_POSITIVE']:
                    BinaryChunk = np.abs(FilteredChunk)>Threshold
                else:
                    BinaryChunk = (FilteredChunk<-Threshold)
                BinaryChunk = BinaryChunk.astype(np.int8)
        # write filtered output to file
        #if Parameters['WRITE_FIL_FILE']:
        #    fil_writer.write(FilteredChunk, s_start, s_end, keep_start, keep_end)
        #    print 'I am here at line 313'

        ############### FLOOD FILL  ######################################
        ChannelGraphToUse = complete_if_none(ChannelGraph, N_CH)
        if (Parameters['USE_HILBERT'] or Parameters['USE_COMPONENT_ALIGNFLOATMASK']):
            if Parameters['USE_OLD_CC_CODE']:
                IndListsChunkOld = connected_components(BinaryChunkWeak,
                            ChannelGraphToUse, S_JOIN_CC)
                IndListsChunk = []  #Final list of connected components. Go through all \weak' connected components
            # and only include in final list if there are some samples that also exceed the strong threshold
            # This method works better than connected_components_twothresholds.
                for IndListWeak in IndListsChunkOld:
                   # embed()
#                    if sum(BinaryChunkStrong[zip(*IndListWeak)]) != 0:
                    i,j = np.array(IndListWeak).transpose()
                    if sum(BinaryChunkStrong[i,j]) != 0: 
                        IndListsChunk.append(IndListWeak)
            else:
                IndListsChunk = connected_components_twothresholds(BinaryChunkWeak, BinaryChunkStrong,
                            ChannelGraphToUse, S_JOIN_CC)
            BinaryChunk = 1 * BinaryChunkWeak + 1 * BinaryChunkStrong
        else:
            IndListsChunk = connected_components(BinaryChunk,
                            ChannelGraphToUse, S_JOIN_CC)
            
        
        if Parameters['DEBUG']:  #TO DO: Change plot_diagnostics for the HILBERT case
            if Parameters['USE_HILBERT']:
                plot_diagnostics_twothresholds(s_start,IndListsChunk,BinaryChunkWeak, BinaryChunkStrong,BinaryChunk,DatChunk,FilteredChunk,FilteredChunkHilbert,ThresholdStrong,ThresholdWeak)
            elif Parameters['USE_COMPONENT_ALIGNFLOATMASK']:
                plot_diagnostics_twothresholds(s_start,IndListsChunk,BinaryChunkWeak,BinaryChunkStrong,BinaryChunk,DatChunk,FilteredChunk,-FilteredChunk,ThresholdStrong,ThresholdWeak)#TODO: change HIlbert in plot_diagnostics_twothresholds
            else:
                plot_diagnostics(s_start,IndListsChunk,BinaryChunk,DatChunk,FilteredChunk,Threshold)
        if Parameters['WRITE_BINFIL_FILE']:
            fil_writer.write_bin(BinaryChunk, s_start, s_end, keep_start, keep_end)
        
        #print len(IndListsChunk), 'len(IndListsChunk)'
        ############## ALIGN AND INTERPOLATE WAVES #######################
        nextbits = []
        if Parameters['USE_HILBERT']:
            
            for IndList in IndListsChunk:
                try:
                    wave, s_peak, sf_peak, cm, fcm = extract_wave_hilbert_new(IndList, FilteredChunk,
                                                    FilteredChunkHilbert,
                                                    S_BEFORE, S_AFTER, N_CH,
                                                    s_start, ThresholdStrong, ThresholdWeak)
                    s_offset = s_start + s_peak
                    sf_offset = s_start + sf_peak
                    if keep_start<=s_offset<keep_end:
                        spike_count += 1
                        nextbits.append((wave, s_offset, sf_offset, cm, fcm))
                except np.linalg.LinAlgError:
                    s = '*** WARNING *** Unalignable spike discarded in chunk {chunk}.'.format(
                            chunk=(s_start, s_end))
                    log_warning(s)
                except InterpolationError:
                    s = '*** WARNING *** Interpolation error in chunk {chunk}.'.format(
                            chunk=(s_start, s_end))
                    log_warning(s)
            # and return them in time sorted order
            nextbits.sort(key=lambda (wave, s, s_frac, cm, fcm): s_frac)
            for wave, s, s_frac, cm, fcm in nextbits:
                uwave = get_padded(DatChunk, int(s)-S_BEFORE-s_start,
                                   int(s)+S_AFTER-s_start).astype(np.int32)
                # cm = add_penumbra(cm, ChannelGraphToUse,
                                  # Parameters['PENUMBRA_SIZE'])
                # fcm = get_float_mask(wave, cm, ChannelGraphToUse,
                                     # 1.)
                yield uwave, wave, s, s_frac, cm, fcm
                # unfiltered wave,wave, s_peak, ChMask, FloatChMask
        elif Parameters['USE_COMPONENT_ALIGNFLOATMASK']:
            for IndList in IndListsChunk:
                try:
                    if Parameters['DETECT_POSITIVE']:
                        wave, s_peak, sf_peak, cm, fcm, comp_normalised, comp_normalised_power = extract_wave_twothresholds(IndList, FilteredChunk,
                                                    FilteredChunk,
                                                    S_BEFORE, S_AFTER, N_CH,
                                                    s_start, ThresholdStrong, ThresholdWeak) 
                    else:
                        wave, s_peak, sf_peak, cm, fcm,comp_normalised, comp_normalised_power = extract_wave_twothresholds(IndList, FilteredChunk,
                                                    -FilteredChunk,
                                                    S_BEFORE, S_AFTER, N_CH,
                                                    s_start, ThresholdStrong, ThresholdWeak)
                    s_offset = s_start+s_peak
                    sf_offset = s_start + sf_peak
                    if keep_start<=s_offset<keep_end:
                        spike_count += 1
                        nextbits.append((wave, s_offset, sf_offset, cm, fcm))
                except np.linalg.LinAlgError:
                    s = '*** WARNING *** Unalignable spike discarded in chunk {chunk}.'.format(
                            chunk=(s_start, s_end))
                    log_warning(s)
                except InterpolationError:
                    s = '*** WARNING *** Interpolation error in chunk {chunk}.'.format(
                            chunk=(s_start, s_end))
                    log_warning(s)
            # and return them in time sorted order
            nextbits.sort(key=lambda (wave, s, s_frac, cm, fcm): s_frac)
            for wave, s, s_frac, cm, fcm in nextbits:
                uwave = get_padded(DatChunk, int(s)-S_BEFORE-s_start,
                                   int(s)+S_AFTER-s_start).astype(np.int32)
                # cm = add_penumbra(cm, ChannelGraphToUse,
                                  # Parameters['PENUMBRA_SIZE'])
                # fcm = get_float_mask(wave, cm, ChannelGraphToUse,
                                     # 1.)
                yield uwave, wave, s, s_frac, cm, fcm   
                # unfiltered wave,wave, s_peak, ChMask, FloatChMask
        else:    #Original SpikeDetekt. This code duplication is regretable but probably easier to deal with
            
            for IndList in IndListsChunk:
                try:
                    wave, s_peak, sf_peak, cm = extract_wave(IndList, FilteredChunk,
                                                    S_BEFORE, S_AFTER, N_CH,
                                                    s_start,Threshold)
                    s_offset = s_start+s_peak
                    sf_offset = s_start + sf_peak
                    if keep_start<=s_offset<keep_end:
                        spike_count += 1
                        nextbits.append((wave, s_offset, sf_offset, cm))
                except np.linalg.LinAlgError:
                    s = '*** WARNING *** Unalignable spike discarded in chunk {chunk}.'.format(
                            chunk=(s_start, s_end))
                    log_warning(s)
            # and return them in time sorted order
            nextbits.sort(key=lambda (wave, s, s_frac, cm): s_frac)
            for wave, s, s_frac, cm in nextbits:
                uwave = get_padded(DatChunk, int(s)-S_BEFORE-s_start,
                                   int(s)+S_AFTER-s_start).astype(np.int32)
                cm = add_penumbra(cm, ChannelGraphToUse,
                                  Parameters['PENUMBRA_SIZE'])
                fcm = get_float_mask(wave, cm, ChannelGraphToUse,
                                     ThresholdSDFactor)
                yield uwave, wave, s, s_frac, cm, fcm    
                # unfiltered wave,wave, s_peak, ChMask, FloatChMask

        progress_bar.update(float(s_end)/n_samples,
            '%d/%d samples, %d spikes found'%(s_end, n_samples, spike_count))
        if max_spikes is not None and spike_count>=max_spikes:
            break
    
    progress_bar.finish()
 def is_aligned(self):
     aligned = self.rect.right % TILE_DIM == 0 and self.rect.bottom % TILE_DIM == 0
     if not aligned:
         log_message("tilepos " + str(self.rect.right) + " " + str(self.rect.bottom))
     return aligned
Example #13
0
def exceptions(e):
    tb = traceback.format_exc()
    print tb
    timestamp = time.strftime('[%Y-%m-%d %H:%M:%S]')
    log.log_message('%s Exception caught:\n%s' % (timestamp, tb))
    return make_response(jsonify({'error':'Internal error'}), 500)
Example #14
0
def after_request(response):
    timestamp = time.strftime('[%Y-%m-%d %H:%M:%S]')
    log.log_message('%s %s %s %s %s %s' % (
        timestamp, request.remote_addr,request.method, 
        request.scheme, request.full_path, response.status))
    return response