Example #1
0
    def sync(self, args):
        """ Synchronize rtc/repository.yaml file and each rtc repository version hash. """
        options, argv = self.parse_args(args[:], self._print_alternative_rtcs)
        verbose = options.verbose_flag
        sys.stdout.write('# Writing repository.yaml for package distribution\n')

        sys.stdout.write('## Parsing RTC directory\n')
        package = admin.package.get_package_from_path(os.getcwd())
        repos = []
        for rtc in admin.rtc.get_rtcs_from_package(package, verbose=verbose):
            sys.stdout.write('### RTC %s\n' % rtc.rtcprofile.basicInfo.name)
            repo = admin.repository.get_repository_from_path(rtc.path, description=rtc.rtcprofile.basicInfo.description)

            repos.append(repo)

        repo_file = os.path.join(package.get_rtcpath(), 'repository.yaml')

        bak_file = repo_file + wasanbon.timestampstr()
        if os.path.isfile(bak_file):
            os.remove(bak_file)
        import shutil, yaml
        shutil.copy(repo_file, bak_file)
        dic = yaml.load(open(bak_file, 'r'))
        if not dic:
            dic = {}
        for repo in repos:
            if getattr(repo, 'url') != None:
                url = repo.url.strip()
            else:
                url = ''
            dic[repo.name] = {'repo_name' : repo.name, 'git': url, 'description':repo.description, 'hash':repo.hash}

        yaml.dump(dic, open(repo_file, 'w'), encoding='utf8', allow_unicode=True, default_flow_style=False)
        pass
Example #2
0
def exp(inF1,inF2):
    G = Gene(inF1)
    ouFile = open(inF1 + '.exp', 'w')
    ouFile.write('Gene\tMock\tMERS\n')
    D = {}
    inFile = open(inF2)
    head = inFile.readline()
    for line in inFile:
        line = line.strip()
        fields = line.split('\t')
        gene = fields[1]
        D.setdefault(gene, [])
        #mock = (float(fields[2]) + float(fields[3]))/2
        #rsv20h = (float(fields[14]) + float(fields[15]))/2
        Mock = np.median([float(fields[2]), float(fields[3]), float(fields[4])])
        MERS = np.median([float(fields[5]), float(fields[6]), float(fields[7])])
        D[gene].append([Mock,MERS])
    inFile.close()
    for g in G:
        if g in D:
            if len(D[g]) > 1:
                #print(D[g])
                pass
            ouFile.write(g + '\t' + str(D[g][0][0]) + '\t' + str(D[g][0][1]) + '\n')
    ouFile.close()
Example #3
0
def main():
    ptt_dir = '/tmp2/GorsachiusMelanolophus/ptt_posts_new/no_sponsored/'
    imgs_dir = '/tmp2/GorsachiusMelanolophus/ptt_imgs/no_sponsored/'
    start = int(sys.argv[1])
    end = int(sys.argv[2])
    fp = open('../img_num/' + str(start)+ '.txt', 'a')
    for i in range(start, end):
        try:
            post_path = ptt_dir + str(i) + '.p'
            post = pickle.load(open(post_path, 'rb'))
            url = ptt_url + post['href']
            webpage = get_webpage(url)
            imgs, blog_url = parse_post(webpage)
            if imgs:
                print(f'{i}:{len(imgs)}', file=fp)
                save(imgs, imgs_dir + str(i))
            elif blog_url:
                webpage = get_webpage(blog_url)
                imgs = get_imgs_blog(webpage)
                if imgs:
                    print(f'{i}:{len(imgs)}', file=fp)
                    save(imgs, imgs_dir + str(i))
        except KeyboardInterrupt:
            return 0
        except Exception as e:
            print(e)
            pass
Example #4
0
    def consolidate_results(self):

        dicts = []
        for file in os.listdir(self.results_directory):
            if file.startswith(self.data_name + '_results_'):
                f1 = open(self.results_directory+ file, 'r')
                my_dict = eval(f1.read())
                dicts.append(my_dict)

        run_nums = [' ']
        run_nums.extend([str(r) for r in range(0,len(dicts))])

        print 'Found ' + str(len(dicts)) + ' result sets'

        full_results_loc = self.results_directory + self.data_name + '_full_results_transpose.csv'

        with open(full_results_loc, 'wb') as ofile:
            writer = csv.writer(ofile, delimiter=',')
            writer.writerow(run_nums)
            for key in dicts[0].iterkeys():
                writer.writerow([key] + [d[key] for d in dicts])

        #this file has all the info - but to bring into pandas we want to transpose the data
        df = pd.read_csv(full_results_loc, index_col=0)
        df2 = df.transpose()
        #save off the results file
        full_results_loc2 = self.results_directory + self.data_name + '_full_results.csv'
        print 'Saving: ' + full_results_loc2
        df2.to_csv(full_results_loc2, delimiter=',')
Example #5
0
def sanitize_open(filename, open_mode):
    """Try to open the given filename, and slightly tweak it if this fails.

    Attempts to open the given filename. If this fails, it tries to change
    the filename slightly, step by step, until it's either able to open it
    or it fails and raises a final exception, like the standard open()
    function.

    It returns the tuple (stream, definitive_file_name).
    """
    try:
        if filename == u'-':
            if sys.platform == 'win32':
                import msvcrt
                msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
            return (sys.stdout.buffer if hasattr(sys.stdout, 'buffer') else sys.stdout, filename)
        stream = open(encodeFilename(filename), open_mode)
        return (stream, filename)
    except (IOError, OSError) as err:
        if err.errno in (errno.EACCES,):
            raise

        # In case of error, try to remove win32 forbidden chars
        alt_filename = os.path.join(
                        re.sub(u'[/<>:"\\|\\\\?\\*]', u'#', path_part)
                        for path_part in os.path.split(filename)
                       )
        if alt_filename == filename:
            raise
        else:
            # An exception here should be caught in the caller
            stream = open(encodeFilename(filename), open_mode)
            return (stream, alt_filename)
def main():
    PROG = os.path.basename(os.path.splitext(__file__)[0])
    description = """Scan claims files"""
    parser = OptionParser(option_class=MultipleOption,
                          usage='usage: %prog claims_file, claims_file, ...',
                          version='%s %s' % (PROG, VERSION),
                          description=description)
    if len(sys.argv) == 1:
        parser.parse_args(['--help'])

    args = parser.parse_args()
    p2k = {}
    k2p = {}
    try:
        with open('claimants.csv') as csv_file:
            for line in csv.reader(csv_file, dialect="excel"):
                p2k[line[0]] = line[1]
                k2p[line[1]] = line[0]
    except IOError:
        pass
    for filename in args[1]:
        with open(filename+'_masked.csv', 'wb') as cf:
            outfile = csv.writer(cf, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
            analyze_file(filename, outfile, p2k, k2p)
            print len(p2k), len(k2p)
    with open('claimants.csv', 'wb') as cf:
        cout = csv.writer(cf, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
        for p in p2k:
            cout.writerow([p, p2k[p]])
Example #7
0
def test_check_config(tmpdir, registry):

    test_path = str(tmpdir / "test.json")

    with open(test_path, "w+", encoding="utf-8") as f:
        f.write("")

    with pytest.raises(ApplianceError):
        Appliance(registry, "jkhj")

    with pytest.raises(ApplianceError):
        Appliance(registry, test_path)

    with open(test_path, "w+", encoding="utf-8") as f:
        f.write("{}")

    with pytest.raises(ApplianceError):
        Appliance(registry, test_path)

    with pytest.raises(ApplianceError):
        with open(test_path, "w+", encoding="utf-8") as f:
            f.write('{"registry_version": 42}')
        Appliance(registry, test_path)

    Appliance(registry, "tests/registry/appliances/microcore-linux.json")
Example #8
0
def PlotFit( g, data, cols=(0,1) ):

    fh1, fn1 = tempfile.mkstemp()
    a,b = cols
    os.close(fh1)
    outfile = open(fn1, "w")
    for d in data: outfile.write("%f\t%f\n" % (d[a], d[b]))
    outfile.close()
    
    parameters = {}
    fh2, fn2 = tempfile.mkstemp()
    fh3, fn3 = tempfile.mkstemp()    
    os.close(fh2)
    os.close(fh3)
    open(fn2, 'w').write('m=0\nb=0\n')
    g("f%i(x) = m * x + y0" % b) 
    g("fit f%i(x) '%s' using 1:2 via y0, m" % (b, fn1))
    g("replot f%i(x)" % (b))
    
##     g('fit m*x+b "%s" via "%s"' % (fn1, fn2) )    
##     g('update "%s" "%s"' % (fn2, fn3))
##     execfile( fn3, globals(), parameters )
##     g.replot( Gnuplot.Func( "%f*x + %f" % (parameters['m'], parameters['b']) ) )
        
    return [fn1, fn2, fn3]
Example #9
0
def run_example_spark_job(work_dir, timeout=25):
    """Runs a Spark job and checks the result."""
    print 'Starting Spark job'
    stdout = open(os.path.join(work_dir, 's_stdout.txt'), 'w')
    stderr = open(os.path.join(work_dir, 's_stderr.txt'), 'w')
    register_exit(lambda: stdout.close())
    register_exit(lambda: stderr.close())

    spark = subprocess.Popen([
        os.path.join(spark_path(), 'bin/spark-submit'),
        '--master', 'mesos://%s' % MESOS_MASTER_CIDR,
        os.path.join(spark_path(), 'examples/src/main/python/pi.py'), '5'],
        stdin=None,
        stdout=stdout,
        stderr=stderr)
    register_exit(lambda: spark.kill() if spark.poll() is None else '')

    while timeout:
        if spark.poll() is not None:
            break

        time.sleep(1)
        timeout -= 1

    if timeout <= 0:
        return False

    with open(os.path.join(work_dir, 's_stdout.txt'), 'r') as f:
        result = f.read()
        return 'Pi is roughly 3' in result
Example #10
0
	def __remove_hotkey(self, command):
		""" Remove the hotkey for 'command' (and 'command' too, of course). """
		""" Return 'True' on success, 'False' otherwise. """
		self.__touch_config_file()
		oldfile = open(XBINDKEYS_CONFIG_FILE, "r")
		newfile = open(XBINDKEYS_CONFIG_FILE + ".new", "w")
		commandfound = False
		skipnextline = False
		for line in oldfile:
			if skipnextline != True:
				if line != '"' + command + '"\n':
					newfile.write(line)
				else:
					commandfound = True
					skipnextline = True
			else:
				skipnextline = False
		oldfile.close()
		newfile.close()
		if commandfound == True:
			try:
				os.remove(XBINDKEYS_CONFIG_FILE)
			except:
				sessionlog.write("ERROR: 'Hotkeys.__remove_hotkey()' - Cannot replace '" + XBINDKEYS_CONFIG_FILE + "'.")
				os.remove(XBINDKEYS_CONFIG_FILE + ".new")
				return False
			shutil.move(XBINDKEYS_CONFIG_FILE + ".new", XBINDKEYS_CONFIG_FILE)
		else:
			os.remove(XBINDKEYS_CONFIG_FILE + ".new")
		return True
Example #11
0
def main():
  fb_name = open('fb_name.txt','r')
  fb_namber_to_name = fb_name.readlines()
  fb32 = open ('TESTING.fb32','rb')
  fb32_b = fb32.read()
  fb32.close
  size_conf =(ord(fb32_b[0])<<8)|(ord(fb32_b[1]))
  size_im_conf = (ord(fb32_b[2])<<8)|(ord(fb32_b[3]))
  im_conf = fb32_b[4:4+size_im_conf]
  size_fb_conf = (ord(fb32_b[4+size_im_conf])<<8)|(ord(fb32_b[5+size_im_conf]))  
  fb_conf = fb32_b[6 + 1+ size_im_conf:6 + 1 + size_im_conf + size_fb_conf]
  im_conf_c = str_to_c(im_conf,len(im_conf))
  fb_conf_c = str_to_c(fb_conf,len(fb_conf))
  print('configuration size',size_conf)
  print('immanager size',size_im_conf)
  print('fb size',size_fb_conf)
  print(im_conf_c)
  print(fb_conf_c)
  variable = {}
  fb_runtime = {}
  fb_immanager = {}
  im_one = FB()                
#  im_one.input_variable['null'] = 0
  im_one.new_var('ones',0x12,0)
  im_one.new_var('twels',0x2,2)
  im_two = FB()
  im_two.new_var('ones',0x66,0)
  im_two.new_var('twels',0x24,2)
  print(im_two.input_variable,im_two.var_variable,im_two.out_variable) 
  print(im_one.input_variable,im_one.var_variable,im_one.out_variable) 
Example #12
0
	def __update_hotkey(self, command, hotkey):
		""" Update the hotkey for 'command' to 'hotkey'. """
		""" If 'command' is not found, add it with the new 'hotkey'. """
		""" Return 'True' on success, 'False' otherwise. """
		self.__touch_config_file()
		oldfile = open(XBINDKEYS_CONFIG_FILE, "r")
		newfile = open(XBINDKEYS_CONFIG_FILE + ".new", "w")
		# Search for command
		commandfound = False
		skipnextline = False
		for line in oldfile:
			if skipnextline == False:
				newfile.write(line)
			else:
				skipnextline = False
			if line == '"' + command + '"\n':
				newfile.write("  " + hotkey + "\n") # update hotkey
				commandfound = True
				skipnextline = True
		if commandfound == False:
			# command not found, add it
			newfile.write('"' + command + '"\n')
			newfile.write("  " + hotkey + "\n")
		oldfile.close()
		newfile.close()
		try:
			os.remove(XBINDKEYS_CONFIG_FILE)
		except:
			sessionlog.write("ERROR: 'Hotkeys.__update_hotkey()' - Cannot replace '" + XBINDKEYS_CONFIG_FILE + "'.")
			os.remove(XBINDKEYS_CONFIG_FILE + ".new")
			return False
		shutil.move(XBINDKEYS_CONFIG_FILE + ".new", XBINDKEYS_CONFIG_FILE)
		return True
def main():
    api_url, username, password = get_account_data(True)
    backup_files = get_nsbackup_files(True)
    if 'pickle_backup_file' in backup_files.keys():
        from pickle import Pickler
    if 'json_backup_file' in backup_files.keys():
        import json
    # Instantiate the inwx class (does not connect yet but dispatches calls to domrobot objects with the correct API URL
    inwx_conn = domrobot(api_url, username, password, 'en', False)
    # get the list of all domains:
    domains = inwx_conn.nameserver.list()['domains']
    # get all the nameserver entries for each domain
    current, total = 0, len(domains)
    nsentries = dict()
    for domain in domains:
        current += 1
        domain = domain['domain']
        print "%i of %i - Currently backing up %s." % (current, total, domain)
        nsentries[domain] = inwx_conn.nameserver.info({'domain': domain})['record']
    if 'pickle_backup_file' in backup_files.keys():
        Pickler(open(backup_files['pickle_backup_file'],'wb')).dump(nsentries)
        print "Wrote backup file using Python Module Pickle : %s." % backup_files['pickle_backup_file']
    if 'json_backup_file' in backup_files.keys():
        json.dump(nsentries, open(backup_files['json_backup_file'], 'w'))
        print "Wrote backup file using Python Module JSON: %s." % backup_files['json_backup_file']
Example #14
0
File: html.py Project: 89sos98/main
    def handle_finish(self):
        self.info(bold('dumping search index... '), nonl=True)
        self.indexer.prune(self.env.all_docs)
        searchindexfn = path.join(self.outdir, self.searchindex_filename)
        # first write to a temporary file, so that if dumping fails,
        # the existing index won't be overwritten
        f = open(searchindexfn + '.tmp', 'wb')
        try:
            self.indexer.dump(f, self.indexer_format)
        finally:
            f.close()
        movefile(searchindexfn + '.tmp', searchindexfn)
        self.info('done')

        self.info(bold('dumping object inventory... '), nonl=True)
        f = open(path.join(self.outdir, INVENTORY_FILENAME), 'w')
        try:
            f.write('# Sphinx inventory version 1\n')
            f.write('# Project: %s\n' % self.config.project.encode('utf-8'))
            f.write('# Version: %s\n' % self.config.version)
            for modname, info in self.env.modules.iteritems():
                f.write('%s mod %s\n' % (modname, self.get_target_uri(info[0])))
            for refname, (docname, desctype) in self.env.descrefs.iteritems():
                f.write('%s %s %s\n' % (refname, desctype,
                                        self.get_target_uri(docname)))
        finally:
            f.close()
        self.info('done')
Example #15
0
def flash_theme(name, mm):
    abspath = os.path.abspath(__file__)
    
    path  = os.path.dirname(abspath) + '/template'
    f = path + '/front.txt'
    c = path + '/css.txt'
    b = path + '/back.txt'

    with open(f, 'r') as ft, open(c, 'r') as ct, open (b, 'r') as bt:
        ftemp = ft.read()
        css   = ct.read()
        btemp = bt.read()

    m  = mm.new(name)

    fld = mm.newField('Note ID'); mm.addField(m, fld)
    fld = mm.newField('Front');   mm.addField(m, fld)
    fld = mm.newField('F Note');  mm.addField(m, fld)
    fld = mm.newField('Back');    mm.addField(m, fld)
    fld = mm.newField('B Note');  mm.addField(m, fld)
    fld = mm.newField('class');   mm.addField(m, fld)
    fld = mm.newField('Noty');    mm.addField(m, fld)
    fld = mm.newField('http');    mm.addField(m, fld)
    fld = mm.newField('video');   mm.addField(m, fld)

    m['css'] = css

    t = mm.newTemplate('Card 1')
    t['qfmt'] = ftemp
    t['afmt'] = btemp
    mm.addTemplate(m, t)
    
    mm.add(m)
    return m
Example #16
0
	def _child_main_loop(self, queue):
		while True:
			url = "http://geekhost.net/OK"
			f = urllib.urlopen(url)
			data = f.read()
			#print data
			abcPattern = re.compile(r'OK')
			if abcPattern.match(data):
				queue.put('Already logined')
			else:
				queue.put('Need login')
				LOGIN_URL = 'https://auth-wlc.ntwk.dendai.ac.jp/login.html'
				#LOGIN_URL = 'http://geekhost.net/checkparams.php'
				pd = yaml.load(open('config.yaml').read().decode('utf-8'))
				pd['buttonClicked'] = '4'
				pd['redirect_url'] = 'http://google.com/'
				pd["err_flag"] = "0" 
				pd["err_msg"] = ""
				pd["info_flag"] = "0"
				pd["info_msg"] = ""
				params = urllib.urlencode(pd)
				print repr(params)
				up = urllib.urlopen(LOGIN_URL, params)
			# あとは寝てる
			time.sleep(yaml.load(open('config.yaml').read().decode('utf-8'))['threadtime'])
	def load_text(self):
		'''
		The text of instances are not stored in the prediction result file,
		so you need to call this function to load texts from testing data.

		>>> from libshorttext.analyzer import *
		>>> insts = InstanceSet('prediction_result_path')
		>>> insts.load_text()

		This method also load the extra svm features if extra svm files
		are used when training.
		'''
		EMPTY_MESSAGE = '**None**'
		sorted_insts = sorted(self.insts, key = lambda inst: inst.idx)
		i = 0
		for idx, lines in enumerate(izip(*([open(self.filepath, 'r')] + [open(f, 'r') for f in self.extra_svm_files]))):
			line = lines[0]
			extra_svm_feats = lines[1:]
			nr_extra_svm_feats = len(extra_svm_feats)
			if idx > sorted_insts[-1].idx:
				break
			if idx == sorted_insts[i].idx:
				try:
					sorted_insts[i].text = line.split('\t',1)[1].strip()
				except:
					sorted_insts[i].text = EMPTY_MESSAGE

				sorted_insts[i].extra_svm_feats = [None] * nr_extra_svm_feats
				for j, extra_svm_feat in enumerate(extra_svm_feats):
					try:
						sorted_insts[i].extra_svm_feats[j] = dict(map(lambda t: (int(t[0]), float(t[1])), [feat.split(':') for feat in extra_svm_feat.split(None, 1)[1].split()]))
					except:
						sorted_insts[i].extra_svm_feats[j] = EMPTY_MESSAGE
				i += 1
def main():

    options = get_options()
    with open(options.input) if options.input else sys.stdin as in_f, \
            open(options.output, 'w') if options.output else sys.stdout as out_f:

        serializer_cls = get_serializer(options.format)
        if not serializer_cls:
            sys.stderr.write('Unsupported format', options.format)
            return

        if options.human:
            serializer_cls.write_header(out_f)

        for line in in_f:
            name = line.strip()
            if not name:
                continue
            resolved = None
            try:
                resolved = resolve(name, options.single)
            except Exception as e:
                pass
            if options.parent:
                resolved = get_parents(resolved)
            out_f.write(serializer_cls.serialize_line(resolved, human=options.human, name=name))
def native_report2(src):
	data = {}
	sum = 0
	c = ""
	for root, versions, ds in os.walk(src):
		if root != src:
			continue
		for version in sorted(versions, key = str.lower, reverse = True):
			sum = 0
			data = {}
			dd = os.path.join(root, version)
			for d_version, dirs, files in os.walk(dd):
				for d in dirs:
					p = os.path.join(d_version, d) + os.sep + "*.log"
					#p = os.path.join(root, d) + os.sep + "*"
					s = len(glob.glob(p))
					sum += s
					name = os.path.join(root, d) 
					if name.startswith(src):
						name = name[len(src):]
					if name.startswith("/"):
						name = name[1:]
					#data[name] = s
					name = d_version + os.sep + name
					data[name] = s
			c += html_report(data, sum, version) + "<br/><br/>"
			#c = "<br/><br/>" + html_report(data, sum)
	open(os.path.join(src, "index.html"), "w").write(c)
Example #20
0
def createMatrix(j_range = 0 ,entry = "data"):
	# j_range = 0
	# if entry == "assigned":
	# 	j_range = 1290442
	# else:
	# 	j_range = 5425990
	if j_range == 0:
		print "You need to pass in the number of clusters as an argument to this function."
		sys.exit(1)
	with open(entry+"_collective.txt","r") as fin, open(entry+"_clust_mem_matrix.txt","w") as out_1, open(entry+"_pre_intensity_matrix.txt","w") as out_2:
		clust_matrix = [[0 for i in range(0,42)] for j in range(0,j_range)]
		int_matrix = [[0.0 for i in range(0,42)] for j in range(0,j_range)]
		fin.readline()
		for line in fin:
			line = line.split()
			clust = int(line[12].split(".")[1])
			f_index = int(line[0])/11	
			clust_matrix[clust][f_index] = clust_matrix[clust][f_index] + 1
			int_matrix[clust][f_index] = int_matrix[clust][f_index] + float(line[10])
		for i in xrange(0,42):			
			out_1.write("".join(["\t",str(i)]))
			out_2.write("".join(["\t",str(i)]))
		out_1.write("\n")
		out_2.write("\n")
		for i in xrange(0,j_range):
			for j in xrange(0,42):
				if j == 0:
					out_1.write("".join([entry,"_0_0.",str(i)]))
					out_2.write("".join([entry,"_0_0.",str(i)]))
				out_1.write("".join(["\t",str(clust_matrix[i][j])]))
				out_2.write("".join(["\t",str(int_matrix[i][j])]))
			out_1.write("\n")
			out_2.write("\n")
	return None
def copy_json(input_path, output_path):
    with open(input_path) as input:
        with open(output_path, "w+") as output:
            json.dump(
                json.load(input),
                output,
                indent=2)
Example #22
0
def contacts_menu(self, args):
    address = os.path.join(self.homeRoute, 'address_book.tool')
    if not os.path.exists(address):
        address_book_init(address)
    
    cFile = open(address,'r+')
    contactsList = cFile.read().split('\n')
    cFile.close()
    
    for item in contactsList:
        if item is not "":
            item = item.split(" : ")
            contacts.add(item[0], item[2], item[3], item[1])
    
    quit = False
    commands = {'view': contacts_view, 'add': contacts_add, 'help': help, 'search': contacts_find, 'remove': remove}
    while not quit:
        command = input('(Contacts): ').strip().lower()
        if command == 'exit':
            quit = True
        elif command in commands:
            commands[command](address)
        else:
            print('Error: command "' + command + '" not found.')
    
    cFile = open(address,'w')
    for item in contacts:
        cFile.write(str(item) + '\n')
    cFile.close()
Example #23
0
    def setUp(self):
        self.CONF = self.useFixture(fixture_config.Config()).conf
        self.CONF.set_override('doc_type', 'fake', group='alarms')
        self.CONF.set_override('uri', 'fake_es_uri', group='es_conn')
        super(TestAlarmDispatcher, self).setUp()

        self.dispatcher_get = (
            alarms.AlarmDispatcher({}))

        self.dispatcher_get_by_id = (
            alarms.AlarmDispatcher({}))

        self.dispatcher_put = (
            alarms.AlarmDispatcher({}))

        self.dispatcher_delete = (
            alarms.AlarmDispatcher({}))

        dir_path = os.path.dirname(os.path.realpath(__file__))
        alarms_data_json = open(os.path.join(dir_path,
                                             'test_alarms_data')
                                ).read().replace('\n', '')
        self.data = json.loads(alarms_data_json)
        get_alarms_data = open(os.path.join(dir_path,
                                            'test_get_alarms_data')
                               ).read().replace('\n', '')
        self.get_alarms_data = json.loads(get_alarms_data)
Example #24
0
    def test_merge(self):
        testdir = path(l2emod.__file__).parent / 'testtex'
        with make_temp_directory() as tmdir:
            fn = testdir / 'example1.tex'
            print "file %s" % fn
            nfn = '%s/%s' % (tmdir, fn.basename())
            os.system('cp %s/* %s' % (testdir, tmdir))
            os.chdir(tmdir)
            l2e = latex2edx(nfn, output_dir=tmdir)
            l2e.convert()

            fn = testdir / 'example2.tex'
            print "file %s" % fn
            nfn = '%s/%s' % (tmdir, fn.basename())
            l2e = latex2edx(nfn, output_dir=tmdir, do_merge=True)
            l2e.convert()

            cfn = path(tmdir) / 'course/2013_Fall.xml'
            self.assertTrue(os.path.exists(cfn))

            self.assertIn('<chapter url_name="Unit_1"', open(cfn).read())
            self.assertIn('<chapter url_name="Unit_2"', open(cfn).read())

            cfn = path(tmdir) / 'chapter/Unit_1.xml'
            self.assertTrue(os.path.exists(cfn))

            cfn = path(tmdir) / 'chapter/Unit_2.xml'
            self.assertTrue(os.path.exists(cfn))
Example #25
0
def compile_js(manifest,config):
	js_file = os.path.join(cwd,'assets','sg.flurry.js')
	if not os.path.exists(js_file): return
	
	sdk = config['TITANIUM_SDK']
	iphone_dir = os.path.join(sdk,'iphone')
	sys.path.insert(0,iphone_dir)
	from compiler import Compiler
	
	path = os.path.basename(js_file)
	metadata = Compiler.make_function_from_file(path,js_file)
	method = metadata['method']
	eq = path.replace('.','_')
	method = '  return %s;' % method
	
	f = os.path.join(cwd,'Classes','SgFlurryModuleAssets.m')
	c = open(f).read()
	idx = c.find('return ')
	before = c[0:idx]
	after = """
}

@end
	"""
	newc = before + method + after
	
	if newc!=c:
		x = open(f,'w')
		x.write(newc)
		x.close()
Example #26
0
def get_users():
    
    # each time ran, clean the user_list
    with open('user_list.txt', 'w'):
        pass
    
    count = 0

    # let's try and get a list of users some how.  
    r = praw.Reddit('User-Agent: user_list (by /u/XjCrazy09)')
    
    # check to see if user already exists.  Because if so they have already been scraped. 
    while count < 100:
        submissions = r.get_random_subreddit().get_top(limit=None)
        print "Running..."
        for i in submissions: 
            print i.author.name
            # run a tally
            count+=1 
            with open('user_list.txt', 'a') as output:
                output.write(i.author.name + "\n")
        print "Finished... \n"
        print "count: ", count
        time.sleep(5)
        
    usersList()
Example #27
0
def main():

  # Get historical data from disc
  history_file = open('rarecs_log.txt', 'r')
  full_history_log = history_file.read()
  latest_historical_set = full_history_log.split('\n')[0] #first line
  history_file.close()

  # Grab latest data from residentadvisor.net/reviews.aspx?format=recommend
  current_artist = rarecommends.recommendedArtist()
  current_work = rarecommends.recommendedWork()
  current_set = current_artist+' -- '+current_work
  
  # Debug 
  print 'latest:  '+latest_historical_set
  print 'current: '+current_set

  # If there's a new set, write new history file
  if current_set != latest_historical_set:

    new_log = current_set+'\n'+full_history_log
    updated_history_file = open('rarecs_log.txt', 'w')
    updated_history_file.write(new_log)
    updated_history_file.close()
    print 'file updated!'

  else:

    print 'no updates'
Example #28
0
def page_extract(start, end, SUBSECTION):

    PDF_IN = PdfFileReader(open(PDF_DIR, 'rb'))

#    for i in xrange(PDF_IN.numPages): # for all pages
    for i in range(int(start) - 1, int(end)):

        output = PdfFileWriter()
        output.addPage(PDF_IN.getPage(i))
        
        base, name_ext = os.path.split(PDF_DIR)
        name, ext      = os.path.splitext(name_ext)
        PDF_OUT        = '{}{}'.format(TMP_DIR, '{}-{}{}'.format(name, str(i).zfill(6), ext))
        
        with open(PDF_OUT, 'wb') as outputStream:
            output.write(outputStream)
        
        gs_pdf_to_png(PDF_OUT)
        os.remove(PDF_OUT)
    
    png_list = group(os.listdir(TMP_DIR), 2)
    for tup in png_list:
        print tup
        card_front = os.path.join(TMP_DIR, tup[0])
        card_back  = os.path.join(TMP_DIR, tup[1])
        make_cards(card_front, card_back, SUBSECTION)
Example #29
0
def fastq_filter(in_file, pos_file, neg_file, wanted):
    """FASTQ filter."""
    from Bio.SeqIO.QualityIO import FastqGeneralIterator
    handle = open(in_file, "r")
    if pos_file is not None and neg_file is not None:
        print "Generating two FASTQ files"
        positive_handle = open(pos_file, "w")
        negative_handle = open(neg_file, "w")
        print in_file
        for title, seq, qual in FastqGeneralIterator(handle):
            print("%s --> %s" % (title, clean_name(title.split(None, 1)[0])))
            if clean_name(title.split(None, 1)[0]) in wanted:
                positive_handle.write("@%s\n%s\n+\n%s\n" % (title, seq, qual))
            else:
                negative_handle.write("@%s\n%s\n+\n%s\n" % (title, seq, qual))
        positive_handle.close()
        negative_handle.close()
    elif pos_file is not None:
        print "Generating matching FASTQ file"
        positive_handle = open(pos_file, "w")
        for title, seq, qual in FastqGeneralIterator(handle):
            if clean_name(title.split(None, 1)[0]) in wanted:
                positive_handle.write("@%s\n%s\n+\n%s\n" % (title, seq, qual))
        positive_handle.close()
    elif neg_file is not None:
        print "Generating non-matching FASTQ file"
        negative_handle = open(neg_file, "w")
        for title, seq, qual in FastqGeneralIterator(handle):
            if clean_name(title.split(None, 1)[0]) not in wanted:
                negative_handle.write("@%s\n%s\n+\n%s\n" % (title, seq, qual))
        negative_handle.close()
    handle.close()
Example #30
0
def installFile(config, target, source, overwrite=False):
    with open(source, "rt") as f:
        new_contents = f.read()
    if os.path.exists(target):
        with open(target, "rt") as f:
            old_contents = f.read()
        if old_contents != new_contents:
            if overwrite:
                if not config['quiet']:
                    print("%s has old/modified contents" % target)
                    print(" overwriting it with new contents")
                with open(target, "wt") as f:
                    f.write(new_contents)
            else:
                if not config['quiet']:
                    print("%s has old/modified contents" % target)
                    print(" writing new contents to %s.new" % target)
                with open(target + ".new", "wt") as f:
                    f.write(new_contents)
        # otherwise, it's up to date
    else:
        if not config['quiet']:
            print("creating %s" % target)
        with open(target, "wt") as f:
            f.write(new_contents)