Exemplo n.º 1
0
def fn(U, Uold):

    val = np.zeros(solver.Ntot)
    Mp = solver.Mp
    Np = solver.Np
    Mn = solver.Mn
    Nn = solver.Nn
    Ms = solver.Ms
    Ma = solver.Ma
    Mz = solver.Mz
    cmat_pe, cmat_ne,\
    uvec_pe, uvec_sep, uvec_ne, \
    Tvec_acc, Tvec_pe, Tvec_sep, Tvec_ne, Tvec_zcc, \
    phie_pe, phie_sep, phie_ne, \
    phis_pe, phis_ne, jvec_pe,jvec_ne,eta_pe,eta_ne = unpack(U, Mp, Np, Mn, Nn, Ms, Ma, Mz)

    cmat_old_pe, cmat_old_ne,\
    uvec_old_pe, uvec_old_sep, uvec_old_ne,\
    Tvec_old_acc, Tvec_old_pe, Tvec_old_sep, Tvec_old_ne, Tvec_old_zcc,\
    _, _, \
    _, _,\
    _,_,\
    _,_,_= unpack(Uold,Mp, Np, Mn, Nn, Ms, Ma, Mz)

    val = solver.res_c_fn(val, cmat_pe, jvec_pe, Tvec_pe, cmat_old_pe, cmat_ne,
                          jvec_ne, Tvec_ne, cmat_old_ne)

    val = solver.res_u_pe(val, uvec_pe, Tvec_pe, jvec_pe, uvec_old_pe,
                          uvec_sep, Tvec_sep)
    val = solver.res_u_sep(val, uvec_sep, Tvec_sep, uvec_old_sep, uvec_pe,
                           Tvec_pe, uvec_ne, Tvec_ne)
    val = solver.res_u_ne(val, uvec_ne, Tvec_ne, jvec_ne, uvec_old_ne,
                          uvec_sep, Tvec_sep)

    val = solver.res_T_acc(val, Tvec_acc, Tvec_old_acc, Tvec_pe)
    val = solver.res_T_pe(val, Tvec_pe, uvec_pe, phie_pe, phis_pe, jvec_pe,
                          eta_pe, cmat_pe, Tvec_old_pe, Tvec_acc, Tvec_sep)
    val = solver.res_T_sep(val, Tvec_sep, uvec_sep, phie_sep, Tvec_old_sep,
                           Tvec_pe, Tvec_ne)
    val = solver.res_T_ne(val, Tvec_ne, uvec_ne, phie_ne, phis_ne, jvec_ne,
                          eta_ne, cmat_ne, Tvec_old_ne, Tvec_zcc, Tvec_sep)
    val = solver.res_T_zcc(val, Tvec_zcc, Tvec_old_zcc, Tvec_ne)

    val = solver.res_phie_pe(val, uvec_pe, phie_pe, Tvec_pe, jvec_pe, uvec_sep,
                             phie_sep, Tvec_sep)
    #    val = res_phie_pe_phi(val, uvec_pe, phie_pe, Tvec_pe, phis_pe, uvec_sep,phie_sep, Tvec_sep)
    val = solver.res_phie_sep(val, uvec_sep, phie_sep, Tvec_sep, phie_pe,
                              phie_ne)
    val = solver.res_phie_ne(val, uvec_ne, phie_ne, Tvec_ne, jvec_ne, uvec_sep,
                             phie_sep, Tvec_sep)
    #    val = res_phie_ne_phi(val, uvec_ne, phie_ne, Tvec_ne, phis_ne, uvec_sep, phie_sep, Tvec_sep)

    val = solver.res_phis(val, phis_pe, jvec_pe, phis_ne, jvec_ne)

    val = solver.res_j(val, jvec_pe, uvec_pe, Tvec_pe, eta_pe, cmat_pe,
                       jvec_ne, uvec_ne, Tvec_ne, eta_ne, cmat_ne)
    #    val = res_j_phi(val, jvec_pe, uvec_pe, Tvec_pe, phis_pe, phie_pe, cmat_pe, jvec_ne, uvec_ne, Tvec_ne, phis_ne, phie_ne, cmat_ne)
    val = solver.res_eta(val, eta_pe, phis_pe, phie_pe, Tvec_pe, cmat_pe,
                         eta_ne, phis_ne, phie_ne, Tvec_ne, cmat_ne)
    return val
Exemplo n.º 2
0
def verify_dir():
    # test2: test that a packed dir gets unpacked correctly
    tweet_id = pack(TEST_SRC, memory_uploader)
    root_payload = deserialize(memory_downloader(tweet_id))
    unpack(root_payload, memory_downloader, name_override=TEST_DIR_DEST, recur=True)
    if not are_identical_dirs(TEST_SRC, TEST_DIR_DEST):
        raise RuntimeError('%s is diff than %s, packing test failed' % \
                (TEST_SRC, TEST_DIR_DEST))
    else:
        print 'TEST 2: PASSED'
Exemplo n.º 3
0
def verify_file():
    # test1: test that a packed file gets unpacked correctly
    fn = TEST_SRC + '/foo'
    tweet_id = pack(fn, memory_uploader)
    payload = deserialize(memory_downloader(tweet_id))
    unpack(payload, memory_downloader, name_override=TEST_FILE_DEST , recur=True)
    if not are_identical_dirs(fn, TEST_FILE_DEST):
        raise RuntimeError('%s is diff than %s, packing test failed' % \
                (fn, TEST_FILE_DEST))
    else:
        print 'TEST 1: PASSED'
Exemplo n.º 4
0
def download(tweet_id, downloader, concealer, name_override=False):
    print 'mode: download'
    l = downloader(tweet_id)
    l = [x.encode('ascii', 'ignore').decode('string_escape') for x in l]
    root = deserialize(concealer.reveal(l))  #.tobytes())
    unpack(root,
           tweet_id,
           downloader,
           concealer,
           name_override=name_override,
           recur=True)
    print 'done'
Exemplo n.º 5
0
def verify_dir():
    # test2: test that a packed dir gets unpacked correctly
    tweet_id = pack(TEST_SRC, memory_uploader)
    root_payload = deserialize(memory_downloader(tweet_id))
    unpack(root_payload,
           memory_downloader,
           name_override=TEST_DIR_DEST,
           recur=True)
    if not are_identical_dirs(TEST_SRC, TEST_DIR_DEST):
        raise RuntimeError('%s is diff than %s, packing test failed' % \
                (TEST_SRC, TEST_DIR_DEST))
    else:
        print 'TEST 2: PASSED'
Exemplo n.º 6
0
def verify_file():
    # test1: test that a packed file gets unpacked correctly
    fn = TEST_SRC + '/foo'
    tweet_id = pack(fn, memory_uploader)
    payload = deserialize(memory_downloader(tweet_id))
    unpack(payload,
           memory_downloader,
           name_override=TEST_FILE_DEST,
           recur=True)
    if not are_identical_dirs(fn, TEST_FILE_DEST):
        raise RuntimeError('%s is diff than %s, packing test failed' % \
                (fn, TEST_FILE_DEST))
    else:
        print 'TEST 1: PASSED'
Exemplo n.º 7
0
def dipatch_client_unix_file(conn, rules):
    f = ""
    while f != "!close":
        try:
            f = conn.recv(1024)
            if f == "!close":
                break
            if os.path.exists(f):
                uf = unpack.unpack(f)
                if uf:
                    f = uf
                matches = []
                for i in rules.match(f):
                    matches.append({
                        "name": i.rule,
                        "namespace": i.namespace,
                        "meta": i.meta,
                        "tags": i.tags
                    })
                conn.send(str(matches))
                if uf:
                    unpack.delete(uf)
            else:
                conn.send("[]")
        except:
            break
    conn.close()
Exemplo n.º 8
0
def getSamairPages():
	global MAX_PAGINAS
	global proxyfile
	i=1
	while i<=MAX_PAGINAS:
		print "["+str(i)+"/30] Downloading Page "+str(i)
		if i < 10:
			resposta = urllib2.urlopen("http://www.samair.ru/proxy/proxy-0"+str(i)+".htm")
		else:
			resposta = urllib2.urlopen("http://www.samair.ru/proxy/proxy-"+str(i)+".htm")
	#	print resposta.read()
		responsa = resposta.read()
		proxies = re.findall(('\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}.*?</'), responsa)
		js = proxies.pop(0)
		js = js[0:js.index(".js")+3]		
		resposta2 = urllib2.urlopen("http://www.samair.ru/js/"+js)	
		traducao = unpack.unpack(resposta2.read())
		traducao = traducao[0:len(traducao)-1]
		#proxyfile.write("trad: "+traducao)
		for aue in proxies:
			try:	
				dicionariotraducao = dict(item.split("=") for item in traducao.split(";"))
				ip = aue[0:aue.index('<')]
				porta = aue[aue.index('":"')+4:]
				porta = porta[0:len(porta)-3]			
				pattern = re.compile(r'\b(' + '|'.join(dicionariotraducao.keys()) + r')\b')
				result = pattern.sub(lambda x: dicionariotraducao[x.group()], porta)
				ipporta = ip + ":" +str(result).replace("+","")
				proxyfile.write(ipporta+"\n")
			except Exception, e:
				#print e
				continue
		i+=1
Exemplo n.º 9
0
def dipatch_client_unix_file(conn, rules):
	f = ""
	while f != "!close":
		try:
			f = conn.recv(1024)
			if f == "!close":
				break
			if os.path.exists(f):
				uf = unpack.unpack(f)
				if uf:
					f = uf
				matches = []
				for i in rules.match(f):
					matches.append({
							"name": i.rule, "namespace": i.namespace,
							"meta": i.meta, "tags": i.tags
						       })
				conn.send(str(matches))
				if uf:
					unpack.delete(uf)
			else:
				conn.send("[]")
		except:
			break
	conn.close()
Exemplo n.º 10
0
def resolve_vup(url):
    reg = "<script type='text\/javascript'>eval(.*?)\s+<\/script>"
    out = getdatacontent(url, reg)
    out = unpack(out[0])
    reg = 'sources:\s*\[{src:\s*"(?P<url>[^"]+)'
    url = re.compile(reg).findall(out)
    url = url[0]
    return url
Exemplo n.º 11
0
 def _start(self, pack_path):
     self.pack_path = pack_path
     unpacker_inst = unpack.unpack(self.pack_path)
     unpacker_inst._find_format()
     unpacker_inst._unpack()
     validator_inst = validate.validate()
     validator_inst._validate_app_data()
     copier_inst = copy_.copy()
     copier_inst._copy()
Exemplo n.º 12
0
def test_unpack():

    current_dir = os.path.curdir
    shutil.rmtree(os.path.join(current_dir, 'test'))
    test_dir = create_dir(current_dir, 'test')

    # 1. Multiple folder levels
    test_dir_1 = create_dir(test_dir, 'test1')
    sub_1 = create_dir(test_dir_1, 'sub1')

    sub_11 = create_dir(sub_1, 'sub1:1')
    create_archive_files(sub_11, 'archive', 2)

    sub_12 = create_dir(sub_1, 'sub1:2')
    create_archive_files(sub_12, 'archive', 1)
    create_files(sub_12, 'file', 1)
    sub_121 = create_dir(sub_12, 'sub1:2:1')
    create_archive_files(sub_121, 'archive', 2)

    sub_2 = create_dir(test_dir_1, 'sub2')

    # 2. All sub folders
    test_dir_2 = create_dir(test_dir, 'test2')
    for x in range(0, 3):
        test_2_subdir = create_dir(test_dir_2, 'subfolder{}'.format(x))
        create_archive_files(test_2_subdir, 'archive', 3)

    # 3. Mix of archives and sub folders
    test_dir_3 = create_dir(test_dir, 'test3')
    for x in range(0, 3):
        test_3_subdir = create_dir(test_dir_3, 'subfolder{}'.format(x))
        create_archive_files(test_3_subdir, 'archive', 3)
    create_archive_files(test_dir_3, 'archive', 2)

    # 4. All archives
    test_dir_4 = create_dir(test_dir, 'test4')
    create_archive_files(test_dir_4, 'archive', 4)

    # 5. Mix of archives and files
    test_dir_5 = create_dir(test_dir, 'test5')
    create_archive_files(test_dir_5, 'archive', 3)
    create_files(test_dir_5, 'file', 2)

    unpack(os.path.join(os.path.curdir, 'test'), 'test')
def init():
    if unpack.ispacked():
        unpack.unpack()

    print('')
    FileCredibility.VerifyFiles()

    img.out()  # print SecureDrop logo

    if getNumUsers() == 0:
        print("No users are registered with this client.")
        c = input("Do you want to register a new user (y/n)? ")
        while 'n' != c != 'y' and 'N' != c != 'Y':
            c = input("Do you want to register a new user (y/n)? ")

        if (c == 'n'):
            img.bye()
            leave(False)
        else:
            registerUser()
Exemplo n.º 14
0
def process(path, aligned, args, gpu):

    t0 = time.time()

    aligned_dw = unpack.label(aligned, 'dw')
    aligned_log = pyfs.rext(aligned, 'shifts')
    aligned_stk = pyfs.rext(aligned, 'mrcs')

    print(path, '->', aligned)
    print(' ' * len(path), '->', aligned_log)

    tmpdir = tempfile.mkdtemp()
    tmp_unzipped = pyfs.join(tmpdir, 'decompressed.mrc')
    tmp_unpacked = pyfs.join(tmpdir, 'unpacked.mrc')
    tmp_aligned = pyfs.join(tmpdir, 'aligned.mrc')
    tmp_aligned_dw = pyfs.join(tmpdir, 'aligned_DW.mrc')
    tmp_logfile = pyfs.join(tmpdir, 'aligned0-Full.log')
    tmp_stkfile = pyfs.join(tmpdir, 'aligned_Stk.mrc')

    tmp_unzipped = pbunzip2(path, tmp_unzipped)

    if args.unpack:
        unpack.unpack(tmp_unzipped,
                      tmp_unpacked,
                      args.defects,
                      args.norm,
                      mode='byte')
        motioncor2(tmp_unpacked, tmp_aligned, args, gpu=gpu)
    else:
        args.mocor2_norm = args.norm
        motioncor2(tmp_unzipped, tmp_aligned, args, gpu=gpu)

    mv(tmp_aligned, aligned)
    mv(tmp_aligned_dw, aligned_dw)
    mv(tmp_logfile, aligned_log)
    mv(tmp_stkfile, aligned_stk)
    shutil.rmtree(tmpdir, False)

    print('aligning: %s took: %.2f secs' % (path, time.time() - t0))
Exemplo n.º 15
0
    def test(self):
        for NAME, TYPE, VERSION in [
            [ "w3vc-1-1", "w3vc", "1" ],
            [ "w3vc-1-2", "w3vc", "1" ],
            [ "c4-1-1", "c4", "1" ],
        ]:
            with open(os.path.join(TESTS, "%s.json" % NAME)) as fin:
                original = json.loads(fin.read())
            with open(os.path.join(TESTS, "%s-packed.txt" % NAME)) as fin:
                uri = fin.read()

            result = unpack(uri, self.resolver)
            self.assertEqual(result, original)
Exemplo n.º 16
0
def process_url(meta, page_url, dry_run=False, auth=None, keep=False,
                subreddit=None, interactive=False):
    logger = logging.getLogger('url')
    logger.info(u'Processing {0}'.format(page_url))

    urlmeta = meta.url(page_url)
    try:
        with TemporaryDir('ektobot', keep=keep) as dname:
            e = Ektoplazm(page_url)
            logger.info(u'tags: ' + u', '.join(e.tags))
            archive = e.download_archive(dname)
            urlmeta.tags = e.tags
            urlmeta.license = e.license.url
            mp3_dir = unpack(archive, dry_run=False, outdir=dname, urlmeta=urlmeta)
            video_dir = os.path.join(dname, 'video')
            videos(mp3_dir, dry_run=False, outdir=video_dir, cover=None)
            (playlist_id, video_ids) = ytupload(video_dir, dry_run=dry_run, secrets_file=auth.yt_secrets)
    except KeyboardInterrupt:
        raise
    except:
        logger.exception(u'Album processing failed')
        urlmeta.youtube.result = 'failed'
    else:
        logger.info(u'Album successfully uploaded')
        urlmeta.youtube.result = 'done'
        urlmeta.youtube.playlist = playlist_id
        urlmeta.youtube.videos = video_ids
    meta.save(dry_run=dry_run)

    if subreddit and urlmeta.youtube.result == 'done':
        try:
            urlmeta.reddit.result = None
            submit_to_reddit(urlmeta, subreddit, auth, interactive=interactive, dry_run=dry_run)
        except Exception:
            logger.exception(u'Failed to submit to reddit')
            if not urlmeta.reddit.result:
                urlmeta.reddit.result = 'failed'
            # TODO: save the exception
        meta.save(dry_run=dry_run)
Exemplo n.º 17
0
input_folder_lbl = tk.Label(root, textvariable=input_folder)
input_folder_lbl.pack()
input_folder_btn = tk.Button(
    root,
    text="Select Input Folder",
    command=lambda: input_folder.set(tk.filedialog.askdirectory()))
input_folder_btn.pack()

output_folder_lbl = tk.Label(root, textvariable=output_folder)
output_folder_lbl.pack()
output_folder_btn = tk.Button(
    root,
    text="Select Output Folder",
    command=lambda: output_folder.set(tk.filedialog.askdirectory()))
output_folder_btn.pack()

unpack_btn = tk.Button(
    root,
    text="Unpack",
    command=lambda: unpack.unpack(
        unpack.UnpackContext(input_folder.get(), output_folder.get(),
                             is_tsl.get(), status_token, root)))

unpack_btn.pack()

status_lbl = tk.Label(root, textvariable=status_token)
status_lbl.pack()

root.mainloop()
Exemplo n.º 18
0
def do_run_pipeline(name,basedir,qsubfile=None,do_field=True):
    '''
    set do_field False for the now obsolete behaviour of downloading
    and imaging a particular observation

    '''
    if qsubfile is None:
        qsubfile='/home/mjh/pipeline-master/ddf-pipeline/torque/pipeline.qsub'

    workdir=basedir+'/'+name
    try:
        os.mkdir(workdir)
    except OSError:
        warn('Working directory already exists')

    report('Downloading data')
    if do_field:
        success=download_field(name,basedir=basedir)
    else:
        success=download_dataset('https://lofar-webdav.grid.sara.nl','/SKSP/'+name+'/',basedir=basedir)

    if not success:
        die('Download failed, see earlier errors',database=False)

    report('Unpacking data')
    try:
        unpack(workdir=workdir)
    except RuntimeError:
        if do_field:
            update_status(name,'Unpack failed',workdir=workdir)
        raise
    if do_field:
        update_status(name,'Unpacked',workdir=workdir)

    report('Deleting tar files')
    os.system('rm '+workdir+'/*.tar.gz')
    os.system('rm '+workdir+'/*.tar')

    averaged=False
    report('Checking structure')
    g=glob.glob(workdir+'/*.ms')
    msl=MSList(None,mss=g)
    dysco=np.any(msl.dysco)
    uobsids=set(msl.obsids)
    for thisobs in uobsids:
        # check one MS with each ID
        for m,ch,o,hc in zip(msl.mss,msl.channels,msl.obsids,msl.hascorrected):
            if o==thisobs:
                if not(hc):
                    print('MS',m,'has no corrected_data column, force use of DATA')
                    averaged=True
                channels=len(ch)
                print('MS',m,'has',channels,'channels')
                if channels>20:
                    update_status(name,'Averaging',workdir=workdir)
                    print('Averaging needed for',thisobs,'!')
                    averaged=True
                    average(wildcard=workdir+'/*'+thisobs+'*')
                    os.system('rm -r '+workdir+'/*'+thisobs+'*pre-cal.ms')
                break
    
    report('Making ms lists')
    success=make_list(workdir=workdir)
    if do_field:
        list_db_update(success,workdir=workdir)
    if not success:
        die('make_list could not construct the MS list',database=False)
        
    report('Creating custom config file from template')
    make_custom_config(name,workdir,do_field,averaged)
    
    # now run the job
    do_run_job(name,basedir=basedir,qsubfile=None,do_field=do_field,dysco=dysco)
Exemplo n.º 19
0
 def create_current_cycle_array(self, convert):
     self.current_cycle = []
     self.ingredients = ingredients.Ingredients(unpack.unpack('train.json'), convert)
     self.single_cycle = [convert]
import unpack
import ingredients
import csv
import pandas
import sys
from sklearn.externals import joblib

row = []
data = unpack.unpack('user_ingredients.json')
selected_ingredients = data['ingredients']

with open('data/train.csv', 'r') as f:
    reader = csv.reader(f)
    names = next(reader)

number_of_ingredients = len(names) - 2

for unique_ingredient in names:
    if unique_ingredient in selected_ingredients:
        row.append(1)
    else:
        row.append(0)

df = pandas.DataFrame([row])
df.to_csv("data/user_ingredients.csv", sep=',', index=False, header=False)

dataset = pandas.read_csv('data/user_ingredients.csv',
                          names=names,
                          low_memory=False)
array = dataset.values
X = array[:, 0:number_of_ingredients]
Exemplo n.º 21
0
import sys
import pandas
from sklearn.linear_model import LogisticRegression
import ingredients
import unpack
from sklearn.externals import joblib

# Default values assigned from looper.py results
convert = 40
limit = 0

# Allows user to override default convert and limit values when generating model
for arg in sys.argv:
    if 'limit=' in arg:
        limit = int(arg.split('=')[1])
    elif 'convert=' in arg:
        convert = int(arg.split('=')[1])

i = ingredients.Ingredients(unpack.unpack('train.json'), convert)
df = i.vectorise(limit)
X = df.drop(columns='cuisine')
y = df['cuisine']

clf = LogisticRegression()
model = clf.fit(X, y)

joblib.dump(model, 'fit_model.pkl')
def file_hash(fname):
	m = md5()
	with open(fname, "rb") as fh:
		for data in fh.read(8192):
			m.update(data)
	return m.hexdigest()

conn = sqlite3.connect(DB_LOC)
setup_db(conn)

folders = listdir(FEED_DIR)
for f in folders:
	files = listdir(FEED_DIR + f)
	for fname in files:
		if fname.startswith("vipFeed") and fname.split(".")[0].endswith("2012-11-06"):
			fullpath = FEED_DIR + f + "/" + fname
			if has_changed(conn, fullpath):
				flatfiledir = fname.split(".")[0] + "_flatfiles/"
				dt.clear_or_create(flatfiledir)
				dt.clear_or_create(TEMP_DIR)
				unpack.unpack(fullpath, TEMP_DIR)
				unpack.flatten_folder(TEMP_DIR)
				xml_file = dt.file_by_extension(".xml", TEMP_DIR)
				ftff.feed_to_db_files(flatfiledir, xml_file)
				make_archive(fname.split(".")[0] + "_flatfiles", "zip", flatfiledir)
				move(fname.split(".")[0] + "_flatfiles.zip", FEED_DIR + f + "/" + fname.split(".")[0] + "_flatfiles.zip")
				rmtree(TEMP_DIR)
				rmtree(flatfiledir)
				
Exemplo n.º 23
0
import numpy as np
import matplotlib.pyplot as plt


#umifiles = str(raw_input("Please paste the path of you folder full of umi files."))
umifiles = "C:\\Users\\rjf\\Desktop\\PB1_C_2016"

#protoblockname = raw_input("Name of you run:")
#Residential or Commercial 
protoblockname = 'Protoblock 1  2016'


if os.path.exists(umifiles+'\\'+'umijson'):
    shutil.rmtree(umifiles+'\\'+'umijson') 

up.unpack(umifiles)


umijson = umifiles + '\\umijson'


key = umijson+'\\run1.umi'

with open(key) as data_file:
    data = json.load(data_file)

features = data["features"]


bldgs = []
for i in range(0,int(len(features))):
Exemplo n.º 24
0
except OSError:
    warn('Working directory already exists')
    pass
os.chdir(name)
report('Downloading data')
if do_field:
    success=download_field(name)
else:
    success=download_dataset('https://lofar-webdav.grid.sara.nl','/SKSP/'+name+'/')

if not success:
    die('Download failed, see earlier errors',database=False)

    
report('Unpacking data')
unpack()
if do_field:
    unpack_db_update()
    
report('Deleting tar files')
os.system('rm *.tar.gz')

report('Making ms lists')
success=make_list()
if do_field:
    list_db_update(success)

if success:
    report('Submit job')
    os.system('pipeline.py /disks/paradata/shimwell/LoTSS-DR2/ongoing-leiden-runs/tier1-DR2.cfg')
    if do_field():
Exemplo n.º 25
0
our_router.bind((local_host, int(router_direct[router_id])))

#Start router protocol loop
#Time counters
i = 1
j = 1
k = 1
link_packet = build_packet(router_id, cost_direct)
time_start = time.time()

while True:
    # Send out heartbeat every 0.05 seconds
    if((time.time() - time_start) / (k*HEARTBEAT_UPDATE_INTERVAL) >= 1):
        heartbeat()
        k += 1

    #Send link packet every second
    if((time.time() - time_start) // (i*UPDATE_INTERVAL) == 1):
        link_packet = build_packet(router_id, cost_direct)
        broadcast(link_packet)
        i += 1

    #Update routes every 30 seconds
    if ((time.time() - time_start) // (ROUTE_UPDATE_INTERVAL*j) == 1):
        output_summary(unpack(packets_received))
        packets_received = []
        already_sent = []
        j+= 1

    packets_received = receive_and_forward(packets_received)
Exemplo n.º 26
0
def jacres_c(U, Uold, peq, neq, sepq, accq, zccq):
    Mp = peq.M
    Mn = neq.M
    Ms = sepq.M
    Ma = accq.M
    Mz = zccq.M
    Np = peq.N
    Nn = neq.N

    cmat_pe, cmat_ne, \
    uvec_pe, uvec_sep, uvec_ne,\
    Tvec_acc, Tvec_pe, Tvec_sep, Tvec_ne, Tvec_zcc, \
    phie_pe, phie_sep, phie_ne, phis_pe, phis_ne, \
    j_pe,j_ne,eta_pe,eta_ne = unpack(U, Mp, Np, Mn, Nn, Ms, Ma, Mz)

    cmat_old_pe, cmat_old_ne,\
    uvec_old_pe, uvec_old_sep, uvec_old_ne,\
    Tvec_old_acc, Tvec_old_pe, Tvec_old_sep, Tvec_old_ne, Tvec_old_zcc,\
    _, _, \
    _, _,\
    _,_,\
    _,_,_= unpack(Uold,Mp, Np, Mn, Nn, Ms, Ma, Mz)
    """ Positive """
    k = delta_t
    Np = peq.N
    Rp = peq.Rp * (np.linspace(1, N, N) - (1 / 2)) / N
    rp = peq.Rp * (np.linspace(0, N, N + 1)) / N
    lambda1_p = k * r[0:N]**2 / (R**2 * hy**2)
    lambda2_p = k * r[1:N + 1]**2 / (R**2 * hy**2)

    res_cp = onp.zeros([(Np + 2) * Mp, 1])
    for i in range(0, Mp):
        arg_cp = [
            cmat_pe[0:Np, i], cmat_pe[1:Np + 1, i], cmat_pe[2:Np + 2, i],
            cmat_old_pe[1:Np + 1, i], lambda1_p, lambda2_p
        ]
        arg_c0p = [cmat_pe[0, i], cmat_pe[1, i]]
        arg_cMp = [cmat_pe[Np, i], cmat_pe[Np + 1, i], j_pe[i], Tvec_pe[i + 1]]

        res_cp[(i) * (Np + 2)] = peq.bc_zero_neumann(*arg_c0p)
        res_cp[i * (Np + 2) + 1:Np + 1 + i * (Np + 2)] = np.reshape(
            peq.solid_conc(*arg_cp), [Np, 1])
        #        res_cp[i*(Np+2)+1: Np + 1 + i*(Np+2)] = vmap(peq.solid_conc)(*arg_cp)
        res_cp[(Np + 1) + i * (Np + 2)] = peq.bc_neumann_c(*arg_cMp)

    res_cp = np.asarray(res_cp)
    """ Linear Jacobian """
    bc_cp0_grad = np.array([[-1, 1]]).T
    A_cp = vmap(grad(peq.solid_conc_2, range(0, 3)))(*arg_cp)
    bc_cpM_grad = np.array([[-1 / peq.hy, 1 / peq.hy]]).T

    bcp = {"right": bc_cp0_grad, "left": bc_cpM_grad}
    J_cp_sub = build_tridiag_c(Np, A_cp, **bcp)
    Ip = identity(Mp)
    J_cp = kron(Ip, J_cp_sub)
    """ d resc / d j : positive """
    Deff = vmap(coeffs.solidDiffCoeff)(peq.Ds * np.ones([Mp, 1]),
                                       peq.ED * np.ones([Mp, 1]),
                                       Tvec_pe[1:Mp + 1])
    dcj_p = 1 / Deff

    # build the jacobian for j
    row_cjp = onp.zeros(Mp, dtype=int)
    col_cjp = onp.arange(0, Mp)
    for i in range(0, Mp):
        row_cjp[i] = Np + 1 + i * (Np + 2)
    J_cjp = coo_matrix((dcj_p.ravel(), (row_cjp, col_cjp)),
                       shape=(Mp * (Np + 2), Mp))
    """ d resc / d T : positive """
    dcT_p = vmap(grad(peq.bc_neumann_c,
                      3))(cmat_pe[Np, 0:Mp], cmat_pe[Np + 1, 0:Mp], j_pe[0:Mp],
                          Tvec_pe[1:Mp + 1])
    #dcT_ps = grad(coeffs.solidDiffCoeff, 2)(peq.Ds, peq.ED, Tvec_pe[1])

    # build the jacobian for j
    row_cTp = onp.zeros(Mp, dtype=int)
    col_cTp = onp.arange(1, Mp + 1)
    for i in range(0, Mp):
        row_cTp[i] = Np + 1 + i * (Np + 2)
    J_cTp = coo_matrix((dcT_p.ravel(), (row_cTp, col_cTp)),
                       shape=(Mp * (Np + 2) + Mn * (Nn + 2), Mp + 2))
    """ Negative """
    res_cn = onp.zeros([(Nn + 2) * Mn, 1])
    for i in range(0, Mn):
        arg_cn = [
            cmat_ne[0:Nn, i], cmat_ne[1:Nn + 1, i], cmat_ne[2:Nn + 2, i],
            cmat_old_ne[1:Nn + 1, i]
        ]
        arg_c0n = [cmat_ne[0, i], cmat_ne[1, i]]
        arg_cMn = [cmat_ne[Nn, i], cmat_ne[Nn + 1, i], j_ne[i], Tvec_ne[i + 1]]
        res_cn[(i) * (Nn + 2)] = neq.bc_zero_neumann(*arg_c0n)
        res_cn[i * (Nn + 2) + 1:Nn + 1 + i * (Nn + 2)] = np.reshape(
            neq.solid_conc_2(*arg_cn), [Nn, 1])
        res_cn[(Nn + 1) + i * (Nn + 2)] = neq.bc_neumann_c(*arg_cMn)

    res_cn = np.asarray(res_cn)
    """ Linear Jacobian """
    bc_cn0_grad = np.array([[-1, 1]]).T
    A_cn = vmap(grad(neq.solid_conc_2, range(0, 3)))(*arg_cn)
    bc_cnM_grad = np.array([[-1, 1]]).T

    bcn = {"right": bc_cn0_grad, "left": bc_cnM_grad}
    J_cn_sub = build_tridiag_c(Nn, A_cn, **bcn)
    In = identity(Mn)
    J_cn = kron(In, J_cn_sub)
    """ d resc / d j : negative """
    Deffn = vmap(coeffs.solidDiffCoeff)(neq.Ds * np.ones([Mn, 1]),
                                        neq.ED * np.ones([Mn, 1]),
                                        Tvec_ne[1:Mn + 1])
    dcj_n = neq.hy / Deffn

    # build the jacobian for j
    row_cjn = onp.zeros(Mn, dtype=int)
    col_cjn = onp.arange(0, Mn)
    for i in range(0, Mn):
        row_cjn[i] = Nn + 1 + i * (Nn + 2)
    J_cjn = coo_matrix((dcj_n.ravel(), (row_cjn, col_cjn)),
                       shape=(Mn * (Nn + 2), Mn))
    """ d resc / d T : negative """
    dcT_n = vmap(grad(neq.bc_neumann_c,
                      3))(cmat_ne[Np, 0:Mp], cmat_ne[Np + 1, 0:Mp], j_ne[0:Mp],
                          Tvec_ne[1:Mp + 1])
    #dcT_ps = grad(coeffs.solidDiffCoeff, 2)(peq.Ds, peq.ED, Tvec_pe[1])

    # build the jacobian for j
    row_cTn = onp.zeros(Mn, dtype=int)
    col_cTn = onp.arange(1, Mn + 1)
    for i in range(0, Mn):
        row_cTn[i] = Nn + 1 + i * (Nn + 2) + Mp * (Np + 2)
    J_cTn = coo_matrix((dcT_n.ravel(), (row_cTn, col_cTn)),
                       shape=(Mn * (Nn + 2) + Mp * (Np + 2), Mn + 2))

    J_cc = block_diag((J_cp, J_cn))
    J_cj = block_diag((J_cjp, J_cjn))
    J_cT = hstack([
        empty_rec(Mp * (Np + 2) + Mn * (Nn + 2), Ma + 2), J_cTp,
        empty_rec(Mp * (Np + 2) + Mn * (Nn + 2), Ms + 2), J_cTn,
        empty_rec(Mp * (Np + 2) + Mn * (Nn + 2), Mz + 2)
    ])

    res_c = np.vstack((res_cp, res_cn))

    J_c = hstack([
        J_cc,
        empty_rec(Mp * (Np + 2) + Mn * (Nn + 2), Mp + 2 + Mn + 2 + Ms + 2),
        J_cT,
        empty_rec(Mp * (Np + 2) + Mn * (Nn + 2), Mp + 2 + Mn + 2 + Ms + 2),
        empty_rec(Mp * (Np + 2) + Mn * (Nn + 2), Mp + 2 + Mn + 2), J_cj,
        empty_rec(Mp * (Np + 2) + Mn * (Nn + 2), Mp + Mn)
    ])

    return res_c, J_c
Exemplo n.º 27
0
def jacres_u(U, Uold, peq, neq, sepq, accq, zccq):
    Mp = peq.M
    Mn = neq.M
    Ms = sepq.M
    Ma = accq.M
    Mz = zccq.M
    Np = peq.N
    Nn = neq.N

    cmat_pe, cmat_ne, \
    uvec_pe, uvec_sep, uvec_ne,\
    Tvec_acc, Tvec_pe, Tvec_sep, Tvec_ne, Tvec_zcc, \
    phie_pe, phie_sep, phie_ne, phis_pe, phis_ne, \
    j_pe,j_ne,eta_pe,eta_ne = unpack(U, Mp, Np, Mn, Nn, Ms, Ma, Mz)

    cmat_old_pe, cmat_old_ne,\
    uvec_old_pe, uvec_old_sep, uvec_old_ne,\
    Tvec_old_acc, Tvec_old_pe, Tvec_old_sep, Tvec_old_ne, Tvec_old_zcc,\
    _, _, \
    _, _,\
    _,_,\
    _,_,_= unpack(Uold,Mp, Np, Mn, Nn, Ms, Ma, Mz)

    bc_u0p = peq.bc_zero_neumann(uvec_pe[0], uvec_pe[1])
    res_up = vmap(peq.electrolyte_conc)(uvec_pe[0:Mp], uvec_pe[1:Mp + 1],
                                        uvec_pe[2:Mp + 2], Tvec_pe[0:Mp],
                                        Tvec_pe[1:Mp + 1], Tvec_pe[2:Mp + 2],
                                        j_pe[0:Mp], uvec_old_pe[1:Mp + 1])
    bc_uMp = peq.bc_u_sep_p(uvec_pe[Mp], uvec_pe[Mp + 1], Tvec_pe[Mp],
                            Tvec_pe[Mp + 1], uvec_sep[0], uvec_sep[1],
                            Tvec_sep[0], Tvec_sep[1])

    bc_u0s = peq.bc_inter_cont(uvec_pe[Mp], uvec_pe[Mp + 1], uvec_sep[0],
                               uvec_sep[1])
    res_us = vmap(sepq.electrolyte_conc)(uvec_sep[0:Ms], uvec_sep[1:Ms + 1],
                                         uvec_sep[2:Ms + 2], Tvec_sep[0:Ms],
                                         Tvec_sep[1:Ms + 1],
                                         Tvec_sep[2:Ms + 2],
                                         uvec_old_sep[1:Ms + 1])
    bc_uMs = sepq.bc_u_sep_n(uvec_ne[0], uvec_ne[1], Tvec_ne[0], Tvec_ne[1],
                             uvec_sep[Ms], uvec_sep[Ms + 1], Tvec_sep[Ms],
                             Tvec_sep[Ms + 1])

    bc_u0n = neq.bc_inter_cont(uvec_ne[0], uvec_ne[1], uvec_sep[Ms],
                               uvec_sep[Ms + 1])
    res_un = vmap(neq.electrolyte_conc)(uvec_ne[0:Mn], uvec_ne[1:Mn + 1],
                                        uvec_ne[2:Mn + 2], Tvec_ne[0:Mn],
                                        Tvec_ne[1:Mn + 1], Tvec_ne[2:Mn + 2],
                                        j_ne[0:Mn], uvec_old_ne[1:Mn + 1])
    bc_uMn = neq.bc_zero_neumann(uvec_ne[Mn], uvec_ne[Mn + 1])
    """ positive electrode"""
    arg_up = [
        uvec_pe[0:Mp], uvec_pe[1:Mp + 1], uvec_pe[2:Mp + 2], Tvec_pe[0:Mp],
        Tvec_pe[1:Mp + 1], Tvec_pe[2:Mp + 2], j_pe[0:Mp], uvec_old_pe[1:Mp + 1]
    ]
    arg_uMp = [
        uvec_pe[Mp], uvec_pe[Mp + 1], Tvec_pe[Mp], Tvec_pe[Mp + 1],
        uvec_sep[0], uvec_sep[1], Tvec_sep[0], Tvec_sep[1]
    ]

    A_up = vmap(grad(peq.electrolyte_conc, range(0, len(arg_up) - 1)))(*arg_up)
    bc_u0p_grad = np.array([[-1, 1]]).T
    bc_uMp_grad = ((jax.grad(peq.bc_u_sep_p, range(0,
                                                   len(arg_uMp)))))(*arg_uMp)

    #uu
    bc_uup = {"right": bc_u0p_grad[0:2], "left": bc_uMp_grad[0:2]}
    J_uupp = build_tridiag(Mp, A_up[0:3], **bc_uup)
    # interface boundar8
    J_uups = coo_matrix(
        (np.ravel(np.asarray(bc_uMp_grad[4:6])), ([Mp + 1, Mp + 1], [0, 1])),
        shape=(Mp + 2, Ms + 2 + Mn + 2))
    J_uup = hstack([J_uupp, J_uups])

    # uT
    bc_uTp = {"left": bc_uMp_grad[2:4]}
    J_uTpp = build_tridiag(Mp, A_up[3:6], **bc_uTp)
    # interface
    J_uTps = coo_matrix(
        (np.ravel(np.asarray(bc_uMp_grad[6:8])), ([Mp + 1, Mp + 1], [0, 1])),
        shape=(Mp + 2, Ms + 2 + Mn + 2))
    J_uTp = hstack([J_uTpp, J_uTps])

    # uj
    J_ujp = build_diag(Mp, A_up[6], "long")
    """ Separator """
    arg_u0s = [uvec_pe[Mp], uvec_pe[Mp + 1], uvec_sep[0], uvec_sep[1]]
    arg_us = [
        uvec_sep[0:Ms], uvec_sep[1:Ms + 1], uvec_sep[2:Ms + 2], Tvec_sep[0:Ms],
        Tvec_sep[1:Ms + 1], Tvec_sep[2:Ms + 2], uvec_old_sep[1:Ms + 1]
    ]
    arg_uMs = [
        uvec_ne[0], uvec_ne[1], Tvec_ne[0], Tvec_ne[1], uvec_sep[Ms],
        uvec_sep[Ms + 1], Tvec_sep[Ms], Tvec_sep[Ms + 1]
    ]

    A_us = vmap(grad(sepq.electrolyte_conc, range(0,
                                                  len(arg_us) - 1)))(*arg_us)
    bc_u0s_grad = (jax.grad(peq.bc_inter_cont, range(0,
                                                     len(arg_u0s))))(*arg_u0s)
    bc_uMs_grad = (jax.grad(sepq.bc_u_sep_n, range(0, len(arg_uMs))))(*arg_uMs)

    #uu
    bc_uus = {"right": bc_u0s_grad[2:4], "left": bc_uMs_grad[4:6]}
    J_uuss = build_tridiag(Ms, A_us[0:3], **bc_uus)
    # positive sep interface
    J_uusp = coo_matrix(
        (np.ravel(np.asarray(bc_u0s_grad[0:2])), ([0, 0], [Mp, Mp + 1])),
        shape=(Ms + 2, Mp + 2))
    #negative sep interface
    J_uusn = coo_matrix(
        (np.ravel(np.asarray(bc_uMs_grad[0:2])), ([Ms + 1, Ms + 1], [0, 1])),
        shape=(Ms + 2, Mn + 2))
    J_uus = hstack([J_uusp, J_uuss, J_uusn])

    # uT
    bc_uTs = {"left": bc_uMs_grad[6:8]}
    J_uTss = build_tridiag(Ms, A_us[3:6], **bc_uTs)
    #    J_uTsp = coo_matrix((np.ravel(np.asarray(bc_u0s_grad[2:4])),([0,0],[Mp,Mp+1] )),shape=(Ms+2,Mp+2))
    J_uTsp = empty_rec(Ms + 2, Mp + 2)
    J_uTsn = coo_matrix(
        (np.ravel(np.asarray(bc_uMs_grad[2:4])), ([Ms + 1, Ms + 1], [0, 1])),
        shape=(Ms + 2, Mn + 2))
    J_uTs = hstack([J_uTsp, J_uTss, J_uTsn])
    """ negative electrode"""
    arg_un = [
        uvec_ne[0:Mn], uvec_ne[1:Mn + 1], uvec_ne[2:Mn + 2], Tvec_ne[0:Mn],
        Tvec_ne[1:Mn + 1], Tvec_ne[2:Mn + 2], j_ne[0:Mn], uvec_old_ne[1:Mn + 1]
    ]
    arg_u0n = [uvec_ne[0], uvec_ne[1], uvec_sep[Ms], uvec_sep[Ms + 1]]

    A_un = vmap(grad(neq.electrolyte_conc, range(0, len(arg_un) - 1)))(*arg_un)
    bc_u0n_grad = grad(neq.bc_inter_cont, range(0, len(arg_u0n)))(*arg_u0n)
    bc_uMn_grad = np.array([[-1, 1]]).T

    #uu
    bc_uun = {"right": bc_u0n_grad[0:2], "left": bc_uMn_grad[0:2]}
    J_uunn = build_tridiag(Mn, A_un[0:3], **bc_uun)
    J_uuns = coo_matrix((np.ravel(np.asarray(bc_u0n_grad[2:4])),
                         ([0, 0], [Mp + 2 + Ms, Mp + 2 + Ms + 1])),
                        shape=(Mn + 2, Ms + 2 + Mp + 2))
    J_uun = hstack([J_uuns, J_uunn])
    # uT

    J_uTnn = build_tridiag(Mn, A_un[3:6])

    J_uTns = empty_rec(Mn + 2, Ms + 2 + Mp + 2)
    J_uTn = hstack([J_uTns, J_uTnn])

    # uj
    J_ujn = build_diag(Mn, A_un[6], "long")

    res_u = np.hstack((bc_u0p, res_up, bc_uMp, bc_u0s, res_us, bc_uMs, bc_u0n,
                       res_un, bc_uMn))

    J_u = hstack([
        empty_rec(Mp + 2 + Ms + 2 + Mn + 2,
                  Mp * (Np + 2) + Mn * (Nn + 2)),  # c
        vstack([J_uup, J_uus, J_uun]),
        hstack([
            empty_rec(Mp + 2 + Ms + 2 + Mn + 2, Ma + 2),  # acc 
            vstack([J_uTp, J_uTs, J_uTn]),
            empty_rec(Mp + 2 + Ms + 2 + Mn + 2, Mz + 2)  # zcc
        ]),
        empty_rec(Mp + 2 + Ms + 2 + Mn + 2, Mp + 2 + Ms + 2 + Mn + 2),  #phie
        empty_rec(Mp + 2 + Ms + 2 + Mn + 2, Mp + 2 + Mn + 2),  #phis
        vstack([
            hstack([J_ujp, empty_rec(Mp + 2, Mn)]),
            empty_rec(Ms + 2, Mp + Mn),
            hstack([empty_rec(Mn + 2, Mp), J_ujn])
        ]),
        empty_rec(Mp + 2 + Ms + 2 + Mn + 2, Mp + Mn)
    ])

    return res_u, J_u
Exemplo n.º 28
0
def jacres_T(U, Uold, peq, neq, sepq, accq, zccq):
    Mp = peq.M
    Mn = neq.M
    Ms = sepq.M
    Ma = accq.M
    Mz = zccq.M
    Np = peq.N
    Nn = neq.N

    cmat_pe, cmat_ne, \
    uvec_pe, uvec_sep, uvec_ne,\
    Tvec_acc, Tvec_pe, Tvec_sep, Tvec_ne, Tvec_zcc, \
    phie_pe, phie_sep, phie_ne, phis_pe, phis_ne, \
    j_pe,j_ne,eta_pe,eta_ne = unpack(U, Mp, Np, Mn, Nn, Ms, Ma, Mz)

    cmat_old_pe, cmat_old_ne,\
    uvec_old_pe, uvec_old_sep, uvec_old_ne,\
    Tvec_old_acc, Tvec_old_pe, Tvec_old_sep, Tvec_old_ne, Tvec_old_zcc,\
    _, _, \
    _, _,\
    _,_,\
    _,_,_= unpack(Uold,Mp, Np, Mn, Nn, Ms, Ma, Mz)
    """ Current Collector a residual """

    bc_T0a = accq.bc_temp_a(Tvec_acc[0], Tvec_acc[1])
    res_Ta = vmap(accq.temperature)(Tvec_acc[0:Ma], Tvec_acc[1:Ma + 1],
                                    Tvec_acc[2:Ma + 2], Tvec_old_acc[1:Ma + 1])
    bc_TMa = peq.bc_temp_ap(Tvec_acc[Ma], Tvec_acc[Ma + 1], Tvec_pe[0],
                            Tvec_pe[1])
    """ jacobian"""
    bc_T0a_grad = grad(accq.bc_temp_a, (0, 1))(Tvec_acc[0], Tvec_acc[1])
    A_Ta = vmap(grad(accq.temperature,
                     range(0, 3)))(Tvec_acc[0:Ma], Tvec_acc[1:Ma + 1],
                                   Tvec_acc[2:Ma + 2], Tvec_old_acc[1:Ma + 1])
    bc_TMa_grad = grad(peq.bc_temp_ap,
                       range(0, 4))(Tvec_acc[Ma], Tvec_acc[Ma + 1], Tvec_pe[0],
                                    Tvec_pe[1])

    bcTa = {"right": bc_T0a_grad[0:2], "left": bc_TMa_grad[0:2]}
    J_TTaa = build_tridiag(Ma, A_Ta[0:3], **bcTa)
    J_TTap = coo_matrix(
        (np.ravel(np.asarray(bc_TMa_grad[2:4])), ([Ma + 1, Ma + 1], [0, 1])),
        shape=(Ma + 2, Mp + 2 + Ms + 2 + Mn + 2 + Mz + 2))
    J_TTa = hstack([J_TTaa, J_TTap])
    """ Positive electrode residual """
    arg_T0p = [Tvec_acc[Ma], Tvec_acc[Ma + 1], Tvec_pe[0], Tvec_pe[1]]
    #arg_T = [uvec_pe[0:Mp], uvec_pe[1:Mp+1], uvec_pe[2:Mp+2], pe.phievec[0:Mp],pe.phievec[2:Mp+2],\
    #         phis_pe[0:Mp], phis_pe[2:Mp+2], Tvec_pe[0:Mp], Tvec_pe[1:Mp+1], Tvec_pe[2:Mp+2], j_pe[0:Mp],\
    #         eta_pe[0:Mp], pe.cs[0:Mp], pe.cmax*np.ones([Mp,1]), Tvec_old_pe[1:Mp+1]]
    arg_Tp = [uvec_pe[0:Mp], uvec_pe[1:Mp+1], uvec_pe[2:Mp+2],
              phie_pe[0:Mp],phie_pe[2:Mp+2],\
             phis_pe[0:Mp], phis_pe[2:Mp+2],
             Tvec_pe[0:Mp], Tvec_pe[1:Mp+1], Tvec_pe[2:Mp+2],
             j_pe[0:Mp],\
             eta_pe[0:Mp],
             cmat_pe[Np,:], cmat_pe[Np+1,:],peq.cmax*np.ones([Mp,1]),
             Tvec_old_pe[1:Mp+1]]
    arg_TMp = [Tvec_pe[Mp], Tvec_pe[Mp + 1], Tvec_sep[0], Tvec_sep[1]]

    bc_T0p = peq.bc_inter_cont(*arg_T0p)
    res_Tp = vmap(peq.temperature)(*arg_Tp)
    bc_TMp = peq.bc_temp_ps(*arg_TMp)

    A_Tp = vmap(grad(peq.temperature, range(0, len(arg_Tp) - 2)))(*arg_Tp)
    bc_T0p_grad = grad(peq.bc_inter_cont, range(0, len(arg_T0p)))(*arg_T0p)
    bc_TMp_grad = grad(peq.bc_temp_ps, range(0, 4))(*arg_TMp)

    bcTp = {"right": bc_T0p_grad[2:4], "left": bc_TMp_grad[0:2]}
    J_TTpp = build_tridiag(Mp, A_Tp[7:10], **bcTp)
    J_TTpa = coo_matrix(
        (np.ravel(np.asarray(bc_T0p_grad[0:2])), ([0, 0], [Ma, Ma + 1])),
        shape=(Mp + 2, Ma + 2))
    J_TTps = coo_matrix(
        (np.ravel(np.asarray(bc_TMp_grad[2:4])), ([Mp + 1, Mp + 1], [0, 1])),
        shape=(Mp + 2, Ms + 2 + Mn + 2 + Mz + 2))
    J_TTp = hstack([J_TTpa, J_TTpp, J_TTps])

    J_Tup = build_tridiag(Mp, A_Tp[0:3])
    J_Tphiep = build_bidiag(Mp, A_Tp[3:5])
    J_Tphisp = build_bidiag(Mp, A_Tp[5:7])
    J_Tjp = build_diag(Mp, A_Tp[10], "long")
    J_Tetap = build_diag(Mp, A_Tp[11], "long")

    col_cp = []
    data_cp = []
    row_cp = np.repeat(Ma + 2 + np.arange(1, Mp + 1), 2)
    for i in range(0, Mp):
        col_cp.append([Np + (Np + 2) * i, Np + 1 + (Np + 2) * (i)])
        data_cp.append([A_Tp[12][i], A_Tp[13][i]])
    data_cp = np.ravel(np.array(data_cp))
    col_cp = np.ravel(np.array(col_cp))
    J_cp = coo_matrix((data_cp, (row_cp, col_cp)),
                      shape=(Ma + 2 + Mp + 2, Mp * (Np + 2) + Mn * (Nn + 2)))
    """ Separator residual """

    arg_T0s = [Tvec_pe[Mp], Tvec_pe[Mp + 1], Tvec_sep[0], Tvec_sep[1]]
    arg_Ts = [uvec_sep[0:Ms], uvec_sep[1:Ms+1], uvec_sep[2:Ms+2], phie_sep[0:Ms], phie_sep[2:Ms+2],\
              Tvec_sep[0:Ms], Tvec_sep[1:Ms+1], Tvec_sep[2:Ms+2], Tvec_old_sep[1:Ms+1]]
    arg_TMs = [Tvec_sep[Ms], Tvec_sep[Ms + 1], Tvec_ne[0], Tvec_ne[1]]

    bc_T0s = peq.bc_inter_cont(*arg_T0s)
    res_Ts = vmap(sepq.temperature)(*arg_Ts)
    bc_TMs = sepq.bc_temp_sn(*arg_TMs)

    bc_T0s_grad = grad(peq.bc_inter_cont, range(0, 4))(*arg_T0s)
    bc_TMs_grad = grad(sepq.bc_temp_sn, range(0, 4))(*arg_TMs)
    A_Ts = vmap(grad(sepq.temperature, range(0, len(arg_Ts) - 1)))(*arg_Ts)

    bcTs = {"right": bc_T0s_grad[2:4], "left": bc_TMs_grad[0:2]}
    J_TTss = build_tridiag(Ms, A_Ts[5:8], **bcTs)
    J_TTsp = coo_matrix((np.ravel(np.asarray(bc_T0s_grad[0:2])),
                         ([0, 0], [Ma + 2 + Mp, Ma + 2 + Mp + 1])),
                        shape=(Ms + 2, Ma + 2 + Mp + 2))
    J_TTsn = coo_matrix(
        (np.ravel(np.asarray(bc_TMs_grad[2:4])), ([Ms + 1, Ms + 1], [0, 1])),
        shape=(Ms + 2, Mn + 2 + Mz + 2))
    J_TTs = hstack([J_TTsp, J_TTss, J_TTsn])

    J_Tus = build_tridiag(Ms, A_Ts[0:3])
    J_Tphies = build_bidiag(Ms, A_Ts[3:5])
    """ Negative residual """
    arg_T0n = [Tvec_sep[Ms], Tvec_sep[Ms + 1], Tvec_ne[0], Tvec_ne[1]]
    arg_Tn = [uvec_ne[0:Mn], uvec_ne[1:Mn+1], uvec_ne[2:Mn+2], phie_ne[0:Mn],phie_ne[2:Mn+2],\
             phis_ne[0:Mn], phis_ne[2:Mn+2], Tvec_ne[0:Mn], Tvec_ne[1:Mn+1], Tvec_ne[2:Mn+2], j_ne[0:Mn],\
             eta_ne[0:Mn], cmat_ne[Nn,:], cmat_ne[Nn+1,:], neq.cmax*np.ones([Mn,1]), Tvec_old_ne[1:Mn+1]]
    arg_TMn = [Tvec_ne[Mn], Tvec_ne[Mn + 1], Tvec_zcc[0], Tvec_zcc[1]]

    bc_T0n = neq.bc_inter_cont(*arg_T0n)
    res_Tn = vmap(neq.temperature)(*arg_Tn)
    bc_TMn = neq.bc_temp_n(*arg_TMn)
    """jacobian"""
    A_Tn = vmap(grad(neq.temperature, range(0, len(arg_Tn) - 2)))(*arg_Tn)
    bc_T0n_grad = grad(neq.bc_inter_cont, range(0, 4))(*arg_T0n)
    bc_TMn_grad = grad(neq.bc_temp_n, range(0, 4))(*arg_TMn)

    bcTn = {"right": bc_T0n_grad[2:4], "left": bc_TMn_grad[0:2]}
    J_TTnn = build_tridiag(Mn, A_Tn[7:10], **bcTn)
    J_TTns = coo_matrix(
        (np.ravel(np.asarray(bc_T0n_grad[0:2])),
         ([0, 0], [Ma + 2 + Mp + 2 + Ms, Ma + 2 + Mp + 2 + Ms + 1])),
        shape=(Mn + 2, Ma + 2 + Mp + 2 + Ms + 2))
    J_TTnz = coo_matrix(
        (np.ravel(np.asarray(bc_TMn_grad[2:4])), ([Mn + 1, Mn + 1], [0, 1])),
        shape=(Mn + 2, Mz + 2))
    J_TTn = hstack([J_TTns, J_TTnn, J_TTnz])

    J_Tun = build_tridiag(Mn, A_Tn[0:3])
    J_Tphien = build_bidiag(Mn, A_Tn[3:5])
    J_Tphisn = build_bidiag(Mn, A_Tn[5:7])
    J_Tjn = build_diag(Mn, A_Tn[10], "long")
    J_Tetan = build_diag(Mn, A_Tn[11], "long")

    col_cn = []
    data_cn = []
    row_cn = np.repeat(np.arange(1, Mn + 1), 2)
    offset = Mp * (Np + 2)
    for i in range(0, Mn):
        col_cn.append(
            [Nn + (Nn + 2) * i + offset, Nn + 1 + (Nn + 2) * (i) + offset])
        data_cn.append([A_Tn[12][i], A_Tn[13][i]])
    data_cn = np.ravel(np.array(data_cn))
    col_cn = np.ravel(np.array(col_cn))
    J_cn = coo_matrix((data_cn, (row_cn, col_cn)),
                      shape=(Mz + 2 + Mn + 2, Mn * (Nn + 2) + offset))
    """ Current collector z residual """
    arg_T0z = [Tvec_ne[Mn], Tvec_ne[Mn + 1], Tvec_zcc[0], Tvec_zcc[1]]
    arg_Tz = [
        Tvec_zcc[0:Mz], Tvec_zcc[1:Mz + 1], Tvec_zcc[2:Mz + 2],
        Tvec_old_zcc[1:Mz + 1]
    ]
    arg_TMz = [Tvec_zcc[Mz], Tvec_zcc[Mz + 1]]

    bc_T0z = neq.bc_inter_cont(*arg_T0z)
    res_Tz = vmap(zccq.temperature)(*arg_Tz)
    bc_TMz = zccq.bc_temp_z(*arg_TMz)
    """ jacobian"""
    bc_T0z_grad = grad(neq.bc_inter_cont, range(0, 4))(*arg_T0z)
    A_Tz = vmap(grad(zccq.temperature, range(0, 3)))(*arg_Tz)
    bc_TMz_grad = grad(zccq.bc_temp_z, (0, 1))(*arg_TMz)

    bcTz = {"right": bc_T0z_grad[2:4], "left": bc_TMz_grad[0:2]}
    J_TTzz = build_tridiag(Mz, A_Tz[0:3], **bcTz)
    J_TTzn = coo_matrix(
        (np.ravel(np.asarray(bc_T0z_grad[0:2])),
         ([0, 0],
          [Ma + 2 + Mp + 2 + Ms + 2 + Mn, Ma + 2 + Mp + 2 + Ms + 2 + Mn + 1])),
        shape=(Mz + 2, Ma + 2 + Mp + 2 + Ms + 2 + Mn + 2))

    J_TTz = hstack([J_TTzn, J_TTzz])


    J_Tu = bmat([ [empty_rec(Ma+2, (Mp+2)+(Ms+2)+(Mn+2))],\
                   [block_diag((J_Tup, J_Tus, J_Tun))],\
                   [empty_rec(Mz+2, (Mp+2) + (Ms+2) + (Mn+2))]
                   ])
    J_Tphie = bmat([ [empty_rec(Ma+2, (Mp+2)+(Ms+2)+(Mn+2))],\
                   [block_diag((J_Tphiep, J_Tphies, J_Tphien))],\
                   [empty_rec(Mz+2, (Mp+2) + (Ms+2) + (Mn+2))]
                   ])
    J_Tphis = vstack([
        empty_rec(Ma + 2, (Mp + 2) + (Mn + 2)),
        hstack([J_Tphisp, empty_rec(Mp + 2, Mn + 2)]),
        empty_rec(Ms + 2, (Mp + 2) + (Mn + 2)),
        hstack([empty_rec(Mn + 2, Mp + 2), J_Tphisn]),
        empty_rec(Mz + 2, (Mp + 2) + (Mn + 2))
    ])

    J_Tj = vstack([
        empty_rec(Ma + 2, Mp + Mn),
        hstack([J_Tjp, empty_rec(Mp + 2, Mn)]),
        empty_rec(Ms + 2, Mp + Mn),
        hstack([empty_rec(Mn + 2, Mp), J_Tjn]),
        empty_rec(Mz + 2, Mp + Mn)
    ])

    J_Teta = vstack([
        empty_rec(Ma + 2, Mp + Mn),
        hstack([J_Tetap, empty_rec(Mp + 2, Mn)]),
        empty_rec(Ms + 2, Mp + Mn),
        hstack([empty_rec(Mn + 2, Mp), J_Tetan]),
        empty_rec(Mz + 2, Mp + Mn)
    ])

    J_TT = vstack([J_TTa, J_TTp, J_TTs, J_TTn, J_TTz])

    J_Tc = vstack(
        [J_cp, empty_rec(Ms + 2,
                         Mp * (Np + 2) + Mn * (Nn + 2)), J_cn])

    res_T = np.hstack(
        (bc_T0a, res_Ta, bc_TMa, bc_T0p, res_Tp, bc_TMp, bc_T0s, res_Ts,
         bc_TMs, bc_T0n, res_Tn, bc_TMn, bc_T0z, res_Tz, bc_TMz))

    J_T = hstack([J_Tc, J_Tu, J_TT, J_Tphie, J_Tphis, J_Tj, J_Teta])

    return res_T, J_T


#plt.figure(figsize=(20,10)); plt.spy(J_T, markersize=1);plt.savefig('Tsparsity.png')
Exemplo n.º 29
0
def do_run_pipeline(name, basedir):

    if name[0] != 'P' and name[0] != 'L':
        die('This code should be used only with field or observation names',
            database=False)

    do_field = (name[0] == 'P')

    try:
        qsubfile = sys.argv[2]
    except:
        qsubfile = '/home/mjh/pipeline-master/ddf-pipeline/torque/pipeline.qsub'

    workdir = basedir + '/' + name
    try:
        os.mkdir(workdir)
    except OSError:
        warn('Working directory already exists')

    report('Downloading data')
    if do_field:
        success = download_field(name, basedir=basedir)
    else:
        success = download_dataset('https://lofar-webdav.grid.sara.nl',
                                   '/SKSP/' + name + '/',
                                   basedir=basedir)

    if not success:
        die('Download failed, see earlier errors', database=False)

    report('Unpacking data')
    try:
        unpack(workdir=workdir)
    except RuntimeError:
        if do_field:
            update_status(name, 'List failed', workdir=workdir)
        raise
    if do_field:
        update_status(name, 'Unpacked', workdir=workdir)

    report('Deleting tar files')
    os.system('rm ' + workdir + '/*.tar.gz')
    os.system('rm ' + workdir + '/*.tar')

    averaged = False
    report('Checking structure')
    g = glob.glob(workdir + '/*.ms')
    msl = MSList(None, mss=g)
    uobsids = set(msl.obsids)
    for thisobs in uobsids:
        # check one MS with each ID
        for m, ch, o in zip(msl.mss, msl.channels, msl.obsids):
            if o == thisobs:
                channels = len(ch)
                print 'MS', m, 'has', channels, 'channels'
                if channels > 20:
                    update_status(name, 'Averaging', workdir=workdir)
                    print 'Averaging needed for', thisobs, '!'
                    averaged = True
                    average(wildcard=workdir + '/*' + thisobs + '*')
                    os.system('rm -r ' + workdir + '/*' + thisobs +
                              '*pre-cal.ms')
                break

    report('Making ms lists')
    success = make_list(workdir=workdir)
    if do_field:
        list_db_update(success, workdir=workdir)
    if not success:
        die('make_list could not construct the MS list', database=False)

    report('Creating custom config file from template')
    make_custom_config(name, workdir, do_field, averaged)

    # now run the job
    do_run_job(name, basedir=basedir, qsubfile=None, do_field=do_field)
Exemplo n.º 30
0
                    try:
                        skip = False

                        if not unpack.has_single_toplevel(base):
                            fname = base.rsplit(os.path.sep, 1)[-1]
                            comp = os.path.join(comp, fname.rsplit('.', 1)[0])

                            if os.path.isdir(comp):
                                log(WARNING, "Unpack torrent: %s exists, %s not unpacked." % (comp, base))
                                skip = True

                        if not skip:
                            log(INFO, "Unpack torrent from %s to %s." % (base, comp))

                            unpack.unpack(base, targetdir = comp, progress = lambda part, name, tor = tor: setattr(tor, "unpackProgress", part))

                            log(INFO, "Removing %s" % base)
                            os.remove(base)

                    except Exception, e:
                        log(ERROR, "Unpack raised error %s! Old data may be left behind!" % e)

                    tor._completion_moved = True
            
            except Exception,e:
                log(ERROR, "completer thread caught unhandled exception %s for %s %s" % (e, com, args))
                
            self._completerQ.task_done()

        log(DEBUG, "Done")
Exemplo n.º 31
0
def main():

    print "setting up directories..."

    dt.clear_or_create(DIRECTORIES["temp"])
    dt.create_directory(DIRECTORIES["archives"])

    print "done setting up directories"

    ftype = ft.get_type(unpack_file)

    print "unpacking and flattening files..."

    unpack.unpack(unpack_file, DIRECTORIES["temp"])
    unpack.flatten_folder(DIRECTORIES["temp"])
    # I could have flatten_folder return a list of files in the directory, so that
    # we wouldn't have to search through the directory everytime for specific files
    # since os.walk is slow with directories with large files

    print "done unpacking and flattening"

    sp = SchemaProps(SCHEMA_URL)
    file_details = {
        "file": unpack_file,
        "process_time": process_time,
        "file_timestamp": file_timestamp
    }
    election_details = {}
    vip_id = None
    election_id = None

    print "converting to db style flat files...."

    if dt.file_by_name(CONFIG_FILE, DIRECTORIES["temp"]):
        file_details.update(
            process_config(DIRECTORIES["temp"],
                           DIRECTORIES["temp"] + CONFIG_FILE, sp))
    if dt.files_by_extension(".txt", DIRECTORIES["temp"]) > 0:
        file_details.update(process_flatfiles(DIRECTORIES["temp"], sp))
    print "processing xml files..."
    xml_files = dt.files_by_extension(".xml", DIRECTORIES["temp"])
    if len(xml_files) >= 1:
        ftff.feed_to_db_files(DIRECTORIES["temp"], xml_files[0],
                              sp.full_header_data("db"), sp.version)
        os.remove(xml_files[0])
        if "valid_files" in file_details:
            file_details["valid_files"].append(xml_files[0])
        else:
            file_details["valid_files"] = [xml_files[0]]

    print "done processing xml files"

    print "getting feed details..."
    db = EasySQL("localhost", "vip", "username", "password")
    try:
        with open(DIRECTORIES["temp"] + "source.txt", "r") as f:
            reader = csv.DictReader(f)
            row = reader.next()
            vip_id = row["vip_id"]
            election_details["vip_id"] = vip_id
        with open(DIRECTORIES["temp"] + "election.txt", "r") as f:
            reader = csv.DictReader(f)
            row = reader.next()
            election_details["election_date"] = row["date"]
            election_details["election_type"] = row["election_type"]
    except:
        er.report_summary(vip_id, election_id, file_details, election_details)
        return

    election_id = get_election_id(election_details, db)
    election_details["election_id"] = election_id
    print "done getting feed details"

    print "converting to full db files...."
    element_counts, error_data, warning_data = convert_to_db_files(
        vip_id, election_id, file_details["file_timestamp"],
        DIRECTORIES["temp"], sp)
    print "done converting to full db files"

    er.report_summary(vip_id, election_id, file_details, election_details,
                      element_counts)
    if len(error_data) > 0:
        er.feed_issues(vip_id, file_details["file_timestamp"], error_data,
                       "error")
    if len(warning_data) > 0:
        er.feed_issues(vip_id, file_details["file_timestamp"], warning_data,
                       "warning")

    update_data(vip_id, election_id, file_details["file_timestamp"], db,
                element_counts, DIRECTORIES["temp"], DIRECTORIES["archives"])

    db_validations(vip_id, election_id, db, sp)

    generate_feed(file_details)
Exemplo n.º 32
0
		if (pid["guess"] == 0 ):
			totals["ncGood"] += 1.0
	elif (pid["true"] == 1):
		totals["cc"] += 1.0
		if (pid["guess"] == 1):
			totals["ccGood"] += 1.0
	
totals = {}
totals["nc"] = 0.0
totals["cc"] = 0.0
totals["ncGood"] = 0.0
totals["ccGood"] = 0.0
pidcutter = BasicCutPID.BasicCutPID()
pidcutter.setCuts(sys.argv[2],sys.argv[3],sys.argv[4])
pcalc = ParameterCalculator.ParameterCalculator()
events = unpack.unpack()
comp = LEMCompactor.LEMEventCompactor()
rm = ROOTFileManager.ROOTFileManager()
rm.setupTree()
#rm.prepareCharacterisation()
no = 0
allStats = []
lpid = LikelihoodPID.LikelihoodPID()
lpid.loadLikelihoods("ratios.root")
for event in events:
	no += 1
	print str(no) + " Events"
	if (1):#no < 100):

	#print "Event " + str(no)
	#print "Using " + str(len(event.digits))
Exemplo n.º 33
0
name = sys.argv[1]
try:
    qsubfile = sys.argv[2]
except:
    qsubfile = '/home/mjh/git/ddf-pipeline/pipeline.qsub'

try:
    os.mkdir(name)
except OSError:
    warn('Working directory already exists')
    pass
os.chdir(name)
report('Downloading data')
if not download_dataset('https://lofar-webdav.grid.sara.nl',
                        '/SKSP/' + name + '/'):
    die('Download failed to get the right number of files')

report('Unpacking data')
unpack()

report('Deleting tar files')
os.system('rm *.tar.gz')

report('Making ms lists')
if make_list():
    report('Submit job')
    os.system('qsub -N ddfp-' + name + ' -v WD=' + rootdir + '/' + name + ' ' +
              qsubfile)
else:
    die('make_list could not construct the MS list')
Exemplo n.º 34
0
def download(tweet_id, downloader, concealer, name_override=False):
    print 'mode: download'
    root = deserialize(concealer.reveal(downloader(tweet_id)))#.tobytes())
    unpack(root, tweet_id, downloader, concealer, name_override=name_override, recur=True)
    print 'done'
Exemplo n.º 35
0
def main():

	print "setting up directories..."
	
	dt.clear_or_create(DIRECTORIES["temp"])
	dt.create_directory(DIRECTORIES["archives"])
	
	print "done setting up directories"

	ftype = ft.get_type(unpack_file)

	print "unpacking and flattening files..."

	unpack.unpack(unpack_file, DIRECTORIES["temp"])
	unpack.flatten_folder(DIRECTORIES["temp"])
# I could have flatten_folder return a list of files in the directory, so that
# we wouldn't have to search through the directory everytime for specific files
# since os.walk is slow with directories with large files

	print "done unpacking and flattening"

	sp = SchemaProps(SCHEMA_URL)
	file_details = {"file":unpack_file, "process_time":process_time, "file_timestamp":file_timestamp}
	election_details = {}
	vip_id = None
	election_id = None

	print "converting to db style flat files...."

	if dt.file_by_name(CONFIG_FILE, DIRECTORIES["temp"]):
		file_details.update(process_config(DIRECTORIES["temp"], DIRECTORIES["temp"] + CONFIG_FILE, sp))
	if dt.files_by_extension(".txt", DIRECTORIES["temp"]) > 0:
		file_details.update(process_flatfiles(DIRECTORIES["temp"], sp))
	print "processing xml files..."
	xml_files = dt.files_by_extension(".xml", DIRECTORIES["temp"])
	if len(xml_files) >= 1:
		ftff.feed_to_db_files(DIRECTORIES["temp"], xml_files[0], sp.full_header_data("db"), sp.version)
		os.remove(xml_files[0])
		if "valid_files" in file_details:
			file_details["valid_files"].append(xml_files[0])
		else:
			file_details["valid_files"] = [xml_files[0]]

	print "done processing xml files"

	print "getting feed details..."
	db = EasySQL("localhost","vip","username","password")
	try:
		with open(DIRECTORIES["temp"] + "source.txt", "r") as f:
			reader = csv.DictReader(f)
			row = reader.next()
			vip_id = row["vip_id"]
			election_details["vip_id"] = vip_id
		with open(DIRECTORIES["temp"] + "election.txt", "r") as f:
			reader = csv.DictReader(f)
			row = reader.next()
			election_details["election_date"] = row["date"]
			election_details["election_type"] = row["election_type"]
	except:
		er.report_summary(vip_id, election_id, file_details, election_details)
		return

	election_id = get_election_id(election_details, db)
	election_details["election_id"] = election_id
	print "done getting feed details"

	print "converting to full db files...."
	element_counts, error_data, warning_data = convert_to_db_files(vip_id, election_id, file_details["file_timestamp"], DIRECTORIES["temp"], sp)
	print "done converting to full db files"
	
	er.report_summary(vip_id, election_id, file_details, election_details, element_counts)
	if len(error_data) > 0:
		er.feed_issues(vip_id, file_details["file_timestamp"], error_data, "error")
	if len(warning_data) > 0:
		er.feed_issues(vip_id, file_details["file_timestamp"], warning_data, "warning")

	update_data(vip_id, election_id, file_details["file_timestamp"], db, element_counts, DIRECTORIES["temp"], DIRECTORIES["archives"])	

	db_validations(vip_id, election_id, db, sp)

	generate_feed(file_details)
Exemplo n.º 36
0
 parser = argparse.ArgumentParser()
 
 # training environment parameters
 parser.add_argument("--objs_dir", type=str, default="./objects", help="Location of the 'PointcloudScenes' directory. Will be created if it not exists. Default: ./objects")
 parser.add_argument("--compositor_dir", type=str, default="./scene_compositor/Build/Compositor/Compositor.exe", help="Directory to the builded scene_compositor program. Default: ./scene_compositor/Build/Compositor/Compositor.exe")
 parser.add_argument("--conversion_only", type=bool, default=False, help="If 'True', only a conversion of the mesh scenes to point clouds will be conducted and the download will be skipped. Default: False")
 parser.add_argument("--max_points", type=int, default=10000, help="Determines the maximum number of points in the resulting point clouds. Default: 10000")
 parser.add_argument("--debug", type=bool, default=False, help="If 'True', the unity compositor can be started from the unity editor. Default: False")
 
 args = parser.parse_args()
 
 print(args)
 
 if not args.conversion_only: # whole processing
     download(args.objs_dir)
     unpack(args.objs_dir)
     structure(args.objs_dir)
     
 while True:
     try:
         if not args.debug:
             p = subprocess.Popen([args.compositor_dir])
         convert(args.objs_dir, max_points=args.max_points)
     except ConnectionResetError:
         update_blacklist()
         continue
     except Exception as e:
         print(e)
         break
     if not args.debug:
         p.terminate()