Ejemplo n.º 1
0
def main():
    hp.setup()
    VENDORS = ["A", "B", "C"]
    for vendor in VENDORS:
        print "Test for vendor %s" % vendor
        (result, _, _) = test_parser_vendor(vendor=vendor)
    return result
Ejemplo n.º 2
0
def run():
    connection, channel = helper.setup('task_queue')
    data = helper.scrape(AREA_CODE_URL)
    soup = BeautifulSoup(data)
    trs = soup.findAll("tr")
    for tr in trs:
        a_elems = tr.findAll("a")
        for a_elem in a_elems:
            ac_url = BASE_URL + a_elem['href']
            dispatch(channel, ac_url)
    connection.close()
Ejemplo n.º 3
0
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA

import types
import helper
import StringIO, unittest, sys

from duplicity.selection import * #@UnusedWildImport
from duplicity.lazy import * #@UnusedWildImport

helper.setup()

class MatchingTest(unittest.TestCase):
    """Test matching of file names against various selection functions"""
    def setUp(self):
        assert not os.system("tar xzf testfiles.tar.gz > /dev/null 2>&1")
        self.root = Path("testfiles/select")
        self.Select = Select(self.root)

    def tearDown(self):
        assert not os.system("rm -rf testfiles tempdir temp2.tar")

    def makeext(self, path):
        return self.root.new_index(tuple(path.split("/")))

    def testRegexp(self):
Ejemplo n.º 4
0
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# Duplicity is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA

import helper
import os, unittest, sys

helper.setup()

# This can be changed to select the URL to use
backend_url = 'file://testfiles/output'

class CmdError(Exception):
    """Indicates an error running an external command"""
    return_val = -1
    def __init__(self, return_val):
        self.return_val = os.WEXITSTATUS(return_val)

class BadUploadTest(unittest.TestCase):
    """
    Test missing volume upload using duplicity binary
    """
    def setUp(self):
import pandas as pd
import numpy as np

from tqdm import tqdm

import helper

pathways, interactome = helper.setup()

interactome_e2n = helper.convert_edges_to_node(interactome, 'edge_weight',
                                               'interactome_weight')
interactome_en = helper.keep_edge_nodes(interactome_e2n,
                                        ['head', 'interactome_weight'])

interactome_degrees = pd.read_csv(
    '../output/features_interactome_no_nearest_01.txt', delimiter='\t')

interactome_features = pd.merge(interactome_degrees,
                                interactome_en,
                                left_on='name',
                                right_on='head')
interactome_features.drop('head', axis=1, inplace=True)

num_folds = 2

create_additional_featuers = False

for pathway in tqdm(pathways):
    pathway_dist_score = pd.read_csv(
        '../output/features_{}_03.txt'.format(pathway),
        delimiter='\t',
Ejemplo n.º 6
0
def setup():
    t.setup()
Ejemplo n.º 7
0
def setup():
    t.setup()
Ejemplo n.º 8
0
def run():
    connection, channel = helper.setup('task_queue')
    print('Waiting for URL to scrape. To exit press CTRL+C')
    channel.basic_qos(prefetch_count=1)
    channel.basic_consume('task_queue', callback)
    channel.start_consuming()
Ejemplo n.º 9
0
from app0 import *
from random import randint
import helper
import matplotlib.pyplot as plt
from numpy import mean,zeros
import time

start = time.time()

tick = 0
hour = 0
endTick = 6*10*24


size = 7
actors,hood = helper.setup(64*size)
#actors,hood = helper.setup(28,1)
timedata = []

while tick<endTick:
    #Update all actor states every 10 ticks
    if tick%10==0:
        print("\t",str(int(hour))+":"+str(int(hour%1 * 60)))
        for i in actors:
            i.run(hour,hood,[blind,block,another, quietest, oneofquietest])
        
        for shop in hood.shops:
            shop.run(hour,tick)

        timedata.append(tick)
    
Ejemplo n.º 10
0
def main():
    hp.setup()
    (result, _, _) = test_autoprecharge()
    return result
Ejemplo n.º 11
0
#!/usr/bin/env python3

import tensorflow as tf
import numpy as np
import time
import sys
import helper
import vgg16

args = helper.get_args(description='Extract VGG16 features')
helper.setup(args)

batch_size = args.batch_size

with tf.Session() as sess:
    vgg = vgg16.Vgg16()

    images = tf.placeholder("float", [None, 224, 224, 3])
    vgg.build(images)

    LAYERS_TO_EXTRACT = helper.get_vgg_layers_to_be_extracted(
        vgg, args.extract_layers)

    for img_paths, imgs in helper.next_img_batch(
            count=batch_size,
            done_file=args.done_file,
            images_file=args.images_list_file,
            prepend_image_path=args.images_path):

        time_start = time.time()
        features = sess.run(LAYERS_TO_EXTRACT, feed_dict={images: imgs})
import pandas as pd
from sklearn import svm

from tqdm import tqdm

import helper

pathways, original_interactome = helper.setup()

prepend = 'set01_ori_balanced'
kernels = ['rbf']  # , 'linear', 'poly']

drop_cols = []
class_weight = 'balanced'  # 'balanced' or None

print('prepend: {}\nkernels: {}\ndrop_cols: {}\nclass_weight:{}'.format(
    prepend, kernels, drop_cols, class_weight))

for kernel in tqdm(kernels):
    for pathway in tqdm(pathways):
        for fold_idx in tqdm(range(2)):
            fold_num = fold_idx + 1

            training = pd.read_csv(
                '../output/fit_training_{}_{}_of_2'.format(
                    pathway, fold_num),
                delimiter='\t')
            testing = pd.read_csv(
                '../output/fit_prediction_{}_{}_of_2'.format(
                    pathway, fold_num),
                delimiter='\t')
Ejemplo n.º 13
0
import pandas as pd
from sklearn import svm

from tqdm import tqdm

import helper

pathways, original_interactome = helper.setup()

prepend = 'set01_ori_balanced'
kernels = ['rbf']  # , 'linear', 'poly']

drop_cols = []
class_weight = 'balanced'  # 'balanced' or None

print('prepend: {}\nkernels: {}\ndrop_cols: {}\nclass_weight:{}'.format(
    prepend, kernels, drop_cols, class_weight))

for kernel in tqdm(kernels):
    for pathway in tqdm(pathways):
        for fold_idx in tqdm(range(2)):
            fold_num = fold_idx + 1

            training = pd.read_csv('../output/fit_training_{}_{}_of_2'.format(
                pathway, fold_num),
                                   delimiter='\t')
            testing = pd.read_csv('../output/fit_prediction_{}_{}_of_2'.format(
                pathway, fold_num),
                                  delimiter='\t')

            if drop_cols:
Ejemplo n.º 14
0
def main():
    hp.setup()
    test_cust_vendor_vendor()
Ejemplo n.º 15
0
def upephelper_processing(key, codons, override):
    outpath = helpersetting.UPEPHELPER_STAGING
    data_loc = helpersetting.UPEPHELPER_DATABASE
    dbuser = helpersetting.DATABASES['default']['USER']
    dbpass = str(helpersetting.DATABASES['default']['PASSWORD'])
    dbhost = helpersetting.DATABASES['default']['HOST']
    daba = helpersetting.DATABASES['default']['DB']
    #local_version = []
    timeid = time.time()
    unid = '%012x%016x' % (int(timeid * 1000), random.randint(0, 0xFFFFFFFFFFFFFFFF))
    
    #if Refseqdb_blast_db_build_log.objects.all().exists() is False:
        #local_version.append(0)

        #print('No local database version information available.')
    #else:
        #r = Refseqdb_blast_db_build_log.objects.order_by('-input_date')[0]
        #local_version.append(r.database_version)

        #print('Latest local database version: %i' % r.database_version)
    
    remote = helper.get_NCBI_RefSeq_release()
    
    
    query= ','.join(codons)
    override_condition = "True"
    dbv = str(remote)
    helper.upep_mysql_database(unid, dbuser, dbpass, dbhost, daba, key, query, override_condition, remote)
    local_version = 0
    lv = """select * from updater_log where refseq_database = %s and success_log = 1 order by unix_timestamp(time_id_start) desc;"""
    dbcon = MySQLdb.connect(user=dbuser, passwd=dbpass, host=dbhost, db=daba)
    cursor = dbcon.cursor()
    cursor.execute(lv, (key,))
    if not cursor.rowcount:
        local_version = 0
    else:
        local = cursor.fetchone()
        local_version = local[4]
    cursor.close()
    dbcon.close()
    fn = 0
    if remote > local_version or override_condition == "True":
        home = os.getcwd()
        dbs = ['RefSeq-complete',
               'RefSeq-fungi',
               'RefSeq-invertebrate',
               'RefSeq-plant',
               'RefSeq-vertebrate_mammalian',
               'RefSeq-vertebrate_other']
        if key:
            print("Working with database " + key)

            if key in dbs:
                
                wd = helper.setup(outpath, home, key)
                fn = helper.download_db(key)
                
                compacted = helper.compact_RefSeq(wd, dbv)
                
                print("Compiling ACC and GI database for " + key)
                helper.compile_RefSeq(compacted, dbv, fn, dbuser, dbpass, dbhost, daba, timeid)                
                               
                print("Recorded log for building ACC and GI database of " + key)
                os.chdir(home)
            else:

                print("Not a defined db")
                sys.exit(1)
        #else:
            #for db in dbs:

                #print("Working with database" + db)
                #wd = helper.setup(outpath, home, db)
                #helper.download_db(db)
                #compacted = helper.compact_RefSeq(wd, dbv)
                #helper.compile_RefSeq(compacted, dbv, dbuser, dbpass, dbhost)
                
                #os.chdir(home)

        os.chdir(outpath)
        #try:
            #os.mkdir("../tmp/RefSeqdb"+dbv)
        #except OSError:
            #print("RefSeqdb %s directory exists \n Overriding directory" % dbv)
            #pass
        #os.system("mv "+outpath+"RefSeq* ../tmp/RefSeqdb"+dbv)
        os.system("rm -rf "+outpath+"RefSeq*")
        for starting_codon in codons:
            _, proc_list = helper.uPEP_finder(codon=starting_codon, db_version=dbv, outpath=outpath, fn=fn)
            helper.build_blast_db(proc_list)
        
        helper.finalise_update()
        dbcon = MySQLdb.connect(user=dbuser, passwd=dbpass, host=dbhost, db=daba)
        cursor = dbcon.cursor()
    
        update_log = """UPDATE updater_log SET time_id_finish = current_timestamp, success_log = 1 WHERE unique_id = %s"""
        cursor.execute(update_log, (unid,))
        dbcon.commit()
        cursor.close()
        dbcon.close()
        #for starting_codon in codons:
            #rdb_blast_log = Refseqdb_blast_db_build_log(input_date=timezone.now(), database_name = key, database_version = remote, codon = starting_codon)
            #rdb_blast_log.save()

    else:
        print("No updrade required")
import pandas as pd
import numpy as np

from tqdm import tqdm

import helper

pathways, interactome = helper.setup()

interactome_e2n = helper.convert_edges_to_node(interactome,
                                               'edge_weight',
                                               'interactome_weight')
interactome_en = helper.keep_edge_nodes(interactome_e2n,
                                        ['head', 'interactome_weight'])

interactome_degrees = pd.read_csv(
    '../output/features_interactome_no_nearest_01.txt', delimiter='\t')

interactome_features = pd.merge(interactome_degrees,
                                interactome_en, left_on='name',
                                right_on='head')
interactome_features.drop('head', axis=1, inplace=True)

num_folds = 2

create_additional_featuers = False

for pathway in tqdm(pathways):
    pathway_dist_score = pd.read_csv(
        '../output/features_{}_03.txt'.format(pathway), delimiter='\t',
        na_values=['None'])