Esempio n. 1
0
def _make_counterfactual_graph(account_names, sim_symbol, lines, min_date,
                               max_date):
    if not account_names:
        return None

    all_traces = calculate_traces(account_names, min_date, max_date)
    line = _standardize_lines(lines)[0]
    plots = []
    y = all_traces[line.split(':')[0]]
    plots.append(
        go.Scatter(x=y.index,
                   y=y,
                   mode='lines',
                   name='Actual Portfolio',
                   yaxis='y1'))

    sim_traces = calculate_sim_traces(account_names, sim_symbol, min_date,
                                      max_date)
    y_sim = sim_traces[line.split(':')[0]]
    plots.append(
        go.Scatter(x=y_sim.index,
                   y=y_sim,
                   mode='lines',
                   name='Counterfactual Portfolio',
                   yaxis='y1'))

    return {
        'data': plots,
        'layout': get_layout(conf('which_axis')[line], [conf('lines')[line]]),
    }
Esempio n. 2
0
def _make_summed_graph(account_names, lines, min_date, max_date):
    if not account_names:
        return None
    all_traces = calculate_traces(account_names, min_date, max_date)

    lines = _standardize_lines(lines)
    plots = []
    axes_used = {conf('which_axis')[l] for l in lines}
    lines_on_y2 = list(
        filter(lambda x: conf('which_axis')[x] == 'pct', conf('which_axis')))
    for line in lines:
        if line in lines_on_y2 and len(axes_used) > 1:
            which_y = 'y2'
        else:
            which_y = 'y1'
        y = all_traces[line.split(':')[0]]
        plots.append(
            go.Scatter(x=y.index,
                       y=y,
                       mode='lines',
                       name=conf('lines')[line],
                       yaxis=which_y))
    return {
        'data': plots,
        'layout': get_layout(axes_used, [conf('lines')[l] for l in lines]),
    }
Esempio n. 3
0
def _make_counterfactual_graph(account_names, sim_symbol, lines, min_date, max_date):
    if not account_names:
        return None

    all_traces = calculate_traces(account_names, min_date, max_date)
    line = _standardize_lines(lines)[0]
    plots = []
    y = all_traces[line.split(":")[0]]
    plots.append(
        go.Scatter(x=y.index, y=y, mode="lines", name="Actual Portfolio", yaxis="y1")
    )

    sim_traces = calculate_sim_traces(account_names, sim_symbol, min_date, max_date)
    y_sim = sim_traces[line.split(":")[0]]
    plots.append(
        go.Scatter(
            x=y_sim.index,
            y=y_sim,
            mode="lines",
            name="Counterfactual Portfolio",
            yaxis="y1",
        )
    )

    return {
        "data": plots,
        "layout": get_layout(conf("which_axis")[line], [conf("lines")[line]]),
    }
Esempio n. 4
0
def generate_aug(which):
    images, tags = load()
    images, tags = augment_and_shuffle(images, tags,
                                       conf(f'aug.{which}.factor'))
    dataset = {'images': images, 'tags': tags}
    with open(conf(f'aug.{which}.output'), 'wb') as f:
        f.write(json.dumps(dataset).encode('utf-8'))
Esempio n. 5
0
    def send(self, server=None, img=None, quality=conf("image.jpg_quality")):
        """
        Envoyer une image sur le serveur UDP du client.

        Parameters
        ----------
        server : tuple of str and int
            Adresse du serveur du client (IP et port)
        img : ndarray or None
            Si None, l'image enregistrée est self._image
            sinon l'image à enregistrer

        Returns
        -------
        void
            Rien.
        """
        if server is None:
            if conf("debug.network"):
                print("NO VIDEO SOCKET")
            return
        if img is None:
            img = self._image

        res, jpg = cv2.imencode('.jpg', img,
                                [int(cv2.IMWRITE_JPEG_QUALITY), quality])
        try:
            server.sendall(
                int(jpg.size).to_bytes(4, 'big', signed=True) + jpg.tobytes())
        except OSError:
            print("Erreur: image::Image.send")
Esempio n. 6
0
    def isFin(self,
              seuil_fin=conf("image.seuil.fin"),
              prc_fin=conf("image.prc_fin")):
        """
        Détecter la présence d'une ligne ou non sur une image


        Parameters
        ----------
        seuil_fin : float
            Tout ce qui est supérieur à ce seuil est considéré comme étant la
            ligne.
        prc_fin : float
            pourcentage minimum de ligne requis pour que l'image soit
            considérée comme "ayant une ligne" et que la fonction retourne False

        Returns
        -------
        bool
            Vrai si il n'y a pas de ligne, faux si il y a une ligne.


        Notes
        -----
        Seuillage de la ligne noire (pix<50)
        Compare le nbr de non-zero retrouvé l'image seuillé
        Il doit y avoir plus de prc_fin pourcents de ligne visible sinon
        le seuillage OTSU ne fonctionnera pas.
        """
        return self._image.size * prc_fin > cv2.countNonZero(
            cv2.threshold(self._image, seuil_fin, 255,
                          cv2.THRESH_BINARY_INV)[1])
Esempio n. 7
0
def _make_summed_graph(account_names, lines, min_date, max_date):
    if not account_names:
        return None
    all_traces = calculate_traces(account_names, min_date, max_date)

    lines = _standardize_lines(lines)
    plots = []
    axes_used = {conf("which_axis")[l] for l in lines}
    lines_on_y2 = list(
        filter(lambda x: conf("which_axis")[x] == "pct", conf("which_axis"))
    )
    for line in lines:
        if line in lines_on_y2 and len(axes_used) > 1:
            which_y = "y2"
        else:
            which_y = "y1"
        y = all_traces[line.split(":")[0]]
        plots.append(
            go.Scatter(
                x=y.index, y=y, mode="lines", name=conf("lines")[line], yaxis=which_y
            )
        )
    return {
        "data": plots,
        "layout": get_layout(axes_used, [conf("lines")[l] for l in lines]),
    }
Esempio n. 8
0
def _get_accounts(refresh: bool = False,
                  download_quotes: bool = False) -> Dict[str, pd.DataFrame]:
    """Return dictionary of all accounts, keyed by account name"""
    if refresh or not os.path.exists(conf('etl_accts')):
        accts = processing.refresh_portfolio(refresh_cache=download_quotes)[0]
    else:
        accts = pickle.load(open(conf('etl_accts'), 'rb'))[0]
    return accts
Esempio n. 9
0
def build_thread(gitpath, ref, buildid, cburl=None, submodules=False):
    tmpdir = os.path.join(conf('buildbot.buildpath'), buildid)
    repo = GitRepository(tmpdir)

    output, retcode = repo.clone(gitpath)
    if retcode:
        buildlog(buildid,
                 'Unable to clone %s. %s\n' % (gitpath, '\n'.join(output)))
        return

    output, retcode = repo.checkout(ref)
    if retcode:
        buildlog(buildid,
                 'Unable to checkout %s. %s\n' % (ref, '\n'.join(output)))
        return

    if submodules:
        output, retcode = repo.submodule_init()
        buildlog(buildid, output[0])
        buildlog(buildid, output[1])
        output, retcode = repo.submodule_update()
        buildlog(buildid, output[0])
        buildlog(buildid, output[1])

    resultsdir = os.path.join(tmpdir, '.build_results')
    os.makedirs(resultsdir)
    output, retcode = repo.build(conf('buildbot.signkey'),
                                 conf('buildbot.pbuilderrc'), resultsdir)

    buildlog(buildid, output[0])
    buildlog(buildid, output[1])
    #logging.debug(output[0])
    #logging.debug(output[1])

    os.chdir(resultsdir)
    if not os.listdir(resultsdir) or retcode != 0:
        buildlog(buildid, 'Nothing in results directory. Giving up.')
        return

    tarpath = os.path.join(tmpdir, 'package.tar.gz')
    tar = tarfile.open(tarpath, 'w:gz')
    for name in os.listdir(resultsdir):
        tar.add(name)
    tar.close()

    buildlog(buildid, 'Build complete. Results in %s\n' % tarpath)
    data = file(tarpath, 'rb').read()
    buildlog(buildid, 'Built %i byte tarball' % len(data))

    if cburl:
        buildlog(buildid, 'Performing callback: %s' % cburl)
        req = Curl()
        req.setopt(req.POST, 1)
        req.setopt(req.URL, str(cburl))
        req.setopt(req.HTTPPOST, [('package', (req.FORM_FILE, str(tarpath)))])
        req.setopt(req.WRITEDATA, file('%s/build.log' % tmpdir, 'a+'))
        req.perform()
        req.close()
Esempio n. 10
0
    def sign(self, dist):
        self._reprepro('export %s' % dist)

        gpg = GPG(gnupghome=conf('repository.gpghome'))
        filename = os.path.join(self.path, 'dists/%s/Release' % dist)
        detach_file = filename + '.gpg'
        try:
            os.unlink(detach_file)
        except: pass
        result = gpg.sign_file(file(filename, 'r'), keyid=conf('repository.signkey'), outputfile=detach_file)
Esempio n. 11
0
def build_thread(gitpath, ref, buildid, cburl=None, submodules=False):
    tmpdir = os.path.join(conf('buildbot.buildpath'), buildid)
    repo = GitRepository(tmpdir)

    output, retcode = repo.clone(gitpath)
    if retcode:
        buildlog(buildid, 'Unable to clone %s. %s\n' % (gitpath, '\n'.join(output)))
        return

    output, retcode = repo.checkout(ref)
    if retcode:
        buildlog(buildid, 'Unable to checkout %s. %s\n' % (ref, '\n'.join(output)))
        return

    if submodules:
        output, retcode = repo.submodule_init()
        buildlog(buildid, output[0])
        buildlog(buildid, output[1])
        output, retcode = repo.submodule_update()
        buildlog(buildid, output[0])
        buildlog(buildid, output[1])

    resultsdir = os.path.join(tmpdir, '.build_results')
    os.makedirs(resultsdir)
    output, retcode = repo.build(conf('buildbot.signkey'), conf('buildbot.pbuilderrc'), resultsdir)

    buildlog(buildid, output[0])
    buildlog(buildid, output[1])
    #logging.debug(output[0])
    #logging.debug(output[1])

    os.chdir(resultsdir)
    if not os.listdir(resultsdir) or retcode != 0:
        buildlog(buildid, 'Nothing in results directory. Giving up.')
        return

    tarpath = os.path.join(tmpdir, 'package.tar.gz')
    tar = tarfile.open(tarpath, 'w:gz')
    for name in os.listdir(resultsdir):
        tar.add(name)
    tar.close()

    buildlog(buildid, 'Build complete. Results in %s\n' % tarpath)
    data = file(tarpath, 'rb').read()
    buildlog(buildid, 'Built %i byte tarball' % len(data))

    if cburl:
        buildlog(buildid, 'Performing callback: %s' % cburl)
        req = Curl()
        req.setopt(req.POST, 1)
        req.setopt(req.URL, str(cburl))
        req.setopt(req.HTTPPOST, [('package', (req.FORM_FILE, str(tarpath)))])
        req.setopt(req.WRITEDATA, file('%s/build.log' % tmpdir, 'a+'))
        req.perform()
        req.close()
Esempio n. 12
0
 def lenet5(self, x):
     for p in conf('network.lenet5.cnn'):
         x = self.conv(x, p['conv']['filter'], p['conv']['bias'])
         x = tf.nn.relu(x)
         x = self.pool(x, p['pool']['ksize'], p['pool']['strides'])
     i = 0
     x = tf.reshape(x, [-1, 13 * 13 * 120])
     for p in conf('network.lenet5.fc'):
         x = tf.layers.dense(x, p, activation='sigmoid', name=f'fc_{i}')
         i += 1
     return x
Esempio n. 13
0
 def __init__(self, initial, correction, angle_min, angle_max, canal):
     super().__init__(conf("controle.servo.periode_min"),
                      conf("controle.servo.periode_max"), canal)
     if conf("debug.controle"):
         print("Servomoteur: créé : ", self)
     self._angle_min = angle_min
     self._angle_max = angle_max
     self._angle_correction = correction
     self._angle_initial = initial
     self._angle_courant = self._angle_initial
     self.calibrer()
Esempio n. 14
0
def load():
    result = []
    tags = []
    for triple in os.walk(conf('image.root')):
        for file in triple[2]:
            if file.endswith('.jpg') or file.endswith('.png'):
                path = triple[0] + file
                result.append(
                    np.array(Image.open(path).resize(conf('image.size'))))
                tags.append(get_label_vector(file.split('.')[0]))
    return result, tags
Esempio n. 15
0
    def __init__(self):
        self.x1 = tf.placeholder(tf.float32, [None, *conf('image.size'), 3])
        self.x2 = tf.placeholder(tf.float32, [None, *conf('image.size'), 3])

        with tf.variable_scope("siamese") as scope:
            self.o1 = self.network(self.x1)
            scope.reuse_variables()
            self.o2 = self.network(self.x2)

        # Create loss
        self.y_ = tf.placeholder(tf.float32, [None])
        self.loss = self.loss_with_spring()
Esempio n. 16
0
def refresh_portfolio(refresh_cache: bool = False):
    """This is the "main" function; it runs everything."""
    os.makedirs(conf("cache_dir"), exist_ok=True)
    inv = read_investment_transactions(conf("investment_transactions"))
    portfolio_transactions = read_portfolio_transactions(
        conf("account_transactions"))
    # Read all the quotes either from disk or from the web.
    if refresh_cache:
        quotes.refresh_quotes(inv.Symbol.unique())
    max_date = quotes.get_max_date()
    log.info(f"The most recent quote is from {max_date}")

    min_port = min(p["Date"].min() for p in portfolio_transactions.values())
    index = pd.date_range(start=min(inv["Date"].min(), min_port),
                          end=max_date,
                          freq="D")
    accounts = {}
    all_accounts = port.init_portfolio(index)
    log.info("Logging investment transactions for all portfolios.")
    for idx, row in inv.iterrows():
        if row["Account"] not in accounts:
            accounts[row["Account"]] = port.init_portfolio(index)
        record_inv_action(all_accounts, row)
        record_inv_action(accounts[row["Account"]], row)
    log.info("Totaling portfolio transactions for all accounts.")
    port.total_portfolio(all_accounts)
    for acct in accounts.values():
        port.total_portfolio(acct)

    max_trans_date = None
    for acct_name, trans in portfolio_transactions.items():
        log.info("Logging portfolio transactions for %s.", acct_name)
        record_trans(accounts[acct_name], trans)
        record_trans(all_accounts, trans)

        accounts[acct_name]["_total"] += accounts[acct_name]["cash"]
        accounts[acct_name].loc[accounts[acct_name]["_total"].abs() < 0.01,
                                "_total"] = 0
        if not max_trans_date:
            max_trans_date = trans["Date"].max()
        else:
            max_trans_date = max((max_trans_date, trans["Date"].max()))
    all_accounts["_total"] += all_accounts["cash"]

    log.info("The most recent transaction was on %s", max_trans_date)
    _check_account_dict(accounts)

    with open(conf("etl_accts"), "wb") as _fout:
        pickle.dump((accounts, max_trans_date.date()), _fout)

    return accounts, max_trans_date.date()
Esempio n. 17
0
def _make_comparison_graph(account_names, lines, min_date, max_date):
    if not account_names:
        return None

    traces = {n: calculate_traces([n], min_date, max_date) for n in account_names}
    line = _standardize_lines(lines)[0]
    plots = []
    for name, trace in traces.items():
        y = trace[line.split(":")[0]]
        plots.append(go.Scatter(x=y.index, y=y, mode="lines", name=name, yaxis="y1"))
    return {
        "data": plots,
        "layout": get_layout(conf("which_axis")[line], [conf("lines")[line]]),
    }
Esempio n. 18
0
def write_sample_data(
    monthly_contribution: float,
    security_mix: Dict[str, float],
    start_date: Union[str, datetime],
    stop_date: Union[str, datetime] = None,
    account: str = 'My 401(k)',
    config_fname: str = 'sample_config.yaml',
    av_api_key: str = None,
) -> None:
    """Write sample CSVs for specified investment contributions

     Assume that there's one contribution at the end of each month,
     and that contributions are split between securities in a way
     described by the `security_mix` input.

    Example
    -------
    # This assumes that there's an AlphaVantage API key in the
    # environment variable "AV_API_KEY".
    python -c "from portdash.sample import *; write_sample_data(100, {'VFAIX': 0.5, 'VDAIX': 0.3, 'VEIEX': 0.2}, '2010-01-01')"
    python portdash/acemoney.py -c sample_config.yaml
    python dash_app.py -c sample_config.yaml
    """
    logging.basicConfig(level='INFO')

    # Use default config as much as possible and create file names for the
    # sample data we're going to write.
    config.update_config({
        'investment_transactions': 'sample_investment_transactions.csv',
        'account_transactions': {
            account: 'sample_account.csv'
        },
        'av_api_key': av_api_key,
    })
    invest_trans, acct_trans = create_transactions(monthly_contribution,
                                                   security_mix, start_date,
                                                   stop_date, account)

    os.makedirs(config.conf('data_dir'), exist_ok=True)
    invest_trans.to_csv(config.conf('investment_transactions'),
                        index=False,
                        quoting=csv.QUOTE_ALL)
    acct_trans.to_csv(config.conf('account_transactions')[account],
                      index=False,
                      quoting=csv.QUOTE_ALL)

    print(f'Writing a sample config file to {config_fname}.')
    with open(config_fname, 'wt') as _fout:
        yaml.dump(config.get_config(), _fout)
Esempio n. 19
0
def mapping(map_para):
    fastq1, fastq2, outdir = map_para  # merged for multiprocessing in py2
    sample_name = fastq1.split('/')[-1].split('.')[0][:-3]
    conf = config.conf()
    cutadapt(fastq1, fastq2, outdir, conf.adapter1, conf.adapter2)
    hisat2(sample_name, outdir, conf.index)
    spicard(sample_name, outdir, conf.picard)
Esempio n. 20
0
def fetch_from_web(symbol: str, start_time: datetime = None) -> pd.DataFrame:
    """Return a table of historical security valuations

    Parameters
    ----------
    symbol : str
        The stock ticker symbol
    start_time : datetime, optional
        If supplied, start the output table at this date.
        Supplying a recent date will allow us to request fewer
        rows returned from the Alpha Vantage service.
        The default will return all available historical quotes.

    Returns
    -------
    pd.DataFrame
        A table of historical quotes, indexed by the datetime of the quote
    """
    log.info(f"Reading {symbol} data from Alpha Vantage.")
    client = AlphaVantageClient(conf("av_api_key"))
    new_quotes = client.historical_quotes(
        symbol, start_time=start_time, return_dividends=True
    )
    new_quotes.index.name = "date"

    return new_quotes
Esempio n. 21
0
def buildlog(buildid, message):
    filename = os.path.join(conf('buildbot.buildpath'),
                            '%s/build.log' % buildid)
    fd = file(filename, 'a+')
    fd.write(message + '\n')
    fd.close()
    logging.debug(message)
Esempio n. 22
0
def connectAndMoveToSetPoint():
    c = conf()
    data = { "setpoint":     c.get('ttm_setpoint')
            ,"setpointTime": c.get('ttm_setpointtime')
            }
    response = connectToCommand("moveToSetpoint", data)
    return connectAndGetStatus()
Esempio n. 23
0
 def remove(self,widget,data):
     if data == "album":
         for x in self.get_album_files():
             if x in self.bilinmiyor_artist: continue
             del self.sozluk[x] 
         remover.remove_iconview(self.iconView )                           
     if data == "artist":                             
         for x in self.get_artist_files(self.dizelge): 
             if x in self.bilinmiyor_artist: continue
             else:
                 del self.sozluk[x] 
         remover.remove_treeview(self.treeview,self.dizi)                
     if data == "title":  
         try:
             liste = self.dizelge.sozluk[self.bilgi]
         except KeyError:
             liste = []
         for x in self.get_title_files():
             if x in liste:
                 liste.remove(x)
                 self.dizelge.yaz()
             elif  x in self.sevilen.sections():
                 self.sevilen._remove(x)  
                 del self.sevilen
                 self.sevilen = conf(svcfg)  
             else:
                 try:
                     del self.sozluk[x]             
                 except KeyError:
                     print "Key Error:{0}".format(x)
                     return False                        
         remover.remove_iconview(self.iconView1)                    
     self.pick.yaz()
Esempio n. 24
0
def twitter_bot(rss_guid=None):
    """
    Consumes a feed and checks if there are new entries in db.
    If so, gets a shortened url and tweets the new status.
    """

    if rss_guid is None:
        # ancestor_key = ndb.Key("RSS_GUID", rss_guid or "*norss*")
        # consumer = FeedConsume.get_last_rss_guid(ancestor_key)
        # rss_guid = consumer[0].rss_guid
        query = FeedConsume.gql("WHERE entry = :1", "latest")
        result = query.get()
        rss_guid = result.rss_guid
    else:
        consumer = FeedConsume(parent=ndb.Key("RSS_GUID", rss_guid or "*norss*"),
                               rss_guid=rss_guid, entry="latest")
        consumer.put()
    url = "{}erss.cgi?rss_guid={}".format(conf("pubmed_rss"), rss_guid)
    feeds = feedparser.parse(url)
    tweets = []
    for feed in feeds["items"]:
        pmid = (feed["link"].split("/")[-1]).rstrip("?dopt=Abstract")
        if "entrez?" in pmid:
            continue
        query = FeedItem.gql("WHERE pmid = :1", pmid)
        # if pmid not in db
        if (query.count() == 0):
            title = feed["title"]
            otitle = title
            url = feed["link"]
            category = feed["category"]
            item = FeedItem()
            item.pmid = pmid
        
            # shorten the url with Bitly.com
            shorturl = shorten_url_bitly(url)

            # tweet the new entry
            max_length = (140 - len(category) - len(shorturl) - 7)
            print(max_length)
            if len(title) > max_length:
                title = title[0:max_length]
            status = "#{}: {}... {}".format("".join(category.split()), title.rstrip(". "), shorturl)
            try:
                status = unicode(status).encode("utf-8")
            except UnicodeEncodeError:
                pass
                # TODO: add logging

            # tweet new status
            # tweets.append({'title': "{}...".format(title.rstrip(". ")), 'url': shorturl})
            ttitle = "#{}: {}...".format("".join(category.split()), otitle[0:100].rstrip(". "))
            tweets.append({'title': ttitle, 'url': shorturl})
            try:
                update_status_twitter(status)
                item.put()
            except:
                pass
            
    return tweets
Esempio n. 25
0
def connectAndInitRamp():
    c = conf()
    data = { "rampRate":      c.get('ttm_ramprate')
            ,"rampAmplitude": c.get('ttm_rampamplitude')
            }
    connection = connectToCommand("initRamp", data)
    return connectAndGetStatus()
def pointInPolygon(walkshed):
    import config

    connection_string = config.conf()
    conn = psycopg2.connect(connection_string)
    cur = conn.cursor()
    polygon = getPolygon(walkshed)
    selectQuery = "SELECT id, ST_AsText(crime_location), crime_time, crime_type FROM cps_crime_data.crime_data WHERE ST_Within(crime_location, ST_GeomFromText(%s, 4326));"
    parameters = [polygon]
    cur.execute(selectQuery, parameters)
    rows = cur.fetchall()
    if len(rows) > 0:
        result_json = '{"type": "FeatureCollection", "features": ['
        for i in xrange(len(rows)):
            id = rows[i][0]
            location = rows[i][1]
            location = location[6:].split(' ')
            longitude = location[0]
            latitude = location[1][:-1]
            location = "[%s,%s]" % (longitude, latitude)
            time = rows[i][2]
            time = "%s-%s-%s %s:%s:00" % (time.year, time.month, time.day, time.hour, time.minute)
            type = rows[i][3]
            result_json += '{"type": "Feature","geometry": {"type": "Point", "coordinates":%s}, "properties": {"id": %s, "time": "%s","type": "%s"}},' % (
                location, id, time, type)
        result_json = result_json[:-1]
        result_json += ']}'
    else:
        result_json = '"NULL"'
    conn.commit()
    cur.close()
    conn.close()
    return result_json
Esempio n. 27
0
    def do(self, packet):
        """
        Execute l'action selon les données du paquet :

        Parameters
        ----------
        packet : str
            données du paquet.

        Returns
        -------
        void
            Rien.

        Notes
        -----
        Paquets :
        * start     :   Mettre la boucle d'analyse en position True
                        Lancer l'analyse dans un nouveau thread
        * stop      :   Arrêter le robot
                        Arrêter la boucle d'analyse
        * direction_* :  Ordres de contrôle manuel du robot
        """
        if packet == b'start':
            self._robot.init_vars()
            self.lancer_video(True)
            if conf("debug.network"):
                print("Commande START reçue")

        elif packet == b'stop':
            self._robot._video_on = False
            self._pad = False
            if conf("debug.network"):
                print("Commande STOP reçue")

            moteur.do("stop")

        elif packet[:9] == b'direction':
            if conf("debug.network"):
                print("Commande de direction :\"", packet[10:], "\"reçue")

            self.pad_control(packet[10:])
            if self._pad is False:
                self.lancer_video(False)
            self._pad = True
        else:
            print("Commande non reconnue : ", packet)
Esempio n. 28
0
def connectToCommand(command, data=None):
    c = conf()
    basic_url = "http://%s:%s/lumikki/csm_lumikki_instron_" % (c.get('ttm_ip'),c.get('ttm_port'))
    url = basic_url + command
    if data != None:
        url += "?%s" % urlencode(data)
    connection = urlopen(url, timeout=timeout)
    return connection
Esempio n. 29
0
def pre_start():
    global parametrs, parametr1, parametr2, parametr3, parametr4
    parametrs = {}
    parametrs = conf()
    parametr1 = parametrs["enable_default_dictionary"]
    parametr2 = parametrs["default_dictionary"]
    parametr3 = parametrs["maximal_random_in_encrypting_withowt_key"]
    parametr4 = parametrs["save_all_actions"]
Esempio n. 30
0
def read_portfolio_transactions(acct_fnames: Dict[str, str]=None,
                                ignore_future: bool=True) \
      -> Dict[str, pd.DataFrame]:
    """Read a CSV of account transactions written by AceMoney"""
    if not acct_fnames:
        acct_fnames = conf('account_transactions')
    trans = {
        acct_name: pd.read_csv(fname, parse_dates=[0], thousands=',')
        for acct_name, fname in acct_fnames.items()
        if fname and acct_name not in conf('skip_accounts')
    }
    if ignore_future:
        # Filter out any transactions marked as being in the future.
        # This can happen with scheduled transactions.
        today = datetime.today()
        trans = {k: v[v['Date'] <= today] for k, v in trans.items()}
    return trans
Esempio n. 31
0
def read_investment_transactions(fname: str = None) -> pd.DataFrame:
    """Read a CSV of investment transactions written by AceMoney"""
    if not fname:
        fname = conf('investment_transactions')
    inv = pd.read_csv(fname,
                      dtype={
                          'Dividend': float,
                          'Price': float,
                          'Total': float,
                          'Commission': float,
                          'Quantity': float
                      },
                      parse_dates=[0],
                      thousands=',')

    log.info(f'Ignore transactions in {conf("skip_accounts")} accounts.')
    inv = inv.drop(inv[inv.Account.isin(conf('skip_accounts'))].index)
    return inv
Esempio n. 32
0
 def __init__(self, robot, portNo=conf("network.port.main")):
     super().__init__()
     self.setDaemon(True)  # Le thread s'arrête en même temps que le process
     self._robot = robot
     self._direction = True  # True : avancer, False : reculer
     self._sock = socket()
     self._sock.bind(("", portNo))
     self._sock.listen()
     self._pad = False
Esempio n. 33
0
def connectAndStartSwitch(data=None):
	c = conf()
	basic_url = "http://%s:%s/lumikki/ttm_run2" % (c.get('ttm_ip'),c.get('ttm_port'))
	url = basic_url
	if data != None:
		url += "?%s" % urlencode(data)
			
	connection = urlopen(url, timeout=timeout)
	return connection
Esempio n. 34
0
 def get(self, gitpath):
     try:
         gitindex = conf('buildbot.gitindex.%s' % gitpath)
     except KeyError:
         return Response(status=404, body='Unknown git path')
     response = urllib.urlopen(gitindex)
     index = response.read()
     index = [x.strip('\r\n ').split(' ')[0].rsplit('.')[0] for x in index.split('\n') if x.strip('\r\n ')]
     return Response(status=200, body=dumps(index))
Esempio n. 35
0
def _make_comparison_graph(account_names, lines, min_date, max_date):
    if not account_names:
        return None

    traces = {
        n: calculate_traces([n], min_date, max_date)
        for n in account_names
    }
    line = _standardize_lines(lines)[0]
    plots = []
    for name, trace in traces.items():
        y = trace[line.split(':')[0]]
        plots.append(
            go.Scatter(x=y.index, y=y, mode='lines', name=name, yaxis='y1'))
    return {
        'data': plots,
        'layout': get_layout(conf('which_axis')[line], [conf('lines')[line]]),
    }
Esempio n. 36
0
 def day(self, mode='daily'):
     path = conf('day')  # 自定义输入保存路径
     if mode == 'daily':
         yesterday = str(datetime.date.today() - datetime.timedelta(days=1))
     else:
         yesterday = str(datetime.date.today() -
                         datetime.timedelta(days=1)) + '_r18'
     path = os.path.join(path, yesterday)
     self._day_parser(path, mode)
Esempio n. 37
0
def connectAndStartCam(data):
    c = conf()
    basic_url = "http://%s:%s/camera/manualCamera" % (c.get('cam_ip'),c.get('cam_port'))
    url = basic_url
    if data != None:
        url += "?%s" % urlencode(data)
            
    connection = urlopen(url, timeout=timeout)
    return connection
Esempio n. 38
0
    def post(self, dist=None, package=None, action=None):
        repo = Repository(conf('repository.path'))
        if not dist or not package or not action:
            return Response(status=405)

        if action == 'copy':
            if not 'dstdist' in self.request.params:
                return Response(status=400, body='A required parameter, dstdist is missing')
            repo.copy_package(dist, self.request.params['dstdist'], package)
            return Response(status=200)
Esempio n. 39
0
 def __init__(self, canal, pin_negatif, pin_positif):
     super().__init__(0, 4095, canal)
     if conf("debug.controle"):
         print("DCMoteur: créé : ", self)
     self._pin_negatif = pin_negatif
     self._pin_positif = pin_positif
     self._sens = True
     self._changement_sens = True
     GPIO.setup(self._pin_negatif, GPIO.OUT, initial=GPIO.LOW)
     GPIO.setup(self._pin_positif, GPIO.OUT, initial=GPIO.LOW)
Esempio n. 40
0
    def sign(self, dist):
        self._reprepro('export %s' % dist)

        gpg = GPG()
        filename = os.path.join(self.path, 'dists/%s/Release' % dist)
        detach_file = filename + '.gpg'
        try:
            os.unlink(detach_file)
        except: pass
        result = gpg.sign_file(file(filename, 'r'), keyid=conf('repository.signkey'), outputfile=detach_file)
Esempio n. 41
0
    def delete(self, dist=None, package=None, action=None):
        repo = Repository(conf('repository.path'))
        if action:
            return Response(status=405, body='You cannot delete an action')
        if not dist or not package:
            return Response(status=400, body='You must specify a dist and package to delete from it')

        result = repo.remove_package(dist, package)
        if result:
            return Response(status=404, body=result)
        return Response(status=200)
Esempio n. 42
0
def distanceDecay(start_point, data, radius):
    import config

    connection_string = config.conf()

    # first turning point (.25 miles)
    x1 = 402.336
    y1 = 1
    #second turning point (1 mile)
    x2 = float(radius) * 1000  #1609.34
    y2 = 0
    #in order to get rid of division by zero when x1=x2
    #the point is when x2<x1 (threshold) then all the weights will be 1
    if x1 == x2:
        x2 -= 1
    #linear equation
    m = (y2 - y1) / float(x2 - x1)
    b = y2 - m * x2
    conn = psycopg2.connect(connection_string)
    cur = conn.cursor()
    start_point = start_point.split(',')
    longitude = start_point[1]
    latitude = start_point[0]
    start_point = 'POINT (%s %s)' % (longitude, latitude)
    data_json = json.loads(data)
    ref_poi_list = {}
    for item in data_json['features']:
        location = item['geometry']['coordinates']
        location = 'POINT (%s %s)' % (location[0], location[1])
        poi_type = str(item['properties']['type'])

        #check if the requirement is met (true/false)
        _isContinued = check_no_of_poi(ref_poi_list, poi_type)
        if _isContinued:
            selectQuery = "SELECT ST_Distance(ST_Transform(ST_GeomFromText(%s, 4326), 3776), " \
                          "ST_Transform(ST_GeomFromText(%s, 4326), 3776));"
            parameters = [start_point, location]
            cur.execute(selectQuery, parameters)
            rows = cur.fetchall()
            distance = rows[0][0]
            if distance < x1:
                weight = 1
            if (distance > x1) and (distance < x2):
                weight = m * distance + b
            if distance > x2:
                weight = 0
            if poi_type in ref_poi_list:
                ref_poi_list[poi_type].append(weight)
                ref_poi_list[poi_type].sort(reverse=True)
            else:
                ref_poi_list[poi_type] = ''.split()
                ref_poi_list[poi_type].append(weight)
                ref_poi_list[poi_type].sort(reverse=True)
    return ref_poi_list
Esempio n. 43
0
def connectToCommand(data=None):
    c = conf()
    # TODO: no hardcoded values
    # move ae_service to config.py as variable
    # move vi_basename to config.py as variable
    url = "http://%s:%s/%s/%s" % (c.get('ae_ip'),c.get('ae_port'),c.get('ae_service'),c.get('ae_viname'))
    if data != None:
        # this converts dict to ?key1=value1&key2=value2&...
        url += "?%s" % urlencode(data)
    connection = urlopen(url, timeout=timeout)
    return connection
Esempio n. 44
0
def generate(env):
    print '---- Loading Config: ' + repr(env) + ' -----'
    c = config.conf(env)
    # In real life the loaded config could be used to do template substitution in a weblogic config.xml.template
    # Now we just print some env specific values
    print 'c.ms_suffixes:', repr(c.ms_suffixes)
    print 'c.admin_server:', repr(c.admin_server)
    print 'c.managed_servers:', repr(c.managed_servers)
    print 'c.datasources:', repr(c.datasources)
    print '---- Loaded: ' + repr(c.selected_env) + ' -----'
    print
Esempio n. 45
0
def load(app, execution=True, redis_session=False, mongo_database=None, name=None, models=None):
    global APP

    load_all()
    debug = config.conf("DEBUG", False)
    redis_url = config.conf("REDISTOGO_URL", None)
    mongo_url = config.conf("MONGOHQ_URL", None)
    smtp_host = config.conf("SMTP_HOST", None)
    smtp_user = config.conf("SMTP_USER", None)
    smtp_password = config.conf("SMTP_PASSWORD", None)

    if not debug and name:
        start_log(app, name)
    if redis_url:
        redisdb.url = redis_url
    if mongo_url:
        mongodb.url = mongo_url
    if smtp_host:
        mail.SMTP_HOST = smtp_host
    if smtp_user:
        mail.SMTP_USER = smtp_user
    if smtp_password:
        mail.SMTP_PASSWORD = smtp_password
    if execution:
        start_execution()
    if redis_session:
        app.session_interface = session.RedisSessionInterface(url=redis_url)
    if mongo_database:
        mongodb.database = mongo_database
    if models:
        setup_models(models)
    app.request_class = request.Request
    APP = app
Esempio n. 46
0
    def post(self, gitpath, gitrepo):
        if not 'ref' in self.request.params:
            return Response(status=400, body='Required parameter "ref" is missing. You must pass a git tag, branch, or commit ID to be built.\n')

        gitpath = os.path.join(conf('buildbot.gitpath.%s' % gitpath), gitrepo)
        ref = self.request.params['ref']
        cburl = self.request.params.get('cburl', None)
        submodules = self.request.params.get('submodules', None)

        buildid = uuid.uuid4().hex

        build_worker(gitpath, ref, buildid, cburl, submodules)
        return Response(status=200, body=buildid + '\n')
Esempio n. 47
0
 def post(self):
     repo = Repository(conf('repository.path'))
     dist = {
         'Version': '5.0',
         'Architectures': 'amd64 source any',
         'Components': 'main contrib non-free',
         'Description': 'Default package repository',
     }
     dist.update(json.loads(self.request.body))
     for field in ['Origin', 'Label', 'Suite', 'Codename']:
         if not field in dist:
             return Response(status=400, body='Required field %s is missing.' % field)
     repo.create_dist(dist)
Esempio n. 48
0
    def get(self, buildid):
        builddir = os.path.join(conf('buildbot.buildpath'), buildid)
        if not os.path.exists(builddir):
            return Response(status=404, body='The build ID does not exist.\n')

        try:
            log = file('%s/build.log' % builddir, 'r').read()
        except:
            log = ''
        if not os.path.exists(builddir + '/package.tar.gz'):
            return Response(status=400, body='The build is not done yet.\n' + log)
        else:
            return Response(status=200, body='Build complete.\n' + log)
Esempio n. 49
0
    def get(self, buildid):
        builddir = os.path.join(conf('buildbot.buildpath'), buildid)
        if not os.path.exists(builddir):
            return Response(status=404, body='The build ID does not exist.\n')

        tarpath = os.path.join(builddir, 'package.tar.gz')
        if not os.path.exists(tarpath):
            return Response(status=400, body='The build is not done yet.\n')
        else:
            fd = file(tarpath, 'rb')
            data = fd.read()
            fd.close()
            return Response(status=200, body=data, content_type='application/x-tar-gz')
Esempio n. 50
0
def connectAndSendStart():
	'''
	TODO: this is the only function that is called 
	from cgi.

	This includes if-else for constant load, speed, etc
	'''
	c = conf()
	if c.get('ttm_creep_experiment') == True:
		data = {"load":	  c.get('ttm_load')}
	else:
		ret = ttm.connectAndInitRamp()
		ret = ttm.connectAndStartLogging()
		ret = ttm.connectAndStartRamp()
Esempio n. 51
0
    def post(self, dist=None, package=None, action=None):
        repo = Repository(conf('repository.path'))
        if not dist or not package or not action:
            return Response(status=405)

        if action == 'copy':
            if not 'dstdist' in self.request.params:
                return Response(status=400, body='A required parameter, dstdist is missing')

            if not repo.get_package(dist, package):
                return Response(statuss=404, body='Package %s not found')

            repo.copy_package(dist, self.request.params['dstdist'], package)
            return Response(status=200)
Esempio n. 52
0
def connectAndStart():
    c = conf()
    data = {"offsetY":      c.get('cam_yoffset'),
            "offsetX":      c.get('cam_xoffset'),
            "fps":      c.get('cam_rawfps'),
            "width":      c.get('cam_width'),
            "height":      c.get('cam_height'),
            "testName":      c.get('g_measurementid'),
            "packSize":      c.get('cam_packetsize'),
            "index":      c.get('cam_index'),
            "exposure":      c.get('cam_exposure'),
            "parentFolder": c.get('cam_parentfolder')

            }
    return connectAndStartCam(data)
Esempio n. 53
0
    def post(self, dist):
        repo = Repository(conf('repository.path'))
        response = None

        basedir = '/tmp/repoman.upload/%s' % uuid.uuid4().hex
        os.makedirs(basedir)
        os.chdir(basedir)

        field = self.request.params['package']

        name = os.path.basename(field.filename)
        if not name.endswith('tar.gz') and not name.endswith('tar.bz2'):
            return Response(status=400, body='Packages must be uploaded as .tar.gz or tar.bz2 files containing .changes, .dsc, and .deb files')

        fd = file(name, 'wb')
        fd.write(field.value)
        fd.close()

        tf = tarfile.open(name, 'r|*')
        tf.extractall()
        changesfile = [x for x in os.listdir(basedir) if x.endswith('.changes')]
        if not changesfile:
            return Response(status=400, body='Tarball does not contain a .changes file')

        packages = []
        for changes in changesfile:
            changes = os.path.join(basedir, changes)
            stderr, stdout = repo.add_package(dist, changes)[0]
            if stdout:
                logging.debug('add_package: %s' % stdout)
            if stderr:
                logging.warning('add_package: %s' % stderr)
            for p in [x.split(': ', 1)[1].rstrip('\r\n').split(' ') for x in file(changes, 'r').readlines() if x.startswith('Binary: ')]:
                for bin in p:
                    pkg = repo.get_package(dist, bin)
                    packages.append(pkg)
        response = Response(status=200, body=dumps(packages))

        for dirpath, dirnames, filenames in os.walk(basedir):
            for filename in filenames:
                filename = os.path.join(dirpath, filename)
                os.remove(filename)
        os.rmdir(basedir)

        if not response:
            response = Response(status=500)
        return response
Esempio n. 54
0
def generate(env):
    print('---- Instantiating config for env: ' + repr(env) + ' -----')
    c = config.conf(env)
    
    print('\n---- Printing entire config as "compact" json: -----')
    # json will dump property method values as well as multiconf property values
    # compact give a more human readable output
    print(c.json(compact=True))

    # In real life the loaded config could be used to do template substitution in a weblogic config.xml.template
    # Now we just print some env specific values
    print('\n---- Access config objects/properties: -----')
    print('c.admin_server:', c.admin_server.json(compact=True))
    # Repeatable objects are inserted in an ordered dict
    print('c.managed_servers["ms1"]:', c.managed_servers['ms1'].json())
    print('ms1.port:', c.managed_servers['ms1'].port)
    print('c.datasources:', c.datasources)
    print()
Esempio n. 55
0
def generate_rss_from_pubmed(input_string, feeds=50):
    """Returns the url of a rss generated at Pubmed by the queried string."""
    input_string = urllib.quote_plus(input_string)
    pubmed_url = conf("pubmed_search")
    url = "{}?term={}".format(pubmed_url, input_string)
    read = request_url(url, list_iter=False, verbose=False)

    if read:
        # parse info from the response html - quick and dirty
        hid = ''
        qk = ''
        line = read.read()
        # the field to parse is the HID and qk
        if 'data-hid="' in line:
            hid = line.split('data-hid="')[1]
            hid = hid.split('"')[0]
        if 'data-qk="' in line:
            qk = line.split('data-qk="')[1]
            qk = qk.split('"')[0]

        dataload = {'p$site': 'pubmed',
                    'p$rq': 'EntrezSystem2.PEntrez.PubMed.Pubmed_SearchBar.Entrez_SearchBar:CreateRssFeed',
                    'QueryKey': qk,
                    'Db': 'pubmed',
                    'RssFeedName': input_string,
                    'RssFeedLimit': str(feeds),
                    'HID': hid}
        dataload = urllib.urlencode(dataload)
        read = post_url(pubmed_url, dataload, verbose=False)
        if read:
            rss_guid = ''
            line = read.read()
            # the field to parse is rss_guid
            if "rss_guid=" in line:
                rss_guid = line.split("rss_guid=")[2]
                rss_guid = rss_guid.split("'")[0]
            # url = "{}erss.cgi?rss_guid={}".format(conf("pubmed_rss"), rss_guid)
            return rss_guid

        else:
            return 0
    else:
        return 0
Esempio n. 56
0
    def get(self, dist, package):
        repo = Repository(conf('repository.path'))

        if dist and package:
            pkg = repo.get_package(dist, package)
            if not pkg:
                return Response(status=404, body=dumps([]))

            return Response(status=200, body=dumps(pkg))

        if dist and not package:
            result = repo.get_packages(dist).keys()
            if not result:
                return Response(status=404, body=dumps([]))
            return Response(status=200, body=dumps(result))

        if not dist:
            result = repo.get_dists()
            if not result:
                return Response(status=404, body=dumps([]))
            return Response(status=200, body=dumps(result))
Esempio n. 57
0
def connectAndStart():
	'''
	TODO: this is the only function that is called 
	from cgi.

	This includes if-else for constant load, speed, etc
	'''
	c = conf()
	data = {"protocolName":	  c.get('ttm_protocol'),
			"channelInt":	  c.get('ttm_channel'),
			"rampRateAE":	  c.get('ttm_ramprateae'),
			"rampRateSilent":	  c.get('ttm_rampratesilent'),
			"timeWindow":	  c.get('ttm_window'),
			"cycleLength":	  c.get('ttm_cyclelength'),
			"holdTime":	  c.get('ttm_holdtime'),
			"rampRate":	  c.get('ttm_ramprate'),
			"load":	  c.get('ttm_load'),
			"rampAmplitude":	  c.get('ttm_rampamplitude'),
			"expId":	c.get('g_measurementid')

			}
	ret = connectAndStartSwitch(data)
Esempio n. 58
0
def pointInCircle(start_point, radius):
    import config

    connection_string = config.conf()

    conn = psycopg2.connect(connection_string)
    cur = conn.cursor()
    radius = float(radius) * 1000  #in meters
    start_point = start_point.split(',')
    start_point = "SRID=4326;POINT(%s %s)" % (start_point[1], start_point[0])
    selectQuery = "SELECT id, ST_AsText(crime_location), crime_time, crime_type " \
                  "FROM cps_crime_data.crime_data " \
                  "WHERE ST_DWithin(ST_GeogFromText(%s), geography(crime_location), %s);"
    parameters = [start_point, radius]
    cur.execute(selectQuery, parameters)
    rows = cur.fetchall()
    if len(rows) > 0:
        result_json = '{"type": "FeatureCollection", "features": ['
        for i in xrange(len(rows)):
            id = rows[i][0]
            location = rows[i][1]
            location = location[6:].split(' ')
            longitude = location[0]
            latitude = location[1][:-1]
            location = "[%s,%s]" % (longitude, latitude)
            time = rows[i][2]
            time = "%s-%s-%s %s:%s:00" % (time.year, time.month, time.day, time.hour, time.minute)
            type = rows[i][3]
            result_json += '{"type": "Feature","geometry": {"type": "Point", "coordinates":%s}, "properties": {"id": %s, "time": "%s","type": "%s"}},' % (
                location, id, time, type)
        result_json = result_json[:-1]
        result_json += ']}'
    else:
        result_json = '"NULL"'
    conn.commit()
    cur.close()
    conn.close()
    return result_json
Esempio n. 59
0
def connectAndGetStatus():
	c = conf()
	responce = connectAndSend(c, createStatusMessage(c))
	# TODO: this is a hack that will be fixed on 
	# device side during summer 2011
	rDict = {}
	splittedResponce = responce.split('\n')
	for item in splittedResponce:
		splittedItem = item.split(':',1)
		rDict[splittedItem[0].strip()] = splittedItem[1].strip()
	if not rDict.has_key('Errors'):
		return 240
	if not rDict['Errors'] == '':
		return 240
	if not rDict.has_key('Satus'):
		return 240
	if rDict['Satus'].find('CAMERA IS STOPPED') >= 0:
		return 210
	if rDict['Satus'].find('CAMERA IS ON') >= 0:
		return 220
	if len(responce) == 0:
		return 240 
	return responce    
Esempio n. 60
0
def calculateArea(polygon):
    polygonJSON = json.loads(polygon)
    finalPolygon = "POLYGON(("
    for point in polygonJSON['coordinates'][0]:
        longitude = point[0]
        latitude = point[1]
        vertex = "%s %s" % (longitude, latitude)
        finalPolygon += "%s," % (vertex,)
    finalPolygon = finalPolygon[:-1]
    finalPolygon += "))"
    import config

    connection_string = config.conf()
    conn = psycopg2.connect(connection_string)
    cur = conn.cursor()
    select_query = "SELECT ST_Area(ST_Transform(ST_GeomFromText(%s, 4326), 3776));"
    parameters = [finalPolygon]
    cur.execute(select_query, parameters)
    rows = cur.fetchone()
    conn.commit()
    cur.close()
    conn.close()
    return rows[0]