Beispiel #1
0
def daemon(region, storage_prefix, profile_name, create_queue_name,
           mandatory_policy_arn):    
    logging.basicConfig(
        level=logging.DEBUG, stream=sys.stderr,
        format=("%(asctime)s %(filename)s:%(lineno)s [%(levelname)s]: "
                "%(message)s"))
    logging.getLogger("boto").setLevel(logging.WARNING)

    create = RoleCreationDaemon(region=region, storage_prefix=storage_prefix,
                                profile_name=profile_name,
                                create_queue_name=create_queue_name,
                                mandatory_policy_arn=mandatory_policy_arn)

    log = logging.getLogger("rolemaker.daemon")
    log.debug("Starting creation daemon")
    create.start()

    # Sleep until we get an exit request.
    try:
        while True:
            sleep(1)
    except (KeyboardInterrupt, SystemExit):
        pass
    finally:
        log.info("Exit requested; waiting for daemon to terminate.")
        create.exit_requested = True
        create.join()

    log.info("Daemon terminated after processing %d requests." %
             create.requests_processed)
    return 0
Beispiel #2
0
def log_header(paths, log):
    """
    Method to create header for MAST-ML logger

    Args:

        paths: (list), list containing strings of path locations for config file, data file, and results folder

        log: (logging object), a python log

    Returns:

        None

    """
    logo = textwrap.dedent(f"""\
           __  ___     __________    __  _____
          /  |/  /__ _/ __/_  __/___/  |/  / /
         / /|_/ / _ `/\ \  / / /___/ /|_/ / /__
        /_/  /_/\_,_/___/ /_/     /_/  /_/____/
    """)

    date_time = time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime())
    header = (f"\n\n{logo}\n\nMAST-ML run on {date_time} using \n"
              f"conf file: {os.path.basename(paths[0])}\n"
              f"csv file:  {os.path.basename(paths[1])}\n"
              f"saving to: {os.path.basename(paths[2])}\n\n")

    # only shows on stdout and log.log
    log.info(header)
Beispiel #3
0
def train_ftrl_model():
    X_train, y_train = load_processed_data(pathify('data', 'processed',
                                                   'avazu-cv-train.csv'),
                                           label_col='click')
    X_val, y_val = load_processed_data(pathify('data', 'processed',
                                               'avazu-cv-val.csv'),
                                       label_col='click')

    params = {
        'alpha': 0.1,  # learning rate
        'beta': 1,  # smoothing parameter for adaptive learning rate
        'L1': 1,  # L1 regularization, larger value means more regularized
        'L2': 1,  # L2 regularization, larger value means more regularized
        'num_categories':
        2**16,  # make sure it is the same value with make_features.py
    }
    ftrl = ftrl_proximal(**params)
    ftrl.fit(X_train, y_train, X_val, y_val)

    y_pred = []
    for x_val in list(X_val.values):
        p = ftrl.predict(x_val)
        y_pred.append(p)
    y_pred = np.array(y_pred)
    auc_score = cal_auc(y_val, y_pred)
    log.info("auc_score: {:.4f}".format(auc_score))

    log_loss = cal_logloss(y_val, y_pred)
    log.info("log_loss: {:.4f}".format(log_loss))

    save_pickle(ftrl, pathify('models', 'avazu-ftrl.pickle'))
    return ftrl
Beispiel #4
0
def meshstats(db, m):
    log.info("calculating mesh stats %s", m)
    mid = db.meshfile(m.filename)
    cur = db.conn.cursor()
    cur.execute(
        """
    UPDATE meshfiles
    SET time=?, size=?, entropy=?
    WHERE id=?
    """, (m.timestamp, len(m), m.entropy, mid))
def rmShutdownMachine(exitCode):
    cmd = "poweroff"

    log.info("SHUTTING DOWN MACHINE...")

    if cmd:
        try:
            log.info(cmd)
            os.system(cmd)
        except Exception, e:
            log.error(e)
def rmShutdownMachine(exitCode):
    cmd = "poweroff"

    log.info("SHUTTING DOWN MACHINE...")

    if cmd:
        try:
            log.info(cmd)
            os.system(cmd)
        except Exception, e:
            log.error(e)
Beispiel #7
0
    def retrievePageContent(self, pageid, anon):
        self.abort = False
        self.anon = anon
        graph = self.graph
        log = self._log
        pageinfo = graph.get_object(pageid)
        log.info("Processing page \"%s\" (id %s, category %s, likes: %s)" % (pageinfo["username"], pageinfo["id"], pageinfo["category"], pageinfo["likes"]))

        try:
            pagefeed = graph.get_object(pageid + "/feed")
            self.processFeed(pagefeed)
        except Exception, e:
            self._log.warn(e)
            raise e
Beispiel #8
0
def build_name2codepoint_dict():
    """
        Builds name to codepoint dictionary
        copy and paste the output to the name2codepoint dictionary
        name2str - name to utf-8 string dictionary
    """
    name2str = html_entity2str
    for k, v in htmlentitydefs.name2codepoint.items():
        name2str[k.lower()] = unichr(v).encode('utf-8')
    for key in sorted(name2str.keys()):
        value = name2str[key]
        log.info("    '{0}': 0x{1:0>4x}, # {2}".format(
            key,
            ord(value.decode('utf-8')),
            value,
        ))
Beispiel #9
0
def build_name2codepoint_dict():
    """
        Builds name to codepoint dictionary
        copy and paste the output to the name2codepoint dictionary
        name2str - name to utf-8 string dictionary
    """
    name2str = html_entity2str
    for k, v in htmlentitydefs.name2codepoint.items():
        name2str[k.lower()] = unichr(v).encode('utf-8')
    for key in sorted(name2str.keys()):
        value = name2str[key]
        log.info("    '{0}': 0x{1:0>4x}, # {2}".format(
            key,
            ord(value.decode('utf-8')),
            value,
        ))
Beispiel #10
0
def security_stopprofit(context,profit=0.1,maTime=5,maProfit=0.02):
    if len(context.portfolio.positions)>0:
        for stock in context.portfolio.positions.keys():
            avg_cost = context.portfolio.positions[stock].avg_cost
            #当前最新价格
            current_price = context.portfolio.positions[stock].price
            #最后一次交易时间
            last_buy_time =context.portfolio.positions[stock].transact_time
            #获取最后一次交易时间到当前,stock股票的最高收盘价
            max_close_price=getMaxClose(stock,last_buy_time)
            #获取是否击穿上升趋势线超过3周期的状态
            cross_after3_flag=getCrossAfterStatus(stock,last_buy_time,3)
            #获取股票当前ma值
            ma=getMaValue(stock,maTime)
            #开始判断            
            if  (current_price/max_close_price- 1 >= profit) or (current_price/ma-1>=maProfit) or  cross_after3_flag:
                log.info(str(stock) + '  个股达到止盈线,平仓止盈!')
                order_target_value(stock, 0)
Beispiel #11
0
    def processFeed(self, pagefeed):

        graph = self.graph
        log = self._log

        self.maxpages = self.maxpages - 1
        if self.maxpages <= 0:
            self.abort = True
            log.info("Not fetching more pages. Maximum exceeded.")

        self.pagecount = self.pagecount + 1
        try:
            nextpage = pagefeed["paging"]["next"]

            if nextpage.startswith("https://graph.facebook.com/"):
                print nextpage
                nextpage = urlparse.urlparse(nextpage)
                qs = cgi.parse_qs(nextpage.query)
                print qs
                #del qs['access_token']
                nextpage = nextpage.path #+ "?" + urllib.urlencode(qs, True)
                nextpage = nextpage[1:]
                nextpage_args = qs

        except KeyError:
            # no next page
            log.info("Hit last page. Aborting.")
            self.abort = True

        pagedata = pagefeed["data"]
        lpd = len(pagedata)
        log.info("Processing %s feed items" % lpd)
        self.addData(pagedata, self.posts)

        if lpd == 0:
            log.info("Hit empty data response. Aborting.")
            self.abort = True

        if not self.abort:
            log.info("Requesting next page of data <%s>" % nextpage)
            pagefeed = graph.request(nextpage, nextpage_args)
            time.sleep(1)
            self.processFeed(pagefeed)
def rmRebootMachineOrApp(machine, exitCode):
    #--------------------------------------------------
    # Restart the machine/app.
    cmd = None

    if machine:
        log.info("REBOOTING MACHINE...")
        cmd = "reboot"
    else:
        log.info("RESTARTING APP...")

        restartScript = os.path.abspath(
            os.path.join(os.path.dirname(os.path.abspath(__file__)), "..",
                         "restartSelf.sh"))
        params = [restartScript, ` os.getpid() `] + sys.argv
        if params[-1] == "&":
            del params[-1]
        cmd = '"' + '" "'.join(params) + '" &'

    if cmd:
        try:
            log.info(cmd)
            os.system(cmd)
        except Exception, e:
            log.error(e)
def rmRebootMachineOrApp(machine, exitCode):
    #--------------------------------------------------
    # Restart the machine/app.
    cmd = None

    if machine:
        log.info("REBOOTING MACHINE...")
        cmd = "reboot"
    else:
        log.info("RESTARTING APP...")

        restartScript = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "restartSelf.sh"))
        params = [restartScript, `os.getpid()`] + sys.argv
        if params[-1] == "&":
            del params[-1]
        cmd = '"' + '" "'.join(params) + '" &'

    if cmd:
        try:
            log.info(cmd)
            os.system(cmd)
        except Exception, e:
            log.error(e)
Beispiel #14
0
def restrain_atom_distances_to_template(session,
                                        template_residues,
                                        restrained_residues,
                                        protein=True,
                                        nucleic=True,
                                        custom_atom_names=[],
                                        distance_cutoff=8,
                                        alignment_cutoff=5,
                                        well_half_width=0.1,
                                        kappa=10,
                                        tolerance=0.025,
                                        fall_off=2,
                                        display_threshold=None,
                                        adjust_for_confidence=False,
                                        use_coordinate_alignment=True,
                                        confidence_type='pae',
                                        pae_matrix=None):
    r'''
    Creates a "web" of adaptive distance restraints between nearby atoms,
    restraining one set of residues to the same spatial organisation as another.

    Args:
        * template_residues:
            - a list of :class:`chimerax.atomic.Residues` instances. If
              :attr:`restrained_residues` is not identical to
              :attr:`template_residues`, then each :class:`Residues` should be
              from a single chain. Residues need not be contiguous.
        * restrained_residues:
            - a list of :class:`chimerax.atomic.Residues` instances. Must be
              the same length as :attr:`template_residues`, with a 1:1
              correspondence between chains. Chains will be aligned individually
              to get the subset of matching residues, but original coordinates
              will be used for the purposes of assigning restraints.
        * protein (default = True):
            - Restrain protein conformation? If True, a pre-defined set of
              useful "control" atoms (CA plus the first two heavy atoms along
              each sidechain) will be added to the restraint network.
        * nucleic (default = True):
            - Restrain nucleic acid conformation? If True, key atoms defining
              the nucleic acid backbone and base pairing will be added to the
              restraint network.
        * custom_atom_names(default = empty list):
            - Provide the names of any other atoms you wish to restrain (e.g.
              ligand atoms) here.
        * distance_cutoff (default = 8):
            - for each CA atom in `restrained_residues`, a distance restraint
              will be created between it and every other CA atom where the
              equivalent atom in `template_residues` is within `distance_cutoff`
              of its template equivalent.
        * alignment_cutoff (default = 5):
            - distance cutoff (in Angstroms) for rigid-body alignment of model
              against  template. Residues with a CA RMSD greater than this
              value after alignment will not be restrained. Ignored if `use_coordinate_alignment`
              is `False`.
        * well_half_width (default = 0.1):
            - distance range (as a fraction of the square root of target distance) within which
              the restraint will behave like a normal harmonic restraint.
              The applied force will gradually taper off for any restraint
              deviating from (target + tolerance) by more than this amount.
        * kappa (default = 10):
            - defines the strength of each restraint when the current distance
              is within :attr:`well_half_width` of the target +/-
              :attr:`tolerance`. The effective spring constant is
              :math:`k=\frac{\kappa}{(\text{well\_half\_width}*\text{target distance})^2}`
              in :math:`kJ mol^{-1} nm^{-2}`.
        * tolerance (default = 0.025):
            - half-width (as a fraction of the target distance) of the "flat
              bottom" of the restraint profile. If
              :math:`abs(distance-target) < tolerance * target`,
              no restraining force will be applied.
        * fall_off (default = 2):
            - Sets the rate at which the energy function will fall off when the
              distance deviates strongly from the target, as a function of the
              target distance. The exponent on the energy term at large
              deviations from the target distance will be set as
              :math:`\alpha = -1 -\text{fall\_off} ln(\text{target})`. In other
              words, long-distance restraints are treated as less confident than
              short-distance ones.
        * display_threshold (default = 0):
            - deviation from (target +- tolerance) as a fraction of
              :attr:`well_half_width` below which restraints will be hidden.
        * use_coordinate_alignment (default = True):
            - if True, reference and template residues will be matched by progressively breaking down
              the models into groups that approximately align as rigid bodies. This is usually the 
              preferable approach, but fails in cases where the working model is badly wrong (e.g. has 
              large register errors). If False, residues will be matched strictly by sequence alignment.
              This may be preferable when restraining against an AlphaFold model.
        * adjust_for_confidence (default = False):
            - if true, interpret B-factors of the template atoms as a confidence score, the 
              definition of which is controlled by :attr:`confidence_type`, and adjust 
              :attr:`kappa`, :attr:`tolerance` and :attr:`fall_off` according to the mean 
              confidence for each restrained atom pair.
        * confidence_type (default = "pae"):
            - the type of confidence score used. Current options are "pae" (predicted aligned error) and
              "plddt" (predicted local distance difference test). For the "pae" option, multi-chain template 
              models are NOT currently supported.
        * pae_matrix (default = None):
            - used if adjust_for_confidence is True and confidence_type is "pae". If the reference model
              was downloaded from the AlphaFold database, leave this argument as None and the relevant PAE
              matrix will be automatically fetched. Otherwise, the matrix should be a 2D Numpy array with entry (i,j) 
              equal to the PAE of residue number i relative to residue number j. 

    '''
    from chimerax.std_commands.align import IterationError
    from chimerax.isolde import session_extensions as sx
    import numpy
    if not protein and not nucleic and not len(custom_atom_names):
        raise UserError('Nothing to restrain!')
    # if len(template_residues) != len(restrained_residues):
    #     raise TypeError('Template and restrained residue arrays must be the same length!')
    for rrs in restrained_residues:
        if len(rrs) == 0:
            raise UserError('No residues specified to restrain!')
        restrained_us = rrs.unique_structures
        if len(restrained_us) != 1:
            raise UserError('Restrained residues must be from a single model!')
    for trs in template_residues:
        if len(trs) == 0:
            raise UserError('No template residues specified!')
        template_us = trs.unique_structures
        if len(template_us) != 1:
            raise UserError('Template residues must be from a single model! '
                            'Residues are {} in {}'.format(
                                trs.numbers,
                                ','.join(s.id_string for s in trs.structures)))
    restrained_model = restrained_us[0]
    from ..atomic.util import correct_pseudosymmetric_sidechain_atoms
    correct_pseudosymmetric_sidechain_atoms(session, restrained_model.residues)
    for tu in template_us:
        if tu != restrained_model:
            correct_pseudosymmetric_sidechain_atoms(session, tu.residues)
    log = restrained_model.session.logger
    if adjust_for_confidence and confidence_type not in ('plddt', 'pae'):
        raise UserError('confidence_type must be one of ("plddt", "pae")!')

    if adjust_for_confidence:
        if confidence_type == 'pae':
            chains = set()
            for trs in template_residues:
                chains.update(set(trs.unique_chain_ids))
            if len(chains) != 1:
                raise UserError(
                    'Weighting according to PAE is currently only supported for single-chain templates!'
                )
            if pae_matrix is None:
                from chimerax.isolde.reference_model.alphafold import alphafold_id, fetch_alphafold_pae
                tm = template_residues[0].unique_structures[0]
                aid = alphafold_id(tm)
                if aid is None:
                    raise UserError(
                        f'Template model #{tm.id_string} does not appear to be from the AlphaFold server. You will need to provide a PAE matrix separately.'
                    )
                uniprot_id = aid.split('-')[1]
                try:
                    pae_matrix = fetch_alphafold_pae(session, uniprot_id)
                except:
                    raise UserError(
                        f'Failed to fetch the PAE matrix for template model #{tm.id_string}!'
                    )
            # Symmetrify the PAE matrix by taking the lowest value for each (i,j) pair
            pae_matrix = numpy.minimum(pae_matrix, pae_matrix.T)
        elif confidence_type == 'plddt':
            confidence_multiplier = plddt_multiplier(template_us.atoms)

    adrm = sx.get_adaptive_distance_restraint_mgr(restrained_model)
    if display_threshold is not None:
        adrm.display_threshold = display_threshold
    from chimerax.geometry import find_close_points, distance
    from chimerax.atomic import concatenate

    atom_names = []
    if protein:
        atom_names.extend(['CA', 'CB', 'CG', 'CG1', 'OG', 'OG1'])
    if nucleic:
        atom_names.extend([
            "OP1", "OP2", "C4'", "C2'", "O2", "O4", "N4", "N2", "O6", "N1",
            "N6", "N9"
        ])
    atom_names.extend(custom_atom_names)

    import numpy
    atom_names = set(atom_names)

    def apply_restraints(trs, rrs, adjust_for_confidence, confidence_type):
        template_as = []
        restrained_as = []
        for tr, rr in zip(trs, rrs):
            ta_names = set(tr.atoms.names).intersection(atom_names)
            ra_names = set(rr.atoms.names).intersection(atom_names)
            common_names = list(ta_names.intersection(ra_names))
            template_as.extend([tr.find_atom(name) for name in common_names])
            restrained_as.extend([rr.find_atom(name) for name in common_names])
            # template_as.append(tr.atoms[numpy.in1d(tr.atoms.names, common_names)])
            # restrained_as.append(rr.atoms[numpy.in1d(rr.atoms.names, common_names)])
        from chimerax.atomic import Atoms
        template_as = Atoms(template_as)
        restrained_as = Atoms(restrained_as)

        template_coords = template_as.coords
        from math import sqrt
        for i, ra1 in enumerate(restrained_as):
            query_coord = numpy.array([template_coords[i]])
            indices = find_close_points(query_coord, template_coords,
                                        distance_cutoff)[1]
            indices = indices[indices != i]
            for ind in indices:
                ra2 = restrained_as[ind]
                if ra1.residue == ra2.residue:
                    continue
                if adjust_for_confidence:
                    if confidence_type == 'plddt':
                        scores = [
                            template_as[i].bfactor * confidence_multiplier,
                            template_as[ind].bfactor * confidence_multiplier
                        ]
                    elif confidence_type == 'pae':
                        scores = [
                            pae_matrix[template_as[i].residue.number - 1,
                                       template_as[ind].residue.number - 1]
                        ]
                    kappa_adj, tol_adj, falloff_adj = adjust_distance_restraint_terms_by_confidence(
                        scores, confidence_type)
                    if kappa_adj == 0:
                        continue
                else:
                    kappa_adj = tol_adj = 1
                    falloff_adj = 0
                try:
                    dr = adrm.add_restraint(ra1, ra2)
                except ValueError:
                    continue
                dist = distance(query_coord[0], template_coords[ind])
                dr.tolerance = tolerance * dist * tol_adj
                dr.target = dist
                dr.c = max(sqrt(dist) * well_half_width, 0.1)
                #dr.effective_spring_constant = spring_constant
                dr.kappa = kappa * kappa_adj
                from math import log
                dr.alpha = -1 - fall_off * log(
                    (max(dist - 1, 1))) - falloff_adj
                dr.enabled = True

    if all(trs == rrs
           for trs, rrs in zip(template_residues, restrained_residues)):
        # If the template is identical to the model, we can just go ahead and restrain

        return [
            apply_restraints(trs, rrs, adjust_for_confidence, confidence_type)
            for trs, rrs in zip(template_residues, restrained_residues)
        ]
    else:
        if not use_coordinate_alignment:
            atrs, arrs = sequence_align_all_residues(session,
                                                     template_residues,
                                                     restrained_residues)
            return apply_restraints(atrs, arrs, adjust_for_confidence,
                                    confidence_type)
        else:
            # If we're not simply restraining the model to itself, we need to do an
            # alignment to get a matching pair of residue sequences
            tpa, rpa = paired_principal_atoms(
                sequence_align_all_residues(session, template_residues,
                                            restrained_residues))
            from chimerax.std_commands import align
            while len(tpa) > 3 and len(rpa) > 3:
                try:
                    ta, ra, *_ = align.align(session,
                                             tpa,
                                             rpa,
                                             move=False,
                                             cutoff_distance=alignment_cutoff)
                except IterationError:
                    log.info(
                        ('No further alignments of 3 or more residues found.'))
                    break
                tr, rr = [ta.residues, ra.residues]
                apply_restraints(tr, rr, adjust_for_confidence,
                                 confidence_type)
                tpa = tpa.subtract(ta)
                rpa = rpa.subtract(ra)
Beispiel #15
0
		else:
			Weights,Delta_W=mutate(Weights,fully_connected=True)

########### Feed Forward for all training inputs in AB ######################################
	costs=[]
	results=[]
	for i,AB_ in enumerate(AB):
		activations=feed_forward(AB_)
		results.append(activations[-1])
		costs.append(cross_entropy(results[i],Soll[i]))

	cost_means.append(np.mean(costs))

	if ii%50==0:
		log.out("Mean Cost:",np.mean(costs))
		log.info("Completed",ii/float(steps)*100,"%")
################################################################################################

######################## Plot ###########################################
plt.plot(cost_means)
# plt.figure()
# plt.imshow(np.diag(Weights[0]).reshape(28,28),interpolation="nearest")
# plt.colorbar()
ys=get_y(num_neurons)
plt.show()

if True:
	plt.figure()
	log.start("Plot")
	W_max=-1e10
	for i in range(len(Weights)):
def go(node):
    log.info("Runner started")
    run(node)
    log.info("Runner finished")
    log.info("Variable values at the end:\n" + '\n'.join(str(k) + " = " + str(v) for k, v in V.items()))
Beispiel #17
0
def wlx_rule(context,stock):
    aa=g.aadataframe[stock]
    upserise=aa[1]
    downseries=aa[0]
    spread=upserise-downseries
    spread = np.sign(spread)
    spread = spread - spread.shift(1)
    if spread.iloc[-1]==2:
        # 1 is the buy signal
        print(aa.tail(3)
        return 1
    elif spread.iloc[-1]==-2:
        # -1 is the sell signal
        print(aa.tail(3)
        return -1
    else:
        return 0
    '''
    aa = g.aadataframe.tail(1)
    aa = aa.T
    upserise = aa.xs(1,level=1)
    downseries = aa.xs(0,level=1)
    upserise.columns=['value']
    downseries.columns=['value']
    decison=upserise - downseries
    decison.columns=['spread']
    #print('columns:',decison.info())
    #print(decison.sort_values(axis=1,ascending=False,inplace=True))
    stocklist = decison.sort_values(by=['spread'],ascending=False).head(10)
    stocklist.columns=['spread']
    #print(stocklist)
    #print('I have:',context.portfolio.positions.keys())
    holding = context.portfolio.positions.keys()
    for value in holding:
        if value in decison.index and decison.loc[value,'spread']<0 or upserise.loc[value,'value']<0 or downseries.loc[value,'value']>0:
            order_target(value, 0)
    for value in stocklist.index:
        if stocklist.loc[value,'spread']>0 and g.waitdays<0 and enter_trend(value,5) and upserise.loc[value,'value']>0 and downseries.loc[value,'value']<0:
            order_target_value(value, 20000)
    #if g.long_position<g.slot
    '''

def trade(context):
    '''
    aa = g.aadataframe.tail(1)
    aa = aa.T
    upserise = aa.xs(1,level=1)
    downseries = aa.xs(0,level=1)
    upserise.columns=['value']
    downseries.columns=['value']
    decison=upserise - downseries
    decison.columns=['spread']
    #print('columns:',decison.info())
    #print(decison.sort_values(axis=1,ascending=False,inplace=True))
    stocklist = decison.sort_values(by=['spread'],ascending=False).head(10)
    stocklist.columns=['spread']
    #print(stocklist)
    #print('I have:',context.portfolio.positions.keys())
    '''
    stocklist=context.portfolios
    holding = context.portfolio.positions.keys()
    for value in holding:
        wlx = wlx_rule(context,value)
        if wlx==-1:
            order_target(value, 0)
    for value in stocklist:
        if g.waitdays>0:
            continue
        else:
            wlx = wlx_rule(context,value)
            if  wlx==1 and enter_trend(value,5):
                order_target(value, 100)
    #if g.long_position<g.slot

def enter_trend(instrument,lookbackdays):
    trends = history(lookbackdays, unit='1d', field='close', security_list=instrument, df=True, skip_paused=False, fq='pre')
    if trends.iloc[0][instrument]<trends.iloc[-1][instrument]:
        return True
    else:
        return False

def stoploss(context):
    # 循环查看持仓的每个股票
    for stock in context.portfolio.positions:
        # 如果股票最新价格除以平均成本小于0.9,即亏损超过10%
        if (context.portfolio.positions[stock].price/context.portfolio.positions[stock].avg_cost < 0.9): 
            # 调整stock的持仓为0,即卖出
            order_target(stock, 0) 
            #剩余资金用于买国债
            #cash=context.portfolio.available_cash
            #order_target_value('000012.XSHG',cash)
            # 输出日志:股票名 止损
            print "\n%s 止损" % stock
            
def stop_retreat(context,nday=10,retreat=0.1):
    for stock in context.portfolio.positions:
        prices=history(nday, unit='1d', field='close', security_list=stock, df=True, skip_paused=False, fq='pre')
        ndaymax = prices.max()[stock]
        current_price = context.portfolio.positions[stock].price
        if (ndaymax-current_price)/ndaymax>retreat:
            order_target(stock, 0) 
        
def max_holdingdays(context):
    pass
            
def security_stopprofit(context,profit=0.1,maTime=5,maProfit=0.02):
    if len(context.portfolio.positions)>0:
        for stock in context.portfolio.positions.keys():
            avg_cost = context.portfolio.positions[stock].avg_cost
            #当前最新价格
            current_price = context.portfolio.positions[stock].price
            #最后一次交易时间
            last_buy_time =context.portfolio.positions[stock].transact_time
            #获取最后一次交易时间到当前,stock股票的最高收盘价
            max_close_price=getMaxClose(stock,last_buy_time)
            #获取是否击穿上升趋势线超过3周期的状态
            cross_after3_flag=getCrossAfterStatus(stock,last_buy_time,3)
            #获取股票当前ma值
            ma=getMaValue(stock,maTime)
            #开始判断            
            if  (current_price/max_close_price- 1 >= profit) or (current_price/ma-1>=maProfit) or  cross_after3_flag:
                log.info(str(stock) + '  个股达到止盈线,平仓止盈!')
                order_target_value(stock, 0)
Beispiel #18
0
def reverse_walk(checked_nodes, world):
    local_world = np.copy(world)
    log.start("reverse_walk")
    valid_nodes = []
    max_steps = 1000
    log2.start("reverse search")
    if len(checked_nodes) > 0:
        n = 1
        start_x = checked_nodes[-1][0]
        start_y = checked_nodes[-1][1]

        node = [start_x, start_y]

        for step in range(max_steps):
            step2 = step
            goal = checked_nodes[n]
            log.out(manhattan(node[0], node[1], goal[0], goal[1]))
            begin_dist = manhattan(node[0], node[1], goal[0], goal[1])
            log.out(goal)
            node = [start_x, start_y]

            while node[0] >= 0 and node[0] < max_y and node[1] >= 0 and node[
                    1] < max_y:

                if manhattan(node[0], node[1], goal[0], goal[1]) < 3:
                    if goal[0] > node[0]:
                        node[0] += 1
                    elif goal[0] < node[0]:
                        node[0] -= 1
                    # if local_world[node[0],node[1]]==1:
                    # 	log.info("hit wall")
                    # 	local_world=np.copy(world)
                    # 	n+=1
                    # 	break
                    # else:
                    # 	local_world[node[0],node[1]]=0.2
                    if goal[1] > node[1]:
                        node[1] += 1
                    elif goal[1] < node[1]:
                        node[1] -= 1
                    if local_world[node[0], node[1]] == 1:
                        log.info("hit wall")
                        local_world = np.copy(world)
                        n += 1
                        break
                    else:
                        local_world[node[0], node[1]] = 0.2
                else:
                    dist1 = abs(node[0] - goal[0])
                    dist2 = abs(node[1] - goal[1])
                    if dist2 > 0:
                        ratio = int(dist1 / dist2)
                    else:
                        ratio = 1
                    #this direction needs to be multiplied by ratio
                    switch = False
                    for i in range(ratio):
                        if goal[0] > node[0]:
                            node[0] += 1
                        elif goal[0] < node[0]:
                            node[0] -= 1
                        if local_world[node[0], node[1]] == 1:
                            log.info("hit wall")
                            local_world = np.copy(world)
                            if begin_dist > 20:
                                n += 10
                            else:
                                n += 1
                            switch = True
                            break
                        else:
                            local_world[node[0], node[1]] = 0.2
                    if switch == True:
                        break

                    if goal[1] > node[1]:
                        node[1] += 1
                    elif goal[1] < node[1]:
                        node[1] -= 1
                    if local_world[node[0], node[1]] == 1:
                        log.info("hit wall")
                        local_world = np.copy(world)
                        if begin_dist > 20:
                            n += 10
                        else:
                            n += 1
                        break
                    else:
                        local_world[node[0], node[1]] = 0.2

                if node[0] == goal[0] and node[1] == goal[1]:
                    start_x = goal[0]
                    start_y = goal[1]
                    if goal not in valid_nodes:
                        valid_nodes.append(goal)
                    log.info("touched goal", goal)
                    n = 0
                    break

                # plt.cla()
                # plt.imshow((local_world),interpolation="nearest")
                # plt.grid(False)
                # plt.pause(0.01)

            if node[0] == checked_nodes[0][0] and node[1] == checked_nodes[0][
                    1]:
                log2.out("solution found! ", step)
                break

    log.end()
    print(step2)
    log2.end()
    return valid_nodes
 def run_program(node):
     run(node["var"])
     log.info("Declared variables:\n%s", '\n'.join(str(k) + " : " + str(v) for k, v in V.items()))
     run(node["body"])
Beispiel #20
0
def before_trading_start(context):

    log.info(str(context.current_dt))

    ffb_wlx(context)