Exemple #1
0
def main():
    redocs = load_neuroml()
    doc = Document(TableOfContents(), *redocs)

    opdir = os.path.join(TestLocations.getTestOutputDir(), 'neuroml')
    doc.to_html(Join(opdir, 'html'))
    doc.to_pdf(Join(opdir, 'all.pdf'))
Exemple #2
0
def main():
    from argparse import ArgumentParser
    from os.path import join as Join

    parser = ArgumentParser()
    parser.add_argument('--dataset', default='all')
    parser.add_argument('--DATA_DIR', default='./raw_data')
    parser.add_argument('--save_path_housing',
                        default='./cleaned_housing_data.csv')
    parser.add_argument('--save_path_student',
                        default='./cleaned_student_data.csv')
    parser.add_argument('--prep', default=True)
    args = parser.parse_args()

    if args.dataset == 'all':
        clean_housing_data(Join(args.DATA_DIR, 'dc-residential-properties'),
                           args.save_path_housing,
                           prep=args.prep)
        clean_student_data(Join(args.DATA_DIR, 'Student_Performance'),
                           args.save_path_student,
                           prep=args.prep)

    elif args.dataset == 'housing':
        clean_housing_data(Join(args.DATA_DIR, 'dc-residential-properties'),
                           args.save_path_housing,
                           prep=args.prep)

    elif args.dataset == 'student':
        clean_student_data(Join(args.DATA_DIR, 'Student_Performance'),
                           args.save_path_student,
                           prep=args.prep)

    else:
        print('incorrect dataset argument')
Exemple #3
0
 def get_tmp_path(cls):
     try:
         loc = cls.get_path_from_rcfile("tmpdir",
                                        Join(cls.get_root_path(), "tmp"))
     except:
         assert False, 'What exception is called here??'
         loc = Join(cls.get_root_path(), 'tmp')
     return cls.ensure_dir_exists(loc)
Exemple #4
0
 def __init__(self, cfg):
     self.cfg = cfg
     self.forward_timer = Timer()
     self.total_time = 0
     self.cnt = 0
     self.score = dict()
     self.output_dir = Join(cfg.TEST.OUTPUT_DIR, cfg.TEST.DATASET)
     self.save_img = cfg.TEST.SAVE_IMG
     if not os.path.exists(self.output_dir):
         os.makedirs(self.output_dir)
     self.score_csv = open(Join(self.output_dir, "score.csv"), 'w')
     self.score_csv.write("vid, image_id, psnr, ssim\n")
def main():

    # Clear out the old directory:
    if os.path.exists(html_output_dir):
        shutil.rmtree(html_output_dir)
    LocMgr.ensure_dir_exists(html_output_dir)

    root_html = Join(html_output_dir, "index.html")

    data = []
    for xmlfile in NeuroMLDataLibrary.get_channelMLV1FilesWithSingleChannel():


        #class NeuroMLDataLibrary(object):
#
#            def get_channelMLV1Files(self):


        #if xmlfile != "/home/michael/hw_to_come/morphforge/src/test_data/NeuroML/V1/example_simulations/GranCellLayer_NeuroML/Golgi_NaF_CML.xml":
        #        continue


        #if xmlfile != "/home/michael/hw_to_come/morphforge/src/test_data/NeuroML/V1/example_simulations/MainenEtAl_PyramidalCell_NeuroML/K_ChannelML.xml":
        #        continue
        #if xmlfile != "/home/michael/hw_to_come/morphforge/src/test_data/NeuroML/V1/example_simulations/CA1PyramidalCell_NeuroML/kdr.xml":
        #    continue

        # Compare:
        data.append(compareNeuroMLChl(xmlfile))

        # Re-update the html:
        with open(root_html, "w") as f:
            f.write(Template(root_html_tmpl, {'data': data}).respond())
Exemple #6
0
    def report_all(cls, output_loc="/tmp/neurtestout/"):
        op = [
            SectionNewPage(s.title, *s()) for s in ReportGenerator.subclasses
        ]

        d = Document(TableOfContents(), *op)
        op_dir = TestLocations.getTestOutputDir()
        op_loc = Join(op_dir, 'everything.pdf')
        d.to_pdf(op_loc)
Exemple #7
0
    def get_temporary_filename(cls, suffix='', filedirectory=None):

        rnd_string = "%f%d%s" % (time.time(), random.randint(
            0, 32000), socket.gethostname())
        from morphforge.core.misc import StrUtils
        filename = 'tmp_%s%s' % (StrUtils.get_hash_md5(rnd_string), suffix)

        filedirectory = filedirectory if filedirectory else cls.get_tmp_path()
        return Join(filedirectory, filename)
def walkInDir(dir_path):
    files_export = []

    if (Exists(dir_path) and Isdir(dir_path)):

        for root, dirs, files in Walk(dir_path, topdown=False):
            for name in files:
                #append the filepath to the array
                files_export.append(Join(root, name))

    return files_export
Exemple #9
0
    def log_average_score(self):
        score_per_vid = {}
        for vid in self.score.keys():
            psnrs = [x[0] for x in self.score[vid].values()]
            ssims = [x[1] for x in self.score[vid].values()]
            score_per_vid[vid] = (np.mean(psnrs), np.mean(ssims))

        with open(Join(self.output_dir, 'videos_scores.csv'), 'w') as f:
            f.write('video_id, psnr, ssim\n')
            for vid in self.score.keys():
                f.write("{},{},{}\n".format(vid, score_per_vid[vid][0],
                                            score_per_vid[vid][1]))
        return score_per_vid
Exemple #10
0
    def log_img_result(self, img_out, vid, img_id, psnr, ssim):
        if vid not in self.score.keys():
            self.score[vid] = {}

        # log score
        self.score[vid][img_id] = (psnr, ssim)
        self.score_csv.write("{},{},{},{}\n".format(vid, img_id, psnr, ssim))

        # save img
        if self.save_img:
            # if not os.path.exists(Join(self.output_dir, vid)):
            #     os.makedirs(Join(self.output_dir, vid))
            img_out = cv2.cvtColor(img_out, cv2.COLOR_RGB2BGR)
            cv2.imwrite(Join(self.output_dir, img_id), img_out)
Exemple #11
0
    def __init__(self,
                 nest_classname,
                 component,
                 synapse_ports,
                 initial_regime,
                 initial_values,
                 default_values,
                 hack_fixed_values={}):

        # The template files are in the same directory as this file,
        # but we could call it from anywhere, so lets set up the locations:
        self.src_dir = os.path.dirname(__file__)
        self.src_tmpl_h = os.path.join(self.src_dir,
                                       "nest_9ml_neuron_h_cheetah.tmpl")
        self.src_tmpl_cpp = os.path.join(self.src_dir,
                                         "nest_9ml_neuron_cpp_cheetah.tmpl")

        self.mymodule_tmpl_cpp = os.path.join(self.src_dir,
                                              "mymodule_cpp_cheetah.tmpl")

        self.src_bootstrap = os.path.join(self.src_dir,
                                          "nest_model/bootstrap.sh")
        self.src_configure_ac = os.path.join(self.src_dir,
                                             "nest_model/configure.ac")
        self.src_makefile_am = os.path.join(self.src_dir,
                                            "nest_model/Makefile.am")

        # Output Files:
        self.build_dir = "nest_model"
        self.output_h_file = Join(self.build_dir, "nest_9ml_neuron.h")
        self.output_cpp_file = Join(self.build_dir, "nest_9ml_neuron.cpp")
        self.output_mymodule_cpp_file = Join(self.build_dir, "mymodule.cpp")

        self.nm = NestModel(nest_classname, component, synapse_ports,
                            initial_regime, initial_values, default_values)
        self.buildCPPFiles()
Exemple #12
0
    def get_channelMLV1Files(cls):

        subdirs = [
            "CA1PyramidalCell_NeuroML",
            "GranCellLayer_NeuroML",
            "GranuleCell_NeuroML",
            "MainenEtAl_PyramidalCell_NeuroML",
            "SolinasEtAl_GolgiCell_NeuroML",
            "Thalamocortical_NeuroML",
            "VervaekeEtAl-GolgiCellNetwork_NeuroML",
        ]

        simSrcDir = "/home/michael/hw_to_come/mf_test_data/test_data/NeuroML/V1/example_simulations/"
        #simSrcDir = "/home/michael/hw_to_come/morphforge/src/test_data/NeuroML/V1/example_simulations/"

        files = []
        for subdir in subdirs:
            files.extend(glob.glob(Join(simSrcDir, subdir) + '/*.xml'))
        #print files
        #assert False
        return files
Exemple #13
0
def document_eqnsets(individual_reports=True):

    all_redocs = []

    for f in TestLocations.getEqnSetFiles():
        f_basename = os.path.splitext( os.path.basename(f))[0]

        # Load the EqnSet:
        library_manager = NeuroUnitParser.File( open(f).read(), name=f_basename )

        # Create the documentation:
        local_redoc = MRedocWriterVisitor.build( library_manager)

        # Create a local documentation file:
        if individual_reports:
            doc = Document(TableOfContents(), local_redoc)
            doc.to_pdf( Join(TestLocations.getTestOutputDir(), 'eqnsets_%s.pdf' %f_basename  ) )

        # Add it to single large file:
        all_redocs.append( local_redoc )

    return all_redocs
def compareNeuroMLChl(xmlFile):
    model, chl_type = os.path.splitext(xmlFile)[0].split("/")[-2:]
    print model, chl_type

    op_dir = LocMgr.ensure_dir_exists(Join(html_output_dir, model, chl_type))
    op_html = Join(op_dir, "index.html")
    c = ComparisonResult(xmlfile=xmlFile, op_file = op_html, same_chl=True, exception=None)

    try:

        # Make the NeuroUnits channel:
        chl_neuro = NeuroML_Via_NeuroUnits_ChannelNEURON(xml_filename=xmlFile,  )
        c.chl_neurounits = chl_neuro


        op_pdf_file = Join(op_dir, 'Op1.pdf')
        #WriteToPDF(eqnset = chl_neuro.eqnset, filename = op_pdf_file)
        c.chl_neurounits_pdf = op_pdf_file


        # Make the NeuroML channel:
        xsl_file = "/home/michael/srcs/neuroml/CommandLineUtils/ChannelMLConverter/ChannelML_v1.8.1_NEURONmod.xsl"
        chl_xsl = NeuroML_Via_XSL_ChannelNEURON(xml_filename=xmlFile, xsl_filename=xsl_file,  )
        c.chl_xsl = chl_xsl
        c.chl_xsl_hoc = []


        chl_neuro_res = simulate_chl_all(chl_neuro)
        chl_xsl_res = simulate_chl_all(chl_xsl)
        c.chl_neurounit_hoc = []


        for i, (rN, rX) in enumerate(zip(chl_neuro_res, chl_xsl_res)):

            c.chl_neurounit_hoc.append(rN.hocfilename )
            c.chl_xsl_hoc.append(rX.hocfilename )

            tN = rN.get_trace("CurrentClamp").convert_to_fixed(dt=unit("1.01:ms"))
            tX = rX.get_trace("CurrentClamp").convert_to_fixed(dt=unit("1.01:ms"))

            # Compare current traces:
            tN._data[np.fabs(tN.time_pts_ms - 0) <0.05] *=0
            tX._data[np.fabs(tX.time_pts_ms - 0) <0.05] *=0
            tN._data[np.fabs(tN.time_pts_ms - 200) <0.05] *=0
            tX._data[np.fabs(tX.time_pts_ms - 200) <0.05] *=0
            tN._data[np.fabs(tN.time_pts_ms - 700) <0.05] *=0
            tX._data[np.fabs(tX.time_pts_ms - 700) <0.05] *=0
            print "TR1"
            f = QuantitiesFigure()
            ax1 = f.add_subplot(4, 1, 1)
            ax2 = f.add_subplot(4, 1, 2)
            ax3 = f.add_subplot(4, 1, 3)
            ax4 = f.add_subplot(4, 1, 4)
            ax1.plotTrace(tN, color='b')
            ax1.plotTrace(tX, color='g', linewidth=20, alpha=0.2)
            ax2.plotTrace(tN.window((200, 250)*pq.ms), color='b')
            ax2.plotTrace(tX.window((200, 250)*pq.ms), color='g', linewidth=20, alpha=0.2)

            num = (tN-tX)
            denom = (tN+tX)
            diff = num/denom
            ax3.plotTrace(diff, color='r')

            ax4.plotTrace(rN.get_trace('SomaVoltage'), color='m')
            ax4.plotTrace(rX.get_trace('SomaVoltage'), color='m', linewidth=20, alpha=0.2)

            if num.max()[1] > unit("0.1:pA"):
                c.same_chl = False

            out_im = Join(op_dir, "out_im%03d" % i)
            pylab.savefig(out_im+".png")
            pylab.savefig(out_im+".pdf")
            c.output_image_files.append(out_im)
            pylab.close()

        c.finished_ok=True



    except NeuroUnitsImportNeuroMLNotImplementedException, e:
        print 'Exception caught:', e

        s = StringIO.StringIO()
        traceback.print_exc(file=s)
        c.exception_long=s.getvalue()
        c.exception="%s (%s)"%(str(e), str(type(e)))
        c.same_chl = False
        c.finished_ok=False
Exemple #15
0
 def get_test_srcs_path(cls):
     return cls.validate_exists(
         Join(cls.get_root_path(), 'morphforge_testdata'))
Exemple #16
0
    def buildsectionsurface(cls, s):
        import gts
        from morphforge.core import LocMgr
        from os.path import join as Join
        print 'Building Mesh'

        working_dir = LocMgr.ensure_dir_exists('/tmp/mf/mesh/')
        fTemp1 = Join(working_dir, 'pts.txt')
        fTemp2 = Join(working_dir, 'pts.off')
        fTemp3 = Join(working_dir, 'pts.stl')
        fTemp2b = Join(working_dir, 'pts_postSub.off')
        fTemp4 = Join(working_dir, 'pts.gts')

        nstep = 5
        print 'Building Spheres'
        distal_offset = np.array((0.05, 0.05, 0.05))
        ptsP = GeomTools.produce_sphere(centre=s.get_proximal_npa3(),
                                        radius=s.p_r,
                                        n_steps=nstep)
        ptsD = GeomTools.produce_sphere(centre=s.get_distal_npa3() +
                                        distal_offset,
                                        radius=s.d_r,
                                        n_steps=nstep)

        print 'Removing Close Points'
        pts = cls.only_pts_at_min_dist(ptsP + ptsD, min_dist=0.01)

        print 'Writing:', fTemp2
        with open(fTemp1, 'w') as f:
            f.write('3 %d\n' % len(pts))
            np.savetxt(f, np.array(pts))

        if os.path.exists(fTemp2):
            os.unlink(fTemp2)
        os.system('qhull T1 QJ o < %s > %s' % (fTemp1, fTemp2))

        # Don't do the subdivision, just copy the files:
        os.system('cp %s %s' % (fTemp2, fTemp2b))
        # fTemp2 = fTemp2b

        f = open(fTemp2b).read().split()
        (nVertex, nFace, nEdge) = [int(i) for i in f[1:4]]
        assert nVertex > 5
        vertices = np.array([float(t) for t in f[4:4 + nVertex * 3]
                             ]).reshape(nVertex, 3)

        triangles = np.array([int(t) for t in f[4 + nVertex * 3:]])
        triangles = triangles.reshape((nFace, 4))
        triangles = triangles[:, (1, 2, 3)]

        print 'Writing STL'
        with open(fTemp3, 'w') as fSTL:
            fSTL.write('solid name\n')
            for i in range(triangles.shape[0]):
                (a, b, c) = triangles[i, :]

                fSTL.write('facet normal 0 0 0\n')
                fSTL.write('outer loop \n')
                fSTL.write('vertex %f %f %f\n' %
                           (vertices[a, 0], vertices[a, 1], vertices[a, 2]))
                fSTL.write('vertex %f %f %f\n' %
                           (vertices[b, 0], vertices[b, 1], vertices[b, 2]))
                fSTL.write('vertex %f %f %f\n' %
                           (vertices[c, 0], vertices[c, 1], vertices[c, 2]))
                fSTL.write('endloop \n')
                fSTL.write('endfacet\n')

            fSTL.write('solid end')

        print 'Running stl2gts...'
        if os.path.exists(fTemp4):
            os.unlink(fTemp4)

        os.system('stl2gts < %s > %s' % (fTemp3, fTemp4))

        assert os.path.exists(fTemp4)

        import gts
        f = open(fTemp4)
        s = gts.Surface()
        s = gts.read(f)

        s.cleanup()
        assert s.is_closed()
        assert s.is_orientable()

        # s.tessellate()
        return s
Exemple #17
0
 def get_test_mods_path(cls):
     return cls.validate_exists(Join(cls.get_test_srcs_path(), 'mod_files'))
def t3():
    print 'Loading Third XML File (COBA-Component)'
    print '---------------------------------------'
    component = readers.XMLReader.read_component(
        Join(tenml_dir, 'comp_coba.9ml'))
    writers.XMLWriter.write(component, '/tmp/nineml_toxml3.xml')
def t4():
    print 'Loading Forth XML File (iaf-2coba-Model)'
    print '----------------------------------------'
    component = readers.XMLReader.read_component(Join(tenml_dir,
                                                      'iaf_2coba.10ml'),
                                                 component_name='iaf')
    writers.XMLWriter.write(
        component,
        '/tmp/nineml_toxml4.xml',
    )
    model = readers.XMLReader.read_component(Join(tenml_dir, 'iaf_2coba.10ml'))

    from nineml.abstraction_layer.flattening import flatten
    from nineml.abstraction_layer.dynamics.utils.modifiers import (
        DynamicsModifier)

    flatcomponent = flatten(model, componentname='iaf_2coba')
    DynamicsModifier.close_analog_port(component=flatcomponent,
                                       port_name='iaf_iSyn',
                                       value='0')

    writers.XMLWriter.write(flatcomponent, '/tmp/nineml_out_iaf_2coba.9ml')

    import pyNN.neuron as sim
    from pyNN.utility import init_logging

    init_logging(None, debug=True)
    sim.setup(timestep=0.1, min_delay=0.1)
    print 'Attempting to simulate From Model:'
    print '----------------------------------'
    celltype_cls = pyNNml.nineml_celltype_from_model(
        name="iaf_2coba",
        nineml_model=flatcomponent,
        synapse_components=[
            pyNNml.CoBaSyn(namespace='cobaExcit', weight_connector='q'),
            pyNNml.CoBaSyn(namespace='cobaInhib', weight_connector='q'),
        ])

    parameters = {
        'iaf.cm': 1.0,
        'iaf.gl': 50.0,
        'iaf.taurefrac': 5.0,
        'iaf.vrest': -65.0,
        'iaf.vreset': -65.0,
        'iaf.vthresh': -50.0,
        'cobaExcit.tau': 2.0,
        'cobaInhib.tau': 5.0,
        'cobaExcit.vrev': 0.0,
        'cobaInhib.vrev': -70.0,
    }

    parameters = ComponentFlattener.flatten_namespace_dict(parameters)

    cells = sim.Population(1, celltype_cls, parameters)
    cells.initialize('iaf_V', parameters['iaf_vrest'])
    cells.initialize('tspike', -1e99)  # neuron not refractory at start
    cells.initialize('regime', 1002)  # temporary hack

    input = sim.Population(2, sim.SpikeSourcePoisson, {'rate': 100})

    connector = sim.OneToOneConnector(weights=1.0, delays=0.5)

    conn = [
        sim.Projection(input[0:1], cells, connector, target='cobaExcit'),
        sim.Projection(input[1:2], cells, connector, target='cobaInhib')
    ]

    cells._record('iaf_V')
    cells._record('cobaExcit_g')
    cells._record('cobaInhib_g')
    cells._record('cobaExcit_I')
    cells._record('cobaInhib_I')
    cells.record()

    sim.run(100.0)

    cells.recorders['iaf_V'].write("Results/nineml_neuron.V",
                                   filter=[cells[0]])
    cells.recorders['cobaExcit_g'].write("Results/nineml_neuron.g_exc",
                                         filter=[cells[0]])
    cells.recorders['cobaInhib_g'].write("Results/nineml_neuron.g_inh",
                                         filter=[cells[0]])
    cells.recorders['cobaExcit_I'].write("Results/nineml_neuron.g_exc",
                                         filter=[cells[0]])
    cells.recorders['cobaInhib_I'].write("Results/nineml_neuron.g_inh",
                                         filter=[cells[0]])

    t = cells.recorders['iaf_V'].get()[:, 1]
    v = cells.recorders['iaf_V'].get()[:, 2]
    gInh = cells.recorders['cobaInhib_g'].get()[:, 2]
    gExc = cells.recorders['cobaExcit_g'].get()[:, 2]
    IInh = cells.recorders['cobaInhib_I'].get()[:, 2]
    IExc = cells.recorders['cobaExcit_I'].get()[:, 2]

    import pylab
    pylab.subplot(311)
    pylab.ylabel('Voltage')
    pylab.plot(t, v)

    pylab.subplot(312)
    pylab.ylabel('Conductance')
    pylab.plot(t, gInh)
    pylab.plot(t, gExc)

    pylab.subplot(313)
    pylab.ylabel('Current')
    pylab.plot(t, IInh)
    pylab.plot(t, IExc)

    pylab.suptitle("From Tree-Model Pathway")
    pylab.show()

    sim.end()
def t1():
    print 'Loading First XML File'
    print '----------------------'
    component = readers.XMLReader.read_component(
        Join(sample_xml_dir, 'PostTF_izhikevich.xml'))
    writers.XMLWriter.write(component, '/tmp/nineml_toxml1.xml')
Exemple #21
0
    try:
        cf.set(seccrous, 'crous', sys.argv[1])
    except:
        cf.set(seccrous, 'crous', 'trec')  #'trec'or'wiki'
    cf.set(seccrous, 'embeddingname', 'aquaint+wiki.txt.gz.ndim=50.bin')
    embeddingname = cf.get(seccrous, 'embeddingname')

    crous = cf.get(seccrous, 'crous')

    cf.add_section(secpath)
    cf.set(secpath, 'dataset', 'dataset')
    cf.set(secpath, 'interdata', 'interdata')
    cf.set(secpath, 'logdir', 'log')
    cf.set(secpath, 'embeddingdir', 'embeddings')

    dataset_dir = Join(cf.get(secpath, 'dataset'), crous)
    interdata_dir = cf.get(secpath, 'interdata')
    log_dir = Join(cf.get(secpath, 'logdir'), crous)
    embeddingdir = cf.get(secpath, 'embeddingdir')

    for sec in [sectrain, sectest]:
        cf.add_section(sec)
        name = sec[:-4]
        cf.set(sec, name + 'file', Join(cf.get(secpath, 'dataset'), crous,
                                        name))
        path = Join(interdata_dir, name)
        if not os.path.exists(path):
            os.makedirs(path)
        cf.set(sec, 'qaid', Join(path, 'qaid'))
        cf.set(sec, 'label', Join(path, 'label'))
        cf.set(sec, 'question_index', Join(path, 'question_index'))
Exemple #22
0
 def getEqnSetFiles(cls):
     loc = Join(cls.getPackageRoot(), "src/test_data/eqnsets/")
     print loc
     files = glob.glob(loc + "/*.eqn")
     return files
from nineml.utility import LocationMgr
from os.path import join as Join
from nineml.abstraction_layer.xmlns import *

import nineml.abstraction_layer as al
import nineml.abstraction_layer.readers as readers
import nineml.abstraction_layer.writers as writers

import pyNN.neuron.nineml as pyNNml

from nineml.abstraction_layer.flattening import ComponentFlattener

#LocationMgr.StdAppendToPath()

sample_xml_dir = Join(LocationMgr.getCatalogDir(), "sample_xml_files")
tenml_dir = Join(LocationMgr.getCatalogDir(), "sample_xml_files/10ml/")


def t1():
    print 'Loading First XML File'
    print '----------------------'
    component = readers.XMLReader.read_component(
        Join(sample_xml_dir, 'PostTF_izhikevich.xml'))
    writers.XMLWriter.write(component, '/tmp/nineml_toxml1.xml')


def t2():
    print 'Loading Second XML File (IAF-Component'
    print '--------------------------------------'
    component = readers.XMLReader.read_component(
        Join(tenml_dir, 'comp_iaf.9ml'))
Exemple #24
0
 def get_bin_path(cls):
     return cls.validate_exists(Join(cls.get_root_path(), 'bin/'))
Exemple #25
0
 def get_log_path(cls):
     return cls.ensure_dir_exists(Join(cls.get_tmp_path(), 'log/'))
Exemple #26
0
 def get_simulation_results_tmp_dir(cls):
     loc = cls.get_path_from_rcfile(
         "tmp_simulationpicklesdir",
         Join(cls.get_tmp_path(), "simulationresults"))
     return cls.ensure_dir_exists(loc)
Exemple #27
0
#"/home/michael/hw/morphforge/doc"
doc_src_dir = os.path.normpath( os.path.join(root, "doc") )

examples_dst_dir =  os.path.join(root, "doc/srcs_generated_examples")
examples_dst_dir_images =  os.path.join(root, "doc/srcs_generated_examples/images/")

examples_build_dir = os.path.join( LocMgr.get_tmp_path(), "mf_doc_build")
examples_build_dir_image_out = os.path.join( examples_build_dir,  "images/")


dirs = ['morphology', 'singlecell_simulation', 'multicell_simulation', 'advanced_examples']#, 'assorted' ]
example_subdirs = [ d for d in os.listdir(examples_src_dir) if d.startswith("""exset""") ]
dirs = sorted(example_subdirs)

example_srcs = list( itertools.chain( *[ sorted(Glob( Join(examples_src_dir, dir) + "/*.py") ) for dir in dirs] ) )




def clear_directory(d):
    if os.path.exists(d):
        shutil.rmtree(d)
    os.mkdir(d)




def parse_src_file(filename, docstring):
    d = open(filename, 'r').read()
def t2():
    print 'Loading Second XML File (IAF-Component'
    print '--------------------------------------'
    component = readers.XMLReader.read_component(
        Join(tenml_dir, 'comp_iaf.9ml'))
    writers.XMLWriter.write(component, '/tmp/nineml_toxml2.xml')
def main():
    module = AnsibleModule(argument_spec=dict(
        #arguments here
        save_name=dict(required=True, type='str'),
        action=dict(required=True, type='str'),
        path_to_save=dict(required=False, type='str'),
        block_size=dict(required=False, type='int'),
        restore_date=dict(required=False, type='str'),
        mysql_host=dict(required=True, type='str'),
        mysql_user=dict(required=True, type='str'),
        mysql_passwd=dict(required=True, type='str'),
        mysql_db=dict(required=True, type='str'),
        ftp_host=dict(required=True, type='str'),
        ftp_user=dict(required=True, type='str'),
        ftp_passwd=dict(required=True, type='str')))

    #get params
    save_name = module.params.get("save_name")
    action = module.params.get("action")

    mysql_host = module.params.get("mysql_host")
    mysql_user = module.params.get("mysql_user")
    mysql_passwd = module.params.get("mysql_passwd")
    mysql_db = module.params.get("mysql_db")

    ftp_host = module.params.get("ftp_host")
    ftp_user = module.params.get("ftp_user")
    ftp_passwd = module.params.get("ftp_passwd")

    #variable for module_exit
    output = ""
    changed = False

    #instantiate db
    db = DB(
        mysql_host,
        mysql_user,
        mysql_passwd,
        mysql_db,  #database connection
        ftp_host,
        ftp_user,
        ftp_passwd)  #ftp connection

    if (action == "save"):

        if (not module.params["path_to_save"]):
            module.exit_json(changed=False,
                             ansible_module_results="path_to_save is missing.",
                             failed=True)

        path_to_save = module.params.get("path_to_save")

        #get param (or use default value)
        if (not module.params["block_size"]):
            blockSize = 4096  #default for ext4
        else:
            blockSize = module.params.get("block_size")

        #check if the path exists and if it's a file or a directory
        if (Exists(path_to_save)):

            if (Isfile(path_to_save)):
                #path is a single file
                files = [path_to_save]

            else:
                #path is a directory

                #get files of dir
                files = walkInDir(path_to_save)

            lastSaveId = db.get_last_saveid_by_savename(
                save_name)[0]["max(id)"]

            #mockup
            #lastSaveId = 1

            if (db.create_save(save_name, str(Datetime.now()))):

                db_files = db.get_files_of_save(lastSaveId)

                for file in files:
                    #compute actual md5 of the file
                    hashfile = md5()
                    with open(file, 'rb') as fopen:

                        sliced_content = fopen.read(blockSize)
                        while sliced_content:
                            hashfile.update(sliced_content)
                            sliced_content = fopen.read(blockSize)

                    #get both name and directory of the file
                    file_dir, file_name = SplitFile(file)
                    compute_blocks_flag = True
                    db_file_id = -1

                    #check hash of files
                    for db_file in db_files:

                        if (db_file["NAME"] == file_name):

                            #check hashes
                            if (db_file["HASH"] == hashfile.hexdigest()):

                                #build array of all locations file
                                file_locations = []
                                for loc in db.get_locations_by_fileid(
                                        db_file["ID"]):
                                    file_locations.append(loc["location"])

                                #check if location exists
                                if (file_dir in file_locations):
                                    #no changes for this file
                                    #just insert references for the current save and continue
                                    db.create_file_references(
                                        db_file["ID"], db_file["location"])
                                    compute_blocks_flag = False

                                else:
                                    #file already exists but has been moved or copied
                                    #update location
                                    db.create_file_references(
                                        db_file["ID"], file_dir)
                                    compute_blocks_flag = False

                            #else, file has been modified, we have to create file and compute blocks
                            db_file_id = db_file["ID"]

                            break

                    #no changes for the current file, continue to the next file_to_save
                    if not compute_blocks_flag: continue
                    else:
                        fileid = db.create_file(file_name, GetSizeOfThis(file),
                                                hashfile.hexdigest(), file_dir)

                    #get stored hash_blocks
                    db_hashes = {}
                    for db_hash in db.get_hashblocks_of_file(db_file_id):
                        db_hashes[db_hash["BLOCKNUMBER"]] = db_hash["HASH"]

                    #compute hash of each block
                    with open(file, 'rb') as fopen:

                        #read file and slice it in blockSize
                        block = fopen.read(blockSize)
                        block_number = 0

                        while block:
                            hash_block = md5(block)

                            if (not len(db_hashes)
                                    or (len(db_hashes) < block_number + 1
                                        or hash_block.hexdigest() !=
                                        db_hashes[block_number])):
                                #hash are differents, we have to reupload the block
                                db.create_block(block_number, block,
                                                hash_block.hexdigest(), fileid)
                            else:
                                db.create_block_references(
                                    hash_block.hexdigest(), fileid)

                            block_number += 1
                            block = fopen.read(blockSize)

                    output = 'saved 100 per 100 ok'
                    changed = True

            else:
                output = "Can't create save object in database"

        else:
            output = "The given path doesn't exist on the host."

    elif (action == "restore"):

        #initialize variable
        restore_date = None
        if (module.params["restore_date"]):
            restore_date = module.params.get("restore_date")

        #if no specific date is set, get last save id with the save_name
        if (restore_date == None):
            lastSaveId = db.get_last_saveid_by_savename(
                save_name)[0]["max(id)"]
        else:
            lastSaveId = db.get_saveid_by_savedate(restore_date)[0]["id"]

        for restore_file in db.get_files_of_save(lastSaveId):

            #if folder doesn't exists, create it
            if (not (Exists(restore_file["location"])
                     and Isdir(restore_file["location"]))):
                makedirs(restore_file["location"])

            #erase / create file
            restored_file = open(
                Join(restore_file["location"] + '/', restore_file["NAME"]),
                'wb')
            restored_file.close()

            #store blocks in file
            with open(
                    Join(restore_file["location"] + '/', restore_file["NAME"]),
                    'ab+') as restored_file:
                for db_hash in db.get_hashblocks_of_file(restore_file["ID"]):

                    block = db.get_block(db_hash["HASH"])
                    restored_file.write(block)

        output = "restoration 100 per 100 ok"
        changed = True

    else:
        output = "Unknow action \"" + action + "\". Available : 'save' or 'restore'"

    #export something to ansible output
    module.exit_json(changed=changed, ansible_module_results=output)
Exemple #30
0
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:76.0) Gecko/20100101 Firefox/76.0",
"Referer": "",
"Sec-Fetch-Mode": "no-cors",
"Connection": "keep-alive"
}

#Daily-Config
main_url  = 'http://bing.plmeizi.com/show/'
header    = '今日美图首页'
User_Name = '57460'

#Desktop_Dir    =  'C:/Users/'+User_Name+'/Desktop/'
#Desktop_Dir    =  'D:/'
Python_Dir     =  'D:/Python'
Pictures_Dir   =  'D:/Pictures'
Bing_Daily_Dir =  Join(Pictures_Dir,'Bing/Date')
Bing_Codes_Dir =  Join(Python_Dir,'Bing/Date')
Mixed_Dir      =  Join(Pictures_Dir,'Mixed')

cur_folder     =  Join(Python_Dir,'Web/CrawlMasPic/Bing')
pic_folder     =  Join(Pictures_Dir,'Bing/Date')
cmm_path       =  Join(pic_folder, 'Comments')
cp_path        =  Join(cur_folder,'.checkpoint')
mv_dest_dir    =  Mixed_Dir

# Load Files
# def GetTxt(path):
#     return open(path,'rb').read().decode('utf-8')
# def LoadTxtToLines(path,sep='\n'):
#     return [line for line in open(path,'rb').read().decode('utf-8').split(sep) if len(line)>0]
def LoadCheckpoint(cp_path):