Ejemplo n.º 1
0
def generate_feed(req):
    prefix = lib.absolute_prefix()
    news = _enum_news()
    feed = ""
    lastmodified = ""
    for id in news:
        filename = lib.valid_dir(djoin(dirname(__file__), basename(id)))
        filename = filename + ".md" if exists(filename +
                                              ".md") else filename + ".news"
        if id == news[0]: lastmodified = lib.get_time_for_file(filename, True)
        content, headline, author = parse_news(id)
        content = sub('style=".*?"', "", escape(content))
        uri = djoin(lib.get_config("CanonicalName"), prefix, "News",
                    "?id=" + id)
        feed += lib.get_template("feedentry") % {
            "uri": uri,
            "title": headline,
            "mtime": lib.get_time_for_file(filename, True),
            "content": content,
            "author": author
        }
    req.content_type = "application/atom+xml; charset=UTF-8"
    return lib.get_template("feed") % {
        "uri":
        djoin(lib.get_config("CanonicalName"), prefix),
        "self":
        djoin(lib.get_config("CanonicalName"), prefix, "News", "?feed=true"),
        "mtime":
        lastmodified,
        "content":
        feed
    }
Ejemplo n.º 2
0
 def test_create_puppet_change(self):
     hostci = CI(name='s11401.dc2', uid='mm-1')
     hostci.type_id = CI_TYPES.DEVICE.id
     hostci.save()
     p = PuppetAgentsImporter()
     yaml = open(
         djoin(CURRENT_DIR, 'cmdb/tests/samples/canonical.yaml')
     ).read()
     p.import_contents(yaml)
     yaml = open(
         djoin(CURRENT_DIR, 'cmdb/tests/samples/canonical_unchanged.yaml')
     ).read()
     p.import_contents(yaml)
     chg = CIChange.objects.get(type=CI_CHANGE_TYPES.CONF_AGENT.id)
     logs = PuppetLog.objects.filter(
         cichange__host='s11401.dc2').order_by('id')
     self.assertEqual(chg.content_object.host, u's11401.dc2')
     self.assertEqual(chg.content_object.kind, u'apply')
     self.assertEqual(chg.ci, hostci)
     self.assertEqual(chg.type, 2)
     # check parsed logs
     self.assertEqual(len(logs), 16)
     time_iso = logs[0].time.isoformat().split('.')[0]
     self.assertEqual(time_iso, datetime.datetime(
         2010, 12, 31, 0, 56, 37).isoformat())
     # should not import puppet report which has 'unchanged' status
     self.assertEqual(
         CIChangePuppet.objects.filter(status='unchanged').count(), 0)
Ejemplo n.º 3
0
 def output(self):
     return [
         luigi.LocalTarget(djoin(gp.read_dir,
                                 gp.final_prefix + '.R1.fastq')),
         luigi.LocalTarget(djoin(gp.read_dir,
                                 gp.final_prefix + '.R2.fastq'))
     ]
Ejemplo n.º 4
0
 def run(self):
     fastq_to_table(
         djoin(gp.read_dir,
               '.'.join([gp.final_prefix, self.direction, 'fastq'])),
         djoin(gp.analyses_dir,
               '.'.join([gp.gstd_prefix, self.direction, 'csv'])),
         gp.analyses_dir)
Ejemplo n.º 5
0
 def run(self):
     if self.enhance_mode:
         yield Single_Enhanced_FastQ(enh_csv=djoin(
             gp.enhd_cld_dir, '.'.join(['bsort', self.direction, 'csv'])),
                                     direction=self.direction)
     barcode_sorter(
         djoin(self.in_dir, '.'.join([self.prefix, self.direction,
                                      'fastq'])), gp.read_dir, False)
Ejemplo n.º 6
0
 def run(self):
     for i, j in enumerate(['R1', 'R2']):
         enhd_file_path = '.'.join([self.enhd_file_prefix, j, 'csv'])
         with open(enhd_file_path, 'w') as outf:
             for chunk_file in listdir(djoin(gp.enhd_cld_dir, j)):
                 with open(djoin(gp.enhd_cld_dir, j, chunk_file)) as f:
                     for l in f:
                         outf.write(l)
Ejemplo n.º 7
0
def _enum_news(req=None):
    if req: return
    newsfiles = [
        basename(file).split(".")[0]
        for file in glob(djoin(dirname(__file__), "*.md")) +
        glob(djoin(dirname(__file__), "*.news"))
    ]
    return sorted(newsfiles,
                  lambda a, b: cmp(int(a.split(".")[0]), int(b.split(".")[0])),
                  reverse=True)
 def run(self):
     # complete_reads(djoin(gp.read_dir, '.'.join([gp.prefix + '_bsort', self.direction, 'fastq'])),
     #                djoin(gp.cs_init_dir, 'K55', '.'.join([gp.search_dist, self.direction, 'fastq'])),
     #                djoin(gp.ariadne_dir, '.'.join([gp.search_dist + '_full', self.direction, 'fastq'])))
     # yield Single_Sort_FastQ(in_dir = gp.ariadne_dir, prefix = gp.search_dist + '_full', direction = self.direction,
     #                   out_dir = gp.ariadne_dir)
     with self.output().open('w') as out_fq:
         with open(djoin(gp.cs_init_dir, 'K55', '.'.join([gp.search_dist, self.direction, 'fastq']))) as ari_fq, \
              open(djoin(gp.read_dir, '.'.join([gp.prefix + '_nobc', self.direction, 'fastq']))) as nobc_fq:
             out_fq.write(ari_fq.read())
             out_fq.write(nobc_fq.read())
 def run(self):
     fastq_prefix = djoin(gp.cs_init_dir, 'K55', gp.search_dist)
     retcode = subprocess.run(['/home/lam4003/bin/spades/assembler/spades.py', '--meta', '--only-assembler', \
                     '--gemcode1-1', djoin(gp.ariadne_dir, gp.ariadne_prefix + '.R1.fastq'), \
                     '--gemcode1-2', djoin(gp.ariadne_dir, gp.ariadne_prefix + '.R2.fastq'), \
                     '--search-distance', '0', '--size-cutoff', '6', '-t', gp.num_threads, '-m', gp.memory, '-o', gp.cs_final_dir])
                     # '-k', '55', '--assembly-graph', djoin(gp.cs_init_dir, 'assembly_graph.fastg')])
     spades_mem = int(gp.memory) + 100
     while retcode.returncode:
         retcode = subprocess.run(['/home/lam4003/bin/spades/assembler/spades.py', '--restart-from', 'k55', '-m', str(spades_mem), '-o', gp.cs_final_dir])
         spades_mem += 100
     shutil.copy(djoin(gp.cs_final_dir, 'scaffolds.fasta'), self.output().path)
Ejemplo n.º 10
0
 def run(self):
     for i, j in enumerate(['R1', 'R2']):
         enhd_file_path = '.'.join([self.enhd_file_prefix, j, 'csv'])
         with open(enhd_file_path, 'w') as outf:
             for chunk_file in listdir(djoin(gp.enhd_cld_dir, j)):
                 with open(djoin(gp.enhd_cld_dir, j, chunk_file)) as f:
                     for l in f:
                         outf.write(l)
         if gp.deconv_type == gp.gstd_prefix:
             shutil.copy(
                 enhd_file_path,
                 djoin(gp.analyses_dir, '.'.join([gp.gstd_prefix, j,
                                                  'csv'])))
Ejemplo n.º 11
0
 def run(self):
     fastq_enhance(
         djoin(
             gp.read_dir,
             '.'.join([gp.base_prefix + '_bsort', self.direction,
                       'fastq'])), self.enh_csv, gp.enhd_cld_dir,
         gp.edit_prefix)
Ejemplo n.º 12
0
 def run(self):
     srt_bam = djoin(gp.work_dir, 'bsort.bam')
     srt_step = subprocess.run(
         ['samtools', 'sort',
          self.input().path, '-o', srt_bam])
     idx_step = subprocess.run(['samtools', 'index', srt_bam])
     bam_to_annotate(srt_bam, self.ids_to_names, gp.work_dir, False)
Ejemplo n.º 13
0
def parse_news(id, req=None):
    if not type(id) is str: return

    newstpl = lib.get_template("news")
    filename = lib.valid_dir(djoin(dirname(__file__), basename(id)))
    if exists(filename + ".news"):
        filename += ".news"
        mode = "plain"
    elif exists(filename + ".md"):
        filename += ".md"
        mode = "markdown"
    else:
        raise Exception()

    time = lib.get_time_for_file(filename)
    with open(filename) as newsfile:
        author = lib.escape(newsfile.readline().replace("\n", ""))
        headline = lib.escape(newsfile.readline().replace("\n", ""))
        newscontent = lib.escape("\n".join(newsfile.readlines()))

    if mode is "markdown":
        newscontent = markdown(newscontent)

    return (newstpl % {
        "headline": headline,
        "author": author,
        "date": time,
        "id": id,
        "content": newscontent,
        "uri": lib.ljoin("News", "?id=" + id)
    }, headline, author)
Ejemplo n.º 14
0
 def run(self):
     subprocess.run(['/home/lam4003/bin/ema/ema', 'align', '-d', '-r', self.reference + '.fna', '-s', \
         djoin(gp.bin_dir, self.ema_bin_file), '-o', self.out_prefix + '.sam'])
     subprocess.run([
         'samtools', 'view', '-hb', '-f', '0', '-F', '256',
         self.out_prefix + '.sam', '-o',
         self.output().path
     ])
 def run(self):
     if exists(djoin(gp.cs_init_dir, 'K33', 'assembly_graph.fastg')): # The previous iteration exists, run from the last one.
         retcode = subprocess.run(['/home/lam4003/bin/spades/assembler/spades.py', '--restart-from', 'k55', '-o', gp.cs_init_dir])
     else: # The initial cloudSPAdes iterations do not exist.
         retcode = subprocess.run(['/home/lam4003/bin/spades/assembler/spades.py', '--meta', '--only-assembler', '--gemcode1-1',
                         self.input()[0].path, '--gemcode1-2', self.input()[1].path, '--search-distance', gp.search_dist,
                         '--size-cutoff', '6', '-t', gp.num_threads, '-m', gp.memory, '-o', gp.cs_init_dir])
     spades_mem = int(gp.memory) + 100
     while retcode.returncode:
         retcode = subprocess.run(['/home/lam4003/bin/spades/assembler/spades.py', '--restart-from', 'k55', '-m', str(spades_mem), '-o', gp.cs_init_dir])
         spades_mem += 100
Ejemplo n.º 16
0
 def run(self):
     subprocess.run([
         '/home/lam4003/bin/spades/assembler/spades.py', '--meta',
         '--only-assembler', '--gemcode1-1',
         self.input()[0].path, '--gemcode1-2',
         self.input()[1].path, '--search-distance', '0', '--size-cutoff',
         '6', '-t', gp.num_threads, '-m', self.memory, '-o',
         gp.cldspades_dir
     ])
     shutil.copy(djoin(gp.cldspades_dir, 'scaffolds.fasta'),
                 self.output().path)
Ejemplo n.º 17
0
    def run(self):
        raw_sam = djoin(gp.work_dir, 'bsort.sam')
        raw_bam = djoin(gp.work_dir, 'bsort_raw.bam')
        srt_bam = djoin(gp.work_dir, 'bsort.bam')

        map_step = subprocess.run([
            'bowtie2', '--sensitive-local', '-p', gp.num_threads, '-x',
            self.reference, '-1',
            self.input()[0].path, '-2',
            self.input()[1].path, '-S', raw_sam
        ])
        bam_step = subprocess.run([
            'samtools', 'view', '-hb', '-f', '0', '-F', '256', raw_sam, '-o',
            raw_bam
        ])  # Primary alignments only
        srt_step = subprocess.run(['samtools', 'sort', raw_bam, '-o', srt_bam])
        idx_step = subprocess.run(['samtools', 'index', srt_bam])
        bam_to_annotate(srt_bam, self.ids_to_names, gp.work_dir,
                        self.fragments)
        add_barcodes(self.input()[0].path, djoin(gp.work_dir, 'bsort.csv'),
                     gp.work_dir)
Ejemplo n.º 18
0
def index (req,page=None):
	entries = lib.get_config ("Entries","Static")
	pagemeta = None
	for entry in entries:
		if entry["uri"] == page and "src" in entry:
			pagemeta = entry
			break
	else:
		req.headers_out["location"] = lib.get_config ("SitePrefix").encode()
		req.status = apache.HTTP_MOVED_TEMPORARILY
		return

	with open ( lib.valid_dir (djoin (dirname(__file__),pagemeta["src"])) ) as pagefile:
		text = lib.escape ("\n".join (pagefile.readlines()))
	if ".md" in pagemeta["src"]:
		text = markdown (text)
	text = lib.get_template ("static") % {"content": text}

	return lib.respond (req, text, pagemeta["title"],pagemeta["caption"],pagemeta["description"],module_info(active=page))
Ejemplo n.º 19
0
 def test_get_new_issues(self):
     dp1_1 = DeploymentPoll(
         key='RALPH-341',
         date=datetime.datetime.strptime('1-1-2012 1:10', '%d-%m-%Y %H:%M')
     )
     dp1_1.save()
     dp1_2 = DeploymentPoll(
         key='RALPH-341',
         date=datetime.datetime.strptime('1-1-2012 1:20', '%d-%m-%Y %H:%M'))
     dp1_2.save()
     dp2_1 = DeploymentPoll(
         key='RALPH-342',
         date=datetime.datetime.strptime('2-2-2012 2:10', '%d-%m-%Y %H:%M'),
         checked=False)
     dp2_1.save()
     dp2_2 = DeploymentPoll(
         key='RALPH-342',
         date=datetime.datetime.strptime('2-2-2012 2:20', '%d-%m-%Y %H:%M'),
         checked=False)
     dp2_2.save()
     dp3_1 = DeploymentPoll(
         key='RALPH-343',
         date=datetime.datetime.strptime('3-3-2012 3:10', '%d-%m-%Y %H:%M')
     )
     dp3_1.save()
     dp3_2 = DeploymentPoll(
         key='RALPH-343',
         date=datetime.datetime.strptime('3-3-2012 3:20', '%d-%m-%Y %H:%M'))
     dp3_2.save()
     dp4_1 = DeploymentPoll(
         key='RALPH-344',
         date=datetime.datetime.strptime('4-4-2012 5:10', '%d-%m-%Y %H:%M'))
     dp4_1.save()
     x = JiraRSS(tracker_name='JIRA')
     rss = open(
         djoin(CURRENT_DIR, 'cmdb/tests/samples/jira_rss.xml')
     ).read()
     x.rss_url = rss
     self.assertEquals(
         sorted(x.get_new_issues()), [
             'RALPH-341', 'RALPH-342', 'RALPH-343', 'RALPH-344']
     )
Ejemplo n.º 20
0
def index(req, page=None):
    entries = lib.get_config("Entries", "Static")
    pagemeta = None
    for entry in entries:
        if entry["uri"] == page and "src" in entry:
            pagemeta = entry
            break
    else:
        req.headers_out["location"] = lib.get_config("SitePrefix").encode()
        req.status = apache.HTTP_MOVED_TEMPORARILY
        return

    with open(lib.valid_dir(djoin(dirname(__file__),
                                  pagemeta["src"]))) as pagefile:
        text = lib.escape("\n".join(pagefile.readlines()))
    if ".md" in pagemeta["src"]:
        text = markdown(text)
    text = lib.get_template("static") % {"content": text}

    return lib.respond(req, text, pagemeta["title"], pagemeta["caption"],
                       pagemeta["description"], module_info(active=page))
Ejemplo n.º 21
0
 def test_get_new_issues(self):
     dp1_1 = DeploymentPoll(key="RALPH-341", date=datetime.datetime.strptime("1-1-2012 1:10", "%d-%m-%Y %H:%M"))
     dp1_1.save()
     dp1_2 = DeploymentPoll(key="RALPH-341", date=datetime.datetime.strptime("1-1-2012 1:20", "%d-%m-%Y %H:%M"))
     dp1_2.save()
     dp2_1 = DeploymentPoll(
         key="RALPH-342", date=datetime.datetime.strptime("2-2-2012 2:10", "%d-%m-%Y %H:%M"), checked=False
     )
     dp2_1.save()
     dp2_2 = DeploymentPoll(
         key="RALPH-342", date=datetime.datetime.strptime("2-2-2012 2:20", "%d-%m-%Y %H:%M"), checked=False
     )
     dp2_2.save()
     dp3_1 = DeploymentPoll(key="RALPH-343", date=datetime.datetime.strptime("3-3-2012 3:10", "%d-%m-%Y %H:%M"))
     dp3_1.save()
     dp3_2 = DeploymentPoll(key="RALPH-343", date=datetime.datetime.strptime("3-3-2012 3:20", "%d-%m-%Y %H:%M"))
     dp3_2.save()
     dp4_1 = DeploymentPoll(key="RALPH-344", date=datetime.datetime.strptime("4-4-2012 5:10", "%d-%m-%Y %H:%M"))
     dp4_1.save()
     x = JiraRSS(tracker_name="JIRA")
     rss = open(djoin(CURRENT_DIR, "cmdb/tests/samples/jira_rss.xml")).read()
     x.rss_url = rss
     self.assertEquals(sorted(x.get_new_issues()), ["RALPH-341", "RALPH-342", "RALPH-343", "RALPH-344"])
Ejemplo n.º 22
0
 def output(self):
     return luigi.LocalTarget(
         djoin(gp.enhd_cld_dir,
               '.'.join([gp.edit_prefix, self.direction, 'fastq'])))
Ejemplo n.º 23
0
 def get_details(self, *args, **kwargs):
     xml = open(
         djoin(CURRENT_DIR + 'cmdb/tests/samples/fisheye_details.xml')
     ).read()
     return objectify.fromstring(xml)
Ejemplo n.º 24
0
def _enum_files(req=None):
    if req: return
    files = glob(djoin(dirname(__file__), "_mainpage", "*.md"))
    return sorted(files,
                  lambda a, b: cmp(int(a.split(".")[0]), int(b.split(".")[0])))
Ejemplo n.º 25
0
def _enum_files (req=None):
	if req: return
	files = glob (djoin (dirname(__file__),"_mainpage","*.md"))
	return sorted(files, lambda a,b: cmp(int(a.split(".")[0]), int(b.split(".")[0])))
Ejemplo n.º 26
0
def _latest_ver (req=None,repo=None):
	if req or not repo: return
	git_dir = djoin (lib.get_config ("RepoPath","Downloads"),repo["repo"])
	return Popen (["git", "--git-dir="+git_dir, "show-ref"],stdout=PIPE).communicate()[0].split("\n")[-2].split("/")[-1]
Ejemplo n.º 27
0
 def get_changes(self, *args, **kwargs):
     xml = open(djoin(CURRENT_DIR, "cmdb/tests/samples/fisheye_changesets.xml")).read()
     return objectify.fromstring(xml)
Ejemplo n.º 28
0
 def run(self):
     concat_annotations(djoin(gp.orig_map_dir, self.chunk_file_name),
                        gp.enhd_cld_dir, self.fragments)
Ejemplo n.º 29
0
 def output(self):
     return luigi.LocalTarget(djoin(gp.work_dir, 'bsort.final.csv'))
Ejemplo n.º 30
0
 def output(self):
     return luigi.LocalTarget(
         djoin(gp.enhd_cld_dir, 'R1',
               self.chunk_file_name)), luigi.LocalTarget(
                   djoin(gp.enhd_cld_dir, 'R2', self.chunk_file_name))
Ejemplo n.º 31
0
 def output(self):
     return luigi.LocalTarget(
         djoin(gp.read_dir,
               '.'.join([self.prefix + '_bsort', self.direction, 'fastq'])))
Ejemplo n.º 32
0
 def output(self):
     return luigi.LocalTarget(
         djoin(gp.analyses_dir, gp.final_prefix + '.scaffolds.fasta'))
Ejemplo n.º 33
0
import sys, os
from markdown import markdown
from mod_python import apache
from glob import glob
from math import ceil
from re import sub
from xml.sax.saxutils import escape
from os.path import dirname, basename, exists
from os.path import join as djoin

sys.path.append(djoin(dirname(__file__), ".."))
import lib


def index(req, id=-1, page="0", feed=False):
    if id >= 0:
        try:
            content, title, author = parse_news(str(int(id)), req)
            return lib.respond(req, content, title, "News", title,
                               module_info())
        except:
            return lib.e404(req, "Could not find matching news!",
                            module_info())

    if feed:
        return generate_feed(req)

    return lib.respond(req, generate_page(int(page), 1), "News", "News",
                       "News", module_info())

Ejemplo n.º 34
0
def check_make(curr_dir, sub):
    outdir = djoin(curr_dir, sub)
    if not exists(outdir):
        makedirs(outdir)
    return outdir
Ejemplo n.º 35
0
import sys
from mod_python import apache
from os.path import dirname,basename,exists
from os.path import join as djoin
from markdown import markdown
sys.path.append (djoin (dirname(__file__),".."))
import lib

def index (req,page=None):
	entries = lib.get_config ("Entries","Static")
	pagemeta = None
	for entry in entries:
		if entry["uri"] == page and "src" in entry:
			pagemeta = entry
			break
	else:
		req.headers_out["location"] = lib.get_config ("SitePrefix").encode()
		req.status = apache.HTTP_MOVED_TEMPORARILY
		return

	with open ( lib.valid_dir (djoin (dirname(__file__),pagemeta["src"])) ) as pagefile:
		text = lib.escape ("\n".join (pagefile.readlines()))
	if ".md" in pagemeta["src"]:
		text = markdown (text)
	text = lib.get_template ("static") % {"content": text}

	return lib.respond (req, text, pagemeta["title"],pagemeta["caption"],pagemeta["description"],module_info(active=page))

def generate_entries (req=None,current=None):
	if req: return
Ejemplo n.º 36
0
 def __init__(self, *args, **kwargs):
     super(Generate_Enhanced_Chunks, self).__init__(*args, **kwargs)
     self.enhd_file_prefix = djoin(gp.enhd_cld_dir, 'bsort')
Ejemplo n.º 37
0
 def output(self):
     return luigi.LocalTarget(
         djoin(gp.analyses_dir,
               '.'.join([gp.final_prefix, self.direction, 'csv'])))
Ejemplo n.º 38
0
from os.path import join as djoin
from json import dumps as jdumps
from flask import Flask
from flask import render_template
from flask import request
from flask import url_for
from urllib.parse import unquote_plus

app = Flask(__name__)
app.jinja_env.add_extension('pypugjs.ext.jinja.PyPugJSExtension')

#Edit this as you require
filepath = 'title.txt'
folderpath = os.path.dirname(
    os.path.realpath(__file__)) or '/foo/bar/path/to/file/'
fullpath = djoin(folderpath, filepath)
_PLAYER = 'spotify'


@app.context_processor
def override_url_for():
    return dict(url_for=dated_url_for)


def dated_url_for(endpoint, **values):
    if endpoint == 'static':
        filename = values.get('filename', None)
        if filename:
            file_path = os.path.join(app.root_path, endpoint, filename)
            values['q'] = int(os.stat(file_path).st_mtime)
    return url_for(endpoint, **values)
Ejemplo n.º 39
0
 def output(self):
     return luigi.LocalTarget(
         djoin(gp.analyses_dir, gp.final_prefix + '.statistics.csv'))