def reset(self): NUM_MACHINES_CVD = self.config['NUM_MACHINES_CVD'] NUM_MACHINES_CDO = self.config['NUM_MACHINES_CDO'] NUM_JOBS = self.config['NUM_JOBS'] self.machines_cvd = [ Machine(str(i), process='CVD') for i in range(NUM_MACHINES_CVD) ] self.machines_cdo = [ Machine(str(i), process='CDO') for i in range(NUM_MACHINES_CDO) ] self.jobs = [Jobs(i) for i in range(NUM_JOBS)] self.miss_cqt = [] self.NOW = 0 self.jobs_processed_CVD = np.zeros( (self.config['NUM_MACHINES_CVD'], self.config['NUM_JOBS']), dtype=np.int8) self.jobs_processed_CDO = np.zeros( (self.config['NUM_MACHINES_CDO'], self.config['NUM_JOBS']), dtype=np.int8) self.process_completion_cvd = np.zeros( (self.config['NUM_MACHINES_CVD'], self.config['NUM_JOBS'])) self.process_completion_cdo = np.zeros( (self.config['NUM_MACHINES_CDO'], self.config['NUM_JOBS'])) self.lot_release = 0 self.time_release = self.df_log['Time step'].values self.job_num = 0 return self.returnObs()
def process_item(self, item, spider): if isinstance(item, LagouItem): if self.session.query(Jobs).filter( Jobs.positionId == item['positionId'], Jobs.companyId == item['companyId']).first(): pass else: obj = Jobs( companyId=item['companyId'], positionId=item['positionId'], jobNature=item['jobNature'], companyName=item['companyName'], financeStage=item['financeStage'], companyFullName=item['companyFullName'], companySize=item['companySize'], industryField=item['industryField'], positionName=item['positionName'], city=item['city'], createTime=item['createTime'], salary_low=item['salary_low'], salary_high=item['salary_high'], workYear=item['workYear'], education=item['education'], positionAdvantage=item['positionAdvantage'], district=item['district'], # uid=item['uid'], companyLabelList=item['companyLabelList'], ) self.session.add(obj) try: self.session.commit() except Exception, e: print e self.session.rollback()
def job(page=None, code=None): page = int(request.args.get('page', 1)) if code is None: if JobPages.need_cache_page(page): jobs = JobPages.new_page(page) jobs = json.loads(jobs) else: jobs = json.loads(str(JobPages.get_page(page).jobs)) return render_template("job.html", jobs=jobs, page=page) else: if Jobs.need_cache_code(code): using_api = True print("Grabbing from API for first time and storing it!") jarray = Jobs.new_job(code) else: print("Pulling cached value from DB!") using_api = False jarray = Jobs.get_code(code) job_info = jarray[0] knowledge = jarray[1] skills = jarray[2] abilities = jarray[3] technology = jarray[4] related_jobs = jarray[5] wage = jarray[6] if using_api == True: return render_template("job_info.html", job=json.loads(job_info.text), knowledge=json.loads(knowledge.text), skills=json.loads(skills.text), abilities=json.loads(abilities.text), technology=json.loads(technology.text), related_jobs=json.loads(related_jobs.text), wage=json.loads(wage.text) ) else: return render_template("job_info.html", job=json.loads(job_info), knowledge=json.loads(knowledge), skills=json.loads(skills), abilities=json.loads(abilities), technology=json.loads(technology), related_jobs=json.loads(related_jobs), wage=json.loads(wage) )
def savejob(jobname, contents_str, username): job = Jobs(job_name=jobname, contents=contents_str, username=username) try: db.session.add(job) db.session.commit() return job.job_id except: return None
def add_job(): name = request.args.get('name') try: job = Jobs(name=name) db.session.add(job) db.session.commit() return "Job added. job id={}".format(job.id) except Exception as e: return (str(e))
def ptp_index(request): frees, totals = server_stats() if request.method == 'POST': # If the form has been submitted... ptp_form = PTPForm(request.POST, request.FILES) # A form bound to the POST data if ptp_form.is_valid(): # All validation rules pass # Process the data in form.cleaned_data job = Jobs() job.email = ptp_form.cleaned_data['sender'] if ptp_form.cleaned_data['rooted'] == "rooted": job.data_type = "rptree" else: job.data_type = "ptree" job.method = "PTP" job.save() filepath = settings.MEDIA_ROOT + repr(job.id) + "/" os.mkdir(filepath) newfilename = filepath + "input.tre" handle_uploaded_file(fin = request.FILES['treefile'] , fout = newfilename) job.filepath = filepath job.save() nmcmc = ptp_form.cleaned_data['nmcmc'] imcmc = ptp_form.cleaned_data['imcmc'] burnin = ptp_form.cleaned_data['burnin'] seed = ptp_form.cleaned_data['seed'] outgroups = ptp_form.cleaned_data['outgroups'].strip() #print(outgroups) removeog = ptp_form.cleaned_data['removeog'] #os.chmod(filepath, 0777) if ptp_form.cleaned_data['rooted'] == "rooted": jobok = run_ptp_sge(fin = newfilename, fout = filepath + "output", rooted = True, nmcmc = nmcmc, imcmc = imcmc, burnin = burnin, seed = seed, outgroup = outgroups, remove = removeog) else: jobok = run_ptp_sge(fin = newfilename, fout = filepath + "output", rooted = False, nmcmc = nmcmc, imcmc = imcmc, burnin = burnin, seed = seed, outgroup = outgroups, remove = removeog) #return HttpResponseRedirect('result/') # Redirect after POST if jobok: return show_ptp_result(request, job_id = repr(job.id), email = job.email) else: return sge_error(request) else: ptp_form = PTPForm() # An unbound form context = {'pform':ptp_form, 'avaliable':frees, 'total':totals} return render(request, 'ptp/index.html', context)
def handle_ex_1(): json = request.get_json() db.session.add( Jobs(job_name=json['name'], job_place=json['place'], job_pay=json['pay'])) db.session.commit() return jsonify(json)
def process_item(self, item, spider): """Save deals in the database. This method is called for every item pipeline component. """ session = self.Session() job = Jobs(**item) try: session.add(job) session.commit() log.msg('-> Adding new entry : %s !' % job.name, level=log.INFO) if job.email is not None: #self.send_mail_offer(item) job.processed = True session.commit() except IntegrityError: session.rollback() q = session.query(Jobs).filter_by(url=item['url']).first() if q and (q.processed is False) and (item['email'] is not None): log.msg('-> Duplicate entry found : %s not processed !' % item['name'], level=log.INFO) # Tryping to send again the mail #self.send_mail_offer(item) # Update processing session.query(Jobs).filter(Jobs.url == item["url"]).update( {'processed': True}) session.commit() session.close() except SMTPAuthenticationError: log.msg('Something wrong with Gmail server !' % job.name, level=log.INFO) session.rollback() raise finally: session.close() return item
def add_job_form(): if request.method == 'POST': name = request.form.get('name') try: job = Jobs(name=name) db.session.add(job) db.session.commit() return "Job added. job id={}".format(job.id) except Exception as e: return (str(e)) return render_template("getdata.html")
def process_item(self, item, spider): """Save deals in the database. This method is called for every item pipeline component. """ session = self.Session() job = Jobs(**item) try: session.add(job) session.commit() log.msg('-> Adding new entry : %s !' % job.name, level=log.INFO) if job.email is not None: #self.send_mail_offer(item) job.processed = True session.commit() except IntegrityError: session.rollback() q = session.query(Jobs).filter_by(url=item['url']).first() if q and (q.processed is False) and (item['email'] is not None): log.msg('-> Duplicate entry found : %s not processed !' % item['name'], level=log.INFO) # Tryping to send again the mail #self.send_mail_offer(item) # Update processing session.query(Jobs).filter(Jobs.url == item["url"]).update({'processed': True}) session.commit() session.close() except SMTPAuthenticationError: log.msg('Something wrong with Gmail server !' % job.name, level=log.INFO) session.rollback() raise finally: session.close() return item
def generate_json_report(start_time, end_time, test): report = {} report['start_time'] = str(start_time) report['end_time'] = str(end_time) report['test'] = test db.session.add( Jobs( start_time=str(start_time), end_time=str(end_time), stress_test=str(test), )) db.session.commit() json_report = json.dumps(report) return json_report
def post(self): parser.add_argument('job_title', help='This field cannot be blank', required=True) parser.add_argument('customer_name', help='This field cannot be blank', required=True) parser.add_argument('job_description', help='This field cannot be blank', required=True) parser.add_argument('required_services', help='This field cannot be blank', action='append', required=True) parser.add_argument('start_date_to_apply', help='This field cannot be blank', required=True) parser.add_argument('last_date_to_apply', help='This field cannot be blank', required=True) parser.add_argument('pay', help='This field cannot be blank', required=True) print(parser) data = parser.parse_args() print(jobs) new_job = jobs.Jobs( job_title=data['job_title'], customer_name=data['customer_name'], job_description=data['job_description'], required_services=data['required_services'], start_date_to_apply=datetime.strptime(data['start_date_to_apply'], "%a, %d %b %Y %H:%M:%S %Z"), last_date_to_apply=datetime.strptime(data['last_date_to_apply'], "%a, %d %b %Y %H:%M:%S %Z"), pay=data['pay']) try: print(new_job) new_job.save() return { 'message': 'Job with title {} has been created'.format(data['job_title']) }, 200 except Exception as ex: print(ex) template = "{0}:{1!r}" message = template.format(type(ex).__name__, ex.args) return {'error': message}, 400
def skill_salary(code): if Jobs.need_cache_code(code): print("Grabbing from API for first time and storing it!") using_api = True jarray = Jobs.new_job(code) else: print("Pulling cached value from DB!") using_api = False jarray = Jobs.get_code(code) job_info = jarray[0] knowledge = jarray[1] skills = jarray[2] abilities = jarray[3] technology = jarray[4] related_jobs = jarray[5] wage = jarray[6] if using_api == True: return render_template("skill_salary.html", job=json.loads(job_info.text), knowledge=json.loads(knowledge.text), skills=json.loads(skills.text), abilities=json.loads(abilities.text), technology=json.loads(technology.text), related_jobs=json.loads(related_jobs.text), wage=json.loads(wage.text) ) else: return render_template("skill_salary.html", job=json.loads(job_info), knowledge=json.loads(knowledge), skills=json.loads(skills), abilities=json.loads(abilities), technology=json.loads(technology), related_jobs=json.loads(related_jobs), wage=json.loads(wage) )
def process_item(self, item, spider): """Save jobs in the database. This method is called for every item pipeline component. """ session = self.Session() job = Jobs(**item) try: session.add(job) session.commit() except: session.rollback() raise finally: session.close() return item
def add_job(): name = request.form.get('name') content = request.form.get('content') cron_time = request.form.get('cron_time') # print(name,content,cron_time) host = content.split(",") _hosts_data = db.session.query(Host.hostname).all() _hosts = [x[0] for x in _hosts_data] _clusters_data = db.session.query(distinct(Host.cluster)).all() _clusters = [x[0] for x in _clusters_data] for x in host: if x not in (_hosts + _clusters): return jsonify({'result': 'fail', 'error': x + "不存在"}) need_add_job = Jobs(name=name, content=content, cron_time=cron_time) if cron_time.count(",") != 4: return jsonify({'result': 'fail', 'error': "执行时间Cron格式错误"}) db.session.add(need_add_job) try: db.session.commit() return jsonify({'result': 'success', 'error': None}) except: return jsonify({'result': 'fail', 'error': '数据库错误'})
month = time.month if __name__ == '__main__': numbers = session.query(Jobs.number).filter(Jobs.year == year) numbers = numbers.all() temp = [] for i in numbers: temp.append(i[0]) numbers = temp clint, project_name, project_number = data_input() if int(project_number) not in numbers: job = Jobs(number=project_number, year=year, month=month, desc=project_name, entry_date=datetime.utcnow()) job.create_job() folders = job.find_objects() session.add_all(folders) session.add(job) for f in files: obj = JobOjbs(object_path=path.join(job.root_path, f), job_number=job.number, job_year=job.year) obj.copy_file(required) obj.setup() session.add(obj)
import sys sys.path.append('../backend/models/') from models import db, Jobs db.session.add( Jobs( start_time=str("0"), end_time=str("0"), stress_test=str("0"), ))
def run(): Jobs.query.delete() Users.query.delete() # db.session.execute("ALTER SEQUENCE jobs_id_seq RESTART") # db.session.execute("ALTER SEQUENCE users_id_seq RESTART") jak = Users(first_name='Jak', last_name='Atak', username='******', password='******', email='*****@*****.**') db.session.add(jak) db.session.add( Jobs(job_name="Pro Sleeping", job_place="Insomni.co", job_pay="$80 per hour", user=jak)) Tom = Users(first_name='Tom', last_name='Bradley', username='******', password='******', email='*****@*****.**') db.session.add(Tom) db.session.add( Jobs(job_name="Food Runner", job_place="BigEats Resturaunt", job_pay="$30 per hour", user=Tom)) db.session.add( Users(first_name='Micheal', last_name='Jackson', username='******', password='******', email='*****@*****.**')) db.session.add( Users(first_name='Beth', last_name='McChair', username='******', password='******', email='*****@*****.**')) db.session.add( Users(first_name='Kim', last_name='Tallmen', username='******', password='******', email='*****@*****.**')) db.session.add( Users(first_name='Joanne', last_name='Scammer', username='******', password='******', email='*****@*****.**')) db.session.add( Users(first_name='victor', last_name='jordan', username='******', password='******', email='*****@*****.**')) db.session.commit() return 'seeds ran successfully'
def ptp_index(request): frees, totals = server_stats() if request.method == 'POST': # If the form has been submitted... ptp_form = PTPForm(request.POST, request.FILES) # A form bound to the POST data if ptp_form.is_valid(): # All validation rules pass # Process the data in form.cleaned_data job = Jobs() job.email = ptp_form.cleaned_data['sender'] if ptp_form.cleaned_data['rooted'] == "rooted": job.data_type = "rptree" else: job.data_type = "ptree" job.method = "PTP" job.save() filepath = settings.MEDIA_ROOT + repr(job.id) + "/" os.mkdir(filepath) newfilename = filepath + "input.tre" handle_uploaded_file(fin=request.FILES['treefile'], fout=newfilename) job.filepath = filepath job.save() nmcmc = ptp_form.cleaned_data['nmcmc'] imcmc = ptp_form.cleaned_data['imcmc'] burnin = ptp_form.cleaned_data['burnin'] seed = ptp_form.cleaned_data['seed'] outgroups = ptp_form.cleaned_data['outgroups'].strip() #print(outgroups) removeog = ptp_form.cleaned_data['removeog'] #os.chmod(filepath, 0777) if ptp_form.cleaned_data['rooted'] == "rooted": jobok = run_ptp_sge(fin=newfilename, fout=filepath + "output", rooted=True, nmcmc=nmcmc, imcmc=imcmc, burnin=burnin, seed=seed, outgroup=outgroups, remove=removeog) else: jobok = run_ptp_sge(fin=newfilename, fout=filepath + "output", rooted=False, nmcmc=nmcmc, imcmc=imcmc, burnin=burnin, seed=seed, outgroup=outgroups, remove=removeog) #return HttpResponseRedirect('result/') # Redirect after POST if jobok: return show_ptp_result(request, job_id=repr(job.id), email=job.email) else: return sge_error(request) else: ptp_form = PTPForm() # An unbound form context = {'pform': ptp_form, 'avaliable': frees, 'total': totals} return render(request, 'ptp/index.html', context)
def main(): queue_size = 4 engine = create_engine('postgresql://*****:*****@localhost:5432/yhack') rt = rtorrent.RTorrent() complete_qry = Jobs.update()\ .values(completed=func.now())\ .where(and_(Jobs.c.completed == None,\ Jobs.c.torrentid == Torrents.c.torrentid,\ Torrents.c.infohash == bindparam('infohash'))) update_qry = Jobs.update()\ .values(downloaded=bindparam('completed_bytes'),\ speed=bindparam('down_rate'),\ eta=bindparam('eta'),\ size=bindparam('size'))\ .where(and_(Torrents.c.infohash == bindparam('infohash'), Jobs.c.torrentid == Torrents.c.torrentid)) complete1_qry = Downloads.update()\ .values(link=bindparam('s3link'))\ .where(and_(Downloads.c.downloadid == Jobs.c.downloadid,\ Jobs.c.torrentid == Torrents.c.torrentid,\ Torrents.c.infohash == bindparam('infohash'))) complete2_qry = Jobs.update()\ .values(completed=func.now())\ .where(and_(Jobs.c.torrentid == Torrents.c.torrentid,\ Torrents.c.infohash == bindparam('infohash'))) active_queue = rt.get_active_infohashes() running = True while running: calculate_hashes(engine) active_queue = rt.get_active_infohashes() if active_queue: for infohash in active_queue: completed_bytes = rt.get_completed_bytes(infohash) size_bytes = rt.get_size_bytes(infohash) if completed_bytes == size_bytes: # Torrent is done print "Completed " + infohash s3link = store_file(infohash) ins = Downloads.insert().values(link=s3link) engine.execute(ins) s = select([Downloads]).where(Downloads.c.link == s3link).limit(1) res = engine.execute(s).fetchone() Jobs.update().values(downloadid=res.downloadid)\ .where(and_(Jobs.c.torrentid == Torrents.c.torrentid,\ Torrents.c.infohash == infohash)) rt.close(infohash) rt.erase(infohash) engine.execute(complete1_qry, infohash=infohash, s3link=s3link) engine.execute(complete2_qry, infohash=infohash) else: # update current stats down_rate = rt.get_down_rate(infohash) completed_bytes = rt.get_completed_bytes(infohash) size = rt.get_size_bytes(infohash) if down_rate != 0: eta = datetime.timedelta(seconds=((size - completed_bytes) / down_rate)) else: eta = None engine.execute(update_qry,\ infohash=infohash,\ down_rate=down_rate,\ eta=eta,\ completed_bytes=completed_bytes,\ size=size) active_queue = rt.get_active_infohashes() new_torrents = [] if not active_queue: # Add more torrents more_needed = queue_size queue_qry = select([Torrents])\ .where(and_(Jobs.c.completed == None,\ Torrents.c.infohash != None,\ Jobs.c.torrentid == Torrents.c.torrentid))\ .order_by(Jobs.c.bid.desc(), Jobs.c.added.asc())\ .limit(more_needed) new_torrents = engine.execute(queue_qry) elif len(active_queue) < queue_size: more_needed = queue_size - len(active_queue) queue_qry = select([Torrents])\ .where(and_(Jobs.c.completed == None,\ Jobs.c.torrentid == Torrents.c.torrentid,\ Torrents.c.infohash != None,\ Torrents.c.infohash.notin_(active_queue)))\ .order_by(Jobs.c.bid.desc(), Jobs.c.added.asc())\ .limit(more_needed) new_torrents = engine.execute(queue_qry) for t in new_torrents: print "Add torrent " + t.infohash if t.torrent: print "torrent" rt.add_torrent_file(unhexlify(t.torrent), t.infohash) elif t.magnet_link: print "magnet" rt.add_torrent_magnet(t.magnet_link, t.infohash) active_queue = rt.get_active_infohashes() if active_queue: # remove stale torrents check_qry = select([Torrents])\ .where(and_(Torrents.c.infohash.in_(active_queue), not_(exists(select([Jobs]).where(and_( Torrents.c.torrentid == Jobs.c.torrentid, Jobs.c.completed == None )))))) stale_torrents = engine.execute(check_qry) stale_hashes = [t.infohash for t in stale_torrents] active_torrents_tmp = [] for t in active_queue: if t in stale_hashes: print "Remove stale torrent " + t rt.close(t) rt.erase(t) else: active_torrents_tmp.append(t) active_queue = active_torrents_tmp
def test_job_insert(self): try: Jobs.new_job("29-1141.01") self.assertFalse(Jobs.need_cache_code("29-1141.01")) except Exception as e: print(e)