def test_reset_all_checked_count(client): assert checklist_1.checked_count != 0 assert checklist_1.last_review_id != 0 with get_session() as s: Checklist.reset_all_checked_count(s, checklist_1.scene_id) with get_session() as s: checklist_1_after = Checklist.get_by_id(s, checklist_1.id) assert checklist_1_after.checked_count == 0 assert checklist_1_after.last_review_id == 0
def init(): engine = models.get_engine(**options.as_dict()) models.Base.metadata.create_all(engine) if options.truncate: with contextlib.closing(models.get_session(engine)) as session: models.TestTable.truncate(session) if options.init_records: with contextlib.closing(models.get_session(engine)) as session: session.bulk_save_objects([models.TestTable.random(session) for i in xrange(options.init_records)]) session.commit() logger.debug('initiated')
def get_off_services(): session = get_session('ekomobile') services = get_class('service_fx') hstr_services = get_class('hstr_service_fx') result = [] sers = session.query(hstr_services.object_id, hstr_services.service_id).\ filter(or_(not hstr_services.deactivated, hstr_services.deactivated > datetime.now())).\ group_by(hstr_services.service_id).all() print('Get services list') for rec in sers: rapi = Rest(ctn=int(rec[0])) ser_name = session.query(services.bee_sync).filter( services.i_id == int(rec[1]), services).one()[0] api_sers = rapi.get_services_list()['services'] for ser in api_sers: if ser['name'] == ser_name: result.append([ser_name, ser['removeInd']]) if result[-1][0] != ser_name: warnings.warn('Kosyak, phone={}, service={}'.format( rapi.ctn, ser_name)) print('Made {} of {}'.format(sers.index(rec) + 1, len(sers))) session.close() try: ex_write(['code', 'y/n'], result, "C:/Users/админ/Desktop/remove_services.xlsx") except Exception: return result
def get_detail(beg=0): ctn = get_class("ctn") ses = get_session("ekomobile") ctn_list = ses.query(ctn).filter(ctn.operator_agree.in_([404, 405])).all() result_detail = [] null_phones = [] for phone in ctn_list[beg:]: try: api = Soap(ctn=phone.msisdn) dt = api.get_current_detail() except Exception: print("Последний - {}, {}".format(phone.msisdn, ctn_list.index(phone))) print(null_phones) return if len(dt) == 0: null_phones.append(phone.msisdn) result_detail.extend(dt) print("Ready {} of {}".format(ctn_list.index(phone) + 1, len(ctn_list))) print(null_phones) ex_write( values=result_detail, names=[ "Дата", "Исходящий", "Входящий", "Тип соединения", "Описание звонка", "Трафик", "Стоимость", "Длительность", ], path="/home/spicin/dt.xlsx", )
def main(): parser = OptionParser('usage: %prog [options] task') parser.add_option("-f", "--config-file", dest='config_file', help="Configuration file") options, args = parser.parse_args() if not (options.config_file): parser.error("Please specify a configuration file.") logging.config.fileConfig(options.config_file) config = ConfigParser() config.read(options.config_file) session = get_session(config) today = date.today() entries = session.query(TimeEntry).filter( not_(TimeEntry.task.like('Aura %'))).filter( or_(TimeEntry.comment == None, TimeEntry.comment == '')).filter( TimeEntry.start_dt >= today).filter( TimeEntry.start_dt <= (today + timedelta(days=1))) grouped_entries = group_entries(entries) if grouped_entries: print 'Following Non-Aura entries require annotations:' print_alerts(session, grouped_entries) email_offenders(session, grouped_entries)
def run(self): logging.info('CrawlerDaemon run') sqlite_session = get_session( self.config.database ) orm_engines = sqlite_session.query( ORM_Engine ).all() if not self.config.dry_run: if len( orm_engines ) == 0: logging.debug( 'Crawler has no engines' ) # Start controllers in each thread for orm_engine in orm_engines: logging.info('Load orm_engine: %s' % orm_engine.name ) engine = Engine( orm_engine ) self.controllers[ engine.name ] = Controller( engine, sqlite_session ) self.controllers[ engine.name ].start() # Start scheduling searches for orm_search in sqlite_session.query( Search ).all(): for engine in orm_search.engines: job = lambda: self.controllers[ engine.name ].queue.put( orm_search ) schedule.every( orm_search.periodicity ).seconds.do( job ) logging.debug('Put %s to schedule with periodicity %i seconds' % ( orm_search.name, orm_search.periodicity ) ) self.httpd = HTTPD( self.config, self.controllers ) self.httpd.start() while True: if not self.config.dry_run: schedule.run_pending() time.sleep(1)
def show_plot_employees(employees_id, start_date=None, end_date=None): with models.get_session() as session: """statistics = session.query(models.Employee.name, models.Statistic.id_employee, models.Statistic.time, models.Statistic.temperature) \ .join(models.Employee, models.Employee.id == models.Statistic.id_employee) \ .filter(models.Employee.id.in_(employees_id)).all()""" with plt.style.context('seaborn'): fig, axes = plt.subplots(nrows=len(employees_id), ncols=1) fig.subplots_adjust(wspace=0.5, hspace=0.5) for index, employee_id in enumerate(employees_id): statistics, employee = Visualization.__get_statistics_from_database(employee_id, start_date, end_date) times = np.array([time.strftime('%Y-%m-%d') for time, _ in statistics]) temperatures = np.array([float(temperature) for _, temperature in statistics]) # Create plot df = pd.DataFrame(temperatures, index=times) axes[index].set_xticks(temperatures) axes[index].xaxis.set_major_locator(ticker.MultipleLocator(2)) labels = axes[index].set_xticklabels(times, horizontalalignment='center', fontweight='light', fontsize='xx-small') for i, label in enumerate(labels): label.set_y(label.get_position()[1] - (i % 2) * 0.075) plt.xticks(rotation=45, horizontalalignment='right', fontweight='light', fontsize='x-small') df.plot(kind='line', style='ko-', legend=False, xlabel='Datetime', ylabel='Temperature', title=employee, grid=True, ax=axes[index]) plt.show()
def run(self): session = models.get_session() store_entry = Store() store_entry.payload = self.payload.decode("utf-8") store_entry.topic = self.topic session.add(store_entry) session.commit()
def _brew_countdown(channel): session = get_session() server = session.query(Server).filter_by(completed=False).first() if not server: return server.completed = True customers = session.query(Customer).filter_by(server_id=server.id) for customer in customers.all(): customer.user.teas_drunk += 1 customer.user.teas_received += 1 server.user.teas_brewed += 1 server.user.teas_brewed += 1 # Account for server's tea server.user.teas_drunk += 1 server.user.times_brewed += 1 if not customers.count(): session.commit() return post_message('Time is up! Looks like no one else wants a cuppa.', channel) # There must be at least 1 customer to get a nomination point. server.user.nomination_points += 1 session.commit() return post_message("\n".join( ['Time is up!'] + ['%s wants %s' % (customer.user.display_name, customer.user.tea_type) for customer in customers] ), channel)
def hello(data): """hello world""" with get_session() as s: s.add(HelloLog(name=data["name"])) s.commit() return succeed()
def get_detail(beg=0): ctn = get_class('ctn') ses = get_session('ekomobile') ctn_list = ses.query(ctn).filter(ctn.operator_agree.in_([404, 405])).all() result_detail = [] null_phones = [] for phone in ctn_list[beg:]: try: api = Soap(ctn=phone.msisdn) dt = api.get_current_detail() except Exception: print("Последний - {}, {}".format(phone.msisdn, ctn_list.index(phone))) print(null_phones) return if len(dt) == 0: null_phones.append(phone.msisdn) result_detail.extend(dt) print("Ready {} of {}".format( ctn_list.index(phone) + 1, len(ctn_list))) print(null_phones) ex_write(values=result_detail, names=[ "Дата", "Исходящий", "Входящий", "Тип соединения", "Описание звонка", "Трафик", "Стоимость", "Длительность" ], path="/home/spicin/dt.xlsx")
def get_all(): with get_session() as s: urls = [ {"comment": i.comment, "url": i.url, "updated_at": i.updated_at} for i in s.query(URLShare).order_by(URLShare.id.desc()).all() ] return jsonify({"urls": urls})
def get_off_services(): session = get_session("ekomobile") services = get_class("service_fx") hstr_services = get_class("hstr_service_fx") result = [] sers = ( session.query(hstr_services.object_id, hstr_services.service_id) .filter(or_(not hstr_services.deactivated, hstr_services.deactivated > datetime.now())) .group_by(hstr_services.service_id) .all() ) print("Get services list") for rec in sers: rapi = Rest(ctn=int(rec[0])) ser_name = session.query(services.bee_sync).filter(services.i_id == int(rec[1]), services).one()[0] api_sers = rapi.get_services_list()["services"] for ser in api_sers: if ser["name"] == ser_name: result.append([ser_name, ser["removeInd"]]) if result[-1][0] != ser_name: warnings.warn("Kosyak, phone={}, service={}".format(rapi.ctn, ser_name)) print("Made {} of {}".format(sers.index(rec) + 1, len(sers))) session.close() try: ex_write(["code", "y/n"], result, "C:/Users/админ/Desktop/remove_services.xlsx") except Exception: return result
def rpc_topic_data_between_dates(self, request, topic, from_date, to_date): session = get_session() from_date = datetime.datetime.fromtimestamp(from_date) to_date = datetime.datetime.fromtimestamp(to_date) data = session.query(Store).filter(Store.topic == topic).filter(and_(Store.timestamp >= from_date, Store.timestamp <= to_date)).all() print(data) return [to_dict(i) for i in data]
def _get_data(num=None, login=None, ban=None): """Method returns login, password, ban and ban_id from eko_DB Can getting phone or/and oan """ session = get_session('ekomobile') ctns = get_class('ctn') agrees = get_class('operator_agree') accounts = get_class('account_info') if not num and not ban and not login: raise InitializationError('Ни один обязательный параметр (num,ban,login) не был передан') if num: try: ban_id = ctns.select(session=session, where={'msisdn': num}).operator_agree agree = agrees.select(session=session, where={'i_id': ban_id}) except Exception: print(num) ban_id = session.query(ctns).filter_by(msisdn=num).first().operator_agree agree = session.query(agrees).filter_by(i_id=ban_id).first() elif ban: agree = agrees.select(session=session, where={'oan': ban}) if agree is not None: account = accounts.select(session=session, where={'operator_agree': agree.i_id, 'access_type': 1}) session.close() if account: return account.login, account.password, agree.oan
def write_worker(self): logger.debug('write start {}'.format(self.id_ if self.id_ else '')) engine = models.get_engine(**options.as_dict()) with contextlib.closing(models.get_session(engine)) as session: t = models.TestTable.random(session) session.add(t) session.commit() logger.debug('write stop {}'.format(self.id_ if self.id_ else ''))
def var1(): session = get_session() query = session.query(Book).order_by(Book.publication_year).all() # print('\n'.join(map(str, query))) for book in query: print("{} - {} ({}): {}".format( book.author.name, book.name, book.publication_year, [genre.name for genre in book.genres.all()]))
def rss(): with get_session() as s: urls = s.query(URLShare).order_by(URLShare.id.desc()).all() response = make_response(render_template('rss.xml', urls=urls)) response.headers['Content-Type'] = 'application/xml' return response
def save_comment(comment): with get_session() as s: issue = Issue.get_latest_one(s) if issue: issue.content = comment s.add(issue) return "{}: {}#{}".format(comment, config.SHARE_BOT_URL, issue.id) return "not found"
def update_comment(issue_id, comment): with get_session() as s: issue = Issue.get_by_id(issue_id) if issue: issue.content = comment s.add(issue) return "mapped with url: " + issue.id return "not found"
def save_comment(comment): with get_session() as s: share = s.query(URLShare).order_by(URLShare.id.desc()).first() if share: share.comment = comment s.add(share) return comment + ": " + share.url return "not found"
def update_comment(share_id, comment): with get_session() as s: share = s.query(URLShare).filter(URLShare.id == share_id).first() if share: share.comment = comment s.add(share) return "mapped with url: " + share.url return "not found"
def add_user(user: LoginUser): session = get_session() new_user = UserInDB(email=user.email, name=user.name, hashed_password=get_password_hash(user.password)) session.add(new_user) session.commit() return make_user_from_db(new_user)
def static_web_generation(config): """ Generate the whole static web pages """ engine = models.init(config) session = models.get_session(engine) # Latest first passes = session().query(models.Passes).order_by(models.Passes.aos_time.asc()) print passes
def check_bills(): """проверка наличия услуг в биллинге""" ser = get_class("hstr_service_fx") service_fx = get_class("service_fx") session = get_session("ekomobile") file = open("C:/Users/админ/Desktop/1.txt").readlines() rez_f = [] for el in file: phone, off, on = el.split(";") rez_f.append({"phone": phone.rstrip(), "on": on.rstrip(), "off": off.rstrip()}) on = [[el["phone"], el["on"]] for el in rez_f if el["on"] != ""] off = [[el["phone"], el["off"]] for el in rez_f if el["off"] != ""] for el in on: phone, service = el try: sid = session.query(service_fx.i_id).filter(service_fx.bee_sync == service.rstrip()).one()[0] except NoResultFound: print(el, "неверные параметры, должна быть подключена") continue hstr = ( session.query(ser) .filter( ser.object_id == phone.rstrip(), ser.service_id == sid, ser.activated < datetime.now(), or_(not ser.deactivated, ser.deactivated > datetime.now()), ) .all() ) if len(hstr) == 0: print(el, " не подключена, должна быть подключена") for el in off: phone, service = el try: sid = session.query(service_fx.i_id).filter(service_fx.bee_sync == service.rstrip()).one()[0] except NoResultFound: print(el, "неверные параметры, должна быть отключена") continue hstr = ( session.query(ser) .filter( ser.object_id == phone.rstrip(), ser.service_id == sid, ser.deactivated < datetime.now(), or_(not ser.deactivated, ser.deactivated > datetime.now()), ) .all() ) if len(hstr) != 0: print(el, " не отклчена, должна быть отключена") session.close()
def app(): logging_config() Base.metadata.create_all(engine) try: with get_session() as session: save_fixtures(session) app = create_app() yield app finally: Base.metadata.drop_all(engine)
def read_worker(self): logger.debug('read start {}'.format(self.id_ if self.id_ else '')) engine = models.get_engine(**options.as_dict()) with contextlib.closing(models.get_session(engine)) as session: query = session.query(models.TestTable)\ .order_by(models.TestTable.value)\ .filter(models.TestTable.value > models.TestTable._rand_lim) for inst in query: inst logger.debug('read stop {}'.format(self.id_ if self.id_ else ''))
def var2(): session = get_session() query = session.query(Book). \ order_by(Book.publication_year). \ options(joinedload(Book.author)). \ options(joinedload(Book.genres)).all() for book in query: print("{} - {} ({}): {}".format(book.author.name, book.name, book.publication_year, [genre.name for genre in book.genres]))
def register_email(uuid, email): session = models.get_session() user = session.query(models.User).get(uuid) if not user: user = models.User(uuid=uuid, email=email) else: user.email = email session.add(user) session.commit() return True
def main1(): thread_pool = ThreadPool(20) thread_pool.start() session = get_session() topic_query = \ session.query(VideoTopic).filter(VideoTopic.video_type == 1) for topic in topic_query: thread_pool.add_task(job, topic.henhen_id) session.close() thread_pool.wait_done()
def __init__(self, end: str): self.classes = get_lib_cls() self.end = end self.closed = False self.client = InfluxDBClient(**INFLUXDB_CONFIG, database=INFLUXDB_DATABASE_NAME) self.session = get_session(DB_URL, DB_NAME, autocommit=True) self.q = Queue() self.qsize = 0
def sample_value(self): url = self.conf.get('url') session = get_session(url, self.schema, autocommit=True) created_time = getattr(self.model, self.conf.get('created_time')) value = session.query(self.model). \ filter(created_time >= self.start_sample, created_time < self.end).count() session.close() return value
def insert(cls, value): session = get_session(DB_URL, DB_NAME) try: session.add(cls(**value)) except Exception as exc: logger.error(exc) session.rollback() else: session.commit() finally: session.close()
def register(json_dict): email = json_dict["email"] password = json_dict["password"] with get_session() as s: user = User.get_by_email(s, email) if user: return failed(msg="用户已经存在") user = User.register(s, email, email, password) return succeed(msg="注册成功,请认证")
def main(): pool = ThreadPool(20) pool.start() session = get_session() topic_query = session.query(PicTopic).filter(PicTopic.pic_type == 'dongmantupian').order_by(PicTopic.id.desc()) for pic_topic in topic_query: pool.add_task(dump_job, pic_topic) session.close() pool.wait_done()
def send_cotizaciones(bot): now = datetime.datetime.now() session = get_session() cotizaciones = get_last_cotizaciones(now, session) text = format_cotizaciones_for_telegram(cotizaciones) if text: bot.sendMessage(chat_id=CHANNEL, text=text, parse_mode=telegram.ParseMode.MARKDOWN) else: logger.warning("No hubo cotizaciones para el día de hoy")
def invoices(): cid = request.args.get('cid') added = request.args.get('info') session = models.get_session() customer = session.query( models.Customer).filter(models.Customer.id == cid).first() invoices = session.query( models.Invoice).filter(models.Invoice.customer_id == cid).all() return render_template("invoices.html", customer=customer, invoices=invoices, added=added)
def info(): session = models.get_session() customer_count = session.query(models.Customer).count() print(customer_count) last_id = session.query(models.Invoice).count() print(last_id) details_of_last_invoice = session.query( models.Invoice).filter(models.Invoice.id == '6').all() print(details_of_last_invoice) return render_template("info.html", customer_count=customer_count, details_of_last_invoice=details_of_last_invoice)
def get_tabs_of_home(): with get_session() as s: tabs = Tab.get_by_location(s, Tab.LOCATION_HOME) res = [] for tab in tabs: res.append( dict( id=tab.id, display_name=tab.display_name, slug=tab.slug, )) return succeed(data=res)
def __get_similar_from_database(employee_id, start_date=None, end_date=None): with models.get_session() as session: employee = session.query(models.Employee.name).filter(models.Employee.id == employee_id).scalar() if start_date and end_date is not None: similar = session.query(models.Statistic.time, models.Statistic.similar) \ .filter(models.Statistic.id_employee == employee_id, models.Statistic.time >= start_date, models.Statistic.time <= end_date).all() else: similar = session.query(models.Statistic.time, models.Statistic.similar) \ .filter(models.Statistic.id_employee == employee_id).all() return similar, employee
def db(): gen_rand = lambda: ''.join(random.choice(string.letters) for i in xrange(256)) paas = detect_paas() session = models.get_session(paas) # Test writing to the DB for i in xrange(50): session.add(models.Test(payload=gen_rand())) session.commit() # Test reading for obj in session.query(models.Test).all(): session.delete(obj) session.commit() return 'ok'
def __init__(self): conf = cfg.CONF.service_credentials tenant = conf.os_tenant_id or conf.os_tenant_name self.nova_client = client.Client( username=conf.os_username, api_key=conf.os_password, project_id=tenant, auth_url=conf.os_auth_url, no_cache=True) self.vm_map = {} self.default_gateway = cfg.CONF.default_gateway self.session = get_session() self.pool = eventlet.GreenPool(200)
def sample_value(self): url = self.conf.get('url') session = get_session(url, self.schema, autocommit=True) created_time = getattr(self.model, self.conf.get('created_time')) value = session.query(self.model). \ filter(created_time >= self.start_sample, created_time < self.end, self.model.apply_type == 'consume_loan', self.model.status == 'SUCCESS').count() session.close() return value
def save_to_db(self): (title_list, video_id_list) = self.get_video_info() session = get_session() try: for index in range(0, len(video_id_list)): topic = VideoTopic(title_list[index], 'henhenlu', self.video_type, video_id_list[index]) session.add(topic) session.commit() print video_id_list[index] + ' is ok' finally: session.close()
def dump_job(topic): session = get_session() subject = topic.title message = '' imgs_url_query = session.query(PicImg).filter(PicImg.pic_topic_id == topic.id).order_by(PicImg.pic_order.asc()) for img in imgs_url_query: message = message + '[img]%s[/img]' % img.url subject = subject.encode('utf-8') message = message.encode('utf-8') dumper = Dumper(API_URL) dumper.dump(subject, message) session.close() print '%s is ok' % subject
def visit_Call(self, node): check_file_name(node) query = models.get_session().query(models.Return).join(models.Function) # TODO(nathan): Handle namespaced functions called_func = self._funcs[node.func.id] query = query.filter(models.Function.name==node.func.id) query = query.filter(models.Function.lineno==called_func.lineno) query = query.filter(models.Function.file_name==getattr( called_func, FILENAME_ANNOTATION)) # TODO(nathan): Handle multi-return ret_dict = {} for ret in query: ret_dict.setdefault(ret.type_name, 0) ret_dict[ret.type_name] += 1 setattr(node.func, TYPE_ANNOTATION, ret_dict) self.generic_visit(node) return node
def check_quotas(config, report_mode=False): quotas = get_quotas(config) session = get_session(config) results = [] for name, quota in quotas.iteritems(): entries = session.query(TimeEntry).filter( TimeEntry.start_dt >= quota['start']).filter( TimeEntry.start_dt <= quota['end']) quota['total'] = sum([entry.duration for entry in entries if quota['regex'].search(entry.task)]) # Unpredictable arithmetic in 2.6, if not cast to float quota['total'] = float(quota['total']) if report_mode or quota['total'] + quota['threshold'] > quota['hours']: results.append((name[6:], quota)) if results: print_results(results)
def create_metadata_sql(catalog): session = models.get_session() for book in catalog: b = models.Book() b.lang = book.lang b.mdate = book.mdate b.bookid = book.bookid b.author = book.author b.title = book.title b.subj = unicode(book.subj) b.loc = unicode(book.loc) b.pgcat = book.pgcat b.desc = book.desc b.toc = book.toc b.alttitle = unicode(book.alttitle) b.friendlytitle = book.friendlytitle session.add(b) session.commit()
def get_mass_serv(): session = get_session("ekomobile") rapi = Rest() services = get_class("service_fx") agree = get_class("operator_agree") ctn = get_class("ctn") btp = get_class("operator_tarif") banlist = ( session.query(agree.i_id).filter(agree.moboperator == 1, agree.payment_type == 0, agree.discontinued == 0).all() ) banlist = [el[0] for el in banlist] ctnlist = ( session.query(ctn.msisdn, ctn.operator_tarif) .filter(ctn.operator_agree.in_(banlist), ctn.status == 1) .group_by(ctn.operator_tarif) .all() ) result = [] for el in ctnlist: rapi.change_owner(ctn=int(el[0])) rez = rapi.get_available_services()["availableServices"] if len(rez) == 0: print(rapi.ctn) for r in rez: tarif = session.query(btp.name).filter(btp.i_id == int(el[1])).one()[0] try: serv = session.query(services.i_id).filter(services.bee_sync == r["name"]).one()[0] except NoResultFound: serv = "Нет в билле!" result.append([rapi.ctn, el[1], tarif, serv, r["name"], r["entityName"], r["rcRate"]]) print("{} из {}".format(ctnlist.index(el) + 1, len(ctnlist))) names = ["Номер", "Тариф", "Техкод услуги", "Название услуги", "АП услуги"] session.close() try: ex_write(names, result, path="C:/Users/ГостЪ/Desktop/services.xlsx") except ValueError: return result else: return
def main(): parser = OptionParser('usage: %prog [options] task') parser.add_option("-f", "--config-file", dest='config_file', help="Configuration file") options, args = parser.parse_args() if not (options.config_file): parser.error("Please specify a configuration file.") logging.config.fileConfig(options.config_file) config = ConfigParser() config.read(options.config_file) session = get_session(config) today = date.today() for task in args: print 'Task: {0}'.format(task) print # Print today's totals entries = session.query(TimeEntry).filter( TimeEntry.task.op('~')(task)).filter( TimeEntry.start_dt >= today).filter( TimeEntry.start_dt <= (today + timedelta(days=1))) grouped_entries = group_entries(entries) print 'Time Today:' print print_user_totals(session, grouped_entries) print # Print month's totals entries = session.query(TimeEntry).filter( TimeEntry.task.op('~')(task)).filter( TimeEntry.start_dt >= date(today.year, today.month, 1)).filter( TimeEntry.start_dt <= (today + timedelta(days=1))) grouped_entries = group_entries(entries) print 'Time This Month:' print print_user_totals(session, grouped_entries) print print
def visit_FunctionDef(self, node): # TODO(nathan): This is super janky and won't handle scoping. self._funcs = {node.name: node} check_file_name(node) query = models.get_session().query(models.Arg).join(models.Function) query.filter(models.Function.name==node.name) query.filter(models.Function.lineno==node.lineno) query.filter(models.Function.file_name==getattr(node, FILENAME_ANNOTATION)) arg_dict = {} for arg in query: arg_dict.setdefault(arg.arg_name, {}).setdefault(arg.type_name, 0) arg_dict[arg.arg_name][arg.type_name] += 1 # TODO(nathan): Make this use the self._var_types in a different visit for arg in node.args.args: setattr(arg, TYPE_ANNOTATION, arg_dict[arg.arg]) old_var_types = dict(self._var_types) self._var_types.update(arg_dict) self.generic_visit(node) self._var_types = old_var_types return node
def save_to_db(self): session = get_session() try: video_topic = \ session.query(VideoTopic).filter(VideoTopic.henhen_id == self.qvod_id).first() index_order = 0 for img in self.imgs: video_img = VideoImg() video_img.pic_order = index_order video_img.url = img video_img.video_topic_id = video_topic.id index_order = index_order + 1 session.add(video_img) video = Video() video.video_topic_id = video_topic.id video.url = self.qvod_address session.add(video) session.commit() finally: session.close()
def __init__(self): self.session = get_session()
def main(): parser = OptionParser('usage: %prog -f config.conf') parser.add_option("-f", "--config-file", dest='config_file', help="Configuration file") options, args = parser.parse_args() if not (options.config_file): parser.error("Please specify a configuration file.") logging.config.fileConfig(options.config_file) config = ConfigParser() config.read(options.config_file) # Check for running instance pidpath = config.get('spider', 'pidfile') lockpath = config.get('spider', 'lockfile') try: lockfile = open(lockpath, 'w') fcntl.lockf(lockfile, fcntl.LOCK_EX|fcntl.LOCK_NB) except IOError: pidfile = open(pidpath) pid = pidfile.read() pidfile.close() print "Slimtimer Spider already running (%s)" % pid sys.exit(1) pidfile = open(pidpath, 'w') pidfile.write(str(os.getpid())) pidfile.close() session = get_session(config) end_date = date.today() start_date = end_date - timedelta( days=int(config.get('slimtimer', 'cutoff'))) start_time = datetime.combine(start_date, time(0, 0)) # These slimtimer entries have no unique ids, so we have to do this a bit # heavy-handedly. Remove all TimeEntries in the range to make room for the # new ones. session.query(TimeEntry).filter(TimeEntry.start_dt >= start_time).delete() ss = SlimtimerSpider( config.get('slimtimer', 'user'), config.get('slimtimer', 'password'), ) logging.info('Retriving users from Slimtimer') users = ss.get_users(start_date=start_date, end_date=end_date) logging.info('Found {0} users'.format(len(users))) logging.debug(str(users)) for user in users: slimtimer_user = session.query(SlimtimerUser).get(user['id']) if slimtimer_user: slimtimer_user.label = user['label'] slimtimer_user.updated_at = datetime.utcnow() session.merge(slimtimer_user) else: logging.info( 'Adding SlimtimerUser for {id} ({label})'.format(**user)) slimtimer_user = SlimtimerUser(id=user['id'], label=user['label']) session.add(slimtimer_user) logging.info( 'Retriving time entries for {id} ({label})'.format(**user)) for entry in ss.get_report(user_ids=[user['id']], start_date=start_date, end_date=end_date): logging.debug(str(entry)) start_dt = datetime.strptime('{Date} {Start}'.format(**entry), '%m/%d/%Y %I:%M %p') time_entry = TimeEntry( user_id = user['id'], task = entry['Task'], comment = entry['Comments'], start_dt = start_dt, duration = float(entry['Duration']), ) session.add(time_entry) session.commit()
def rpc_unique_topics(self, request): session = get_session() data = session.query(distinct(Store.topic)).all() session.close() return [topic[0] for topic in data]
def rpc_last_payload_for_topic(self, request, topic): session = get_session() data = session.query(Store).filter(Store.topic == topic).order_by(Store.timestamp.desc()).first() session.close() return to_dict(data)
#!bin/python import models import time import dbm session = models.get_session() addrdb = dbm.open('addr.db', 'c') baddrdb = dbm.open('baddr.db', 'c') for addrtxt in addrdb.keys(): (lat, lon) = addrdb[addrtxt].split(":") addr = models.get_or_create(session, models.Address, address=addrtxt) geo = models.get_or_create(session, models.Geocoding, address_id=addr.id, latitude=lat, longitude=lon, error=0) for addrtxt in baddrdb.keys(): addr = models.get_or_create(session, models.Address, address=addrtxt) geo = models.get_or_create(session, models.Geocoding, address_id=addr.id, latitude=0.0, longitude=0.0, error=1)
def main(): parser = argparse.ArgumentParser() parser.add_argument("--geocode", "-g", action="store_true", help="geocode arrestee address", default=False) parser.add_argument("--sort", "-s", help="geocode arrestee address", default='date') parser.add_argument("--configuration", "-c", help="use valued from configuration file FILE", default="arrests.cfg") parser.add_argument("--home", help="specify origin address for arrestee residence distance") parser.add_argument("--latitude", type=float, help="specify origin lat/long for arrestee residence distance") parser.add_argument("--longitude", type=float, help="specify origin lat/long for arrestee residence distance") parser.add_argument("--limit", type=int, help="limit processing to first n arrests, useful for limitig geocode lookups", default=25000) parser.add_argument("--api_key", help="use googlemaps api key KEY") args = parser.parse_args() logger.info("loading config") config = ConfigParser.ConfigParser() config.readfp(open('arrests.cfg')) # config.read(['site.cfg', os.path.expanduser('~/.myapp.cfg')]) logger.info("fetching arrest data") url = "http://www.fairfaxcounty.gov/police/crime/arrest.txt" r = geturl_cached(url) headers = r.readline() api_key = config.get('googlemaps', 'api_key') if args.api_key: api_key = args.api_key widths = [40, 20, 40, 5, 30, 25, 50, 100] offsets = [0] for i in widths: offsets.append(offsets[-1] + i) arrests = [] count = 0 logger.debug("limiting to %d records" % args.limit) session = models.get_session() new_arrests = 0 while count < args.limit: line = r.readline() if len(line) == 0: break count += 1 f = [] arrest = {} offset = 0 for i in widths: f.append(line[offset:offset+i].strip()) offset += i (is_new, charge) = models.get_or_create(session, models.Charge, name=f[CHARGE], description=f[DESCRIP]) (is_new, address) = models.get_or_create(session, models.Address, address=f[ADDRESS]) # geo_api_error = 1 # if not models.have_geocoding(session, address): # raise Exception('whoops missed an address') # try: # (lat, lon) = geoutil.get_coord(address.address, False) # geo_api_error = 0 # except geoutil.InvalidAddress, x: # lat=0.0 # lon=0.0 # geo = models.add_geocoding(session, # address=address, # latitude=lat, # longitude=lon, # error=geo_api_error) (is_new, arrestee) = models.get_or_create(session, models.Arrestee, lname = f[LNAME], fname = f[FNAME], mname = f[MNAME], age = f[AGE], address_id = address.id) date = time.strftime("%s", time.strptime(f[DATE], '%m/%d/%Y')) (is_new, arrest) = models.get_or_create(session, models.Arrest, date=date, charge=charge, arrestee=arrestee) if is_new: new_arrests += 1 session.add(arrest) session.commit() print "Found %d new arrest records" % new_arrests