def update_name_and_description(self, document_id: int, name: str, description: str): if not name: raise ServerException(ServerErrors.file_name_cannot_be_empty) # file_name_cannot_be_empty # raise Exception("Name cannot be empty") mainlog.debug('Renaming doc:{} to:{} with description:{}'.format( document_id, name, description)) try: self._rename_file_in_storage(document_id, name) except Exception as ex: mainlog.error("Could not rename document {}".format(document_id)) mainlog.exception(ex) doc = session().query(Document).filter( Document.document_id == document_id).one() doc.description = description or "" doc.filename = name audit_trail_service.record("DOCUMENT_RENAMED", "", document_id, commit=False) session().commit()
def test_multi_update(self): d = date(2012, 10, 23) cache = PotentialTasksCache(self.task_dao, d) tasks = cache.tasks_for_identifier(None) tt1 = self._standard_timetrack(tasks[0].task_id, datetime.now(), 1) tt1.employee = self.employee tt2 = self._standard_timetrack(tasks[0].task_id, datetime.now(), 2) tt2.employee = self.employee session().close() mainlog.debug(tt1) mainlog.debug(tt1.employee) mainlog.debug(tt1.task) self.timetrack_dao.multi_update(tt1.employee, d, [], [tt1], []) for tt in session().query(TimeTrack).all(): mainlog.debug(u"CHECK: {}".format(tt)) session().close() tt1 = session().query(TimeTrack).one() self.timetrack_dao.multi_update(tt1.employee, d, [tt1], [tt2], []) for tt in session().query(TimeTrack).all(): mainlog.debug(u"CHECK: {}".format(tt))
def replace_template(self, template_id, file_handle, file_name): """ A new template document replaces an old one in our database. We ask for a file handle because we expect the file to come from the web server (that's the way cherrypy does it). """ mainlog.debug(u"replace_template(): doc_id:{}, fh:{} fn:{}".format( template_id, file_handle, file_name)) document = session().query(TemplateDocument).filter( TemplateDocument.template_document_id == template_id).one() document.filename = file_name document.upload_date = date.today() document.server_location = "DUMMY" document.file_size = 666 document.server_location, document.file_size = self._copy_file_to_storage( file_handle, document.document_id, file_name) doc_id = document.document_id # save for use after session's flushed audit_trail_service.record("TEMPLATE_REPLACED", "", document.document_id, commit=False) session().commit() mainlog.debug("Replaced template {}".format(doc_id)) return doc_id
def categories(self): r = session().query( *all_non_relation_columns(DocumentCategory)).order_by( DocumentCategory.short_name).all() session().commit() mainlog.debug("categories() : {}".format(r)) return r
def _hoover_text_for(self, tar): mainlog.debug("TAR.time = {} --> {}".format(tar.time, time_to_hm(tar.time))) hoover_text = [_("On {}").format(time_to_hm(tar.time))] if tar.editor: hoover_text.append(_("Edited by {}").format(tar.editor)) return hoover_text
def pl_migration(): # q = current_session.query(Task.task_id).filter(Task.task_type != 'task_for_presence') # current_session.query(TaskActionReports).filter(TaskActionReports.kind.in_(TaskActionReportType.start_task,TaskActionReportType.end_task)).delete(False) # current_session.query(TimeTrack).filter(TimeTrack.task_id.in_(q)).delete(False) # current_session.query(TaskOnOperation).delete() # current_session.query(Task).filter(Task.task_type != 'task_for_presence').delete() # Operation.__table__.drop(engine, checkfirst=True) print((current_session.query(Operation).delete())) current_session.commit() # ProductionFile.__table__.drop(engine, checkfirst=True) print((current_session.query(ProductionFile).delete())) current_session.commit() # DeliverySlipPart.__table__.drop(engine, checkfirst=True) print((current_session.query(DeliverySlipPart).delete())) current_session.commit() # OrderPart.__table__.drop(engine, checkfirst=True) print((current_session.query(OrderPart).delete())) current_session.commit() # Order.__table__.drop(engine, checkfirst=True) mainlog.debug("deleting orders") session().connection().execute( "UPDATE gapless_seq SET gseq_value=(select max(accounting_label) from orders) WHERE gseq_name='order_id'" ) i = 0 for order in current_session.query(Order).order_by( desc(Order.accounting_label)).all(): current_session.delete(order) i += 1 if i % 100 == 0: print(i) current_session.commit() # print current_session.query(Order).delete() current_session.commit() # DeliverySlip.__table__.drop(engine, checkfirst=True) print((current_session.query(DeliverySlip).delete())) for delivery_slip in current_session.query(DeliverySlip).order_by( desc(DeliverySlip.delivery_slip_id)).all(): current_session.delete(delivery_slip) current_session.commit() # Customer.__table__.drop(engine, checkfirst=True) print((current_session.query(Customer).delete())) current_session.commit() # OperationDefinitionPeriod.__table__.drop(engine, checkfirst=True) print((current_session.query(OperationDefinitionPeriod).delete())) current_session.commit() # OperationDefinition.__table__.drop(engine, checkfirst=True) print((current_session.query(OperationDefinition).delete())) current_session.commit()
def _apply_filter(self, filter_text): mainlog.debug(u"_apply_filter : {}".format(filter_text)) parts = [] len_check = False if " " in filter_text.strip(): # More than one word in the filter => I assume it's the full # fledged filtering check = check_parse(filter_text) if check == True: parts = supply_order_service.find_parts_expression_filter( filter_text) len_check = True else: showErrorBox(_("Error in the filter !"), check, object_name="filter_is_wrong") elif filter_text: parts = supply_order_service.find_parts_filtered(filter_text) len_check = True else: parts = supply_order_service.find_recent_parts() len_check = False if len_check and len(parts) >= supply_order_service.MAX_RESULTS: showWarningBox( _("Too many results"), _("The query you've given brought back too many results. Only a part of them is displayed. Consider refining your query" )) self._fill_model(parts) self.search_results_view.setFocus(Qt.OtherFocusReason)
def tasks_for_identifier(self,obj): # FIXME !!! rename """ Returns imputable task corresponding to the given obj. If tasks are created before being returned (that's why we say potential) then those are *not* added to the session """ # Note that "None" is an appropriate value. None represents # unbillable tasks if obj not in self.potential_tasks_cache: # mainlog.debug("PotentialTasksCache.tasks_for_obj : cache miss on '{}'!".format(obj)) # Pay attention ! This is only for administrators ! self.potential_tasks_cache[obj] = list( filter(is_task_imputable_for_admin, self.task_dao.potential_imputable_tasks_for(obj, self.base_date))) # self.potential_tasks_cache[obj] = self.task_dao.potential_imputable_tasks_for(obj, self.base_date) # filter(is_task_imputable_for_admin, # mainlog.debug("For obj : {}".format(obj)) # for t in self.potential_tasks_cache[obj]: # mainlog.debug(u"cache has : #{} {}".format(t.task_id,t)) else: mainlog.debug("PotentialTasksCache.tasks_for_obj : cache hit !") return self.potential_tasks_cache[obj]
def chrono_start(title = None): global _chrono, _first_chrono if title: mainlog.debug("{} Chrono start".format(title)) _first_chrono = _chrono = time()
def save(self, sa): mainlog.debug("SpecialActivityDAO.save()") if not sa.special_activity_id: session().add(sa) elif sa not in session(): session().merge(sa) session().commit()
def _add_one_document(self, file_name, doc_id, file_size, description): """ Adds a document to the list """ # file_name is either an absolute path or just a file name # If it is an absolute path, then the file is expected # to exist locally (at it absolute path location of course). # If not, then the file is expected to be a remote file # and shall be downloaded before opening. mainlog.debug(u"{} {} {} {}".format(file_name, doc_id, file_size, description)) short_name = file_name if os.path.isabs(file_name): short_name = os.path.basename(file_name) if not os.path.isfile(file_name): raise Exception(u"The file {} doesn't exist".format(file_name)) items = [QStandardItem(short_name)] items.append(QStandardItem(description)) self.model.appendRow(items) self.model.setData(self.model.index(self.model.rowCount() - 1, 0), doc_id, Qt.UserRole + 1) self.view.horizontalHeader().setResizeMode(0, QHeaderView.Stretch) self.view.horizontalHeader().setResizeMode(1, QHeaderView.Stretch) self.view.resizeRowsToContents()
def save_template(self, file_handle, file_name): """ A new template document is added to our database. We ask for a file handle because we expect the file to come from the web server (that's the way cherrypy does it). :returns the doc id of the file """ mainlog.debug(u"save_template(): fh:{} fn:{}".format( file_handle, file_name)) document = TemplateDocument() document.filename = file_name document.upload_date = date.today() document.server_location = "DUMMY" document.file_size = 666 session().add(document) session().flush() # get an id document.server_location, document.file_size = self._copy_file_to_storage( file_handle, document.document_id, file_name) doc_id = document.document_id audit_trail_service.record("TEMPLATE_CREATED", "", document.document_id, commit=False) session().commit() mainlog.debug("Saved to template doc_id={}, bytes={}".format( doc_id, document.file_size)) return doc_id
def __init__(self, func): mainlog.debug("CacheResult : __init__ decorating {}".format(func)) super(CacheResult, self).__init__(func) self.__func = func self.__cache = dict() self.__expiration = dict() self.expire_time = timedelta(hours=1)
def valuation_this_month_indicator(self, begin: date = None, end: date = None): to_facture, encours_this_month, encours_previous_month, turnover = self._compute_turnover_info( end) mainlog.debug( "valuation_this_month_indicator = {}".format(encours_this_month)) return encours_this_month
def get_current_dir(): current_dir = None if getattr(sys, 'frozen', False): mainlog.debug("I'm frozen") # Handle PyInstaller situation return os.path.dirname(sys.executable) elif __file__: return os.path.dirname(__file__)
def find_by_id(self, identifier, resilient=False): mainlog.debug(u"customer_dao.find_by_id : {}".format(identifier)) q = session().query(Customer).filter( Customer.customer_id == identifier) if not resilient: return q.one() else: return q.first()
def test_print_iso_status(self): order = self._make_order() n = make_pdf_filename("test") _print_iso_status(dao, order.order_id, n) mainlog.debug(n) mainlog.debug(os.path.getsize(n)) assert os.path.getsize(n) > 8000 os.remove(n)
def valution_production_chart(self, begin: date, end: date): mainlog.debug("valution_production_chart : from {} to {}".format( begin, end)) valuations = dao.order_part_dao.wip_valuation_over_time(begin, end) x_legends, values = to_serie(valuations.items()) return GraphData(x_legends, [''], [values])
def find_by_id(self, supplier_id): mainlog.debug("SupplierService.find_by_id {}".format(supplier_id)) c = all_non_relation_columns(Supplier) supplier = session().query(*c).filter( Supplier.supplier_id == supplier_id).one() session().commit() return supplier
def check(self): t = self.widget.text() mainlog.debug(u"OrderPartIdentifierEdit : check() : {}".format(t)) if not t or not t.strip(): return True else: return self.validator.validate(t, 0) == QValidator.Acceptable
def value(self): # Somehow, using itemData transforms some KeyedTupel reference into # simple list... FIXME ! row = self.widget.currentIndex() # an int r = self.model.references[row] mainlog.debug("ConstrainedMachineEdit.value:{}".format(type(r))) return r
def _refresh_list(self): mainlog.debug("_refresh_list") self.current_item = None objs = self.objects_list() self.list_model.buildModelFromObjects(objs) self.list_model_filtered.setIndexData([self.index_builder(o) for o in objs]) return len(objs)
def _compute_turnover_info(self, begin_date: date): global dao mainlog.debug("_compute_turnover_info. to {}".format(begin_date)) if begin_date not in self._turnover_computation_cache: # self._turnover_computation_cache[begin_date] = dao.order_dao.compute_turnover_on( begin_date) self._turnover_computation_cache[ begin_date] = dao.order_part_dao.compute_turnover_on( begin_date) return self._turnover_computation_cache[begin_date]
def test_preorder_report(self): order = self._make_order() session().commit() n = make_pdf_filename("test") _make_preorder_report(order,n) mainlog.debug("Analyzing {}".format(n)) mainlog.debug(os.path.getsize(n)) assert os.path.getsize(n) > 26000 os.remove(n)
def test_find_by_full_id(self): order = self._make_order() mainlog.debug("Part id = {}".format(order.parts[0].label)) self.assertEqual( [order.parts[0].order_part_id], dao.order_part_dao.find_by_full_id(order.parts[0].human_identifier)) self.assertEqual( [], dao.order_part_dao.find_by_full_id(None)) self.assertEqual( [], dao.order_part_dao.find_by_full_id('E')) self.assertEqual( [], dao.order_part_dao.find_by_full_id('1')) self.assertEqual( [], dao.order_part_dao.find_by_full_id('1EA'))
def set_event_on_days(self, day_event: DayEvent, days_duration: list): """ Set an event on several days each time with a specific duration. :param day_event: :param days_duration: An array of pairs. Each pair is (date, duration). Each date must be unique. :return: """ day_max = date(1980, 1, 1) day_min = date(2050, 12, 31) mainlog.debug("set_event_on_days") mainlog.debug(days_duration) for day, duration in days_duration: day_min = min(day_min, day) day_max = max(day_max, day) db_events = session().query(DayEvent).filter( and_(DayEvent.employee_id == day_event.employee_id, DayEvent.event_type == day_event.event_type, DayEvent.date.between(day_min, day_max))).all() db_events_dates = dict(zip([e.date for e in db_events], db_events)) other_db_events = session().query(DayEvent.date, func.sum(DayEvent.duration).label("duration_sum")).\ filter( and_( DayEvent.employee_id == day_event.employee_id, DayEvent.event_type != day_event.event_type, DayEvent.date.between(day_min, day_max))).\ group_by(DayEvent.date).all() other_db_events_dates = dict([(e.date, e.duration_sum) for e in other_db_events]) for day, duration in days_duration: if day in other_db_events_dates and other_db_events_dates[ day] + duration > 1: raise ServerException(ServerErrors.too_much_off_time_on_a_day, date_to_dmy(day)) if day in db_events_dates: # Replace the old duration db_event = db_events_dates[day] db_event.duration = duration else: nu_event = DayEvent() nu_event.date = day nu_event.duration = duration nu_event.event_type = day_event.event_type nu_event.employee_id = day_event.employee_id session().add(nu_event) session().commit()
def __call__(self, *args, **kwargs): if args in self.__cache and self.__expiration[args] > datetime.now(): mainlog.debug("CacheResult : cache hit ! args={}".format(args)) return self.__cache[args] else: # mainlog.debug("CacheResult : cache miss !") # value = self.__func(*args) value = self.call_decorated(*args, **kwargs) self.__cache[args] = value self.__expiration[args] = datetime.now() + self.expire_time return value
def path_to_file(self, document_id): """ The file system path to a document stored in our database """ filename = session().query(Document.filename).filter( Document.document_id == document_id).scalar() path = self._make_path_to_document(document_id, filename) # path = session().query(Document.server_location).filter(Document.document_id == document_id).scalar() session().commit() mainlog.debug(u"path_to_file for document id {} is {}".format( document_id, path)) return path
def _load_forms_data(self): d = dict() for p in self.form_prototype: # mainlog.debug("_load_forms_data : {} = {}".format(p.field, p.edit_widget_data())) d[p.field] = p.edit_widget_data() if self.current_item: d[self.key_field] = getattr(self.current_item, self.key_field) else: mainlog.debug("_load_forms_data : no current item") return d
def get_server_version(url_version): try: response = urlopen(url_version, timeout=5) html = response.read().decode('ascii') version = StrictVersion(html.strip()) mainlog.debug("Version advertised by server : {}".format(str(version))) return version except Exception as e: mainlog.error("I was unable to get the version from server {}".format( url_version)) mainlog.error(e) return None