def create_alembic_table(): """creates the default alembic_version table and creates the data so that any new database will be considered as the latest version """ # Now, this is not the correct way of doing this, there is a proper way of # doing it and it is explained nicely in the Alembic library documentation. # # But it is simply not working when Stalker is installed as a package. # # So as a workaround here we are doing it manually # don't forget to update the version_num (and the corresponding test # whenever a new alembic revision is created) version_num = alembic_version from sqlalchemy import Table, Column, Text table_name = 'alembic_version' from stalker.db.session import DBSession conn = DBSession.connection() engine = conn.engine # check if the table is already there from stalker.db.declarative import Base table = Table( table_name, Base.metadata, Column('version_num', Text), extend_existing=True ) if not engine.dialect.has_table(conn, table_name): logger.debug('creating alembic_version table') # create the table no matter if it exists or not we need it either way Base.metadata.create_all(engine) # first try to query the version value sql_query = 'select version_num from alembic_version' try: version_num = \ DBSession.connection().execute(sql_query).fetchone()[0] except TypeError: logger.debug('inserting %s to alembic_version table' % version_num) # the table is there but there is no value so insert it ins = table.insert().values(version_num=version_num) DBSession.connection().execute(ins) DBSession.commit() logger.debug('alembic_version table is created and initialized') else: # the value is there do not touch the table logger.debug( 'alembic_version table is already there, not doing anything!' )
def create_alembic_table(): """creates the default alembic_version table and creates the data so that any new database will be considered as the latest version """ # Now, this is not the correct way of doing, there is a proper way of doing # it and it is explained nicely in the Alembic library documentation. # # But it is simply not working when Stalker is installed as a package. # # So as a workaround here we are doing it manually # don't forget to update the version_num (and the corresponding test # whenever a new alembic revision is created) version_num = '2e4a3813ae76' from sqlalchemy import Table, Column, Text table_name = 'alembic_version' conn = DBSession.connection() engine = conn.engine # check if the table is already there table = Table( table_name, Base.metadata, Column('version_num', Text), extend_existing=True ) if not engine.dialect.has_table(conn, table_name): logger.debug('creating alembic_version table') # create the table no matter if it exists or not we need it either way Base.metadata.create_all(engine) # first try to query the version value sql_query = 'select version_num from "alembic_version"' try: version_num = \ DBSession.connection().execute(sql_query).fetchone()[0] except TypeError: logger.debug('inserting %s to alembic_version table' % version_num) # the table is there but there is no value so insert it ins = table.insert().values(version_num=version_num) DBSession.connection().execute(ins) DBSession.commit() logger.debug('alembic_version table is created and initialized') else: # the value is there do not touch the table logger.debug( 'alembic_version table is already there, not doing anything!' )
def check_alembic_version(): """checks the alembic version of the database and raise a ValueError if it is not matching with this version of Stalker """ current_alembic_version = get_alembic_version() logger.debug('current_alembic_version: %s' % current_alembic_version) if current_alembic_version and current_alembic_version != alembic_version: # invalidate the connection from stalker.db.session import DBSession DBSession.connection().invalidate() # and raise a ValueError (which I'm not sure is the correct exception) raise ValueError('Please update the database to version: %s' % alembic_version)
def get_alembic_version(): """returns the alembic version of the database """ # try to query the version value conn = DBSession.connection() engine = conn.engine if engine.dialect.has_table(conn, 'alembic_version'): sql_query = 'select version_num from alembic_version' try: return DBSession.connection().execute(sql_query).fetchone()[0] except (OperationalError, ProgrammingError, TypeError): DBSession.rollback() return None else: return None
def get_search_result(request): """returns search result """ logger.debug('get_search_result is running') q_string = request.params.get('str', -1) sql_query_buffer = [ 'select id, name, entity_type from "SimpleEntities"', 'where' ] for i, part in enumerate(re.findall(r'[\w\d]+', q_string)): if i > 0: sql_query_buffer.append('and') sql_query_buffer.append( """"SimpleEntities".name ilike '%{s}%' """.format(s=part)) sql_query_buffer.append('order by "SimpleEntities".name') sql_query = '\n'.join(sql_query_buffer) from sqlalchemy import text # to be able to use "%" sign use this function from stalker.db.session import DBSession result = DBSession.connection().execute(text(sql_query)) return [{ 'id': r[0], 'name': r[1], 'entity_type': r[2] } for r in result.fetchall()]
def get_time_logs(self): """returns time logs of the user """ sql = """ select "TimeLogs".id, "SimpleEntities".name, "SimpleEntities".entity_type from "TimeLogs" join "SimpleEntities" on "TimeLogs".id = "SimpleEntities".id where "TimeLogs".resource_id = :id """ from sqlalchemy import text from stalker.db.session import DBSession conn = DBSession.connection() result = conn.execute(text(sql), id=self.entity_id).fetchall() from stalker_pyramid import entity_type_to_url data = [{ 'id': r[0], 'name': r[1], 'entity_type': r[2], '$ref': '%s/%s' % (entity_type_to_url[r[2]], r[0]) } for r in result] from pyramid.response import Response return Response(json_body=data, status=200)
def get_vacations(self): """returns user vacations """ sql = """ select "Vacations".id, "SimpleEntities".name, "SimpleEntities".entity_type from "Vacations" join "SimpleEntities" on "Vacations".id = "SimpleEntities".id where "Vacations".user_id = :id """ from stalker.db.session import DBSession conn = DBSession.connection() from sqlalchemy import text result = conn.execute(text(sql), id=self.entity_id) from stalker_pyramid import entity_type_to_url data = [{ 'id': r[0], '$ref': '%s/%s' % (entity_type_to_url[r[2]], r[0]), 'name': r[1], 'entity_type': r[2] } for r in result.fetchall()] from pyramid.response import Response return Response(json_body=data, status=200)
def get_entity(self): """returns the ReferenceMixin portion of this mixed-in class data """ # get references count sql = """select er.link_id from "%s_References" as er where er.%s_id = :id """ % (self.entity.entity_type, self.entity.entity_type.lower()) from stalker.db.session import DBSession conn = DBSession.connection() from sqlalchemy import text r = conn.execute(text(sql), id=self.entity_id).fetchone() reference_count = r[0] if r else 0 from stalker_pyramid import entity_type_to_url data = { 'references': { '$ref': '%s/%s/references' % (entity_type_to_url[self.entity.entity_type], self.entity_id), 'length': reference_count }, } return data
def get_client_users_out_stack(request): logger.debug('get_client_users_out_stack is running') client_id = request.matchdict.get('id', -1) client = Client.query.filter_by(id=client_id).first() if not client: transaction.abort() return Response('Can not find a client with id: %s' % client_id, 500) sql_query = """ select "User_SimpleEntities".name, "User_SimpleEntities".id from "Users" left outer join "Client_Users" on "Client_Users".uid = "Users".id join "SimpleEntities" as "User_SimpleEntities" on "User_SimpleEntities".id = "Users".id where "Client_Users".cid != %(client_id)s or "Client_Users".cid is Null """ sql_query = sql_query % {'client_id': client_id} result = DBSession.connection().execute(sql_query) users = [] for r in result.fetchall(): user = {'name': r[0], 'id': r[1]} users.append(user) resp = Response(json_body=users) return resp
def get_assets_types(request): """returns the Asset Types """ sql_query = """select "Assets_Types_SimpleEntities".id, "Assets_Types_SimpleEntities".name from "Assets" join "SimpleEntities" as "Assets_SimpleEntities" on "Assets_SimpleEntities".id = "Assets".id join "SimpleEntities" as "Assets_Types_SimpleEntities" on "Assets_Types_SimpleEntities".id = "Assets_SimpleEntities".type_id group by "Assets_Types_SimpleEntities".name, "Assets_Types_SimpleEntities".id order by "Assets_Types_SimpleEntities".name """ result = DBSession.connection().execute(sql_query) return_data = [{ 'asset_type_id': r[0], 'asset_type_name': r[1] } for r in result.fetchall()] content_range = '%s-%s/%s' type_count = len(return_data) content_range = content_range % (0, type_count - 1, type_count) logger.debug('content_range : %s' % content_range) resp = Response(json_body=return_data) resp.content_range = content_range return resp
def get_entity(self): """returns one TimeLog instance as JSON """ sql = """ select "TimeLogs".resource_id, "SimpleEntities".name, "SimpleEntities".entity_type from "TimeLogs" left join "SimpleEntities" on "TimeLogs".resource_id = "SimpleEntities".id where "TimeLogs".id = :id """ from sqlalchemy import text from stalker.db.session import DBSession conn = DBSession.connection() r = conn.execute(text(sql), id=self.entity_id).fetchone() from stalker_pyramid import entity_type_to_url data = { 'resource': { 'id': r[0], 'name': r[1], 'entity_type': r[2], '$ref': '%s/%s' % (entity_type_to_url[r[2]], r[0]), } if r else None, } data.update(DateRangeMixinViews.get_entity(self)) entity_response = super(TimeLogViews, self).get_entity() return self.update_response_data(entity_response, data)
def get_dailies_count(request): """missing docstring """ project_id = request.matchdict.get('id') logger.debug( 'get_dailies_count is working for the project which id is %s' % project_id) sql_query = """ select count(1) from ( select "Dailies".id from "Projects" join "Dailies" on "Dailies".project_id = "Projects".id join "Statuses" on "Dailies".status_id = "Statuses".id where "Statuses".code = 'OPEN' and "Projects".id = %(project_id)s ) as data """ sql_query = sql_query % {'project_id': project_id} from sqlalchemy import text # to be able to use "%" sign use this function result = DBSession.connection().execute(text(sql_query)) return result.fetchone()[0]
def get_shots_count(request): """returns the count of Shots in the given Project or Sequence """ logger.debug('get_shots_count starts') entity_id = request.matchdict.get('id', -1) entity = Entity.query.filter(Entity.id == entity_id).first() sql_query = """select count(1) from "Shots" join "Tasks" on "Shots".id = "Tasks".id %(where_condition)s""" where_condition = '' if entity.entity_type == 'Sequence': where_condition = """left join "Shot_Sequences" on "Shot_Sequences".shot_id = "Shots".id where "Shot_Sequences".sequence_id = %s""" % entity_id elif entity.entity_type == 'Project': where_condition = 'where "Tasks".project_id = %s' % entity_id logger.debug('where_condition : %s ' % where_condition) sql_query = sql_query % {'where_condition': where_condition} return DBSession.connection().execute(sql_query).fetchone()[0]
def get_entity(self): """return one StatusList instance as json """ sql = """select "StatusLists".target_entity_type from "StatusLists" where "StatusLists".id = :id """ from sqlalchemy import text from stalker.db.session import DBSession conn = DBSession.connection() result = conn.execute(text(sql), id=self.entity_id) r = result.fetchone() # get statuses count from stalker.models.status import StatusList_Statuses statuses_count = DBSession.query(StatusList_Statuses.c.status_id)\ .filter(StatusList_Statuses.c.status_list_id == self.entity_id)\ .count() from stalker_pyramid import entity_type_to_url data = { 'statuses': { '$ref': '%s/%s/statuses' % (entity_type_to_url['StatusList'], self.entity_id), 'length': statuses_count }, 'target_entity_type': r[0] } # update with super data response = super(StatusListViews, self).get_entity() return self.update_response_data(response, data)
def get_references(self): """returns the ReferenceMixin portion of this mixed-in class data """ sql = """select entity_references.link_id, "SimpleEntities".name, "SimpleEntities".entity_type from "%s_References" as entity_references join "SimpleEntities" on entity_references.link_id = "SimpleEntities".id where entity_references.%s_id = :id """ % (self.entity.__class__.__name__, self.entity.__class__.__name__.lower()) from sqlalchemy import text from stalker.db.session import DBSession conn = DBSession.connection() result = conn.execute(text(sql), id=self.entity_id) from stalker_pyramid import entity_type_to_url data = [{ 'id': r[0], 'name': r[1], 'entity_type': r[2], '$ref': '%s/%s' % (entity_type_to_url[r[2]], r[0]) } for r in result.fetchall()] return data
def schedule_info(request): """returns the last schedule info """ # there should be only one studio sql_query = """ select is_scheduling, "CurrentScheduler_SimpleEntities".id as is_scheduling_by_id, "CurrentScheduler_SimpleEntities".name as is_scheduling_by, (extract(epoch from scheduling_started_at::timestamp AT TIME ZONE 'UTC') * 1000)::bigint as scheduling_started_at, (extract(epoch from last_scheduled_at::timestamp AT TIME ZONE 'UTC') * 1000)::bigint as last_scheduled_at, extract(epoch from last_scheduled_at::timestamp AT TIME ZONE 'UTC' - scheduling_started_at::timestamp AT TIME ZONE 'UTC') as last_scheduling_duration, "LastScheduler_SimpleEntities".name as last_scheduled_by, (extract(epoch from "Studios".start::timestamp AT TIME ZONE 'UTC') * 1000)::bigint as start, (extract(epoch from "Studios".end::timestamp AT TIME ZONE 'UTC') * 1000)::bigint as end from "Studios" left outer join "SimpleEntities" as "LastScheduler_SimpleEntities" on "Studios".last_scheduled_by_id = "LastScheduler_SimpleEntities".id left outer join "SimpleEntities" as "CurrentScheduler_SimpleEntities" on "Studios".is_scheduling_by_id = "CurrentScheduler_SimpleEntities".id """ from stalker.db.session import DBSession result = DBSession.connection().execute(sql_query) r = result.fetchone() return { 'is_scheduling': r[0], 'is_scheduling_by_id': r[1], 'is_scheduling_by': r[2], 'scheduling_started_at': r[3], 'last_scheduled_at': r[4], 'last_scheduling_took': r[5], 'last_scheduled_by': r[6], 'start': r[7], 'end': r[8] }
def get_entity(self): """returns one ImageFormat instance data as json """ sql = """select "ImageFormats".id, "ImageFormats".width, "ImageFormats".height, "ImageFormats".pixel_aspect, "ImageFormats".print_resolution from "ImageFormats" where "ImageFormats".id = :id """ from stalker.db.session import DBSession conn = DBSession.connection() from sqlalchemy import text result = conn.execute(text(sql), id=self.entity_id) r = result.fetchone() data = { 'id': r[0], 'width': r[1], 'height': r[2], 'pixel_aspect': r[3], 'print_resolution': r[4], } response = super(ImageFormatViews, self).get_entity() return self.update_response_data(response, data)
def _set_defaults(self): """setup the default values """ # set size policies # self.name_lineEdit self.type_comboBox.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed) self.status_comboBox.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed) self.client_comboBox.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed) self.agency_comboBox.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed) self.production_company_comboBox.setSizePolicy( QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed) # invalidate the name and code fields by default self.name_lineEdit.set_invalid('Enter a name') self.code_lineEdit.set_invalid('Enter a code') # update type field from stalker import Type from stalker.db.session import DBSession project_types = \ DBSession.query(Type.id, Type.name)\ .filter(Type.target_entity_type == 'Project')\ .order_by(Type.name)\ .all() self.type_comboBox.clear() self.type_comboBox.addItem('', -1) for type_id, type_name in project_types: self.type_comboBox.addItem(type_name, type_id) self.image_format.fill_combo_box() self.fill_repository_combo_box() self.fill_structure_combo_box() # fill status field sql = """select "SimpleEntities".id, "SimpleEntities".name from "Statuses" join "SimpleEntities" on "Statuses".id = "SimpleEntities".id join "StatusList_Statuses" on "Statuses".id = "StatusList_Statuses".status_id join "StatusLists" on "StatusLists".id = "StatusList_Statuses".status_list_id where "StatusLists".target_entity_type = 'Project'""" all_project_statuses = \ DBSession.connection().execute(sql).fetchall() for st_id, st_name in all_project_statuses: self.status_comboBox.addItem(st_name, st_id)
def get_assets_count(request): """returns the count of assets in a project """ project_id = request.matchdict.get('id', -1) sql_query = """select count(1) from "Assets" join "Tasks" on "Assets".id = "Tasks".id where "Tasks".project_id = %s """ % project_id return DBSession.connection().execute(sql_query).fetchone()[0]
def get_studio_clients(request): """returns client with the given id """ logger.debug('get_studio_clients is working for the studio') sql_query = """ select "Clients".id, "Client_SimpleEntities".name, "Client_SimpleEntities".description, "Thumbnail_Links".full_path, projects.project_count from "Clients" join "SimpleEntities" as "Client_SimpleEntities" on "Client_SimpleEntities".id = "Clients".id left outer join "Links" as "Thumbnail_Links" on "Client_SimpleEntities".thumbnail_id = "Thumbnail_Links".id left outer join ( select "Projects".client_id as client_id, count("Projects".id) as project_count from "Projects" group by "Projects".client_id)as projects on projects.client_id = "Clients".id """ clients = [] result = DBSession.connection().execute(sql_query) update_client_permission = \ PermissionChecker(request)('Update_Client') for r in result.fetchall(): client = { 'id': r[0], 'name': r[1], 'description': r[2], 'thumbnail_full_path': r[3], 'projectsCount': r[4] if r[4] else 0 } if update_client_permission: client['item_update_link'] = \ '/clients/%s/update/dialog' % client['id'] client['item_remove_link'] =\ '/clients/%s/delete/dialog?came_from=%s' % ( client['id'], request.current_route_path() ) clients.append(client) resp = Response(json_body=clients) return resp
def get_shots_simple(request): """returns all the Shots of the given Project """ entity_id = request.matchdict.get('id', -1) entity = Entity.query.filter_by(id=entity_id).first() shot_id = request.params.get('entity_id', None) logger.debug('get_shots starts ') sql_query = """select "Shot_SimpleEntities".id as id, "Shot_SimpleEntities".name as name from "Shots" join "Tasks" as "Shot_Tasks" on "Shot_Tasks".id = "Shots".id join "SimpleEntities" as "Shot_SimpleEntities" on "Shot_SimpleEntities".id = "Shots".id %(where_condition)s order by "Shot_SimpleEntities".name """ # set the content range to prevent JSONRest Store to query the data twice content_range = '%s-%s/%s' where_condition = '' if entity.entity_type == 'Sequence': where_condition = 'where "Shot_Sequences".sequence_id = %s' % entity_id elif entity.entity_type == 'Project': where_condition = 'where "Shot_Tasks".project_id = %s' % entity_id sql_query = sql_query % {'where_condition': where_condition} logger.debug('entity_id : %s' % entity_id) # convert to dgrid format right here in place result = DBSession.connection().execute(sql_query) return_data = [] for r in result.fetchall(): r_data = {'id': r[0], 'name': r[1]} return_data.append(r_data) shot_count = len(return_data) content_range = content_range % (0, shot_count - 1, shot_count) logger.debug('get_shots_simple ends ') resp = Response(json_body=return_data) resp.content_range = content_range return resp
def get_budget_entries(request): """returns budgets with the given id """ budget_id = request.matchdict.get('id') logger.debug( 'get_budget_entries is working for the project which id is: %s' % budget_id) sql_query = """ select "BudgetEntries_SimpleEntities".id, "BudgetEntries_SimpleEntities".name, "Types_SimpleEntities".name as type_name, "BudgetEntries".amount, "BudgetEntries".cost, "BudgetEntries".msrp, "BudgetEntries".price, "BudgetEntries".realized_total, "BudgetEntries".unit, "BudgetEntries_SimpleEntities".description, "BudgetEntries_SimpleEntities".generic_text from "BudgetEntries" join "SimpleEntities" as "BudgetEntries_SimpleEntities" on "BudgetEntries_SimpleEntities".id = "BudgetEntries".id join "SimpleEntities" as "Types_SimpleEntities" on "Types_SimpleEntities".id = "BudgetEntries_SimpleEntities".type_id join "Budgets" on "Budgets".id = "BudgetEntries".budget_id where "Budgets".id = %(budget_id)s """ sql_query = sql_query % {'budget_id': budget_id} result = DBSession.connection().execute(sql_query) entries = [{ 'id': r[0], 'name': r[1], 'type': r[2], 'amount': r[3], 'cost': r[4], 'msrp': r[5], 'price': r[6], 'realized_total': r[7], 'unit': r[8], 'note': r[9], 'addition_type': r[10] } for r in result.fetchall()] resp = Response(json_body=entries) return resp
def get_reviews_count(request, where_conditions): """returns the count of reviews """ sql_query = """ select count(1) from "Reviews" join "Tasks" as "Review_Tasks" on "Review_Tasks".id = "Reviews".task_id join "Statuses" as "Reviews_Statuses" on "Reviews_Statuses".id = "Reviews".status_id where %(where_conditions)s """ sql_query = sql_query % {'where_conditions': where_conditions} return DBSession.connection().execute(sql_query).fetchone()[0]
def get_shots_children_task_type(request): """returns the Task Types defined under the Shot container """ sql_query = """select "SimpleEntities".id as type_id, "SimpleEntities".name as type_name from "SimpleEntities" join "SimpleEntities" as "Task_SimpleEntities" on "SimpleEntities".id = "Task_SimpleEntities".type_id join "Tasks" on "Task_SimpleEntities".id = "Tasks".id join "Shots" on "Tasks".parent_id = "Shots".id group by "SimpleEntities".id, "SimpleEntities".name order by "SimpleEntities".name""" result = DBSession.connection().execute(sql_query) return [{'id': r[0], 'name': r[1]} for r in result.fetchall()]
def get_entity_task_max_end(request): """gives entity's tasks max end date """ logger.debug('get_entity_task_max_end starts') entity_id = request.matchdict.get('id') from stalker import Entity entity = Entity.query.filter_by(id=entity_id).first() sql_query = """select max(extract(epoch from "Tasks".end::timestamp AT TIME ZONE 'UTC')) as end from "Users" join "Task_Resources" on "Task_Resources".resource_id = "Users".id join "Tasks" on "Tasks".id = "Task_Resources".task_id --where not exists(select 1 from "Tasks" as t where t.parent_id = "Tasks".id) %(where_conditions)s """ where_conditions = '' if entity.entity_type == 'Project': where_conditions = """where "Tasks".project_id = %(project_id)s """ % { 'project_id': entity_id } elif entity.entity_type == 'User': where_conditions = """where "Task_Resources".resource_id = %(resource_id)s """ % { 'resource_id': entity_id } elif entity.entity_type == 'Department': temp_buffer = ["""where ("""] for i, resource in enumerate(entity.users): if i > 0: temp_buffer.append(' or') temp_buffer.append(""" "Task_Resources".resource_id='%s'""" % resource.id) temp_buffer.append(' )') where_conditions = ''.join(temp_buffer) logger.debug('where_conditions: %s' % where_conditions) sql_query = sql_query % {'where_conditions': where_conditions} from stalker.db.session import DBSession result = DBSession.connection().execute(sql_query).fetchone() return result[0]
def get_assets_type_task_types(request): """returns the Task Types defined under the Asset container """ type_id = request.matchdict.get('t_id', None) logger.debug('type_id %s' % type_id) sql_query = """select "SimpleEntities".id as type_id, "SimpleEntities".name as type_name from "SimpleEntities" join "SimpleEntities" as "Task_SimpleEntities" on "SimpleEntities".id = "Task_SimpleEntities".type_id join "Tasks" on "Task_SimpleEntities".id = "Tasks".id join "Assets" on "Tasks".parent_id = "Assets".id join "SimpleEntities" as "Assets_SimpleEntities" on "Assets_SimpleEntities".id = "Assets".id %(where_condition)s group by "SimpleEntities".id, "SimpleEntities".name order by "SimpleEntities".name""" where_condition = '' if type_id: where_condition = 'where "Assets_SimpleEntities".type_id = %(type_id)s' % { 'type_id': type_id } sql_query = sql_query % {'where_condition': where_condition} result = DBSession.connection().execute(sql_query) return_data = [{'id': r[0], 'name': r[1]} for r in result.fetchall()] content_range = '%s-%s/%s' type_count = len(return_data) content_range = content_range % (0, type_count - 1, type_count) logger.debug('content_range : %s' % content_range) resp = Response(json_body=return_data) resp.content_range = content_range return resp
def get_entity(self): """returns one Status instance """ sql = """ select "Statuses".code from "Statuses" where "Statuses".id = :id """ from sqlalchemy import text from stalker.db.session import DBSession conn = DBSession.connection() result = conn.execute(text(sql), id=self.entity_id) data = {'code': result.fetchone()[0]} response = super(StatusViews, self).get_entity() return self.update_response_data(response, data)
def tearDown(self): """clean up the test """ import datetime from stalker import defaults from stalker.db.declarative import Base from stalker.db.session import DBSession # clean up test database DBSession.rollback() connection = DBSession.connection() engine = connection.engine connection.close() Base.metadata.drop_all(engine, checkfirst=True) DBSession.remove() defaults.timing_resolution = datetime.timedelta(hours=1)
def get_entity(self): """returns one Type instance data as json """ sql = """select "Types".id, "Types".code from "Types" where "Types".id = :id """ from stalker.db.session import DBSession conn = DBSession.connection() from sqlalchemy import text result = conn.execute(text(sql), id=self.entity_id) r = result.fetchone() data = {'id': r[0], 'code': r[1]} response = super(TypeViews, self).get_entity() return self.update_response_data(response, data)
def get_entity(self): """returns one Entity instance """ sql = """ select "Entities".id from "Entities" where "Entities".id = :id """ from stalker.db.session import DBSession conn = DBSession.connection() from sqlalchemy import text result = conn.execute(text(sql), id=self.entity_id) r = result.fetchone() # get notes count from stalker.models.entity import Entity_Notes notes_count = DBSession.query(Entity_Notes.c.entity_id)\ .filter(Entity_Notes.c.entity_id == self.entity_id)\ .count() # get tags count from stalker.models.entity import Entity_Tags tags_count = DBSession.query(Entity_Tags.c.entity_id)\ .filter(Entity_Tags.c.entity_id == self.entity_id)\ .count() from stalker_pyramid import entity_type_to_url data = { 'id': r[0], 'notes': { '$ref': '%s/%s/notes' % (entity_type_to_url['Entity'], r[0]), 'length': notes_count }, 'tags': { '$ref': '%s/%s/tags' % (entity_type_to_url['Entity'], r[0]), 'length': tags_count }, } # update with super data response = super(EntityViews, self).get_entity() return self.update_response_data(response, data)
def get_task_reviewers(request): """RESTful version of getting all reviews of a task """ logger.debug('get_task_reviewers is running') task_id = request.matchdict.get('id', -1) task = Task.query.filter(Task.id == task_id).first() if not task: transaction.abort() return Response('There is no task with id: %s' % task_id, 500) sql_query = """ select "Reviewers".name as reviewers_name, "Reviewers".id as reviewers_id from "Reviews" join "Tasks" as "Review_Tasks" on "Review_Tasks".id = "Reviews".task_id join "SimpleEntities" as "Reviewers" on "Reviewers".id = "Reviews".reviewer_id %(where_conditions)s group by "Reviewers".id, "Reviewers".name """ where_conditions = """where "Review_Tasks".id = %(task_id)s""" % { 'task_id': task.id } logger.debug('where_conditions %s ' % where_conditions) sql_query = sql_query % {'where_conditions': where_conditions} result = DBSession.connection().execute(sql_query) return_data = [{ 'reviewer_name': r[0], 'reviewer_id': r[1] } for r in result.fetchall()] return return_data
def test_database_is_correctly_created(create_db): """testing if the fixture is working properly """ from stalker.db.session import DBSession assert str(DBSession.connection().engine.dialect.name) == 'sqlite'
def fill_calendar_with_time_logs(self): """fill the calendar with daily time log info """ resource_id = self.get_current_resource_id() # do not update if the calendar is showing the same user if self.calendar_widget.resource_id == resource_id or resource_id == -1: return tool_tip_text_format = u'{start:%H:%M} - {end:%H:%M} | {task_name}' # import time # start = time.time() # get all the TimeLogs grouped daily sql = """-- TOP DOWN SEARCH -- select "TimeLogs".start::date as date, array_agg(task_rec_data.full_path) as task_name, array_agg("TimeLogs".start) as start, array_agg("TimeLogs".end) as end, sum(extract(epoch from ("TimeLogs".end - "TimeLogs".start))) from "TimeLogs" join ( with recursive recursive_task(id, parent_id, path_names) as ( select task.id, task.project_id, -- task.project_id::text as path, ("Projects".code || '') as path_names from "Tasks" as task join "Projects" on task.project_id = "Projects".id where task.parent_id is NULL union all select task.id, task.parent_id, -- (parent.path || '|' || task.parent_id:: text) as path, (parent.path_names || ' | ' || "Parent_SimpleEntities".name) as path_names from "Tasks" as task inner join recursive_task as parent on task.parent_id = parent.id inner join "SimpleEntities" as "Parent_SimpleEntities" on parent.id = "Parent_SimpleEntities".id ) select recursive_task.id, "SimpleEntities".name || ' (' || recursive_task.path_names || ')' as full_path from recursive_task join "SimpleEntities" on recursive_task.id = "SimpleEntities".id ) as task_rec_data on "TimeLogs".task_id = task_rec_data.id -- getting all the data is as fast as getting one, so get all the TimeLogs of this user where "TimeLogs".resource_id = :resource_id group by cast("TimeLogs".start as date) order by cast("TimeLogs".start as date) """ from sqlalchemy import text from stalker.db.session import DBSession result = DBSession.connection().execute( text(sql), resource_id=resource_id ).fetchall() # end = time.time() # print('getting data from sql: %0.3f sec' % (end - start)) from anima.utils import utc_to_local time_shifter = utc_to_local import stalker # TODO: Remove this in a later version from distutils.version import LooseVersion if LooseVersion(stalker.__version__) >= LooseVersion('0.2.18'): def time_shifter(x): return x for r in result: calendar_day = r[0] year = calendar_day.year month = calendar_day.month day = calendar_day.day daily_logged_seconds = r[4] daily_logged_hours = daily_logged_seconds // 3600 daily_logged_minutes = \ (daily_logged_seconds - daily_logged_hours * 3600) // 60 tool_tip_text_data = [ u'Total: %i h %i min logged' % (daily_logged_hours, daily_logged_minutes) if daily_logged_hours else u'Total: %i min logged' % daily_logged_minutes ] for task_name, start, end in sorted( zip(r[1], r[2], r[3]), key=lambda x: x[1]): time_log_tool_tip_text = tool_tip_text_format.format( start=time_shifter(start), end=time_shifter(end), task_name=task_name ) tool_tip_text_data.append(time_log_tool_tip_text) merged_tool_tip = u'\n'.join(tool_tip_text_data) date_format = QtGui.QTextCharFormat() bg_brush = QtGui.QBrush() bg_brush.setColor( QtGui.QColor(0, 255.0 / 86400.0 * daily_logged_seconds, 0) ) date_format.setBackground(bg_brush) date_format.setToolTip(merged_tool_tip) date = QtCore.QDate(year, month, day) self.calendar_widget.setDateTextFormat(date, date_format)
def _set_defaults(self): """setup the default values """ # set size policies # self.name_lineEdit self.type_comboBox.setSizePolicy( QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed ) self.status_comboBox.setSizePolicy( QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed ) self.client_comboBox.setSizePolicy( QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed ) self.agency_comboBox.setSizePolicy( QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed ) self.production_company_comboBox.setSizePolicy( QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed ) # invalidate the name and code fields by default self.name_lineEdit.set_invalid('Enter a name') self.code_lineEdit.set_invalid('Enter a code') # update type field from stalker import Type from stalker.db.session import DBSession project_types = \ DBSession.query(Type.id, Type.name)\ .filter(Type.target_entity_type == 'Project')\ .order_by(Type.name)\ .all() self.type_comboBox.clear() self.type_comboBox.addItem('', -1) for type_id, type_name in project_types: self.type_comboBox.addItem(type_name, type_id) self.image_format.fill_combo_box() self.fill_repository_combo_box() self.fill_structure_combo_box() # fill status field sql = """select "SimpleEntities".id, "SimpleEntities".name from "Statuses" join "SimpleEntities" on "Statuses".id = "SimpleEntities".id join "StatusList_Statuses" on "Statuses".id = "StatusList_Statuses".status_id join "StatusLists" on "StatusLists".id = "StatusList_Statuses".status_list_id where "StatusLists".target_entity_type = 'Project'""" all_project_statuses = \ DBSession.connection().execute(sql).fetchall() for st_id, st_name in all_project_statuses: self.status_comboBox.addItem(st_name, st_id)