def connectAndRun(self, onlyEventIDs=None): self.syncomaniaSettings.load() if self.syncomaniaSettings.get( sync_settings.FIELD_CURRENT_SYNCDAEMON_ID) != self.mycode: debug.error("another daemon seems to have started running") return OTHER_SYNCDAMEON_RUNNING # while testing sync daemon goes into sleep mode if self.syncomaniaSettings.get(sync_settings.FIELD_SYNC_SLEEP ) == sync_settings.FIELD_SYNC_SLEEP_YES: return SYNC_DAEMON_SHOULD_SLEEP state_Shotgun_to_Local = self.shotgun_to_local_spooler.connectAndRun() if not state_Shotgun_to_Local: debug.debug("something not OK syncing Shotgun to Local", debug.ERROR) state_Local_to_Shotgun = self.local_to_shotgun_spooler.connectAndRun( onlyEventIDs=onlyEventIDs) if not state_Local_to_Shotgun: debug.debug("something not OK syncing Local to Shotgun", debug.ERROR) if not state_Shotgun_to_Local or not state_Local_to_Shotgun: return SYNCED_NOT_OK else: return SYNCED_OK
def _addToDatabase( self, itemType, fieldValues, fieldNames ): replacers = ["%s"] * len( fieldNames ) for i in range( len( fieldValues ) ): if isinstance( fieldValues[i], base_entity.ShotgunBaseEntity ): fieldValues[i] = fieldValues[i].getPgObj() if type( fieldValues[i] ) == dict: local_id = None if fieldValues[i].has_key( "sg_local_id" ): local_id = fieldValues[i]["sg_local_id"] fieldValues[i] = PostgresEntityType( fieldValues[i]["type"], remote_id = fieldValues[i]["id"], local_id = local_id ) query = "INSERT INTO \"%s\" (%s) VALUES (%s)" % ( itemType, ", ".join( fieldNames ), ", ".join( replacers ) ) cur = self.con.cursor() debug.debug( cur.mogrify( query, fieldValues ) ) cur.execute( query, fieldValues ) query2 = "SELECT currval('\"%s___local_id_seq\"')" % itemType cur.execute( query2 ) ret = cur.fetchone() newID = ret[0] self.con.commit() cur.close() return newID
def _addToDatabase(self, itemType, fieldValues, fieldNames): replacers = ["%s"] * len(fieldNames) for i in range(len(fieldValues)): if isinstance(fieldValues[i], base_entity.ShotgunBaseEntity): fieldValues[i] = fieldValues[i].getPgObj() if type(fieldValues[i]) == dict: local_id = None if fieldValues[i].has_key("sg_local_id"): local_id = fieldValues[i]["sg_local_id"] fieldValues[i] = PostgresEntityType( fieldValues[i]["type"], remote_id=fieldValues[i]["id"], local_id=local_id) query = "INSERT INTO \"%s\" (%s) VALUES (%s)" % ( itemType, ", ".join(fieldNames), ", ".join(replacers)) cur = self.con.cursor() debug.debug(cur.mogrify(query, fieldValues)) cur.execute(query, fieldValues) query2 = "SELECT currval('\"%s___local_id_seq\"')" % itemType cur.execute(query2) ret = cur.fetchone() newID = ret[0] self.con.commit() cur.close() return newID
def getPgType(shotgunType): if shotgunType == "checkbox": return "boolean" elif shotgunType == "currency": return "money" elif shotgunType == "date": return "date" elif shotgunType == "date_time": return "timestamp with time zone" elif shotgunType == "duration": return "interval" elif shotgunType == "entity": return "entity_sync" # pgfields.append([attribute+"__x__type", "text"]) # pgfields.append([attribute+"__x__id", "integer"]) elif shotgunType == "multi_entity": return "entity_sync[]" elif shotgunType == "float": return "double precision" elif shotgunType == "file": return "text" elif shotgunType == "tag_list": return "varchar[]" elif shotgunType == "list": return "text" elif shotgunType == "number": return "integer" elif shotgunType == "percent": return "integer" elif shotgunType == "password": pass #debug("%s not yet handled" % shotgunType, ERROR) elif shotgunType == "query": return "text" elif shotgunType == "status_list": return "text" elif shotgunType == "text": return "text" elif shotgunType == "image": return "text" elif shotgunType == "pivot_column": pass #debug("%s not yet handled" % shotgunType, ERROR) elif shotgunType == "url": return "text" elif shotgunType == "color": return "text" elif shotgunType == "uuid": return "text" elif shotgunType == "entity_type": return "text" elif shotgunType == "serializable": return "text" elif shotgunType == "summary": debug.debug("%s not yet handled" % shotgunType) else: debug.debug("%s not yet handled" % shotgunType)
def getPgType( shotgunType ): if shotgunType == "checkbox": return "boolean" elif shotgunType == "currency": return "money" elif shotgunType == "date": return "date" elif shotgunType == "date_time": return "timestamp with time zone" elif shotgunType == "duration": return "interval" elif shotgunType == "entity": return "entity_sync" # pgfields.append([attribute+"__x__type", "text"]) # pgfields.append([attribute+"__x__id", "integer"]) elif shotgunType == "multi_entity": return "entity_sync[]" elif shotgunType == "float": return "double precision" elif shotgunType == "file": return "text" elif shotgunType == "tag_list": return "varchar[]" elif shotgunType == "list": return "text" elif shotgunType == "number": return "integer" elif shotgunType == "percent": return "integer" elif shotgunType == "password": pass #debug("%s not yet handled" % shotgunType, ERROR) elif shotgunType == "query": return "text" elif shotgunType == "status_list": return "text" elif shotgunType == "text": return "text" elif shotgunType == "image": return "text" elif shotgunType == "pivot_column": pass #debug("%s not yet handled" % shotgunType, ERROR) elif shotgunType == "url": return "text" elif shotgunType == "color": return "text" elif shotgunType == "uuid": return "text" elif shotgunType == "entity_type": return "text" elif shotgunType == "serializable": return "text" elif shotgunType == "summary": debug.debug( "%s not yet handled" % shotgunType ) else: debug.debug( "%s not yet handled" % shotgunType )
def _setProcessed( self, event, exception = None ): query = "UPDATE \"ChangeEventsToShotgun\" SET processed = 't', exception = %s WHERE id=%s" if exception != None: exception += "\n%s" % traceback.format_exc() cur = self.src.con.cursor() debug.debug( cur.mogrify( query, ( exception, event["id"], ) ) ) cur.execute( query, ( exception, event["id"], ) ) cur.close()
def __init__(self): self.shotgun_to_local_spooler = shotgun_to_local.EventSpooler() self.local_to_shotgun_spooler = local_to_shotgun.LocalDBEventSpooler() self.syncomaniaSettings = sync_settings.SyncomaniaSettings() try: self.syncomaniaSettings.load() except Exception, error: #IGNORE:W0703 debug.debug("no syncomania data available yet: " + unicode(error), debug.ERROR)
def _getNewEvents( self ): newevents = self.sg.find( "EventLogEntry", filters = [['id', 'greater_than', self.lastID]], fields = ['id', 'event_type', 'attribute_name', 'meta', 'entity'], order = [{'column':'id', 'direction':'asc'}], filter_operator = 'all', limit = 100 ) debug.debug( newevents ) return newevents
def _getNewEvents(self): newevents = self.sg.find( "EventLogEntry", filters=[['id', 'greater_than', self.lastID]], fields=['id', 'event_type', 'attribute_name', 'meta', 'entity'], order=[{ 'column': 'id', 'direction': 'asc' }], filter_operator='all', limit=100) debug.debug(newevents) return newevents
def _createDBFields( entitycode, fieldDefs, entityname ): conn = connectors.getDBConnection() createCur = conn.cursor() dbFields = _getDBFields( entitycode, entityname ) if not dbFields.has_key( "__retired" ): query = "ALTER TABLE \"%s\" ADD COLUMN \"__retired\" BOOLEAN DEFAULT 'f'" % entitycode createCur.execute( query ) for attribute in fieldDefs.keys(): datatype = fieldDefs[attribute]["data_type"]["value"] postgresType = connectors.getPgType( datatype ) if postgresType == None: debug.debug( "field %s.%s (%s) not handled" % ( entitycode, attribute, datatype ) ) continue elif ( dbFields.has_key( attribute ) and dbFields[attribute] == postgresType ): pass elif ( dbFields.has_key( attribute ) and dbFields[attribute] != postgresType ): debug.debug( "changing type %s to %s" % ( dbFields[attribute], postgresType ) ) query = "ALTER TABLE \"" + entitycode + "\" ALTER COLUMN \"" query += str( attribute ) query += "\" TYPE " query += postgresType debug.debug( query ) createCur.execute( query ) else: query = "ALTER TABLE \"" + entitycode + "\" ADD COLUMN \"" query += str( attribute ) query += "\" " query += postgresType debug.debug( query ) createCur.execute( query )
def testSyncomaniaSplitsChange( self ): task = getObject( "Task", remote_id = testTaskID ) self.assertEqual( 11520, task.duration.days * 24 * 60 ) ret = self.sg.update( "Task", testTaskID, {"splits": SPLIT_NEW} ) debug.debug( ret ) self.assertTrue( self.shotgun2local.connectAndRun(), "synch not successful" ) task = getObject( "Task", remote_id = testTaskID ) self.assertEqual( 10080, task.duration.days * 24 * 60 ) self.assertEqual( len( task.splits ), 2 ) self.assertTrue( task.splits[0].has_key("start") ) self.assertTrue( task.splits[0].has_key("end") )
def setUp( self ): self.local2shotgun = local_to_shotgun.LocalDBEventSpooler() self.shotgun2local = shotgun_to_local.EventSpooler() self.testassetlibrary = factories.getObject( entities.AssetLibrary().getType(), remote_id = commanda.TEST_ASSETLIBRARY_ID ) self.task = factories.getObject( "Task", remote_id = tests_elefant.testTaskID ) self.testasset = tests_elefant.createTestAsset( self.testassetlibrary ) debug.debug( self.testasset.getLocalID() ) self.linkedAsset = tests_elefant.createTestAsset( self.testassetlibrary ) debug.debug( self.linkedAsset.getLocalID() )
def testSyncomaniaSplitsChange(self): task = getObject("Task", remote_id=testTaskID) self.assertEqual(11520, task.duration.days * 24 * 60) ret = self.sg.update("Task", testTaskID, {"splits": SPLIT_NEW}) debug.debug(ret) self.assertTrue(self.shotgun2local.connectAndRun(), "synch not successful") task = getObject("Task", remote_id=testTaskID) self.assertEqual(10080, task.duration.days * 24 * 60) self.assertEqual(len(task.splits), 2) self.assertTrue(task.splits[0].has_key("start")) self.assertTrue(task.splits[0].has_key("end"))
def _setProcessed(self, event, exception=None): query = "UPDATE \"ChangeEventsToShotgun\" SET processed = 't', exception = %s WHERE id=%s" if exception != None: exception += "\n%s" % traceback.format_exc() cur = self.src.con.cursor() debug.debug(cur.mogrify(query, ( exception, event["id"], ))) cur.execute(query, ( exception, event["id"], )) cur.close()
def setUp(self): self.local2shotgun = local_to_shotgun.LocalDBEventSpooler() self.shotgun2local = shotgun_to_local.EventSpooler() self.testassetlibrary = factories.getObject( entities.AssetLibrary().getType(), remote_id=commanda.TEST_ASSETLIBRARY_ID) self.task = factories.getObject("Task", remote_id=tests_elefant.testTaskID) self.testasset = tests_elefant.createTestAsset(self.testassetlibrary) debug.debug(self.testasset.getLocalID()) self.linkedAsset = tests_elefant.createTestAsset(self.testassetlibrary) debug.debug(self.linkedAsset.getLocalID())
def _createEntity(self, event): """ process a create entity event """ entity = event["corr_entity"] if type(entity.local_id) == type(1): try: obj = getObject(entity.type, local_id=entity.local_id, includeRetireds=True) if not obj: exception = "Error %s with local_id %d does not exist anymore" % ( entity.type, entity.local_id) self._setProcessed(event, exception=exception) return False remoteID = obj.getRemoteID() if remoteID != None and remoteID != UNKNOWN_SHOTGUN_ID: exception = ( "Error %s with local_id %d seems to be existing already.\n" + \ "This is most probably due to concurrent syncing of tests and sync-daemon? " ) % ( entity.type, entity.local_id ) self._setProcessed(event, exception=exception) return True data = obj.getShotgunDict() debug.debug(data) newdata = self.sg.create(entity.type, data) debug.debug(newdata) self.src.changeInDB(obj, "id", newdata["id"]) self._setProcessed(event) return True except AttributeError, fault: #event["type"] = "CouchdbChangeEvents" exception = "Error %s" % (str(fault)) self._setProcessed(event, exception=exception) return False except shotgun_api3.Fault, fault: #event["type"] = "CouchdbChangeEvents" exception = "Error %s" % (str(fault)) self._setProcessed(event, exception=exception) return False
def _createEntity( self, event ): """ process a create entity event """ entity = event["corr_entity"] if type( entity.local_id ) == type( 1 ): try: obj = getObject( entity.type, local_id = entity.local_id, includeRetireds = True ) if not obj: exception = "Error %s with local_id %d does not exist anymore" % ( entity.type, entity.local_id ) self._setProcessed( event, exception = exception ) return False remoteID = obj.getRemoteID() if remoteID != None and remoteID != UNKNOWN_SHOTGUN_ID: exception = ( "Error %s with local_id %d seems to be existing already.\n" + \ "This is most probably due to concurrent syncing of tests and sync-daemon? " ) % ( entity.type, entity.local_id ) self._setProcessed( event, exception = exception ) return True data = obj.getShotgunDict() debug.debug( data ) newdata = self.sg.create( entity.type, data ) debug.debug( newdata ) self.src.changeInDB( obj, "id", newdata["id"] ) self._setProcessed( event ) return True except AttributeError, fault: #event["type"] = "CouchdbChangeEvents" exception = "Error %s" % ( str( fault ) ) self._setProcessed( event, exception = exception ) return False except shotgun_api3.Fault, fault: #event["type"] = "CouchdbChangeEvents" exception = "Error %s" % ( str( fault ) ) self._setProcessed( event, exception = exception ) return False
def delete(self, myObj): query = "DELETE FROM \"%s\" WHERE " % (myObj.getType()) filters = [] filtervalues = [] if myObj.local_id != UNKNOWN_SHOTGUN_ID: filters.append("__local_id=%s") filtervalues.append(myObj.local_id) if myObj.remote_id != UNKNOWN_SHOTGUN_ID: filters.append("id=%s") filtervalues.append(myObj.remote_id) query += " OR ".join(filters) cursor = self.con.cursor() debug.debug(cursor.mogrify(query, filtervalues)) cursor.execute(query, filtervalues)
def delete( self, myObj ): query = "DELETE FROM \"%s\" WHERE " % ( myObj.getType() ) filters = [] filtervalues = [] if myObj.local_id != UNKNOWN_SHOTGUN_ID: filters.append( "__local_id=%s" ) filtervalues.append( myObj.local_id ) if myObj.remote_id != UNKNOWN_SHOTGUN_ID: filters.append( "id=%s" ) filtervalues.append( myObj.remote_id ) query += " OR ".join( filters ) cursor = self.con.cursor() debug.debug( cursor.mogrify( query, filtervalues ) ) cursor.execute( query, filtervalues )
def _createChangeEvent( src, task, corr_entity = None, changed_values = None ): global CREATED_CHANGE_EVENTS global GENERATEEVENTS if not GENERATEEVENTS: debug.debug( "not generating change event ( for testing purposes only )" ) return updated_by = connectors.getPostgresUser() names = ["task"] values = [task] if updated_by != None: names.append( "updated_by" ) values.append( updated_by ) if corr_entity != None: names.append( "corr_entity" ) values.append( corr_entity ) if changed_values != None: names.append( "changed_values" ) values.append( changed_values ) repls = ["%s"] * len( names ) query = "INSERT INTO \"ChangeEventsToShotgun\" (%s) VALUES (%s)" query = query % ( ", ".join( names ), ", ".join( repls ) ) cur = src.con.cursor() debug.debug( cur.mogrify( query, values ) ) cur.execute( query, values ) cur.execute( "SELECT currval('\"ChangeEventsToShotgun_id_seq\"')" ) ( eventid, ) = cur.fetchone() CREATED_CHANGE_EVENTS.append( eventid ) src.con.commit()
def testSyncomaniaSettingsChange( self ): lastevent = self.sg.find( "EventLogEntry", filters = [], fields = ['id', 'event_type', 'attribute_name', 'meta', 'entity'], order = [{'column':'id', 'direction':'desc'}], filter_operator = 'all', limit = 1 )[0] debug.debug( lastevent ) lastID = lastevent["id"] ret = self.sg.update( "Task", testTaskID, {"sg_status_list": NEWVALUE} ) debug.debug( ret ) newevent = self.sg.find( "EventLogEntry", filters = [], fields = ['id', 'event_type', 'attribute_name', 'meta', 'entity'], order = [{'column':'id', 'direction':'desc'}], filter_operator = 'all', limit = 1 )[0] debug.debug( newevent ) self.failUnlessEqual( newevent["entity"]["id"], testTaskID ) self.failUnlessEqual( newevent["meta"]["new_value"], NEWVALUE ) self.failUnlessEqual( newevent["id"], lastID + 1 ) self.assertTrue( self.shotgun2local.connectAndRun(), "synch not successful" ) task = getObject( "Task", remote_id = testTaskID ) self.assertEqual( NEWVALUE, task.sg_status_list )
def queryAndProcess(self, onlyEventIDs=None): """ queries and processes events from shotgun this method is mainly used from the run method but also for testing purposes """ # for temporary objects only # query first changeEvents, and then tempObject # tempObjects need then to be created first query = "SELECT * FROM \"ChangeEventsToShotgun\" WHERE NOT processed" if onlyEventIDs != None: query += " AND id = ANY(%s)" query += " ORDER BY created ASC, id ASC" self.cur.execute(query, (onlyEventIDs, )) else: query += " ORDER BY created ASC, id ASC" self.cur.execute(query) descriptions = self.cur.description allOk = True for result in self.cur: eventDict = {} for i in range(len(descriptions)): if descriptions[i][0] == "changed_values": if result[i] != None and result[i] != "": eventDict[descriptions[i][0]] = json.loads(result[i]) else: eventDict[descriptions[i][0]] = {} else: eventDict[descriptions[i][0]] = result[i] stateOk = self._processChangeEvent(eventDict) if not stateOk: debug.debug("event %d not synced OK" % eventDict["id"]) allOk = False return allOk
def queryAndProcess( self, onlyEventIDs = None ): """ queries and processes events from shotgun this method is mainly used from the run method but also for testing purposes """ # for temporary objects only # query first changeEvents, and then tempObject # tempObjects need then to be created first query = "SELECT * FROM \"ChangeEventsToShotgun\" WHERE NOT processed" if onlyEventIDs != None: query += " AND id = ANY(%s)" query += " ORDER BY created ASC, id ASC" self.cur.execute( query, (onlyEventIDs, ) ) else: query += " ORDER BY created ASC, id ASC" self.cur.execute( query ) descriptions = self.cur.description allOk = True for result in self.cur: eventDict = {} for i in range( len( descriptions ) ): if descriptions[i][0] == "changed_values": if result[i] != None and result[i] != "" : eventDict[descriptions[i][0]] = json.loads( result[i] ) else: eventDict[descriptions[i][0]] = {} else: eventDict[descriptions[i][0]] = result[i] stateOk = self._processChangeEvent( eventDict ) if not stateOk: debug.debug( "event %d not synced OK" % eventDict["id"] ) allOk = False return allOk
def save( self ): """ save this objects state to database. creates a new record or updates the existing record """ if not self.isConsistent(): debug.debug( "changing localID: %s" % str( self.getLocalID() ) ) if self.getLocalID() == None or self.getLocalID() == shotgun_replica.UNKNOWN_SHOTGUN_ID: # insert entity in local database entity_manipulation.createEntity( self ) else: changes = {} for attribute_name in self._changed_values: changes[attribute_name] = object.__getattribute__( self, attribute_name ) entity_manipulation.changeEntity( self, changes ) self._changed_values = [] else: debug.debug( "nothing changed" ) return True
def getConnectionEntityAttrName( baseEntityType, linkedEntityType, connEntityName ): """return the attribute-names of the connection-entity""" baseAttrName = replaceCapitalsWithUnderscores( baseEntityType ) linkedAttrName = replaceCapitalsWithUnderscores( linkedEntityType ) debug.debug( ( baseAttrName, linkedAttrName ) ) if baseAttrName != linkedAttrName: if linkedAttrName == "human_user": linkedAttrName = "user" return ( baseAttrName, linkedAttrName ) else: theclass = connectors.getClassOfType( connEntityName ) baseAttrNamePrefixed = "source_" + baseAttrName linkedAttrNamePrefixed = "dest_" + linkedAttrName if theclass.shotgun_fields.has_key( baseAttrNamePrefixed ) and theclass.shotgun_fields.has_key( linkedAttrNamePrefixed ): debug.debug( ( baseAttrNamePrefixed, linkedAttrNamePrefixed ) ) return ( baseAttrNamePrefixed, linkedAttrNamePrefixed ) elif theclass.shotgun_fields.has_key( baseAttrName ) and theclass.shotgun_fields.has_key( "parent" ): return ( baseAttrName, "parent" )
def save( self ): """ save this objects state to database. creates a new record or updates the existing record """ if not self.isConsistent(): debug.debug( "changing localID: %s" % str( self.getLocalID() ) ) if self.getLocalID() == None or self.getLocalID() == shotgun_replica.UNKNOWN_SHOTGUN_ID: # insert entity in local database entity_manipulation.createEntity( self ) self._changed_values = [] else: changes = {} for attribute_name in self._changed_values: changes[attribute_name] = object.__getattribute__( self, attribute_name ) entity_manipulation.changeEntity( self, changes ) self._changed_values = [] else: debug.debug( "nothing changed" ) return True
def _getDBFields(entityType, entityName): conn = connectors.getDBConnection() queryCur = conn.cursor() createCur = conn.cursor() queryCur.execute( """SELECT * FROM information_schema.columns WHERE table_catalog=%s AND table_name=%s ORDER BY ordinal_position""", (config.DB_DATABASE, entityType)) allFields = {} for record in queryCur: column_name = record[3] data_type = record[7] if data_type == "USER-DEFINED": #debug(queryCur.description) #debug(record) data_type = record[27] elif data_type == "ARRAY": data_type = record[27][1:] + "[]" allFields[column_name] = data_type if queryCur.rowcount == 0: fieldstr = "\"__local_id\" SERIAL PRIMARY KEY" query = "CREATE TABLE \"%s\" (%s)" % (entityType, fieldstr) debug.debug(query) createCur.execute(query) for column in ["__retired", "id", "__local_id", "sg_link"]: query = "CREATE INDEX %s ON \"%s\" (\"%s\")" % ( "%s_%s_idx" % (entityType.lower(), column.lower()), entityType, column) try: debug.debug(query) createCur.execute(query) except psycopg2.ProgrammingError: debug.debug("%s of %s does not exist or index already available" % (column, entityType)) if entityName != entityType: query = "COMMENT ON TABLE \"%s\" IS 'Entity name: %s'" % (entityType, entityName) else: query = "COMMENT ON TABLE \"%s\" IS ''" % (entityType) createCur.execute(query) queryCur.close() createCur.close() return allFields
def _getDBFields( entityType, entityName ): conn = connectors.getDBConnection() queryCur = conn.cursor() createCur = conn.cursor() queryCur.execute( """SELECT * FROM information_schema.columns WHERE table_catalog=%s AND table_name=%s ORDER BY ordinal_position""", ( config.DB_DATABASE, entityType ) ) allFields = {} for record in queryCur: column_name = record[3] data_type = record[7] if data_type == "USER-DEFINED": #debug(queryCur.description) #debug(record) data_type = record[27] elif data_type == "ARRAY": data_type = record[27][1:] + "[]" allFields[column_name] = data_type if queryCur.rowcount == 0: fieldstr = "\"__local_id\" SERIAL PRIMARY KEY" query = "CREATE TABLE \"%s\" (%s)" % ( entityType, fieldstr ) debug.debug( query ) createCur.execute( query ) for column in [ "__retired", "id", "__local_id", "sg_link" ]: query = "CREATE INDEX %s ON \"%s\" (\"%s\")" % ( "%s_%s_idx" % ( entityType.lower(), column.lower() ), entityType, column ) try: debug.debug( query ) createCur.execute( query ) except psycopg2.ProgrammingError: debug.debug( "%s of %s does not exist or index already available" % ( column, entityType ) ) if entityName != entityType: query = "COMMENT ON TABLE \"%s\" IS 'Entity name: %s'" % ( entityType, entityName ); else: query = "COMMENT ON TABLE \"%s\" IS ''" % ( entityType ); createCur.execute( query ) queryCur.close() createCur.close() return allFields
def _createDBFields(entitycode, fieldDefs, entityname): conn = connectors.getDBConnection() createCur = conn.cursor() dbFields = _getDBFields(entitycode, entityname) if not dbFields.has_key("__retired"): query = "ALTER TABLE \"%s\" ADD COLUMN \"__retired\" BOOLEAN DEFAULT 'f'" % entitycode createCur.execute(query) for attribute in fieldDefs.keys(): datatype = fieldDefs[attribute]["data_type"]["value"] postgresType = connectors.getPgType(datatype) if postgresType == None: debug.debug("field %s.%s (%s) not handled" % (entitycode, attribute, datatype)) continue elif (dbFields.has_key(attribute) and dbFields[attribute] == postgresType): pass elif (dbFields.has_key(attribute) and dbFields[attribute] != postgresType): debug.debug("changing type %s to %s" % (dbFields[attribute], postgresType)) query = "ALTER TABLE \"" + entitycode + "\" ALTER COLUMN \"" query += str(attribute) query += "\" TYPE " query += postgresType debug.debug(query) createCur.execute(query) else: query = "ALTER TABLE \"" + entitycode + "\" ADD COLUMN \"" query += str(attribute) query += "\" " query += postgresType debug.debug(query) createCur.execute(query)
def saveShotgunImageLocally( url ): """save shotgun image locally""" if type( url ) not in [str, unicode]: return None debug.debug( "loading: " + url ) http = Http() [response, content] = http.request( url, "GET" ) debug.debug( response ) [path, filename] = __getPathFromImageUrl( url ) savedAt = __getAbsShotgunImagePath( path, filename ) debug.debug( savedAt ) imagefile = open( savedAt, "w" ) imagefile.write( content ) imagefile.close() return savedAt
def saveShotgunImageLocally(url): """save shotgun image locally""" if type(url) not in [str, unicode]: return None debug.debug("loading: " + url) http = Http() [response, content] = http.request(url, "GET") debug.debug(response) [path, filename] = __getPathFromImageUrl(url) savedAt = __getAbsShotgunImagePath(path, filename) debug.debug(savedAt) oldumask = os.umask(0o002) imagefile = open(savedAt, "w") imagefile.write(content) imagefile.close() os.umask(oldumask) os.chmod(savedAt, 0o664) return savedAt
def testSyncomaniaSettingsChange(self): lastevent = self.sg.find( "EventLogEntry", filters=[], fields=['id', 'event_type', 'attribute_name', 'meta', 'entity'], order=[{ 'column': 'id', 'direction': 'desc' }], filter_operator='all', limit=1)[0] debug.debug(lastevent) lastID = lastevent["id"] ret = self.sg.update("Task", testTaskID, {"sg_status_list": NEWVALUE}) debug.debug(ret) newevent = self.sg.find( "EventLogEntry", filters=[], fields=['id', 'event_type', 'attribute_name', 'meta', 'entity'], order=[{ 'column': 'id', 'direction': 'desc' }], filter_operator='all', limit=1)[0] debug.debug(newevent) self.failUnlessEqual(newevent["entity"]["id"], testTaskID) self.failUnlessEqual(newevent["meta"]["new_value"], NEWVALUE) self.failUnlessEqual(newevent["id"], lastID + 1) self.assertTrue(self.shotgun2local.connectAndRun(), "synch not successful") task = getObject("Task", remote_id=testTaskID) self.assertEqual(NEWVALUE, task.sg_status_list)
def _processChangeEvent( self, changeEvent ): """ processes change-events """ success = False debug.debug( changeEvent ) corr_entity = changeEvent["corr_entity"] if changeEvent["task"] == "creation": debug.debug( "creating entity %s with local ID %d" % ( corr_entity.type, corr_entity.local_id ) ) success = self._createEntity( changeEvent ) elif changeEvent["task"] == "change": debug.debug( "changing entity %s with local ID %d" % ( corr_entity.type, corr_entity.local_id ) ) success = self._changeEntity( changeEvent ) elif changeEvent["task"] == "deletion": debug.debug( "deleting entity %s with local ID %d" % ( corr_entity.type, corr_entity.local_id ) ) success = self._deleteEntity( changeEvent ) elif changeEvent["task"] == "addLink": debug.debug( "adding link: %s with local ID %d" % ( corr_entity.type, corr_entity.local_id ) ) success = self._changeEntity( changeEvent ) elif changeEvent["task"] == "removeLink": debug.debug( "removing link: %s with local ID %d" % ( corr_entity.type, corr_entity.local_id ) ) success = self._changeEntity( changeEvent ) return success
def changeInDB( self, entity, attribute = None, value = None, changes = None, doAppend = False, doRemove = False ): """ change something in couchdb and do a reload and retry if it fails due to changed doc """ entityLocalID = None if type( entity ) == dict: entityType = entity["type"] entityID = entity["id"] elif type( entity ) == PostgresEntityType: entityType = entity.type entityID = entity.remote_id entityLocalID = entity.local_id elif isinstance( entity, base_entity.ShotgunBaseEntity ): entityType = entity.getType() entityID = entity.getRemoteID() entityLocalID = entity.getLocalID() cur = self.con.cursor() classOfType = getClassOfType( entityType ) if changes != None: keys = changes.keys() values = [] for attr in keys: convFunc = None if classOfType.shotgun_fields.has_key( attr ): sgType = classOfType.shotgun_fields[attr]["data_type"]["value"] convFunc = getConversionSg2Pg( sgType ) newValue = changes[attr] if convFunc != None: newValue = convFunc( changes[attr] ) values.append( newValue ) query = "UPDATE \"%s\" SET " % entityType changeArr = ["\"%s\" = %s" % ( x, "%s" ) for x in keys] query += ", ".join( changeArr ) filters = [] if entityLocalID != None and entityLocalID != UNKNOWN_SHOTGUN_ID: filters.append( "__local_id=%s" ) values.append( entityLocalID ) if entityID != None and entityID != UNKNOWN_SHOTGUN_ID: filters.append( "id=%s" ) values.append( entityID ) query += " WHERE (" + " OR ".join( filters ) + " )" debug.debug( cur.mogrify( query, values ) ) cur.execute( query, values ) elif ( attribute != None ): values = [] if doAppend: query = "UPDATE \"%s\" SET " % entityType query += "\"%s\" = \"%s\" || %s" % ( attribute, attribute, "%s" ) if isinstance( value, PostgresEntityType ): values = [value, ] elif isinstance( value, base_entity.ShotgunBaseEntity ): values = [value.getPgObj(), ] else: raise Exception( "unknown format for appending: %s" % type( value ) ) elif doRemove: debug.debug( "removing: " ) fieldvalue = entity.getField( attribute ) debug.debug( entity.getField( attribute ) ) debug.debug( value ) if fieldvalue != None and len( fieldvalue ) > 0 \ and value in fieldvalue: fieldvalue.remove( value ) theList = [x.getPgObj() for x in fieldvalue] query = "UPDATE \"%s\" SET " % entityType query += "\"%s\" = %s" % ( attribute, "%s" ) values = [theList, ] else: query = "UPDATE \"%s\" SET " % entityType query += "\"%s\" = %s" % ( attribute, "%s" ) convFunc = None if classOfType.shotgun_fields.has_key( attribute ): sgType = classOfType.shotgun_fields[attribute]["data_type"]["value"] convFunc = getConversionSg2Pg( sgType ) if convFunc != None: values = [convFunc( value ), ] else: values = [value, ] filters = [] if entityLocalID != None and entityLocalID != UNKNOWN_SHOTGUN_ID: filters.append( "__local_id=%s" ) values.append( entityLocalID ) if entityID != None and entityID != UNKNOWN_SHOTGUN_ID: filters.append( "id=%s" ) values.append( entityID ) if len( filters ) > 0: query += " WHERE (" + " OR ".join( filters ) + " )" debug.debug( query ) debug.debug( values ) debug.debug( cur.mogrify( query, values ) ) cur.execute( query, values ) cur.close() self.con.commit()
def importEntities(conn, cur, sg): debug.debug("starting import Entities", debug.INFO) entities = sg.schema_entity_read() classes = entities.keys() classes.sort() for entityType in classes: if entityType in ["EventLogEntry"]: continue if len(UPDATE_ONLY) > 0 and entityType not in UPDATE_ONLY: continue entityName = cleanSysName(entities[entityType]["name"]["value"]) if entityType.endswith("Connection"): entityName = entityType debug.debug("import entities of type " + entityType) fieldList = connectors.getClassOfType(entityName).shotgun_fields debug.debug("deleting entities of type " + entityType) query = "DELETE FROM \"%s\"" % (entityType) cur.execute(query) debug.debug("loading entities of type " + entityType) objects = sg.find(entityType, [["id", "greater_than", 0]], fieldList.keys()) for obj in objects: values = [] names = [] reprs = [] for fieldName in fieldList.keys(): sgType = fieldList[fieldName]['data_type']['value'] convFunc = connectors.getConversionSg2Pg(sgType) if convFunc != None: names.append("\"%s\"" % fieldName) if sgType == "image" and obj[fieldName] != None: thumbnails.saveShotgunImageLocally(obj[fieldName]) if sgType == "multi_entity": reprs.append("%s::entity_sync[]") else: reprs.append("%s") values.append(convFunc(obj[fieldName])) query = "INSERT INTO \"%s\" (%s) VALUES (%s)" % ( entityType, ", ".join(names), ", ".join(reprs)) debug.debug(cur.mogrify(str(query), values), debug.DEBUG) cur.execute(query, values) conn.commit() debug.debug("finnished import Entities", debug.INFO)
def test_retrieval_simple( self ): localpath = thumbnails.saveShotgunImageLocally( self.testnode.image ) results = os.stat( localpath ) debug.debug(results.st_mtime) self.assertTrue(results.st_mtime > time.time() - 20, "file does not seem to have been written lately")
def changeEntity( myObj, changes ): """change entity in local database and add corresponding change-events for shotgun-sync""" # myObj.reload() src = connectors.DatabaseModificator() src.changeInDB( myObj, changes = changes ) for ( key, value ) in changes.iteritems(): if type( value ) == datetime.datetime: changes[key] = value.strftime( "%Y-%m-%d %H:%M:%S" ) if type( value ) == datetime.date: changes[key] = value.strftime( "%Y-%m-%d" ) elif type( value ) == datetime.timedelta: changes[key] = float( value.days ) * 24 + float( value.seconds ) / 3600 elif type( value ) == connectors.PostgresEntityType: changes[key] = value.getShortDict() elif isinstance( value, base_entity.ShotgunBaseEntity ): changes[key] = value.getShortDict() elif type( value ) == type( [] ): changes[key] = [] for entry in value: if isinstance( entry, base_entity.ShotgunBaseEntity ) or type( entry ) == connectors.PostgresEntityType: changes[key].append( entry.getShortDict() ) else: changes[key].append( entry ) attributeName = key fieldValues = value entityType = myObj.getType() connEntityName = entityNaming.getConnectionEntityName( entityType, attributeName ) if connEntityName != None: reverseAttribute = entityNaming.getReverseAttributeName( entityType, attributeName ) linkedEntityType = myObj.shotgun_fields[attributeName]["properties"]["valid_types"]["value"][0] baseEntityType = entityType ( baseAttrName, linkedAttrName ) = entityNaming.getConnectionEntityAttrName( baseEntityType, linkedEntityType, connEntityName ) basePgObj = myObj.getPgObj() # get connections filters = "%s=%s" % ( baseAttrName, "%s" ) filterValues = [ basePgObj ] connections = factories.getObjects( connEntityName, filters, filterValues ) # create new connection entities for linkedDict in changes[key]: linkedPostgresObj = getPgObj( linkedDict ) fieldNames = [ baseAttrName, linkedAttrName ] fieldValues = [ basePgObj, linkedPostgresObj ] # check if existing first connectionExists = False for i in range( len( connections ) ): connection = connections[i] if connection.getRawField( linkedAttrName ) == linkedPostgresObj: connections.remove( connection ) connectionExists = True break if not connectionExists: debug.debug( dict( zip( fieldNames, fieldValues ) ), prefix = "OOOOOOOOO" ) src._addToDatabase( connEntityName, fieldValues, fieldNames ) # setting reverse attribute as well linkedObj = factories.getObject( linkedDict["type"], local_id = linkedDict["__local_id"], remote_id = linkedDict["id"] ) retValues = linkedObj.getRawField( reverseAttribute ) if retValues == None: retValues = [] if basePgObj not in retValues: retValues.append( basePgObj ) src.changeInDB( linkedObj, reverseAttribute, retValues ) # delete unused connection entities for connection in connections: linkedObj = connection.getField( linkedAttrName ) retValues = linkedObj.getRawField( reverseAttribute ) retValues.remove( basePgObj ) src.changeInDB( linkedObj, reverseAttribute, retValues ) src.delete( connection ) _createChangeEvent( src, "change", corr_entity = myObj.getPgObj(), changed_values = json.dumps( changes ) ) return myObj
def changeInDB(self, entity, attribute=None, value=None, changes=None, doAppend=False, doRemove=False): """ change something in couchdb and do a reload and retry if it fails due to changed doc """ entityLocalID = None if type(entity) == dict: entityType = entity["type"] entityID = entity["id"] elif type(entity) == PostgresEntityType: entityType = entity.type entityID = entity.remote_id entityLocalID = entity.local_id elif isinstance(entity, base_entity.ShotgunBaseEntity): entityType = entity.getType() entityID = entity.getRemoteID() entityLocalID = entity.getLocalID() cur = self.con.cursor() classOfType = getClassOfType(entityType) if changes != None: keys = changes.keys() values = [] for attr in keys: convFunc = None if classOfType.shotgun_fields.has_key(attr): sgType = classOfType.shotgun_fields[attr]["data_type"][ "value"] convFunc = getConversionSg2Pg(sgType) newValue = changes[attr] if convFunc != None: newValue = convFunc(changes[attr]) values.append(newValue) query = "UPDATE \"%s\" SET " % entityType changeArr = ["\"%s\" = %s" % (x, "%s") for x in keys] query += ", ".join(changeArr) filters = [] if entityLocalID != None and entityLocalID != UNKNOWN_SHOTGUN_ID: filters.append("__local_id=%s") values.append(entityLocalID) if entityID != None and entityID != UNKNOWN_SHOTGUN_ID: filters.append("id=%s") values.append(entityID) query += " WHERE (" + " OR ".join(filters) + " )" debug.debug(cur.mogrify(query, values)) cur.execute(query, values) elif (attribute != None): values = [] if doAppend: query = "UPDATE \"%s\" SET " % entityType query += "\"%s\" = \"%s\" || %s" % (attribute, attribute, "%s") if isinstance(value, PostgresEntityType): values = [ value, ] elif isinstance(value, base_entity.ShotgunBaseEntity): values = [ value.getPgObj(), ] else: raise Exception("unknown format for appending: %s" % type(value)) elif doRemove: debug.debug("removing: ") fieldvalue = entity.getField(attribute) debug.debug(entity.getField(attribute)) debug.debug(value) if fieldvalue != None and len( fieldvalue ) > 0 \ and value in fieldvalue: fieldvalue.remove(value) theList = [x.getPgObj() for x in fieldvalue] query = "UPDATE \"%s\" SET " % entityType query += "\"%s\" = %s" % (attribute, "%s") values = [ theList, ] else: query = "UPDATE \"%s\" SET " % entityType query += "\"%s\" = %s" % (attribute, "%s") convFunc = None if classOfType.shotgun_fields.has_key(attribute): sgType = classOfType.shotgun_fields[attribute][ "data_type"]["value"] convFunc = getConversionSg2Pg(sgType) if convFunc != None: values = [ convFunc(value), ] else: values = [ value, ] filters = [] if entityLocalID != None and entityLocalID != UNKNOWN_SHOTGUN_ID: filters.append("__local_id=%s") values.append(entityLocalID) if entityID != None and entityID != UNKNOWN_SHOTGUN_ID: filters.append("id=%s") values.append(entityID) if len(filters) > 0: query += " WHERE (" + " OR ".join(filters) + " )" debug.debug(query) debug.debug(values) debug.debug(cur.mogrify(query, values)) cur.execute(query, values) cur.close() self.con.commit()
def importEntities(conn, cur, sg): debug.debug("starting import Entities", debug.INFO) entities = sg.schema_entity_read() classes = entities.keys() classes.sort() for entityType in classes: if entityType in ["EventLogEntry"]: continue if len(UPDATE_ONLY) > 0 and entityType not in UPDATE_ONLY: continue entityName = cleanSysName(entities[entityType]["name"]["value"]) if entityType.endswith("Connection"): entityName = entityType debug.debug("import entities of type " + entityType) fieldList = connectors.getClassOfType(entityName).shotgun_fields debug.debug("deleting entities of type " + entityType) query = 'DELETE FROM "%s"' % (entityType) cur.execute(query) debug.debug("loading entities of type " + entityType) objects = sg.find(entityType, [["id", "greater_than", 0]], fieldList.keys()) for obj in objects: values = [] names = [] reprs = [] for fieldName in fieldList.keys(): sgType = fieldList[fieldName]["data_type"]["value"] convFunc = connectors.getConversionSg2Pg(sgType) if convFunc != None: names.append('"%s"' % fieldName) if sgType == "image" and obj[fieldName] != None: thumbnails.saveShotgunImageLocally(obj[fieldName]) if sgType == "multi_entity": reprs.append("%s::entity_sync[]") else: reprs.append("%s") values.append(convFunc(obj[fieldName])) query = 'INSERT INTO "%s" (%s) VALUES (%s)' % (entityType, ", ".join(names), ", ".join(reprs)) debug.debug(cur.mogrify(str(query), values), debug.DEBUG) cur.execute(query, values) conn.commit() debug.debug("finnished import Entities", debug.INFO)
def _changeEntity( self, event ): """ process a change entity event """ entity = event["corr_entity"] entityObj = getObject( entity.type, remote_id = entity.remote_id, local_id = entity.local_id, includeRetireds = True ) if entityObj == None: exception = "Object not available %s local:%s remote:%s\n\n" % ( str( entity.type ), str( entity.local_id ), str( entity.remote_id ) ) self._setProcessed( event, exception = exception ) return False data = event["changed_values"] fieldDefs = connectors.getClassOfType( entity.type ).shotgun_fields hasFields = True for attribute in data.keys(): if not fieldDefs.has_key( attribute ): hasFields = False if not hasFields: exception = "some fields not available %s local:%s remote:%s" % ( str( entity.type ), str( entity.local_id ), str( entity.remote_id ) ) self._setProcessed( event, exception = exception ) return False else: for attribute in data.keys(): dataType = fieldDefs[attribute]["data_type"]["value"] value = data[attribute] if dataType == "float": data[attribute] = float( value ) elif dataType == "entity": data[attribute] = getSgObj( value ) elif dataType == "multi_entity": newvalue = [] for sgObj in value: newvalue.append( getSgObj( sgObj ) ) data[attribute] = newvalue elif dataType == "date_time": if type( value ) == type( u"" ): data[attribute] = datetime.datetime.strptime( value, "%Y-%m-%d %H:%M:%S" ) elif dataType == "date": if type( value ) == type( u"" ): data[attribute] = datetime.datetime.strptime( value, "%Y-%m-%d" ).date() elif dataType == "duration": if type( value ) == float: data[attribute] = int( value * 60 ) if fieldDefs.has_key( "sg_remotely_updated_by" ): data["sg_remotely_updated_by"] = event["updated_by"].getSgObj() try: debug.debug( data ) if entityObj.getType().endswith( "Connection" ) and entityObj.getRemoteID() == UNKNOWN_SHOTGUN_ID: remoteID = connectors.getRemoteID( entityObj.getType(), entityObj.getLocalID() ) if remoteID == None or remoteID == UNKNOWN_SHOTGUN_ID: # Connection-Entities need first the corresponding remote-id # they get that by the shotgun-event triggered by the event that causes this connection-entity to be created # so we simply have to wait and do nothing (hopefully ;) debug.info( "waiting for a connection-entitiy to appear %s" % ( str( entityObj ), ) ) return True self.sg.update( entityObj.getType(), entityObj.getRemoteID(), data ) self._setProcessed( event ) return True except shotgun_api3.Fault, fault: #event["type"] = "CouchdbChangeEvents" exception = "Error %s" % ( str( fault ) ) self._setProcessed( event, exception = exception ) return False
def _processChangeEvent(self, changeEvent): """ processes change-events """ success = False debug.debug(changeEvent) corr_entity = changeEvent["corr_entity"] if changeEvent["task"] == "creation": debug.debug("creating entity %s with local ID %d" % (corr_entity.type, corr_entity.local_id)) success = self._createEntity(changeEvent) elif changeEvent["task"] == "change": debug.debug("changing entity %s with local ID %d" % (corr_entity.type, corr_entity.local_id)) success = self._changeEntity(changeEvent) elif changeEvent["task"] == "deletion": debug.debug("deleting entity %s with local ID %d" % (corr_entity.type, corr_entity.local_id)) success = self._deleteEntity(changeEvent) elif changeEvent["task"] == "addLink": debug.debug("adding link: %s with local ID %d" % (corr_entity.type, corr_entity.local_id)) success = self._changeEntity(changeEvent) elif changeEvent["task"] == "removeLink": debug.debug("removing link: %s with local ID %d" % (corr_entity.type, corr_entity.local_id)) success = self._changeEntity(changeEvent) return success
def _changeEntity(self, event): """ process a change entity event """ entity = event["corr_entity"] entityObj = getObject(entity.type, remote_id=entity.remote_id, local_id=entity.local_id, includeRetireds=True) if entityObj == None: exception = "Object not available %s local:%s remote:%s\n\n" % ( str(entity.type), str(entity.local_id), str(entity.remote_id)) self._setProcessed(event, exception=exception) return False data = event["changed_values"] fieldDefs = connectors.getClassOfType(entity.type).shotgun_fields hasFields = True for attribute in data.keys(): if not fieldDefs.has_key(attribute): hasFields = False if not hasFields: exception = "some fields not available %s local:%s remote:%s" % ( str(entity.type), str(entity.local_id), str(entity.remote_id)) self._setProcessed(event, exception=exception) return False else: for attribute in data.keys(): dataType = fieldDefs[attribute]["data_type"]["value"] value = data[attribute] if value == None: continue if dataType == "float": data[attribute] = float(value) elif dataType == "entity": data[attribute] = getSgObj(value) elif dataType == "multi_entity": newvalue = [] for sgObj in value: newvalue.append(getSgObj(sgObj)) data[attribute] = newvalue elif dataType == "date_time": if type(value) == unicode or type(value) == str: data[attribute] = datetime.datetime.strptime( value, "%Y-%m-%d %H:%M:%S") if value.tzinfo == None: from pytz import timezone zurich = timezone("Europe/Zurich") value = zurich.localize(value) elif dataType == "date": if type(value) == unicode or type(value) == str: data[attribute] = datetime.datetime.strptime( value, "%Y-%m-%d").date() elif dataType == "duration": if type(value) == float: data[attribute] = int(value * 60) if fieldDefs.has_key( "sg_remotely_updated_by") and event["updated_by"] != None: data["sg_remotely_updated_by"] = event["updated_by"].getSgObj() try: debug.debug(data) if entityObj.getType().endswith( "Connection") and entityObj.getRemoteID( ) == UNKNOWN_SHOTGUN_ID: remoteID = connectors.getRemoteID(entityObj.getType(), entityObj.getLocalID()) if remoteID == None or remoteID == UNKNOWN_SHOTGUN_ID: # Connection-Entities need first the corresponding remote-id # they get that by the shotgun-event triggered by the event that causes this connection-entity to be created # so we simply have to wait and do nothing (hopefully ;) debug.info( "waiting for a connection-entitiy to appear %s" % (str(entityObj), )) return True self.sg.update(entityObj.getType(), entityObj.getRemoteID(), data) self._setProcessed(event) return True except shotgun_api3.Fault, fault: #event["type"] = "CouchdbChangeEvents" exception = "Error %s" % (str(fault)) self._setProcessed(event, exception=exception) return False