def app(sourcedb,table_columns,version_filename=False,output_directory=None,log_directory=None,execute=False):
    logging.basicConfig(level=logging.DEBUG,format='%(levelname)s - [%(asctime)s]:%(message)s')
    options={}
    try:
        options['table_columns']=eval(table_columns)
    except:
        logging.error('Table options format error,Exiting.')
        return 1
    options['sourcedb']=sourcedb
    options['version_filename']=version_filename
    options['output_directory']=output_directory
    options['log_directory']=log_directory
    options['execute']=execute

    source_info=schema.parse_database_url(sourcedb)
    if not source_info.get('protocol') or not source_info.get('protocol').upper()=='MYSQL':
          logging.error('Source database must be mysql database,Exiting.')
          return 1
    if not source_info.get('db'):
          logging.error('Source database not provided,Exiting.')
          return 1
    source_obj=schema.DataBaseConnection()
    source_obj.connect(sourcedb)
    if source_obj.version < '5.0.0':
          logging.error('Source database mysql version is too low,please update your mysql version.')
          source_obj.close()
          return 1
    filters = lambda d:utils.REGEX_MULTI_SPACE.sub(' ',d),lambda d:utils.REGEX_DISTANT_SEMICOLIN.sub(';',d)
    p_fname =utils.create_pnames(source_obj.db,date_format=DATE_FORMAT,prefix='data_mix_')
    pBuffer = utils.PatchBuffer(name=os.path.join(output_directory,p_fname),filters=filters,tpl=None,ctx=None,version_filename=version_filename)
    for patch in mixdata.encrypt_data(source_obj,options):
          if patch:
              pBuffer.write(patch+'\n')
    try:
        pBuffer.save()
        logging.info("Sql file have been saved to '{}'.".format(pBuffer.name))
    except OSError as e:
        pBuffer.delete()
        logging.error('Error occurred,{}'.format(e))
        return 1
        # rBuffer = utils.PatchBuffer(name=os.path.join(output_directory,r_fname),filters=filters,tpl=None,ctx=None,version_filename=version_filename)
    finally:
        if source_obj:
            source_obj.close()
    print(pBuffer.name)
    return pBuffer.name
def app(sourcedb='',targetdb='',version_filename=False,output_directory=None,log_directory=None,tag=None,sync_auto_inc=False,sync_comments=False,ignore_operations=None):
      logging.basicConfig(level=logging.DEBUG,format='%(levelname)s - [%(asctime)s]:%(message)s')
      """Main Application"""
      # options=locals()  #Update and return a dictionary containing the current scope's local variables.
      #============
      options={}
      options['sourcedb']=sourcedb
      options['targetdb']=targetdb
      options['version_filename']=version_filename
      options['output_directory']=output_directory
      options['log_directory']=log_directory
      options['tag']=tag
      options['sync_auto_inc']=sync_auto_inc
      options['sync_comments']=sync_comments
      options['ignore_operations']=ignore_operations
      #============
      if not os.path.isabs(output_directory):
          logging.error('Error:Output directory must be an absolute path.Exiting.')
          return 1
      if not os.path.isdir(output_directory):
          logging.error('Error:Output directory does not exist.Exiting.')
          return 1
      if not log_directory or not os.path.isdir(log_directory):
          if log_directory:
              logging.info('Log directory does not exist,writing log to {}'.format(output_directory))
          log_directory=output_directory
      # logging.basicConfig(filename=os.path.join(log_directory,LOG_FILENAME),level=logging.INFO,format='[%(asctime)s     %(levelname)s]    %(message)s')
      # logging.basicConfig(level=logging.DEBUG,format='%(levelname)s - [%(asctime)s]:%(message)s')
      # console =logging.StreamHandler()
      # console.setLevel(logging.DEBUG)
      # logging.getLogger('').addHandler(console)

      if not sourcedb:
          logging.error('Source database URL not provided.Exiting.')
          return 1
      if not targetdb:
          logging.error('Target database URL not provided.Exiting.')
          return 1
      # data transformation filters
      source_info=schema.parse_database_url(sourcedb)
      if not source_info.get('protocol') or not source_info.get('protocol').upper()=='MYSQL':
          logging.error('Source database must be mysql database,Exiting.')
          return 1
      if not source_info.get('db'):
          logging.error('Source database not provided,Exiting.')
          return 1
      target_info=schema.parse_database_url(targetdb)
      if not target_info.get('protocol') or not target_info.get('protocol').upper()=='MYSQL':
          logging.error('Target database must be mysql database,Exiting.')
          return 1
      if not target_info.get('db'):
          logging.error('Target database not provided,Exiting.')
          return 1
      source_obj=schema.DataBaseConnection()
      source_obj.connect(sourcedb)
      if source_obj.version < '5.0.0':
          logging.error('Source database mysql version is too low,please update your mysql version.')
          source_obj.close()
          return 1
      target_obj=schema.DataBaseConnection()
      target_obj.connect(targetdb)
      if target_obj.version<'5.0.0':
          logging.error('Target database mysql version is too low,please update your mysql version.')
          target_obj.close()
          return 1
      try:
        filters = lambda d:utils.REGEX_MULTI_SPACE.sub(' ',d),lambda d:utils.REGEX_DISTANT_SEMICOLIN.sub(';',d)
        # ctx = dict(app_version=APPLICATION_VERSION,
        #        server_version=target_obj.version,
        #        target_host=target_obj.host,
        #        target_database=target_obj.selected.name,
        #        created=datetime.datetime.now().strftime(TPL_DATE_FORMAT))
        p_fname =utils.create_pnames(target_obj.db,tag=tag,date_format=DATE_FORMAT)

        pBuffer = utils.PatchBuffer(name=os.path.join(output_directory,p_fname),filters=filters,tpl=None,ctx=None,version_filename=version_filename)

        db_selected=False

        for patch in syncdb.sync_schema(source_obj.databases[source_obj.db],target_obj.databases[target_obj.db],options):
            if patch:
                if not db_selected:
                   pBuffer.write(source_obj.databases[source_obj.db].use()+'\n')
                   db_selected=True
                pBuffer.write(patch+'\n')
        if not pBuffer.modified:
            logging.info('No migration scripts written. mysql://{}/{} and mysql://{}/{} were in sync.'.format(source_obj.host,source_obj.db,target_obj.host,source_obj.db))
            sys.exit(0)
        else:
            try:
                pBuffer.save(logging)
                logging.info("Sql file have been saved to '{}'.".format(pBuffer.name))
            except OSError as e:
                pBuffer.delete()
                logging.error('Error occurred,{}'.format(e))
                return 1
        # rBuffer = utils.PatchBuffer(name=os.path.join(output_directory,r_fname),filters=filters,tpl=None,ctx=None,version_filename=version_filename)
      finally:
        if source_obj:
           source_obj.close()
        if target_obj:
           target_obj.close()
      print(pBuffer.name)
      return pBuffer.name
def app(sourcedb='',targetdb='',table_list='',version_filename=False,output_directory=None,log_directory=None,ignore_operations=None):
    logging.basicConfig(level=logging.DEBUG,format='%(levelname)s - [%(asctime)s]:%(message)s')
    options={}
    options['sourcedb']=sourcedb
    options['targetdb']=targetdb
    options['table_list']=table_list.split(',')
    options['version_filename']=version_filename
    options['output_directory']=output_directory
    options['log_directory']=log_directory
    options['ignore_operations']=ignore_operations

    if len(table_list)=='':
          logging.error('Error:tables must be specified for data sync.Exiting.')
          return 1
    if not os.path.isabs(output_directory):
          logging.error('Error:Output directory must be an absolute path.Exiting.')
          return 1
    if not os.path.isdir(output_directory):
          logging.error('Error:Output directory does not exist.Exiting.')
          return 1
    if not log_directory or not os.path.isdir(log_directory):
          if log_directory:
              logging.info('Log directory does not exist,writing log to {}'.format(output_directory))
          log_directory=output_directory
    if not sourcedb:
          logging.error('Source database URL not provided.Exiting.')
          return 1
    if not targetdb:
          logging.error('Target database URL not provided.Exiting.')
          return 1
      # data transformation filters
    source_info=schema.parse_database_url(sourcedb)
    if not source_info.get('protocol') or not source_info.get('protocol').upper()=='MYSQL':
          logging.error('Source database must be mysql database,Exiting.')
          return 1
    if not source_info.get('db'):
          logging.error('Source database not provided,Exiting.')
          return 1
    target_info=schema.parse_database_url(targetdb)
    if not target_info.get('protocol') or not target_info.get('protocol').upper()=='MYSQL':
          logging.error('Target database must be mysql database,Exiting.')
          return 1
    if not target_info.get('db'):
          logging.error('Target database not provided,Exiting.')
          return 1
    source_obj=schema.DataBaseConnection()
    source_obj.connect(sourcedb)
    if source_obj.version < '5.0.0':
          logging.error('Source database mysql version is too low,please update your mysql version.')
          source_obj.close()
          return 1
    target_obj=schema.DataBaseConnection()
    target_obj.connect(targetdb)
    if target_obj.version<'5.0.0':
          logging.error('Target database mysql version is too low,please update your mysql version.')
          target_obj.close()
          return 1

    try:
        filters = lambda d:utils.REGEX_MULTI_SPACE.sub(' ',d),lambda d:utils.REGEX_DISTANT_SEMICOLIN.sub(';',d)
        # ctx = dict(app_version=APPLICATION_VERSION,
        #        server_version=target_obj.version,
        #        target_host=target_obj.host,
        #        target_database=target_obj.selected.name,
        #        created=datetime.datetime.now().strftime(TPL_DATE_FORMAT))
        p_fname =utils.create_pnames(target_obj.db,date_format=DATE_FORMAT,prefix='data_sync_')

        pBuffer = utils.PatchBuffer(name=os.path.join(output_directory,p_fname),filters=filters,tpl=None,ctx=None,version_filename=version_filename)

        db_selected=False

        for patch in synctd.sync_data(source_obj.databases[source_obj.db],target_obj.databases[target_obj.db],options):
            if patch:
                if not db_selected:
                   pBuffer.write(source_obj.databases[source_obj.db].use()+'\n')
                   db_selected=True
                pBuffer.write(patch+'\n')
        if not pBuffer.modified:
            logging.info('No migration scripts written. mysql://{}/{} and mysql://{}/{} were in sync.'.format(source_obj.host,source_obj.db,target_obj.host,source_obj.db))
            sys.exit(0)
        else:
            try:
                pBuffer.save()
                logging.info("Sql file have been saved to '{}'.".format(pBuffer.name))
            except OSError as e:
                pBuffer.delete()
                logging.error('Error occurred,{}'.format(e))
                return 1
        # rBuffer = utils.PatchBuffer(name=os.path.join(output_directory,r_fname),filters=filters,tpl=None,ctx=None,version_filename=version_filename)
    finally:
        if source_obj:
           source_obj.close()
        if target_obj:
           target_obj.close()
    print(pBuffer.name)
    return pBuffer.name