def getThisMonthSpend(self):

        if self.local_dates.today.date().day == 1:
            return 0

        if self.budget_group_id:
            query = """
            select sum(cost) from campaign_performance where account_id = '%s'
            and date_range = 'THIS_MONTH'
            and campaign_id in (%s)
            """ % (self.account_id, ','.join([
                "'" + campaign_id + "'"
                for campaign_id in self.budget_group_info['campaign_ids']
            ]))
            rows = Database().executeQuery(query)
            for row in rows:
                cost = row[0]
            if cost is None:
                return 0
            return cost

        query = "select sum(cost) from account_performance_reports where account_id = '%s' " % (
            self.account_id)
        query += " and date >= '%s' and date <= '%s' " % (
            self.local_dates.first_date_of_this_month,
            self.local_dates.yesterday)
        result = (Database()).createEngine().execute(query)
        for row in result:
            this_month_cost = row[0]

        if this_month_cost is None:
            return 0

        return this_month_cost
예제 #2
0
    def getUserIdsToProcess(self, first_run):
        """Get user_ids which need accounts based on whether accounts already exist.
            If first_run is false return all users.
        """
        user_ids = Database().getValues("users", "id")
        if not first_run:
            return [user_id[0] for user_id in user_ids]

        user_ids_to_process = []
        for user_id in user_ids:

            query = """
            SELECT users.id FROM users
            join accounts on accounts.user_id = users.id
            where users.id = '%s'
            """ % (user_id[0])

            result = Database().executeQuery(query)

            has_accounts = False
            for row in result:
                has_accounts = True
                break

            if has_accounts is False:
                user_ids_to_process.append(user_id[0])

        return user_ids_to_process
    def createBudgetCommanderTable(self):
        """Create budget commander table for testing.
        A migration will handle this in production"""

        if self.envvars["APP_ENV"] != "local":
            return

        if self.budgetCommanderTableExists():
            return

        columns = [
            "account_id", "email_sent", "email_addresses", "notify_via_email",
            "pause_campaigns", "enable_campaigns", "control_spend",
            "rollover_budget", "day_paused_campaigns", "month_paused_campaigns"
        ]
        data = [[
            self.account_id, 0,
            "[[email protected], [email protected]]", 1, 0, 1, 1,
            1, "", ""
        ]]
        df = pd.DataFrame(data, columns=columns)
        df.to_sql(name="budget_commander",
                  con=(Database()).createEngine(),
                  index=False,
                  if_exists="replace")

        with (Database()).createEngine().connect() as con:
            con.execute(
                'alter table `budget_commander` modify account_id varchar(255)'
            )
            con.execute(
                'ALTER TABLE `budget_commander` ADD PRIMARY KEY (`account_id`);'
            )
    def dataFrameFromAdPerformanceReports(self, date_range):
        database = Database()
        table_name = 'advert_performance'
        columns = list(
            pd.read_sql(
                "select * from %s where id = '999999999'" % (table_name),
                database.createEngine()).columns)

        columns = ['advert_performance.' + column for column in columns]

        query = """
        select adverts.ad_type,adverts.adgroup_id,adverts.final_urls, %s,
        adverts.headline_1,
        adverts.headline_2,
        adverts.headline_3,
        adverts.description,
        adverts.description_2,
        adverts.path_1,
        adverts.path_2
        from advert_performance
        join adverts
        on adverts.id = advert_performance.advert_id
        where adverts.account_id = '%s'
        and advert_performance.date_range = '%s'
        """ % (','.join(columns), self.account_id, date_range)

        ad_performance_df_chunks = pd.read_sql_query(query,
                                                     Database().createEngine(),
                                                     chunksize=2000)

        return ad_performance_df_chunks
예제 #5
0
    def __init__(self, download_dir='/tmp/download/'):
        super(DouyinDownloader, self).__init__()
        # self.arg = arg

        self.downloadCount = 0
        self.uploadCount = 0

        if download_dir.endswith('/'):
            self.downloadDir = download_dir
        else:
            self.downloadDir = download_dir + '/'

        # redis connect
        self.redisClient = redis.StrictRedis(host='115.159.157.98',
                                             port=17379,
                                             db=0,
                                             password='******')

        # mysql
        self.mysqlDB = Database(host='localhost',
                                user='******',
                                passwd='zx#Video2018',
                                database='video')

        # nextcloud
        self.oc = owncloud.Client('http://127.0.0.1:18080')
        self.oc.login('zhangxu', 'zx@12346')
        self.oc.logout()

        self.oc2 = owncloud.Client('http://115.29.34.236:18181')
        self.oc2.login('zhangxu', 'zx@12346')
        self.oc2.logout()
 def writeToDatabase(self, ad_count, priority, message, adgroup_id):
     Database().executeQuery(
         "update adgroups set ad_count = %s where id = '%s' and account_id = '%s'"
         % (ad_count, adgroup_id, self.account_id))
     Database().executeQuery(
         "update adgroups set priority = %s where id = '%s' and account_id = '%s'"
         % (priority, adgroup_id, self.account_id))
     Database().executeQuery(
         "update adgroups set message = '%s' where id = '%s' and account_id = '%s'"
         % (message, adgroup_id, self.account_id))
예제 #7
0
def keywords(account_id):
    """Delete keywords"""
    adgroup_ids = Database().getValues('adgroups', 'id')
    adgroup_ids = ['"%s"' % (adgroup_id[0]) for adgroup_id in adgroup_ids]

    query = """
    delete from keywords
    where account_id = '%s'
    and adgroup_id
    not in (%s)
    """ % (account_id, ','.join(adgroup_ids))
    Database().executeQuery(query)
예제 #8
0
def adgroups(account_id):
    """Delete ad groups"""
    campaign_ids = Database().getValues('campaigns', 'id')
    campaign_ids = ['"%s"' % (campaign_id[0]) for campaign_id in campaign_ids]

    query = """
    delete from adgroups
    where account_id = '%s'
    and campaign_id
    not in (%s)
    """ % (account_id, ','.join(campaign_ids))

    Database().executeQuery(query)
    def __init__(self, appid, appsecret):

        self.appid = appid
        self.appsecret = appsecret

        # set the logger
        self.log_level = logging.DEBUG
        self.log_path = '/var/log/update_access_token.log'

        self.logger = logging.getLogger('Token')
        self.logger.setLevel(self.log_level)

        # create a handler for write the log to file.
        fh = logging.FileHandler(self.log_path)
        fh.setLevel(self.log_level)

        # create a handler for print the log info on console.
        ch = logging.StreamHandler()
        ch.setLevel(self.log_level)

        # set the log format
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        fh.setFormatter(formatter)
        ch.setFormatter(formatter)

        # add the handlers to logger
        self.logger.addHandler(fh)
        self.logger.addHandler(ch)



        self.database = Database()

        self.logger.debug('update_access_token init over.')
    def deleteExisting(self, date_range):
        """Initially remove all advert data for this account - we'll overwrite it"""

        deleteQuery = "delete from advert_performance where account_id = '%s' and date_range = '%s' " % (
            self.account_id, date_range)

        Database().createEngine().execute(deleteQuery)
예제 #11
0
    def addUserAccounts(self, user_id, first_run):

        if not Helpers().isActiveUser(user_id):
            Log("info", "this user isn't active. Exiting",
                'user_id: %s' % (user_id))
            return

        try:

            Log("info", "adding accounts for user id '%s'" % user_id)
            self.user_id = user_id
            accounts_df = self.getAccountsDf()
            if functions.dfIsEmpty(accounts_df):
                return
            accounts_df = accounts_df.drop_duplicates('google_id')
            accounts_df = self.dropDuplicates(accounts_df, first_run)
            if (accounts_df.shape[0] == 0 and first_run):
                Log('warning',
                    "no unique google accounts were found for this user",
                    "user_id (%s)" % (user_id), "")
            accounts_df.to_sql("accounts",
                               Database().createEngine(),
                               index=False,
                               if_exists="append")
        except Exception as exception:
            Log("error",
                str(exception) + " (User id: %s)" % (user_id),
                traceback.format_exc())

        Log("info", "finished adding account meta data")
예제 #12
0
 def writeToPerformanceTable(self, df, report, account_id):
     delete_query = "delete from %s where account_id = '%s' " % (
         self.performance_table_name, account_id)
     Database().executeQuery(delete_query)
     df["id"] = pd.Series([uuid.uuid1()
                           for i in range(len(df))]).astype(str)
     report.writeDataframeToTable(df, self.performance_table_name)
예제 #13
0
 def markAsSent(self):
     # update the budget_commander table mark the account's email as sent
     # email_sent field
     print("marking as sent")
     query = "update budget_commander set email_sent = 1 where account_id = '%s' " % (
         self.account_id)
     (Database()).createEngine().execute(query)
    def addDefaultBudgetCommanderSettings(self):
        """If budget commander settings aren't availble add the defaults"""
        settings = {
            "1": {
                "id": uuid.uuid4(),
                "created_at": datetime.now(),
                "updated_at": datetime.now(),
                "account_id": self.account_id,
                "notify_via_email": 0,
                "pause_campaigns": 0,
                "enable_campaigns": 0,
                "rollover_spend": 0,
                "control_spend": 0,
                "email_sent": 0,
                "emergency_stop": 0,
                "email_addresses": None,
                "day_paused_campaigns": None,
                "month_paused_campaigns": None,
                "excess_budget": None,
            }
        }

        # write to the db
        Database().appendRows("budget_commander", settings)

        return settings["1"]
예제 #15
0
def createNotification(account_id):

    #only when running for the first time...
    def isFirstRun(account_id):
        query = "select ad_performance_report_processed_at from accounts where id = '%s'" % (
            account_id)
        results = Database().executeQuery(query)
        for result in results:
            return result[0] is None

    if not isFirstRun(account_id):
        Log('info',
            "the notification won't be created as this isn't the first run",
            '', account_id)
        return

    Log('info', "creating successful sync notification", '', account_id)

    user_id = Helpers().getUserIdFromAccountId(account_id)
    account_name = Helpers().getAccountNameFromId(account_id)
    account_google_id = Helpers().getAccountGoogleIdFromId(account_id)
    username = Helpers().getUsername(account_id)
    query = r"""
    insert into notifications 
    (id, type, notifiable_id, notifiable_type, data, created_at, updated_at)
    values
    ('%s', 'App\\Notifications\\AccountSynced', '%s','App\\User',
    '{"message":"%s was synced successfully! Refesh the page to access the account."}', now(), now())
    """ % (str(uuid.uuid4()), user_id, account_name)
    Database().executeQuery(query)

    sendEmail(account_name, account_google_id, username, account_id)
예제 #16
0
    def update_batch_jobs(self, batch_jobs_data_frame):
        for index, row in batch_jobs_data_frame.iterrows():
            query = "update batch_job set is_checking={0}, status='{1}' where id='{2}'".format(
                row["is_checking"], row["status"], row["id"]
            )

            Database().executeQuery(query)
예제 #17
0
    def process_service_type(self, service_type, action, attribute=None):
        """ Grab rows from the queue filtered by service type, action and attribute
        Process the rows (apply mutations to the API)
        Run each chunk once. The next process will pick up the next chunk.
        """
        # Query data from mutations table
        query = self.mutations_query(service_type, action, attribute)
        mutations_data_frame_chunks = pd.read_sql(query, Database().createEngine(), chunksize=2000)

        # Iterate over chunks
        while True:
            try:
                mutations_data_frame = next(mutations_data_frame_chunks)
            except StopIteration:
                return

            # Mark the chunk data as is_processing equal to True
            self.set_data_frame_to_processing(mutations_data_frame)

            account_ids = mutations_data_frame.account_id.drop_duplicates().values

            for account_id in account_ids:
                account_data_frame = mutations_data_frame[mutations_data_frame.account_id == account_id].drop_duplicates()
                # Send mutations to google ads API
                result = self.process_account_mutations(account_data_frame, service_type)

                # Write data to DB
                self.handle_mutations_result(result, account_data_frame)
예제 #18
0
    def __init__(self, download_dir = '/tmp/download/'):
        super(DouyinDownloader, self).__init__()
        # self.arg = arg

        self.downloadCount = 0
        self.uploadCount = 0

        if download_dir.endswith('/'):
            self.downloadDir = download_dir
        else:
            self.downloadDir = download_dir + '/'

        # redis connect
        self.redisClient = redis.StrictRedis(host='115.159.157.98', port=17379, db=0, password='******')

        # mysql
        self.mysqlDB = Database(host='localhost', user='******', passwd='zx#Video2018', database='video')

        # nextcloud
        self.oc = owncloud.Client('http://127.0.0.1:18080')
        self.oc.login('zhangxu', 'zx@12346')
        self.oc.logout()

        self.oc2 = owncloud.Client('http://115.29.34.236:18181')
        self.oc2.login('zhangxu', 'zx@12346')
        self.oc2.logout()
 def getKeywordsDataframe(self):
     query = """
     SELECT keywords.id as entity_id,adgroups.google_id as adgroup_google_id, keywords.google_id,keyword_performance.clicks, keyword_performance.conversions,keyword_performance.search_impression_share,
     keyword_performance.cost,keyword_performance.conversion_value,keywords.keyword_text
     ,keywords.keyword_match_type, keywords.cpc_bid, keywords.original_cpc_bid
     FROM keyword_performance
     join keywords on keywords.id = keyword_performance.keyword_id
     join adgroups on adgroups.id = keywords.adgroup_id
     where date_range = "last_30_days"
     and keywords.account_id = "%s"
     and keywords.status = "enabled"
     and keyword_performance.conversions = 0
     and keyword_performance.clicks > 0
     and keywords.google_id != "3000006"
     and keywords.google_id != "3000000"
     and keywords.bidding_strategy_type = "cpc" 
     order by cost desc
     """ % (self.account_id)
     df = pd.read_sql(query, (Database()).createEngine())
     df.cpc_bid = df.cpc_bid.astype("str")
     df.cpc_bid = df.cpc_bid.str.replace("--", "0")
     df.cpc_bid = df.cpc_bid.astype("float")
     df['forecast'] = (df.cost /
                       7) * self.local_dates.days_remaining_in_this_month
     df.forecast = df.forecast.astype("str")
     df.forecast = df.forecast.str.replace("--", "0")
     df.forecast = df.forecast.astype("float")
     return df
예제 #20
0
    def getUserIdFromAccountId(self, account_id):
        query = "select user_id from accounts where id = '%s'" % (account_id)

        result = Database().executeQuery(query)

        for row in result:
            # print row
            return row[0]
예제 #21
0
 def getTimezone(self):
     """Returns an account's tzinfo timezone"""
     query = 'SELECT timezone FROM accounts where id = "%s"' % (
         self.account_id)
     timezone = pd.read_sql_query(
         query,
         Database().createEngine())["timezone"].values[0]
     return pytz.timezone(timezone)
예제 #22
0
    def getRefreshTokenFromUserId(self, user_id):

        query = "select refresh_token from users where id = '%s'" % (user_id)

        result = Database().executeQuery(query)
        for row in result:
            if row[0] is None:
                return
            return row[0]
 def getSyncedAccountIds(self):
     """Return a list of synced account ids"""
     query = "select id from accounts where is_synced = 1"
     result = Database().createEngine().execute(query)
     account_ids = []
     for row in result:
         account_id = row[0]
         account_ids.append(account_id)
     return account_ids
    def getAccountInfo(self):
        query = "select budget, name,google_id,currency_code from accounts where id = '%s' " % (
            self.account_id)
        data = pd.read_sql(query, (Database()).createEngine()).to_dict()
        user_info = {}
        for key in data:
            user_info[key] = data[key][0]

        return user_info
예제 #25
0
    def insert_batch_job(self, batch_job_status, batch_job_google_id):
        # Create batch job
        uuid_string = uuid.uuid1().__str__()
        query = "INSERT INTO batch_job (`id`, `status`, `google_id`) VALUES ('{0}','{1}', '{2}')".format(
            uuid_string, batch_job_status, batch_job_google_id
        )
        Database().createEngine().execute(query)

        return uuid_string
예제 #26
0
    def getCampaignIdsToPause(self):
        query = "select google_id from campaigns where account_id = '%s' " % (self.account_id)

        result = (Database()).createEngine().execute(query)
        campaign_ids = []

        for row in result:
            campaign_ids.append(row[0])

        self.campaign_ids_string = ",".join(campaign_ids)
예제 #27
0
def append_df_to_sql_table(df, table_name, engine=None):
    if dfIsEmpty(df):
        return
    df = df.replace([np.inf, -np.inf], 0)
    # df = df.replace(r'\s+', np.nan, regex=True)
    df = trimDfToTableColumns(df, table_name)
    df.to_sql(table_name,
              Database().createEngine(),
              if_exists='append',
              index=False)
예제 #28
0
    def check(self):
        """
        1. Get batch jobs with status != Done
        2. Check status of batch job
        3. If status = Done, Get mutations with batch job id = current batch job id (order by created)
        4. Update mutations with results
        """
        batch_jobs_query = self.batch_jobs_query()
        batch_jobs_chunks = pd.read_sql(batch_jobs_query, Database().createEngine(), chunksize=10)

        # Iterate over chunks
        try:
            batch_jobs_data_frame = next(batch_jobs_chunks)
        except StopIteration:
            return

        # Mark the batch jobs data as is_checking equal to True
        self.set_batch_jobs_to_processing(batch_jobs_data_frame)

        for index, batch_job_data_frame in batch_jobs_data_frame.iterrows():
            response = Update(
                batch_job_data_frame['account_id'], ''
            ).check_batch_job_result(batch_job_data_frame['google_id'])

            if response:
                mutations_query = self.mutations_query_by_batch_job(batch_job_data_frame['id'])
                mutations_data_frame_chunks = pd.read_sql(mutations_query, Database().createEngine(), chunksize=2000)

                # Iterate over chunks
                try:
                    mutations_data_frame = next(mutations_data_frame_chunks)
                except StopIteration:
                    continue

                mutations_data_frame["is_processing"] = 0
                self.update_batch_job_mutations(mutations_data_frame, response)

                # TODO: support all status for batch job
                batch_jobs_data_frame.at[index, "status"] = 'DONE'

        batch_jobs_data_frame["is_checking"] = 0
        self.update_batch_jobs(batch_jobs_data_frame)
예제 #29
0
    def update_mutations(self, account_data_frame, message, is_success):
        for i, row in account_data_frame.iterrows():
            if row['batch_job_id'] is None:
                row['batch_job_id'] = 'null'
            else:
                row['batch_job_id'] = "'%s'" %(row['batch_job_id'])
            query = "update mutations set response = '{0}', executed_at=now(), is_success={1}, is_processing={2}, batch_job_id={3} where id = '{4}'".format(
                message, is_success, row["is_processing"],row['batch_job_id'] , row["id"],
            )

            Database().executeQuery(query)
    def getAdvertsDataframe(self):
        """Pulls advert, advert_performance and adgroup data
        Returns chunks of 2000"""

        query = """
        SELECT 
        adverts.google_id,
        adverts.final_urls,
        adverts.headline_1,
        adverts.status,
        adverts.description,
        adverts.path_2,
        adverts.path_1,
        adverts.headline_2,
        adverts.id,
        adverts.adgroup_id,
        adverts.domain,
        adverts.ctr_significance,
        adverts.conversion_rate_significance,
        adverts.account_id,
        adverts.loser,
        adverts.potential_savings,
        adverts.headline_3,
        adverts.description_2,
        advert_performance.impressions,
        advert_performance.clicks,
        advert_performance.conversions,
        advert_performance.cost,
        advert_performance.conversion_value,
        advert_performance.date_range,
        advert_performance.id,
        advert_performance.cpa,
        advert_performance.conversion_rate,
        advert_performance.average_cpc,
        advert_performance.ctr,
        advert_performance.roas,
        advert_performance.ctr_significance,
        advert_performance.conversion_rate_significance,
        advert_performance.ctr_message,
        advert_performance.conversion_rate_message,
        advert_performance.advert_id,
        advert_performance.average_position,
        advert_performance.impression_share,
        adgroups.name as adgroup_name,
        adgroups.campaign_id
        from adverts join advert_performance
        on adverts.id = advert_performance.advert_id
        join adgroups
        on adgroups.id = adverts.adgroup_id
        where adverts.account_id = '%s'
        and date_range = '%s'
        """ % (self.account_id, self.date_range)

        return pd.read_sql(query, Database().createEngine(), chunksize=2000)
예제 #31
0
    def getUsername(self, account_id):
        query = """
        SELECT u.name FROM users as u
        join accounts as a 
        on a.user_id = u.id
        where a.id = "%s"
        """ % (account_id)
        result = (Database()).createEngine().execute(query)

        for row in result:
            return row[0]
예제 #32
0
 def isActiveUser(self, user_id):
     """Returns true if user exists and has a refresh token"""
     query = """
     SELECT id from users 
     where refresh_token is not null 
     and id = '%s'
     """ % (user_id)
     results = Database().executeQuery(query)
     if (results.rowcount > 0):
         return True
     return False
예제 #33
0
    def __init__(self, ip='0.0.0.0', port=9000, log_level=logging.DEBUG):

        self.ip   = ip
        self.port = port

        self.author  = __author__
        self.version = __version__

        self.file_path = os.path.realpath(__file__)
        self.dir_path  = os.path.dirname(self.file_path)


        # mark system start time
        self.system_initialized = datetime.now()


        # weixin about
        self.appid      = 'wxb1efccbbb5bafcbb'
        self.appsecret  = '9d64356f48062e46159b4d179dea5c44'
        self.token      = 'shenhailuanma'
        self.access_token = ''
        self.signature  = None
        self.echostr    = None
        self.timestamp  = None
        self.nonce      = None

        self.wechat     = WechatBasic(token=self.token, appid=self.appid, appsecret=self.appsecret)

        self.weather    = weather()

        # database
        self.database   = Database()

        # set the logger
        self.log_level = logging.DEBUG
        self.log_path = 'myWeixinServer.log'

        self.logger = logging.getLogger('myWeixinServer')
        self.logger.setLevel(self.log_level)

        # create a handler for write the log to file.
        fh = logging.FileHandler(self.log_path)
        fh.setLevel(self.log_level)

        # create a handler for print the log info on console.
        ch = logging.StreamHandler()
        ch.setLevel(self.log_level)

        # set the log format
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        fh.setFormatter(formatter)
        ch.setFormatter(formatter)

        # add the handlers to logger
        self.logger.addHandler(fh)
        self.logger.addHandler(ch)

        self.logger.info('init over.')

    
        #######  web test ######
        @bottle.route('/')
        def index_get():
            try:
                # get the post data
                self.logger.debug('handle a GET request: /, ')

                # e.g :  /?signature=04d39d841082682dc7623945528d8086cc9ece97&echostr=8242236714827861439&timestamp=1440564411&nonce=2061393952

                # get the data
                self.logger.debug('handle the request data: %s' %(bottle.request.query_string))
                #self.logger.debug('handle the request signature:%s' %(bottle.request.query.signature))
                #self.logger.debug('handle the request echostr:%s' %(bottle.request.query.echostr))
                #self.logger.debug('handle the request timestamp:%s' %(bottle.request.query.timestamp))
                #self.logger.debug('handle the request nonce:%s' %(bottle.request.query.nonce))

                return bottle.request.query.echostr
            except Exception,ex:
                return "%s" %(ex)


            return "Hello, this is myWeixinServer."
class update_access_token:

    def __init__(self, appid, appsecret):

        self.appid = appid
        self.appsecret = appsecret

        # set the logger
        self.log_level = logging.DEBUG
        self.log_path = '/var/log/update_access_token.log'

        self.logger = logging.getLogger('Token')
        self.logger.setLevel(self.log_level)

        # create a handler for write the log to file.
        fh = logging.FileHandler(self.log_path)
        fh.setLevel(self.log_level)

        # create a handler for print the log info on console.
        ch = logging.StreamHandler()
        ch.setLevel(self.log_level)

        # set the log format
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        fh.setFormatter(formatter)
        ch.setFormatter(formatter)

        # add the handlers to logger
        self.logger.addHandler(fh)
        self.logger.addHandler(ch)



        self.database = Database()

        self.logger.debug('update_access_token init over.')

    def update(self):

        # get access_token from weixin
        access_token = self.get_access_token()
        
        # check the access_token if exist, if not to insert it.
        db_access_token = self.database.get_access_token(self.appid, self.appsecret)
        self.logger.debug('get_access_token from database:%s.' %(db_access_token))

        if db_access_token != None:
            # update access_token
            self.database.update_access_token(self.appid, self.appsecret, access_token)
        else:
            self.database.insert_access_token(self.appid, self.appsecret, access_token)






    def get_access_token(self):
        try:
            url = "https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid={0}&secret={1}".format(self.appid, self.appsecret)
            self.logger.debug('get_access_token request url:%s.' %(url))
            response = self.http_get(url, 5)
            if response != None:
                response_json = json.loads(response)
                if response_json.has_key('access_token') and len(response_json['access_token']) > 0:
                    return response_json['access_token']

            return None
        except Exception,ex:
            self.logger.error('get_access_token error:{0}'.format(str(ex)))
            return None
예제 #35
0
class Server:
    def __init__(self, ip='0.0.0.0', port=9000, log_level=logging.DEBUG):

        self.ip   = ip
        self.port = port

        self.author  = __author__
        self.version = __version__

        self.file_path = os.path.realpath(__file__)
        self.dir_path  = os.path.dirname(self.file_path)


        # mark system start time
        self.system_initialized = datetime.now()


        # weixin about
        self.appid      = 'wxb1efccbbb5bafcbb'
        self.appsecret  = '9d64356f48062e46159b4d179dea5c44'
        self.token      = 'shenhailuanma'
        self.access_token = ''
        self.signature  = None
        self.echostr    = None
        self.timestamp  = None
        self.nonce      = None

        self.wechat     = WechatBasic(token=self.token, appid=self.appid, appsecret=self.appsecret)

        self.weather    = weather()

        # database
        self.database   = Database()

        # set the logger
        self.log_level = logging.DEBUG
        self.log_path = 'myWeixinServer.log'

        self.logger = logging.getLogger('myWeixinServer')
        self.logger.setLevel(self.log_level)

        # create a handler for write the log to file.
        fh = logging.FileHandler(self.log_path)
        fh.setLevel(self.log_level)

        # create a handler for print the log info on console.
        ch = logging.StreamHandler()
        ch.setLevel(self.log_level)

        # set the log format
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
        fh.setFormatter(formatter)
        ch.setFormatter(formatter)

        # add the handlers to logger
        self.logger.addHandler(fh)
        self.logger.addHandler(ch)

        self.logger.info('init over.')

    
        #######  web test ######
        @bottle.route('/')
        def index_get():
            try:
                # get the post data
                self.logger.debug('handle a GET request: /, ')

                # e.g :  /?signature=04d39d841082682dc7623945528d8086cc9ece97&echostr=8242236714827861439&timestamp=1440564411&nonce=2061393952

                # get the data
                self.logger.debug('handle the request data: %s' %(bottle.request.query_string))
                #self.logger.debug('handle the request signature:%s' %(bottle.request.query.signature))
                #self.logger.debug('handle the request echostr:%s' %(bottle.request.query.echostr))
                #self.logger.debug('handle the request timestamp:%s' %(bottle.request.query.timestamp))
                #self.logger.debug('handle the request nonce:%s' %(bottle.request.query.nonce))

                return bottle.request.query.echostr
            except Exception,ex:
                return "%s" %(ex)


            return "Hello, this is myWeixinServer."


        @bottle.route('/', method="POST")
        def index_post():
            try:
                response = ''

                self.logger.debug('handle a POST request: /, ')
                self.logger.debug('handle the request data: %s' %(bottle.request.query_string))

                post_data = bottle.request.body.getvalue()
                self.logger.debug('handle the request post data: %s' %(post_data))

                echostr     = bottle.request.query.echostr
                signature   = bottle.request.query.signature
                timestamp   = bottle.request.query.timestamp
                nonce       = bottle.request.query.nonce

                if self.wechat.check_signature(signature=signature, timestamp=timestamp, nonce=nonce):
                    self.logger.debug('check_signature ok.')

                    self.wechat.parse_data(post_data)

                    message = self.wechat.get_message()

                    if message.type == 'text':
                        if message.content == 'wechat':
                            response = self.wechat.response_text(u'^_^')
                        elif u'天气' in message.content:
                            city = u'北京'
                            data = self.database.get_weather_data(city)
                            self.logger.debug('get the weather response:{0}'.format(data))
                            response = self.wechat.response_text(data)
                        else:
                            response = self.wechat.response_text(u'文字')

                    elif message.type == 'image':
                        response = self.wechat.response_text(u'图片')
                    elif message.type == 'video':
                        self.logger.debug('message.media_id:%s' %(message.media_id))
                        response = self.wechat.download_media(message.media_id)
                        self.logger.debug('message.media_id:%s over' %(message.media_id))
                        #response = self.wechat.response_text(u'视频')
                    elif message.type == 'voice':
                        response = self.wechat.response_text(u'音频')
                    elif message.type == 'location':
                        response = self.wechat.response_text(u'位置')
                    else:
                        response = self.wechat.response_text(u'未知')


                

                return response
            except Exception,ex:
                self.logger.debug('error:%s' %(ex))
                return "%s" %(ex)
예제 #36
0
class DouyinDownloader(object):
    """docstring for DouyinDownloader"""
    def __init__(self, download_dir = '/tmp/download/'):
        super(DouyinDownloader, self).__init__()
        # self.arg = arg

        self.downloadCount = 0
        self.uploadCount = 0

        if download_dir.endswith('/'):
            self.downloadDir = download_dir
        else:
            self.downloadDir = download_dir + '/'

        # redis connect
        self.redisClient = redis.StrictRedis(host='115.159.157.98', port=17379, db=0, password='******')

        # mysql
        self.mysqlDB = Database(host='localhost', user='******', passwd='zx#Video2018', database='video')

        # nextcloud
        self.oc = owncloud.Client('http://127.0.0.1:18080')
        self.oc.login('zhangxu', 'zx@12346')
        self.oc.logout()

        self.oc2 = owncloud.Client('http://115.29.34.236:18181')
        self.oc2.login('zhangxu', 'zx@12346')
        self.oc2.logout()
        
    def downloadAll(self):
        while True:
            try:
                task = self.getTask()
                if task != None:
                    print("get one task, to download")
                    print(task)

                    # 判断是否已经存在
                    exist = self.videoExist(task)
                    if exist == True:
                        print("video is exist, return")
                    else:
                        self.downloadOne(task)
            except Exception as e:
                print("downloadAll error:")
                print(e)

            time.sleep(1)


    def getTask(self):
        # 从任务队列中取任务
        task = self.redisClient.rpop("douyinTask")
        return task

    def downloadOne(self, task):

        nowtime = time.time()
        filename = str(nowtime) + ".mp4"
        # dirname = time.strftime("%Y-%m-%d", time.localtime()) 
        filepath = self.downloadDir + filename

        downloadDirNow = self.downloadDir

        print("to download video:" + filepath)

        # prepare dir
        if os.path.exists(downloadDirNow) == False:
            os.makedirs(downloadDirNow, 666)

        # stream=True作用是推迟下载响应体直到访问Response.content属性
        res = requests.get(task, stream=True)
        # 将视频写入文件夹

        with open(filepath, 'ab') as f:
            f.write(res.content)
            f.flush()

            self.downloadCount = self.downloadCount + 1
            print(filename + '下载完成' + ', count:' + str(self.downloadCount))


            # 计算文件md5
            file_md5 = self.get_file_md5(filepath)
            print("file md5:" + file_md5)


            # 使用md5值作为新文件名
            new_file_path = downloadDirNow + file_md5 + ".mp4"


            videoInfo = self.mysqlDB.get_video_info_by_md5(file_md5)
            if videoInfo != None:
                # 文件已经存在
                print("video same md5 is exist, return")
                # 删除文件
                os.remove(filepath)
            else:
                # 重命名
                os.rename(filepath, new_file_path)

                # upload
                print("upload file to local cloud.")
                cloudFilePath = self.uploadFileToCloud(new_file_path, file_md5 + ".mp4", self.oc)

                # 暂时不上传到远程云了, 网速太慢了。。。
                # print("upload file to remote cloud.")
                # self.uploadFileToCloud(new_file_path, file_md5 + ".mp4", self.oc2)

                # 创建异步任务copy视频到远程云
                # if len(cloudFilePath) > 0:
                #     self.redisClient.lpush('copyTask', cloudFilePath)

                # 更新上传计数
                self.uploadCount = self.uploadCount + 1
                print("upload file count:" + str(self.uploadCount))

                # 信息插入数据库
                urlmd5 = hashlib.md5(task).hexdigest()
                result,msg = self.mysqlDB.insert_video_info(platform=1, status=0, title='', url=str(task, encoding = "utf-8")  , md5=file_md5, urlmd5=urlmd5, storepath=new_file_path)
                print("insert_video_info:" + result)


                # 删除文件
                os.remove(new_file_path)


    def videoExist(self, task):
        # 计算url的md5值,之后根据该值在数据库中查询, 从而判断该视频是否已经存在
        urlmd5 = hashlib.md5(task).hexdigest()

        # 根据urlMd5 查询视频信息
        videoInfo = self.mysqlDB.get_video_info(urlmd5)

        if videoInfo != None:
            return True

        return False

    def get_file_md5(self, file_path):
        f = open(file_path,'rb')  
        md5_obj = hashlib.md5()
        while True:
            d = f.read(8096)
            if not d:
                break
            md5_obj.update(d)
        hash_code = md5_obj.hexdigest()
        f.close()
        md5 = str(hash_code).lower()
        return md5

    def uploadFileToCloud(self, file_path, file_name, oc):
        try:
            oc.login('zhangxu', 'zx@12346')

            dirname = time.strftime("%Y-%m-%d", time.localtime()) 
            dirname = dirname + '-%d' %(self.uploadCount/200)

            try:
                oc.mkdir(dirname)
            except Exception as e:
                print("mkdir failed:" + str(e))

            # upload
            cloudFilePath = dirname + '/' + file_name
            oc.put_file(cloudFilePath, file_path)

            # logout
            oc.logout()
            print("uploadFileToCloud success, file:" + cloudFilePath)

            return cloudFilePath
            
        except Exception as e:
            print("uploadFileToCloud error:")
            print(e)
            return ""