def process(self, response):
     trip = response.value
     start_time = trip['start_time']
     end_time = trip['end_time']
     val = {
         "vehicle_id": trip['vehicle_id'],
         "start_time": start_time,
         "start_time_day_w": self.__get_day_of_week(start_time),
         "start_time_day_m": self.__get_day_of_month(start_time),
         "start_time_h": self.__get_hour_of_day(start_time),
         "end_time": end_time,
         "end_time_day_w": self.__get_day_of_week(end_time),
         "end_time_day_m": self.__get_day_of_month(end_time),
         "end_time_h": self.__get_hour_of_day(end_time),
         "duration": trip['duration'],
         "distance": 0,
         "stops": 0,
         "type": trip['vehicle_type'],
         "src": trip['src'],
         "dest": trip['dest'],
         "timestamps": [],
         "route": [],
         "start_time_str": TimeUtils.format_timestamp(start_time),
         "end_time_str": TimeUtils.format_timestamp(end_time)
     }
     return val
Exemple #2
0
    def check_rotation_needed(log_object, data_size, new_timestamp):
        """
        The method checks whether rotation is needed for the given log object (from logs_map) according to given new
        log data chunk size and new message timestamp.

        :param log_object - object - log object from logs_map which holds log-related data, such as total written bytes:
        :param data_size - int - size of new data chunk to be written to the given log object:
        :param new_timestamp - long - timestamp of the new message which is to be saved to the given log object:

        :return need_to_rotate - boolean - if True - the given log object needs to be rotated:
        """
        try:
            if log_object['size'] + data_size > LOG_FILE_MAX_SIZE:
                return True

            prev_timestamp = log_object['first_msg_ts']
            curr_timestamp = new_timestamp

            if not TimeUtils.is_same_day(
                    prev_timestamp, curr_timestamp
            ):  # Use timestamps, not UTC formatted strings
                return True

            days, hours, minutes, seconds = TimeUtils.get_diff(
                prev_timestamp, curr_timestamp)
            if hours >= LOG_FILE_MAX_DATES_DISTANCE_HOURS:
                return True
            return False
        except Exception, e:
            log.error('Error during rotation conditions checking: %s' %
                      e.message)
            raise
    def check_rotation_needed(log_object, data_size, new_timestamp):
        """
        The method checks whether rotation is needed for the given log object (from logs_map) according to given new
        log data chunk size and new message timestamp.

        :param log_object - object - log object from logs_map which holds log-related data, such as total written bytes:
        :param data_size - int - size of new data chunk to be written to the given log object:
        :param new_timestamp - long - timestamp of the new message which is to be saved to the given log object:

        :return need_to_rotate - boolean - if True - the given log object needs to be rotated:
        """
        try:
            if log_object['size'] + data_size > LOG_FILE_MAX_SIZE:
                return True

            prev_timestamp = log_object['first_msg_ts']
            curr_timestamp = new_timestamp

            if not TimeUtils.is_same_day(prev_timestamp, curr_timestamp):  # Use timestamps, not UTC formatted strings
                return True

            days, hours, minutes, seconds = TimeUtils.get_diff(prev_timestamp, curr_timestamp)
            if hours >= LOG_FILE_MAX_DATES_DISTANCE_HOURS:
                return True
            return False
        except Exception, e:
            log.error('Error during rotation conditions checking: %s' % e.message)
            raise
Exemple #4
0
    def ClearExpTimeList(self,expireTime):

        nowTime = TimeUtils.GetNowUnixTime()
        for k,v in self.items():
            creatTime=TimeUtils.String2UnixTime(v.create_time,"%Y-%m-%d %H:%M:%S")
            if nowTime - creatTime > expireTime:
                self.pop(k)
                continue
    def __init__(self, data, **kwargs):
        if self.DEBUG_MRO:
            self.registerMessage(" ")
        super(ImportSpecial, self).__init__(data, **kwargs)
        try:
            self.ID
        except:
            raise UserWarning("ID exist for Special to be valid")

        self["start_time"] = TimeUtils.gDriveStrpTime(self["FROM"])
        self["end_time"] = TimeUtils.gDriveStrpTime(self["TO"])
 def __init__(self, json_data):
     self.JsonData = json_data
     self.InvolvedObject = InvolvedObject(json_data.get('involvedObject'))
     self.Metadata = self.JsonData.get('metadata')
     self.Namespace = self.Metadata.get('namespace')
     self.CreationTimeStamp = TimeUtils.ConvertFromGoTime(self.Metadata.get('creationTimestamp'))
     self.DeletionTimeStamp = TimeUtils.ConvertFromGoTime(self.Metadata.get('deletionTimestamp'))
     self.Reason = self.JsonData.get('reason', None)
     self.Message = self.JsonData.get('message', None)
     self.Type = self.JsonData.get('type', None)
     self.Count = self.JsonData.get('count')
     self.DetailedReason = self.__get_detailed_reason()
def scheduled_job(driver, names):
    global has_updates, match_start_time, match_end_time

    current_time = TimeUtils.get_current_time()
    LOGGER.debug_with_time("Entered scheduled_job...")

    # the match hasn't started yet...
    if not properties.IS_TEST_MODE and current_time < match_start_time or current_time > match_end_time:
        return

    has_updates = True
    message_content = get_match_info()

    if not has_updates:
        message_content = "No new updates right now..."

    try:
        for name in names:
            user = WebDriverWait(driver, 10).until(
                EC.presence_of_element_located(
                    (By.XPATH, "//span[@title = \"{}\"]".format(name))))

            LOGGER.debug_with_time("User found!")
            user.click()

            message_box = WebDriverWait(driver, 10).until(
                EC.presence_of_element_located(
                    (By.CLASS_NAME, properties.MESSAGE_BOX_CLASS_NAME)))

            LOGGER.debug_with_time("Message box found!")

            if len(message_content) == 0:
                continue

            message_box.send_keys(message_content)
            LOGGER.debug_with_time("Will wait to locate send_button...")

            send_button = WebDriverWait(driver, 10).until(
                EC.element_to_be_clickable(
                    (By.CLASS_NAME, properties.SEND_BUTTON_CLASS_NAME)))

            LOGGER.debug("Send_button found!")
            send_button.send_keys("\n")
            send_button.click()
    except (TimeoutException, WebDriverException) as e:
        with open(properties.ERROR_LOG_FILE_NAME, "a+") as error_logfile:
            error_logfile.write(
                "ERROR:root:[" +
                TimeUtils.get_current_time().strftime('%Y-%m-%d %H:%M:%S') +
                "] : Exception occurred => " + str(e))

        return
Exemple #8
0
 def __init__(self, device_type, udid, mode, config_file):
     self.device_type = device_type
     self.udid = udid
     self.__udid_validate()
     self.mode = mode
     self.config_file = config_file
     self.exe_id = TimeUtils.get_current_time_for_output()
     self.start_time = TimeUtils.get_current_time()
     self.rule = self.__rule()
     self.driver = self.__driver_router()
     self.flow = self.__flow_router()
     self.cache_file_name = self.__cache_file_name()
     self.window_info = self.driver.window_info
    def refresh_occupancies(self, counts):

        now_weekday = TimeUtils.get_local_weekday()
        if now_weekday < 6:
            now_weekday = -1
        now_hour = TimeUtils.get_local_hour()

        parking_areas = self.db\
            .child(OccupancyRatesRepository.__occupancy_rates_ODS_node_name)\
            .get()

        time_config_params = [(6, 'TIME_START_SATURDAY', 'TIME_END_SATURDAY'),
                              (7, 'TIME_START_SUNDAY', 'TIME_END_SUNDAY'),
                              (-1, 'TIME_START_WEEKDAYS', 'TIME_END_WEEKDAYS')]

        time_start_name, time_end_name = [(s, e)
                                          for w, s, e in time_config_params
                                          if w == now_weekday][0]
        occupancy_rates_update = {}

        for pa in parking_areas.each():
            key = pa.key()
            pa = pa.val()
            is_occupancy_rate_set = False
            properties = pa['properties']

            if 'TYPE_OF_FINANCIAL' not in properties:
                None
            elif properties['TYPE_OF_FINANCIAL'] == 'M':
                if properties[time_start_name] <= now_hour < properties[
                        time_end_name]:
                    is_occupancy_rate_set = True

            if is_occupancy_rate_set is True:
                amount_of_parked_cars = 0
                if key in counts:
                    amount_of_parked_cars = counts[key]

                # Essentially the occupancy rate is places occupied divided by the total number of places
                occupancy_rate = min(
                    1, amount_of_parked_cars /
                    float(properties['NUMBER_OF_PLACES']))
            else:
                occupancy_rate = OccupancyRatesRepository.__unknown_occupancy_value

            update_key = OccupancyRatesRepository.__occupancy_rates_ODS_node_name \
                         + '/' + str(key) \
                         + '/' + OccupancyRatesRepository.__occupancy_rate_node_name
            occupancy_rates_update[update_key] = occupancy_rate

        self.db.update(occupancy_rates_update)
Exemple #10
0
    def parse_info(self, spider_url):
        st, html = self.get_list_page(spider_url)
        com_html = etree.HTML(html)
        link_els = com_html.xpath(
            '//div[@id="user-repositories-list"]//li[@class="col-12 d-flex width-full py-4 border-bottom public fork"]'
        )
        link_els1 = com_html.xpath(
            '//div[@id="user-repositories-list"]//li[@class="col-12 d-flex width-full py-4 border-bottom public source"]'
        )
        self.save_data(link_els)
        self.save_data(link_els1)

        TimeUtils.sleep_long()
        Log.info("github_ data spider successfully!")
Exemple #11
0
    def store_parking_event(self, request_json, live_time):
        register_number = request_json['registerNumber']
        parking_context_type = request_json['parkingContextType']
        timestamp = TimeUtils.get_local_timestamp()

        # parking_event_json is the json stored under parkingAreaParkingEvent node
        parking_event_json = {
            'timestamp': timestamp,
            'parkingType': parking_context_type,
            'registerNumber': register_number,
        }

        if parking_context_type == 'PAID':
            parking_area_id = request_json['parkingAreaId']
            parking_event_json['parkingDurationInMinutes'] = request_json[
                'parkingDurationInMinutes']
        elif parking_context_type == 'PARKING_DISC':
            parking_area_id = 'PARKING_DISC_AREA'

        # Remove previous events from the ODS if any exist
        self.__remove_parking_event_from_ods_by(register_number)

        # Store the incoming event to ODS
        add_results = self.__add_parking_event_to_ods(parking_area_id,
                                                      parking_event_json)

        # Store notification about the event for event consumption and clean up
        # > Notifications are stored in a flattened format
        # > Better use of indexing for server side event consumers
        notification_json = {
            'parkingAreaId': parking_area_id,
            'registerNumber': register_number,
            'parkingEventId': add_results['odsId'],
            'liveUntilTime':
            TimeUtils.get_epoch_timestamp_plus_seconds(live_time),
            'parkingAreaParkingEvent': parking_event_json
        }

        # Only PAID context events are stored long term, because they are the only
        # types of events that one gets location information of.
        if parking_context_type == 'PAID':
            notification_json['willBeStoredToLongTermDataStore'] = True

        notification_add_results = self.__add_parking_event_to_notification_store(
            notification_json)

        add_results['notificationId'] = notification_add_results['name']
        add_results['timestamp'] = timestamp
        return json.dumps(add_results)
def start_commentary():
    global match_start_time, match_end_time, last_comment

    current_time = TimeUtils.get_current_time()
    match_start_time = current_time.replace(
        hour=properties.MATCH_START_HOURS,
        minute=properties.MATCH_START_MINUTES,
        second=0,
        microsecond=0)
    match_end_time = current_time.replace(hour=properties.MATCH_END_HOURS,
                                          minute=properties.MATCH_END_MINUTES,
                                          second=0,
                                          microsecond=0)
    last_comment = Comment("None", "No comment yet...")
    URL = "https://web.whatsapp.com"

    if (properties.BROWSER.lower() == "safari"):
        driver = webdriver.Safari()
    elif (properties.BROWSER.lower() == "chrome"):
        driver = webdriver.Chrome("../chromedriver")
    elif (properties.BROWSER.lower() == "firefox"):
        driver = webdriver.Firefox()
    else:
        error_message = "Web browser should be one of Safari/Chrome/Firefox"
        LOGGER.error_with_time(error_message)

        return

    driver.get(URL)

    user_input = input(
        "Enter the names of the groups/users you want to text, separated by commas(Eg. - Arya Stark, Sansa Stark, Jon Snow, Bran, Rickon, Robb) : "
    )
    names = [x.strip() for x in user_input.split(',')]
    scheduler(driver, names)
Exemple #13
0
    def __add_parking_event_to_ods(self, parking_area_id, parking_event_json):

        register_number = parking_event_json['registerNumber']

        self.db\
            .child(ParkingEventRepository._parking_event_ODS_node_name)\
            .child(parking_area_id)\
            .child(register_number)\
            .set(parking_event_json)

        # lookup_json is the json stored in the Firebase lookup node
        lookup_json = {
            'registerNumber': register_number,
            'parkingAreaId': parking_area_id,
            'parkingAreaParkingEventId': register_number,
            'timestamp': parking_event_json['timestamp']
        }

        if parking_event_json['parkingType'] == 'PAID':
            lookup_json[
                'durationEndTimestamp'] = TimeUtils.get_local_timestamp(
                    parking_event_json['parkingDurationInMinutes'])

        self.db\
            .child(ParkingEventRepository._parking_event_ODS_lookup_node_name)\
            .child(register_number) \
            .set(lookup_json)

        add_results = {
            'odsId': register_number,
            'odsLookupId': register_number
        }

        return add_results
Exemple #14
0
def download_html(url, headers=None):
    st = 0
    try:
        redirect_url = get_redirect_url(url)
        response = requests.get(redirect_url, headers=headers, allow_redirects=False, timeout=(5, 60))
        if response.status_code == 200:
            st = 1
            content = response.text
        else:
            content = ''
    except Exception as e:
        content = ''
        Log.error(e, 'download html error,because of proxy,url:%s' % (url,))
        TimeUtils.sleep_short()

    return st, content
Exemple #15
0
    def remove_dead_events(self):
        notifications_ref = self.db.child(
            ParkingEventRepository._parking_event_notification_store_node_name)
        dead_notifications = notifications_ref\
            .order_by_child('liveUntilTime')\
            .start_at('0')\
            .end_at(TimeUtils.get_epoch_timestamp_plus_seconds(0)).get()

        # a notification is considered dead if it is already stored to the long term data store OR never will be
        dead_notifications = [
            (dn.key(), dn.val()) for dn in dead_notifications.each()
            if 'willBeStoredToLongTermDataStore' not in dn.val()
            or dn.val()['willBeStoredToLongTermDataStore'] == False
        ]

        for dn_id, dn in dead_notifications:

            # Remove dead events
            self.__remove_parking_events_from_ods_by(
                dn['parkingAreaId'], dn['registerNumber'],
                dn['parkingAreaParkingEvent']['timestamp'])

            # Remove dead notifications
            self.db.child(ParkingEventRepository._parking_event_notification_store_node_name)\
                .child(dn_id)\
                .remove()
Exemple #16
0
    def __init__(self, oldMObject, oldSObject, lastSync=None):
        super(SyncUpdate, self).__init__()
        for oldObject in oldMObject, oldSObject:
            assert isinstance(oldObject, ImportObject)
        if not lastSync:
            lastSync = self.default_lastSync
        # print "Creating SyncUpdate: ", oldMObject.__repr__(), oldSObject.__repr__()
        self.oldMObject = oldMObject
        self.oldSObject = oldSObject
        self.tTime = TimeUtils.wpStrptime(lastSync)

        self.newSObject = None
        self.newMObject = None
        self.static = True
        self.importantStatic = True
        self.syncWarnings = OrderedDict()
        self.syncPasses = OrderedDict()
        self.syncProblematics = OrderedDict()
        self.updates = 0
        self.importantUpdates = 0
        self.importantCols = []
        self.mDeltas = False
        self.sDeltas = False

        self.mTime = 0
        self.sTime = 0
        self.bTime = 0
Exemple #17
0
 def exe(self):
     while self.flow.action_count < self.rule.MAX_ACTION_COUNT_OPTIONAL and TimeUtils.duration(
             self.start_time) < self.rule.MAX_EXE_DURATION_OPTIONAL:
         try:
             self.flow.exe()
             # 所有执行记录在内存中, 防止过大, 将部分记录在本地缓存中
             if self.flow.action_count % 100 == 0:
                 self.to_local_cache()
         except KeyboardInterrupt:
             logger.info(
                 "KeyboardInterrupt caught. Program Stopped Manually")
             logger.info("DONE")
             self.to_local_cache()
             self.video_generator()
             break
         # 用于target模式
         except ElementFoundError:
             logger.info("target settings for xpath is incorrect. BREAK!!!")
             break
         except Exception as e:
             logger.error("error type is {0}".format(e.__class__.__name__))
             logger.error("error message is {0}".format(e))
             pass
     self.to_local_cache()
     self.video_generator()
     logger.info("DONE")
Exemple #18
0
 def on_message(self,room,message, member, source,**kwargs):
     jid = str(room.jid)
     reg = '{}/'.format(jid)
     member_jid = member.conversation_jid
     # Log.info("收到会话消息",room)
     win = self.roomList[jid]['win']
     if aioxmpp.structs.LanguageTag.fromstr('en') in message.body:
         win.chatWin.append('({}){}:\n{}'.format(TimeUtils.getTimeWithoutDay(),member_jid,str(message.body[aioxmpp.structs.LanguageTag.fromstr('en')])))
Exemple #19
0
 def getInfoComponents(self, info_fmt="%s"):
     info_components = super(SyncUpdate_Usr, self).getInfoComponents(info_fmt)
     info_components += [
         (info_fmt % ("Last Sale", TimeUtils.wpTimeToString(self.bTime))) if self.bTime else "No Last Sale",
         (info_fmt % ("%s Mod Time" % self.master_name, TimeUtils.wpTimeToString(self.mTime))) if self.mMod else "%s Not Modded" % self.master_name,
         (info_fmt % ("%s Mod Time" % self.slave_name, TimeUtils.wpTimeToString(self.sTime))) if self.sMod else "%s Not Modded" % self.slave_name
     ]
     for tracking_name, cols in self.colData.getACTTrackedCols().items():
         col = cols[0]
         mColModTime = self.getMColModTime(col)
         sColModTime = self.getSColModTime(col)
         if mColModTime or sColModTime:
             info_components.append(info_fmt % (tracking_name, '%s: %s; %s: %s' % (
                 self.master_name,
                 TimeUtils.wpTimeToString(mColModTime),
                 self.slave_name,
                 TimeUtils.wpTimeToString(sColModTime),
             )))
     return info_components
Exemple #20
0
    def put_data_to_local_log(self, log_name, token, data):
        """
        The method pushes given data (which is a string) to the data queue for given log, which is identified by
        lof_name, of the backend.

        Before placing new data to the queue the method checks if there is no such log file named by log_name
        value in the logs map; if there is no such log - creates it.

        :param log_name - str - name of the log file without the path to it (to reference it in the logs map):
        :param data - str - data to be pushed to the data queue:

        :return:
        """
        try:
            try:
                self.log_map_lock.acquire()

                if data is None:
                    return

                timestamp = TimeUtils.get_current_time_as_timestamp_as_ms()

                if self.logs_map.get(
                        log_name
                ) is None:  # New log - need to add it to the map
                    self.add_to_local_logs_map(log_name, token, timestamp)
                else:
                    # First message timestamp is set to None after log rotation.
                    if self.logs_map[log_name]['first_msg_ts'] is None:
                        self.logs_map[log_name]['first_msg_ts'] = timestamp
            finally:
                self.log_map_lock.release()

            if not self.no_timestamps:
                data = str(timestamp) + ' ' + data

            data_size = len(data)

            new_data_item = {
                'log_name': log_name,
                'token': token,
                'data': data,
                'size': data_size,
                'timestamp': timestamp
            }

            while len(self.data_queue) == self.data_queue.maxlen:
                time.sleep(DATA_QUEUE_FREE_WAIT_PERIOD)

            self.data_queue.appendleft(new_data_item)

            self.data_queue_thread.data_consumer_event.set()
        except threading.ThreadError, e:
            log.error('Cannot acquire log write lock! Error %s' % e.message)
            raise
Exemple #21
0
class Log(object):
    # Define logfile directory
    log_dir = os.path.join(PROJECT_PATH, "logs")

    # Define default logfile format.

    file_name_format = TimeUtils.get_current_time_for_log()
    console_msg_format = '%(asctime)s %(levelname)-8s: %(message)s'
    file_msg_format = '%(asctime)s %(levelname)-8s: %(message)s'
    # file_msg_format = '%(asctime)s %(levelname)-8s: %(module)-2s:%(funcName)-2s: %(process)-2d: %(processName)-2s:  %(name)s \n%(message)s'
    # console_msg_format = '%(asctime)s %(levelname)-8s: %(module)-2s:%(funcName)-2s: %(process)-2d: %(processName)-2s:  %(name)s \n%(message)s'

    # Define the log level
    log_level = logging.INFO

    # Define the log rotation criteria.
    # max_bytes = 1024 ** 2
    # backup_count = 100

    @staticmethod
    def logger(logger_name=None):
        # Create the root logger.
        logger = logging.getLogger(logger_name)
        logger.setLevel(Log.log_level)

        # Validate the given directory.
        Log.log_dir = os.path.normpath(Log.log_dir)

        # Create a folder for the logfile.
        FileUtils.make_dir(Log.log_dir)

        # Build the logfile name
        filename = Log.file_name_format + ".log"
        filename = os.path.join(Log.log_dir, filename)

        # Set up logging to the logfile
        file_handler = RotatingFileHandler(
            filename=filename
            # ,maxBytes=Log.max_bytes, backupCount=Log.backup_count
        )
        file_handler.setLevel(Log.log_level)
        file_formatter = logging.Formatter(Log.file_msg_format)
        file_handler.setFormatter(file_formatter)
        logger.addHandler(file_handler)

        # Set up logging to console
        stream_handler = logging.StreamHandler()
        stream_handler.setLevel(Log.log_level)
        stream_formatter = logging.Formatter(Log.console_msg_format)
        stream_handler.setFormatter(stream_formatter)
        logger.addHandler(stream_handler)

        return logger
Exemple #22
0
 def __parse_trip(self, record):
     start_time_tuple = record[2]
     end_time_tuple = record[3]
     start_time = start_time_tuple.timestamp()
     end_time = end_time_tuple.timestamp()
     val = {
         "vehicle_id": record[1],
         "start_time": start_time,
         "start_time_day_w": self.__get_day_of_week(start_time),
         "start_time_day_m": self.__get_day_of_month(start_time),
         "start_time_h": self.__get_hour_of_day(start_time),
         "end_time": end_time,
         "end_time_day_w": self.__get_day_of_week(end_time),
         "end_time_day_m": self.__get_day_of_month(end_time),
         "end_time_h": self.__get_hour_of_day(end_time),
         "duration": int(record[4]),
         "distance": float(record[5]),
         "type": record[6],
         "src": self.__get_coordinates(record[7]),
         "dest": self.__get_coordinates(record[8]),
         "start_time_str": TimeUtils.format_timestamp(start_time),
         "end_time_str": TimeUtils.format_timestamp(end_time)
     }
     return val
    def put_data_to_local_log(self, log_name, token, data):
        """
        The method pushes given data (which is a string) to the data queue for given log, which is identified by
        lof_name, of the backend.

        Before placing new data to the queue the method checks if there is no such log file named by log_name
        value in the logs map; if there is no such log - creates it.

        :param log_name - str - name of the log file without the path to it (to reference it in the logs map):
        :param data - str - data to be pushed to the data queue:

        :return:
        """
        try:
            try:
                self.log_map_lock.acquire()

                if data is None:
                    return

                timestamp = TimeUtils.get_current_time_as_timestamp_as_ms()

                if self.logs_map.get(log_name) is None:  # New log - need to add it to the map
                    self.add_to_local_logs_map(log_name, token, timestamp)
                else:
                    # First message timestamp is set to None after log rotation.
                    if self.logs_map[log_name]['first_msg_ts'] is None:
                        self.logs_map[log_name]['first_msg_ts'] = timestamp
            finally:
                self.log_map_lock.release()

            if not self.no_timestamps:
                data = str(timestamp) + ' ' + data

            data_size = len(data)

            new_data_item = {'log_name': log_name, 'token': token, 'data': data, 'size': data_size,
                             'timestamp': timestamp}

            while len(self.data_queue) == self.data_queue.maxlen:
                time.sleep(DATA_QUEUE_FREE_WAIT_PERIOD)

            self.data_queue.appendleft(new_data_item)

            self.data_queue_thread.data_consumer_event.set()
        except threading.ThreadError, e:
            log.error('Cannot acquire log write lock! Error %s' % e.message)
            raise
Exemple #24
0
def create_update_user(user_identity, user_request_details: dict, user_identity_provided: bool):
    phone_length_is_valid = is_phone_vaild(user_request_details.setdefault('phone_number', None))

    if not phone_length_is_valid:
        return ErrorEnums.INVALID_PHONE_LENGTH_ERROR.value

    user_request_details.__setitem__(
        'date_of_birth', TimeUtils.convert_time(
            user_request_details.get('date_of_birth')
        )
    ) if user_request_details.get('date_of_birth') else None

    if not user_identity_provided:
        if not validate_email_format(user_request_details.get('email')):
            return ErrorEnums.INVALID_EMAIL_FORMAT_ERROR.value
        if not validate_password_format(user_request_details.get('password')):
            return ErrorEnums.INVALID_PASSWORD_ERROR.value
        user_request_details.__setitem__('password', UserSecurity.encrypt_pass(user_request_details.get('password')))
        created_user = UserDatabaseService.create_user(user_request_details)
        if isinstance(created_user, str):
            return created_user
        return dataStateAccess.UserDTO.user_dto(created_user)

    if user_request_details.get('email'):
        if not validate_email_format(user_request_details.get('email')):
            return ErrorEnums.INVALID_EMAIL_FORMAT_ERROR.value

        if user_request_details.get('email') and user_identity.get('email') != user_request_details.get('email'):
            verify_existence_for_user_email = UserDatabaseService.get_active_user_by_email(user_request_details.get('email'))

            if verify_existence_for_user_email:
                return ErrorEnums.EMAIL_ALREADY_EXISTS_ERROR.value

    if user_request_details.get('username'):
        if user_request_details.get('username') and user_identity.get('username') != user_request_details.get('username'):
            verify_username_existence = UserDatabaseService.get_active_user_by_username(user_request_details.get('username'))
            verify_alt_username_existence = UserDatabaseService().get_user_by_alt_username(user_request_details.get('username'))

            if verify_username_existence or verify_alt_username_existence:
                return ErrorEnums.USER_NAME_ALREADY_EXISTS.value

    updated_user = UserDatabaseService.update_user_generic_data(user_identity, user_request_details)
    if isinstance(updated_user, str):
        return updated_user
    return dataStateAccess.UserDTO.user_dto(updated_user)
Exemple #25
0
 def get_bounds(self):
     activity_key = self.current_activity
     for k in self.exe_records[activity_key].keys():
         if k not in ["roll_back_steps", "status"]:
             for kk, vv in self.exe_records[activity_key][k].items():
                 if vv["covered"] is False:
                     bounds = vv["bounds"]
                     self.focus_bounds = bounds
                     self.full_path = kk
                     self.exe_records[activity_key][k][kk][
                         "covered"] = True
                     timestamp = TimeUtils.get_current_timestamp()
                     self.exe_records[activity_key][k][kk][
                         "timestamp"] = timestamp
                     logger.info("exe records is {0}".format(json.dumps(self.exe_records, ensure_ascii=False)))
                     logger.info("info : current focus bounds : {0}".format(self.focus_bounds))
                     return True
     self.exe_records[activity_key]["status"] = RecordsStatus.DONE
     return False
Exemple #26
0
    def get_occuring_paid_event_counts(self):

        now = TimeUtils.get_local_timestamp()
        counts = {}

        occuring_events = self.db \
            .child('parkingEventLookup') \
            .order_by_child('durationEndTimestamp') \
            .start_at(now).end_at('9') \
            .get()

        occuring_events = self.db.sort(occuring_events, 'parkingAreaId')

        # count currently occuring parking event amounts for each area
        for k, g in groupby(occuring_events.each(),
                            lambda i: i.val()['parkingAreaId']):
            counts[str(k)] = sum(1 for i in g)

        return counts
def test_send_messages_on_whatsapp():
    global match_start_time
    global last_comment

    current_time = TimeUtils.get_current_time()
    match_start_time = current_time.replace(
        hour=properties.MATCH_START_HOURS,
        minute=properties.MATCH_START_MINUTES,
        second=0,
        microsecond=0)
    last_comment = Comment("None", "No comment yet...")
    # URL = "https://web.whatsapp.com"

    # driver = webdriver.Safari()
    # driver.get(URL)

    # user_input = input("Enter the names of the groups/users you want to text, separated by commas(Eg. - Arya Stark, Sansa Stark, Jon Snow, Bran, Rickon, Robb) : ")
    # names = [x.strip() for x in user_input.split(',')]

    test_scheduled_job()
Exemple #28
0
 def displayUpdateList(self, updateList, tablefmt=None):
     if updateList:
         delimeter = "<br/>" if tablefmt=="html" else "\n"
         subject_fmt = "<h4>%s</h4>" if tablefmt=="html" else "%s"
         # header = ["Column", "Reason", "Old", "New"]
         header = OrderedDict([
             ('col', 'Column'),
             ('reason', 'Reason'),
             ('oldLoserValue', 'Old'),
             ('oldWinnerValue', 'New'),
             ('mColTime', 'M TIME'),
             ('sColTime', 'S TIME'),
         ])
         subjects = {}
         for warnings in updateList.values():
             for warning in warnings:
                 subject = warning['subject']
                 if subject not in subjects.keys():
                     subjects[subject] = []
                 warning_fmtd = dict([\
                     (key, SanitationUtils.sanitizeForTable(val))\
                     for key, val in warning.items()\
                     if key in header
                 ])
                 for key in ['mColTime', 'sColTime']:
                     try:
                         rawTime = int(warning[key])
                         if rawTime:
                             warning_fmtd[key] = TimeUtils.wpTimeToString(rawTime)
                     except Exception, e:
                         pass
                 subjects[subject].append(warning_fmtd)
         tables = []
         for subject, warnings in subjects.items():
             antiSubject_fmtd = subject_fmt % self.opposite_src(subject)
             table = [header]+warnings
             table_fmtd = tabulate(table, headers='firstrow', \
                                   tablefmt=tablefmt )
             tables.append(delimeter.join([antiSubject_fmtd, table_fmtd]))
         return delimeter.join(tables)
Exemple #29
0
    def btnListener(self, sender):

        btnName = sender.objectName()
        data = {'JID': self.jid, 'action': btnName}
        user_icon = "D:\CodeSave\py\ChatChat\TestProject\src\images\CustomerService.png"
        ss = '''<a>({})自己:</a><a>{}</a>'''
        if btnName == 'sendmsg':
            if self.inputWin.toPlainText() != '':
                data['msg'] = str(self.inputWin.toPlainText())
                self.inputWin.setText('')
                self.chatWin.append(
                    ss.format(TimeUtils.getTimeWithoutDay(), data['msg']))
                self._sendMsg2Friend.emit(data)
                self.insertMsg(userName=self.selfJid,
                               isSelf=1,
                               chatWith=str(self.jid),
                               messageFrom=str(self.jid),
                               messageContext=data['msg'],
                               createTime=time.time())
                return
            else:
                return
Exemple #30
0
def addGisInfoDetailField(taskJobId):
    gbi = Session.query(GisInfoDetailField).filter(
        GisInfoDetailField.taskJobId == taskJobId).first()
    if gbi != None:
        return
    urlReCityFieldList = Session.query(UrlReCityField).all()
    searchTaskJob = loadTaskById(taskJobId)
    gisBigInfo = queryGisDetail(taskJobId)
    areaList = Session.query(AreaField).all()
    # jobTemplate=TemplateDao.queryTemplateByType(searchTaskJob.type)
    area = areaList[random.randint(0, len(areaList) - 1)]
    urlReCity = urlReCityFieldList[random.randint(0,
                                                  len(urlReCityFieldList) - 1)]
    gisInfoDetailField = GisInfoDetailField()
    gisInfoDetailField.contentType = searchTaskJob.type
    gisInfoDetailField.createTtime = TimeUtils.get_time_beforeNow(
        0, '%Y-%m-%d %H:%M')
    gisInfoDetailField.detailAreaId = area.cityId
    gisInfoDetailField.detailUrl = urlReCity.url
    gisInfoDetailField.detailKv = searchTaskJob.key
    gisInfoDetailField.networkCnt = random.randint(100, 500)
    gisInfoDetailField.baiduSearchCnt = random.randint(10000, 50000)
    gisInfoDetailField.haosouSearchCnt = random.randint(10000, 50000)
    gisInfoDetailField.sougouSearchCnt = random.randint(10000, 50000)
    gisInfoDetailField.weixinSearchCnt = random.randint(10000, 50000)
    gisInfoDetailField.allCnt = random.randint(1000, 5000)
    gisInfoDetailField.suitability = random.randint(0, 100)
    gisInfoDetailField.suitabilityRange = random.randint(0, 100)
    gisInfoDetailField.genuineInfo = gisBigInfo.genuineInfo if gisBigInfo != None else "人民的名义;李路;2017-05"
    gisInfoDetailField.piracyInfo = ''
    gisInfoDetailField.piracySource = random.randint(0, 100)
    gisInfoDetailField.estimatedImpactCnt = random.randint(0, 100)
    gisInfoDetailField.transmissionRate = random.randint(100, 500)
    gisInfoDetailField.spreadScope = random.randint(100, 500)
    gisInfoDetailField.resourceSize = random.randint(1000, 10000)
    gisInfoDetailField.resourceInDate = datetime.now()
    gisInfoDetailField.taskJobId = searchTaskJob.id
    Session.add(gisInfoDetailField)
    def test_duration_timestamp(self):
        """
        Test if the duration timestamps get set properly in paid parking context.
        :return:
        """

        # Assign
        mock_now = datetime(2016, 1, 1, 1, 1, 1, 1)
        with mock.patch('datetime.datetime') as dt_mock:
            dt_mock.now.return_value = mock_now
            self.parking_event_repo.db = MockDb().with_paid_init()

            expected_parking_context_type = 'PAID'
            expected_parking_duration_in_minutes = 60

            request_json = TestUtils.build_request(
                parking_context_type=expected_parking_context_type,
                parking_duration_in_minutes=
                expected_parking_duration_in_minutes,
                parking_area_id=paid_parking_area_id,
                payment_method_type=paid_parking_payment_method_type,
                payment_receipt=paid_parking_payment_receipt)

            # Act
            result = self.parking_event_repo.store_parking_event(
                request_json, 100)

            # Assert
            _, actual_event = self.parking_event_repo.db. \
                get_single_event_key_and_value(paid_parking_area_id)

            _, lookup = self.parking_event_repo.db \
                .get_single_lookup_key_and_value(parking_disc_parking_area_id)
            actual_end_timestamp = lookup['durationEndTimestamp']
            expected_end_timestamp = TimeUtils.get_local_timestamp(
                actual_event['parkingDurationInMinutes'])
            self.assertEqual(expected_end_timestamp, actual_end_timestamp)
Exemple #32
0
    def on_message(self, conversation, message, member, source, **kwargs):
        '''
        :param conversation
        :param message 接收到的消息 <aioxmpp.Message>
        :param member 发送消息成员 <aioxmpp.im.conversation.AbstractConversationMember>
        :param
        '''
        # Log.info("member.conversation_jid",member.conversation_jid)
        # Log.info("member.conversation_jid",type(member.conversation_jid))
        jid = str(conversation.jid)
        win = self.conversationList[jid]['win']
        if aioxmpp.structs.LanguageTag.fromstr('en') in message.body:

            self.insertMsg(
                userName=str(self.core.jid),
                isSelf=0,
                chatWith=str(conversation.jid),
                messageFrom=jid,
                messageContext=str(
                    message.body[aioxmpp.structs.LanguageTag.fromstr('en')]),
                createTime=time.time())
            win.chatWin.append('({}){}:\n{}'.format(
                TimeUtils.getTimeWithoutDay(), jid,
                str(message.body[aioxmpp.structs.LanguageTag.fromstr('en')])))
Exemple #33
0
    def save_upload_file(self, post_streamer, user_id, user_category_info,
                         file_title):
        """
        返回文件路径、文件 id
        :param post_streamer:
        :param user_id: 文件使用者id
        :param user_category_info: 文件使用者的分类
        :param file_title: 文件名称
        :return:
        """
        def clean_file_name(file_name):
            """
            去除文件名中不规范的项目(利用正则表达式)
            """
            re_str = r"[\/\\\:\*\?\"\<\>\| _]"  # '/\:*?"<>|'
            return re.sub(re_str, "", file_name)

        # 获取文件信息
        file_info = {}
        for part in post_streamer.parts:
            """
            [
                { headers:[ {params, name, value}, {params, name, value} ], tempile, size },    # part

                { headers:[ {params, name, value}, ], tempile, size },  # part
                { headers:[ {params, name, value}, ], tempile, size },  # part
                { headers:[ {params, name, value}, ], tempile, size },  # part
                { headers:[ {params, name, value}, ], tempile, size },  # part
                { headers:[ {params, name, value}, ], tempile, size },  # part
            ]
            0、poststreamer.parts,为一个 list 对象,其总共有六个 dict(headers、tempfile、size) 元素
            1、size 文件大小
            2、tempfile 值是一个临时文件对象 (转存要用到)
            4、headers 值是一个 list [ {params, name, value}, ]
            5、六个 dict 中,第一个为需要的

            """
            try:
                file_args = {}
                part["tmpfile"].close()
                # 获取文件后缀
                params = part["headers"][0].get("params", None)
                filename = params[
                    'filename']  # 只有上传文件对应的part才有该键,通过抛出异常来获得有效文件
                fill_suffix = PathUtils.get_file_suffix(filename)
                file_args['id'] = TimeUtils.time_id()
                file_args['user_id'] = user_id
                file_args['user_category'] = user_category_info['category']
                file_args['file_category'] = 'attachment'
                file_args['created'] = TimeUtils.datetime_date_simple()
                file_args['size'] = part["size"]
                file_args['title'] = clean_file_name(file_title)
                if len(file_args['title']) == 0:
                    file_args['title'] = str(file_args['id'])
                # 文件类型
                full_file_type = part["headers"][1].get("value", "text/plain")
                if '/' in full_file_type:
                    file_args['type'] = full_file_type.split('/')[0]
                elif '\\' in full_file_type:
                    file_args['type'] = full_file_type.split('\\')[0]
                elif '\\\\' in full_file_type:
                    file_args['type'] = full_file_type.split('\\\\')[0]
                # 文件名:id_user file_title suffix
                name_file = StringUtils.connect(file_args['user_category'],
                                                '_', file_args['user_id'], '-',
                                                file_args['id'], '-',
                                                file_args['title'],
                                                fill_suffix)
                path_folder_relative = self.make_path_folder_relative_no_static(
                )
                path_folder_absolute = PathUtils.connect(
                    path.PATH_STATIC, path_folder_relative)
                file_args['path'] = PathUtils.to_url(
                    PathUtils.connect(path_folder_relative, name_file))
                path_save = PathUtils.connect(path_folder_absolute, name_file)
                if isinstance(path_save, type('')):
                    path_save = path_save.decode('utf-8')
                # 创建文件夹路径
                if not os.path.exists(path_folder_absolute):
                    os.makedirs(path_folder_absolute)
                # 转存文件(这步将临时文件保存为正式文件, 关键)
                os.rename(part["tmpfile"].name, path_save)
                if not os.path.exists(path_save):  # 判断是否转存成功
                    file_info = {}
                    continue
                file_info = file_args
                # break # 不能终止循环,要通过异常,来删除临时文件
            except Exception as e:
                # traceback.print_exc()
                part["tmpfile"].close()
                os.unlink(part["tmpfile"].name)  # 删除临时文件(有多个,只有一个是上传文件)
        # 将信息写入数据库
        if len(file_info) > 0:
            result = self.insert_one_item(user_category_info['file_table'],
                                          **file_info)
            if result != -1:
                return {
                    '': file_info['id'],
                    'file_path': file_info['path'],
                    'file_title': file_info['title']
                }
            else:
                return {}
        else:
            return {}
Exemple #34
0
    def save_image(self, user_id, user_category, file_save):
        """
        保存富文本编辑器上传的图片: 图片要压缩, 图片转为jpeg格式
        :param user_id: 图片使用者的id
        :param user_category: 图片使用者的分类信息,通过分类信息dict,可以获取很多有用信息
        :param file_save: 要保存的文件
        :return: 图片保存的路径(含静态路径)
        """
        try:
            # 获取文件
            filename = os.path.splitext(
                file_save['filename'])[0]  # 只有上传文件对应的part才有该键,通过抛出异常来获得有效文件
            # 文件类型
            file_type_full = file_save['content_type']  # u'image/jpeg'
            if '/' in file_type_full:  # image
                file_type = file_type_full.split('/')[0]
            elif '\\' in file_type_full['type']:
                file_type = file_type_full['type'].split('\\')[0]
            elif '\\\\' in file_type_full['type']:
                file_type = file_type_full['type'].split('\\\\')[0]
            else:
                file_type = 'unknown'

            # 生成文件名
            name_use = self.make_file_name_with_user_id(
                user_category=user_category['category'],
                user_id=user_id,
                file_category='u',
                file_suffix='.jpg')
            # 文件夹相对路径: files/upload/2017/3/24
            path_folder_relative_no_static = self.make_path_folder_relative_no_static(
            )
            # 文件夹绝对对路径:..../static/files/upload/2017/3/24
            path_folder_absolute = PathUtils.connect(
                path.PATH_STATIC, path_folder_relative_no_static)
            # 文件保存绝对路径
            path_save_use = PathUtils.connect(path_folder_absolute, name_use)
            # 数据库路径
            path_db_use = PathUtils.to_url(
                PathUtils.connect(path_folder_relative_no_static, name_use))
            # 创建保存路径
            if not os.path.exists(path_folder_absolute):
                os.makedirs(path_folder_absolute)

            # 从字符串生成图片, 并保存为jpg(保存路径后缀决定)
            Image.open(StringIO.StringIO(file_save['body'])).save(
                path_save_use, format='jpeg')
            # 如果原始文件大小超标,压缩
            if os.path.getsize(
                    path_save_use) > user_category['image_size_limit']:
                self.compress_image(image_file_path=path_save_use,
                                    category_info=user_category)
            file_size = os.path.getsize(path_save_use)
            image_id = TimeUtils.time_id()
            date_time = TimeUtils.datetime_date_simple()
            param_origin = {
                'id': image_id,
                'user_id': user_id,
                'user_category': user_category['category'],
                'file_category': 'use',
                'title': name_use,
                'summary': filename,
                'type': file_type,
                'created': date_time,
                'size': file_size,
                'path': path_db_use
            }
            self.insert_one_item(user_category['file_table'], **param_origin)

            # 返回数据
            return self.add_static(path_db_use)
        except Exception as e:
            traceback.print_exc()
            app_log.error(e)
            return ''
 def act_modtime(self):
     return TimeUtils.actStrptime(self.get("Edited in Act", 0))
    def rotate_log(self, log_object, log_name):
        """
        The method performs log rotation which consists of next operations:
        1) The method tries to get the first timestamp for the given log object (which is stored in 'first_msg_ts' field);
        2) If it is None by some reason, the method uses current timestamp with ms. precision to generate the name;
        3) If such log exists - the method tries to generate new name, expanding the old one with '_INDEX' (1,2,3,...)
        4) After new name is generated the method renames given *.log file with new generated name (e.g. 1234567890.log)
        5) After renaming the method adjusts log object's info: 1 - sets total written data size to 0; 2 - sets first
           log's timestamp to None (this is completely new file - it does not contain timestamps yet); 3 - alters last
           message timestamp field with last_data_timestamp value - it is needed by check_rotation_needed() method on
           next iteration of new message processing for this log object.
        6) Pushes new name of the rotated file into LogsArchiver's queue and signalizes that the file needs to be
           compressed.

        :param log_object - dict - log object from logs_map:
        :param log_name - str - the name of currently rotated log (like 'amazon_s3_XXXXX.log'):

        :return rotation_success - boolean - signalizes whether log rotation succeeded:
        """
        old_file_name = log_object['local_log_file']
        rotation_success = False
        new_file_name = ARCHIVING_BACKEND_BASE_DIRECTORY + log_object['token'] + '/'

        if not os.path.exists(new_file_name):
            os.makedirs(new_file_name)

        # Generate new name for the rotated log from it's first backend timestamp or from the
        # current timestamp (current date-time) if it is absent by some reason, like: 212347772784.log
        if log_object['first_msg_ts'] is None:
            numeric_name_part = str(TimeUtils.get_current_time_as_timestamp_as_ms())
            new_file_name += numeric_name_part + '.log'
        else:
            numeric_name_part = str(log_object['first_msg_ts'])
            new_file_name += numeric_name_part + '.log'

        # Check whether we have any files with the name the same as we've just generated. If there are ones -
        # generate new name by adding "_N" prefix to it (N=1,2,3...)
        if os.path.exists(new_file_name):
            generated_name = ''
            for i in range(1, MAX_FILE_NAME_INDEX + 1):
                generated_name = ARCHIVING_BACKEND_BASE_DIRECTORY + log_object['token'] + '/' + numeric_name_part + '_' + str(i) + '.log'
                if not os.path.exists(generated_name):
                    break
                if i == MAX_FILE_NAME_INDEX:
                    raise Exception('Cannot generate new rotation name: all indexes from _%d to _%d '
                                    'are already used!' % (1, MAX_FILE_NAME_INDEX))
            log.info('Rotation: %s already exists, so %s name will be used for the log rotation.' %
                     (new_file_name, generated_name))
            new_file_name = generated_name

        try:
            try:
                self.log_map_lock.acquire()
                os.rename(old_file_name, new_file_name)

                log_object['size'] = 0  # Size of written data is cleared due to the file rotation.
                log_object['first_msg_ts'] = None      # The same is for the first message's timestamp.

                self.logs_map[log_name] = log_object
                rotation_success = True

                if not self.no_logs_compressing:
                    self.logs_compressor.compress_async(new_file_name, self.compress_callback)

            except threading.ThreadError, e:
                log.error('Cannot acquire log map lock! Error %s' % e.message)
            except Exception, e:
                if hasattr(e, 'strerror'):
                    msg = e.strerror  # For IO errors and other system-related ones
                else:
                    msg = e.message
                log.error('Cannot rename %s to %s for log rotation. Error: %s' % (old_file_name, new_file_name, msg))
Exemple #37
0
 def parseMTime(self, rawMTime):
     return TimeUtils.actServerToLocalTime(TimeUtils.actStrptime(rawMTime))
Exemple #38
0
 def parseSTime(self, rawSTime):
     return TimeUtils.wpServerToLocalTime(TimeUtils.wpStrptime(rawSTime))
 def last_sale(self):
     return TimeUtils.actStrptime(self.get("Last Sale", 0))
 def wp_modtime(self):
     return TimeUtils.wpServerToLocalTime(TimeUtils.wpStrptime(self.get("Edited in Wordpress", 0)))
Exemple #41
0
import yaml
import os
import paramiko
from coldata import ColData_User
from csvparse_flat import CSVParse_User, UsrObjList
from utils import TimeUtils, SanitationUtils
from itertools import chain

srcFolder = "../source/"
inFolder = "../input/"
remoteExportFolder = "act_usr_exp/"
yamlPath = "merger_config.yaml"

importName = TimeUtils.getMsTimeStamp()
dateStamp = TimeUtils.getDateStamp()

with open(yamlPath) as stream:
    config = yaml.load(stream)

    m_ssh_user = config.get('test_m_ssh_user')
    m_ssh_pass = config.get('test_m_ssh_pass')
    m_ssh_host = config.get('test_m_ssh_host')
    m_ssh_port = config.get('test_m_ssh_port', 22)
    m_db_host = config.get('test_m_db_host', '127.0.0.1')
    m_db_user = config.get('test_m_db_user')
    m_db_pass = config.get('test_m_db_pass')
    m_db_name = config.get('test_m_db_name')
    m_command = config.get('test_m_command')

exportFilename = "act_x_test_"+importName+".csv"
remoteExportPath = os.path.join(remoteExportFolder, exportFilename)
Exemple #42
0
    hotout = LoadHotOutDoc()
    print "hotout length:", len(hotout)

    hotin = LoadHotInDoc()
    print "hotin length:", len(hotin)

    history = LoadHistory()
    print "history length:", len(history)

    newDoc = LoadNewDoc()
    print "newDoc length:", len(newDoc)

    res = {}

    nowTime = tu.GetNowUnixTime()

    for k, v in newDoc.items():

        if k in hotout:
            continue

        if k in hotin:
            continue

        if k in history:
            continue

        cTime = tu.String2UnixTime(v.create_time, "%Y-%m-%d %H:%M:%S")

        if nowTime - cTime > 24 * 60 * 60:
Exemple #43
0
    def rotate_log(self, log_object, log_name):
        """
        The method performs log rotation which consists of next operations:
        1) The method tries to get the first timestamp for the given log object (which is stored in 'first_msg_ts' field);
        2) If it is None by some reason, the method uses current timestamp with ms. precision to generate the name;
        3) If such log exists - the method tries to generate new name, expanding the old one with '_INDEX' (1,2,3,...)
        4) After new name is generated the method renames given *.log file with new generated name (e.g. 1234567890.log)
        5) After renaming the method adjusts log object's info: 1 - sets total written data size to 0; 2 - sets first
           log's timestamp to None (this is completely new file - it does not contain timestamps yet); 3 - alters last
           message timestamp field with last_data_timestamp value - it is needed by check_rotation_needed() method on
           next iteration of new message processing for this log object.
        6) Pushes new name of the rotated file into LogsArchiver's queue and signalizes that the file needs to be
           compressed.

        :param log_object - dict - log object from logs_map:
        :param log_name - str - the name of currently rotated log (like 'amazon_s3_XXXXX.log'):

        :return rotation_success - boolean - signalizes whether log rotation succeeded:
        """
        old_file_name = log_object['local_log_file']
        rotation_success = False
        new_file_name = ARCHIVING_BACKEND_BASE_DIRECTORY + log_object[
            'token'] + '/'

        if not os.path.exists(new_file_name):
            os.makedirs(new_file_name)

        # Generate new name for the rotated log from it's first backend timestamp or from the
        # current timestamp (current date-time) if it is absent by some reason, like: 212347772784.log
        if log_object['first_msg_ts'] is None:
            numeric_name_part = str(
                TimeUtils.get_current_time_as_timestamp_as_ms())
            new_file_name += numeric_name_part + '.log'
        else:
            numeric_name_part = str(log_object['first_msg_ts'])
            new_file_name += numeric_name_part + '.log'

        # Check whether we have any files with the name the same as we've just generated. If there are ones -
        # generate new name by adding "_N" prefix to it (N=1,2,3...)
        if os.path.exists(new_file_name):
            generated_name = ''
            for i in range(1, MAX_FILE_NAME_INDEX + 1):
                generated_name = ARCHIVING_BACKEND_BASE_DIRECTORY + log_object[
                    'token'] + '/' + numeric_name_part + '_' + str(i) + '.log'
                if not os.path.exists(generated_name):
                    break
                if i == MAX_FILE_NAME_INDEX:
                    raise Exception(
                        'Cannot generate new rotation name: all indexes from _%d to _%d '
                        'are already used!' % (1, MAX_FILE_NAME_INDEX))
            log.info(
                'Rotation: %s already exists, so %s name will be used for the log rotation.'
                % (new_file_name, generated_name))
            new_file_name = generated_name

        try:
            try:
                self.log_map_lock.acquire()
                os.rename(old_file_name, new_file_name)

                log_object[
                    'size'] = 0  # Size of written data is cleared due to the file rotation.
                log_object[
                    'first_msg_ts'] = None  # The same is for the first message's timestamp.

                self.logs_map[log_name] = log_object
                rotation_success = True

                if not self.no_logs_compressing:
                    self.logs_compressor.compress_async(
                        new_file_name, self.compress_callback)

            except threading.ThreadError, e:
                log.error('Cannot acquire log map lock! Error %s' % e.message)
            except Exception, e:
                if hasattr(e, 'strerror'):
                    msg = e.strerror  # For IO errors and other system-related ones
                else:
                    msg = e.message
                log.error(
                    'Cannot rename %s to %s for log rotation. Error: %s' %
                    (old_file_name, new_file_name, msg))
    def analyseRemote(self, parser, since=None, limit=None, filterItems=None):

        self.assertConnect()

        # srv_offset = self.dbParams.pop('srv_offset','')
        self.dbParams['port'] = self.service.local_bind_address[-1]
        cursor = pymysql.connect( **self.dbParams ).cursor()

        sm_where_clauses = []

        if since:
            since_t = TimeUtils.wpServerToLocalTime( TimeUtils.wpStrptime(since))
            assert since_t, "Time should be valid format, got %s" % since
            since_s = TimeUtils.wpTimeToString(since_t)

            sm_where_clauses.append( "tu.`time` > '%s'" % since_s )

        modtime_cols = [
            "tu.`user_id` as `user_id`",
            "MAX(tu.`time`) as `Edited in Wordpress`"
        ]

        for tracking_name, aliases in ColData_User.getWPTrackedCols().items():
            case_clauses = []
            for alias in aliases:
                case_clauses.append("LOCATE('\"%s\"', tu.`changed`) > 0" % alias)
            modtime_cols.append("MAX(CASE WHEN {case_clauses} THEN tu.`time` ELSE \"\" END) as `{tracking_name}`".format(
                case_clauses = " OR ".join(case_clauses),
                tracking_name = tracking_name
            ))

        if sm_where_clauses:
            sm_where_clause = 'WHERE ' + ' AND '.join(sm_where_clauses)
        else:
            sm_where_clause = ''

        sql_select_modtime = """\
    SELECT
        {modtime_cols}
    FROM
        {tbl_tu} tu
    {sm_where_clause}
    GROUP BY
        tu.`user_id`""".format(
            modtime_cols = ",\n\t\t".join(modtime_cols),
            tbl_tu=self.tbl_prefix+'tansync_updates',
            sm_where_clause = sm_where_clause,
        )

        # print sql_select_modtime

        if since:
            cursor.execute(sql_select_modtime)
            headers = [SanitationUtils.coerceUnicode(i[0]) for i in cursor.description]
            results = [[SanitationUtils.coerceUnicode(cell) for cell in row] for row in cursor]
            table = [headers] + results
            # print tabulate(table, headers='firstrow')
            # results = list(cursor)
            # if len(results) == 0:
            #     #nothing to analyse
            #     return
            # else:
            #     # n rows to analyse
            #     print "THERE ARE %d ITEMS" % len(results)

        wpDbMetaCols = ColData_User.getWPDBCols(meta=True)
        wpDbCoreCols = ColData_User.getWPDBCols(meta=False)

        userdata_cols = ",\n\t\t".join(filter(None,
            [
                "u.%s as `%s`" % (key, name)\
                    for key, name in wpDbCoreCols.items()
            ] + [
                "MAX(CASE WHEN um.meta_key = '%s' THEN um.meta_value ELSE \"\" END) as `%s`" % (key, name) \
                    for key, name in wpDbMetaCols.items()
            ]
        ))

        # wpCols = OrderedDict(filter( lambda (k, v): not v.get('wp',{}).get('generated'), ColData_User.getWPCols().items()))

        # assert all([
        #     'Wordpress ID' in wpCols.keys(),
        #     wpCols['Wordpress ID'].get('wp', {}).get('key') == 'ID',
        #     wpCols['Wordpress ID'].get('wp', {}).get('final')
        # ]), 'ColData should be configured correctly'

        # userdata_cols2 = ",\n\t\t".join(filter(None,[
        #     ("MAX(CASE WHEN um.meta_key = '%s' THEN um.meta_value ELSE \"\" END) as `%s`" if data['wp'].get('meta') else "u.%s as `%s`") % (data['wp']['key'], col)\
        #     for col, data in wpCols.items()
        # ]))

        # print " -> COLS1: ", userdata_cols
        # print " -> COLS2: ", userdata_cols2

        # print userdata_cols

        sql_select_user = """
    SELECT
        {usr_cols}
    FROM
        {tbl_u} u
        LEFT JOIN {tbl_um} um
        ON ( um.`user_id` = u.`ID`)
    GROUP BY
        u.`ID`""".format(
            tbl_u = self.tbl_prefix+'users',
            tbl_um = self.tbl_prefix+'usermeta',
            usr_cols = userdata_cols,
        )

        um_on_clauses = []
        um_where_clauses = []

        um_on_clauses.append('ud.`Wordpress ID` = lu.`user_id`')

        if filterItems:
            if 'cards' in filterItems:
                um_where_clauses.append( "ud.`MYOB Card ID` IN (%s)" % (','.join([
                    '"%s"' % card for card in filterItems['cards']
                ])))

        if um_on_clauses:
            um_on_clause = ' AND '.join([
                "(%s)" % clause for clause in um_on_clauses
            ])
        else:
            um_on_clause = ''

        if um_where_clauses:
            um_where_clause = 'WHERE ' + ' AND '.join([
                "(%s)" % clause for clause in um_where_clauses
            ])
        else:
            um_where_clause = ''



        # print sql_select_user

        sql_select_user_modtime = """
SELECT *
FROM
(
    {sql_ud}
) as ud
{join_type} JOIN
(
    {sql_mt}
) as lu
ON {um_on_clause}
{um_where_clause}
{limit_clause};""".format(
            sql_ud = sql_select_user,
            sql_mt = sql_select_modtime,
            join_type = "INNER" if sm_where_clause else "LEFT",
            limit_clause = "LIMIT %d" % limit if limit else "",
            um_on_clause = um_on_clause,
            um_where_clause = um_where_clause
        )

        if Registrar.DEBUG_CLIENT: Registrar.registerMessage(sql_select_user_modtime)

        cursor.execute(sql_select_user_modtime)

        headers = [SanitationUtils.coerceUnicode(i[0]) for i in cursor.description]

        results = [[SanitationUtils.coerceUnicode(cell) for cell in row] for row in cursor]

        rows = [headers] + results

        # print rows

        if results:
            print "there are %d results" % len(results)
            parser.analyseRows(rows)
Exemple #45
0
from email.message import Message
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
# import MySQLdb
# import pymysql
# import paramiko
from sshtunnel import SSHTunnelForwarder, check_address
import io
# import wordpress_xmlrpc
from sync_client_user import UsrSyncClient_SSH_ACT, UsrSyncClient_SQL_WP
from sync_client_user import UsrSyncClient_WC#, UsrSyncClient_JSON
from SyncUpdate import SyncUpdate, SyncUpdate_Usr
from contact_objects import FieldGroup


importName = TimeUtils.getMsTimeStamp()
start_time = time.time()

def timediff():
    return time.time() - start_time

testMode = False
# testMode = True

# good command: python source/merger.py -vv --skip-download-master --skip-download-slave --skip-update-master --skip-update-slave --skip-filter --do-sync --skip-post --livemode --limit=9000 --master-file=act_x_2016-08-01_15-02-35.csv --slave-file=act_x_2016-08-01_15-02-35.csv

### DEFAULT CONFIG ###

# paths are relative to source file

# things that need global scope
Exemple #46
0
def main():
    global testMode, inFolder, outFolder, logFolder, srcFolder, pklFolder, \
        yamlPath, repPath, mFailPath, sFailPath, logPath, zipPath

    userFile = cardFile = emailFile = sinceM = sinceS = False

    ### OVERRIDE CONFIG WITH YAML FILE ###

    with open(yamlPath) as stream:
        config = yaml.load(stream)

        if 'inFolder' in config.keys():
            inFolder = config['inFolder']
        if 'outFolder' in config.keys():
            outFolder = config['outFolder']
        if 'logFolder' in config.keys():
            logFolder = config['logFolder']

        #mandatory
        merge_mode = config.get('merge_mode', 'sync')
        MASTER_NAME = config.get('master_name', 'MASTER')
        SLAVE_NAME = config.get('slave_name', 'SLAVE')
        DEFAULT_LAST_SYNC = config.get('default_last_sync')
        master_file = config.get('master_file', '')
        slave_file = config.get('slave_file', '')
        userFile = config.get('userFile')
        cardFile = config.get('cardFile')
        emailFile = config.get('emailFile')
        sinceM = config.get('sinceM')
        sinceS = config.get('sinceS')
        download_slave = config.get('download_slave')
        download_master = config.get('download_master')
        update_slave = config.get('update_slave')
        update_master = config.get('update_master')
        do_filter = config.get('do_filter')
        do_problematic = config.get('do_problematic')
        do_post = config.get('do_post')
        do_sync = config.get('do_sync')

    ### OVERRIDE CONFIG WITH ARGPARSE ###

    parser = argparse.ArgumentParser(description = 'Merge contact records between two databases')
    group = parser.add_mutually_exclusive_group()
    group.add_argument("-v", "--verbosity", action="count",
                        help="increase output verbosity")
    group.add_argument("-q", "--quiet", action="store_true")
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--testmode', help='Run in test mode with test databases',
                        action='store_true', default=None)
    group.add_argument('--livemode', help='Run the script on the live databases',
                        action='store_false', dest='testmode')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--download-master', help='download the master data',
                       action="store_true", default=None)
    group.add_argument('--skip-download-master', help='use the local master file instead\
        of downloading the master data', action="store_false", dest='download_master')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--download-slave', help='download the slave data',
                       action="store_true", default=None)
    group.add_argument('--skip-download-slave', help='use the local slave file instead\
        of downloading the slave data', action="store_false", dest='download_slave')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--update-master', help='update the master database',
                       action="store_true", default=None)
    group.add_argument('--skip-update-master', help='don\'t update the master database',
                       action="store_false", dest='update_master')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--update-slave', help='update the slave database',
                       action="store_true", default=None)
    group.add_argument('--skip-update-slave', help='don\'t update the slave database',
                       action="store_false", dest='update_slave')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--do-filter', help='filter the databases',
                       action="store_true", default=None)
    group.add_argument('--skip-filter', help='don\'t filter the databases',
                       action="store_false", dest='do_filter')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--do-sync', help='sync the databases',
                       action="store_true", default=None)
    group.add_argument('--skip-sync', help='don\'t sync the databases',
                       action="store_false", dest='do_sync')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--do-problematic', help='make problematic updates to the databases',
                       action="store_true", default=None)
    group.add_argument('--skip-problematic', help='don\'t make problematic updates to the databases',
                       action="store_false", dest='do_problematic')
    group = parser.add_mutually_exclusive_group()
    group.add_argument('--do-post', help='post process the contacts',
                       action="store_true", default=None)
    group.add_argument('--skip-post', help='don\'t post process the contacts',
                       action="store_false", dest='do_post')

    parser.add_argument('--m-ssh-host', help='location of master ssh server')
    parser.add_argument('--m-ssh-port', type=int, help='location of master ssh port')
    parser.add_argument('--limit', type=int, help='global limit of objects to process')
    parser.add_argument('--master-file', help='location of master file')
    parser.add_argument('--slave-file', help='location of slave file')
    parser.add_argument('--card-file')

    group = parser.add_argument_group()
    group.add_argument('--debug-abstract', action='store_true', dest='debug_abstract')
    group.add_argument('--debug-parser', action='store_true', dest='debug_parser')
    group.add_argument('--debug-update', action='store_true', dest='debug_update')
    group.add_argument('--debug-flat', action='store_true', dest='debug_flat')
    group.add_argument('--debug-name', action='store_true', dest='debug_name')
    group.add_argument('--debug-address', action='store_true', dest='debug_address')
    group.add_argument('--debug-client', action='store_true', dest='debug_client')
    group.add_argument('--debug-utils', action='store_true', dest='debug_utils')
    group.add_argument('--debug-contact', action='store_true', dest='debug_contact')

    args = parser.parse_args()

    if args:
        print args
        if args.verbosity > 0:
            Registrar.DEBUG_PROGRESS = True
            Registrar.DEBUG_ERROR = True
        if args.verbosity > 1:
            Registrar.DEBUG_MESSAGE = True
        if args.quiet:
            Registrar.DEBUG_PROGRESS = False
            Registrar.DEBUG_ERROR = False
            Registrar.DEBUG_MESSAGE = False
        if args.testmode is not None:
            testMode = args.testmode
        if args.download_slave is not None:
            download_slave = args.download_slave
        if args.download_master is not None:
            download_master = args.download_master
        if args.update_slave is not None:
            update_slave = args.update_slave
        if args.update_master is not None:
            update_master = args.update_master
        if args.do_filter is not None:
            do_filter = args.do_filter
        if args.do_sync is not None:
            do_sync = args.do_sync
        if args.do_problematic is not None:
            do_problematic = args.do_problematic
        if args.do_post is not None:
            do_post = args.do_post
        if args.m_ssh_port:
            m_ssh_port = args.m_ssh_port
        if args.m_ssh_host:
            m_ssh_host = args.m_ssh_host
        if args.master_file is not None:
            download_master = False
            master_file = args.master_file
        if args.slave_file is not None:
            download_slave = False
            slave_file = args.slave_file
        if args.card_file is not None:
            cardFile = args.card_file
            do_filter = True

        if args.debug_abstract is not None:
            Registrar.DEBUG_ABSTRACT = args.debug_abstract
        if args.debug_parser is not None:
            Registrar.DEBUG_PARSER = args.debug_parser
        if args.debug_update is not None:
            Registrar.DEBUG_UPDATE = args.debug_update
        if args.debug_flat is not None:
            Registrar.DEBUG_FLAT = args.debug_flat
        if args.debug_name is not None:
            Registrar.DEBUG_NAME = args.debug_name
        if args.debug_address is not None:
            Registrar.DEBUG_ADDRESS = args.debug_address
        if args.debug_client is not None:
            Registrar.DEBUG_CLIENT = args.debug_client
        if args.debug_utils is not None:
            Registrar.DEBUG_UTILS = args.debug_utils
        if args.debug_contact is not None:
            Registrar.DEBUG_CONTACT = args.dest='debug_contact'

        global_limit = args.limit

    # api config

    with open(yamlPath) as stream:
        optionNamePrefix = 'test_' if testMode else ''
        config = yaml.load(stream)
        ssh_user = config.get(optionNamePrefix+'ssh_user')
        ssh_pass = config.get(optionNamePrefix+'ssh_pass')
        ssh_host = config.get(optionNamePrefix+'ssh_host')
        ssh_port = config.get(optionNamePrefix+'ssh_port', 22)
        m_ssh_user = config.get(optionNamePrefix+'m_ssh_user')
        m_ssh_pass = config.get(optionNamePrefix+'m_ssh_pass')
        m_ssh_host = config.get(optionNamePrefix+'m_ssh_host')
        m_ssh_port = config.get(optionNamePrefix+'m_ssh_port', 22)
        remote_bind_host = config.get(optionNamePrefix+'remote_bind_host', '127.0.0.1')
        remote_bind_port = config.get(optionNamePrefix+'remote_bind_port', 3306)
        db_user = config.get(optionNamePrefix+'db_user')
        db_pass = config.get(optionNamePrefix+'db_pass')
        db_name = config.get(optionNamePrefix+'db_name')
        db_charset = config.get(optionNamePrefix+'db_charset', 'utf8mb4')
        wp_srv_offset = config.get(optionNamePrefix+'wp_srv_offset', 0)
        m_db_user = config.get(optionNamePrefix+'m_db_user')
        m_db_pass = config.get(optionNamePrefix+'m_db_pass')
        m_db_name = config.get(optionNamePrefix+'m_db_name')
        m_db_host = config.get(optionNamePrefix+'m_db_host')
        m_x_cmd = config.get(optionNamePrefix+'m_x_cmd')
        m_i_cmd = config.get(optionNamePrefix+'m_i_cmd')
        tbl_prefix = config.get(optionNamePrefix+'tbl_prefix', '')
        wp_user = config.get(optionNamePrefix+'wp_user', '')
        wp_pass = config.get(optionNamePrefix+'wp_pass', '')
        store_url = config.get(optionNamePrefix+'store_url', '')
        wc_api_key = config.get(optionNamePrefix+'wc_api_key')
        wc_api_secret = config.get(optionNamePrefix+'wc_api_secret')
        remote_export_folder = config.get(optionNamePrefix+'remote_export_folder', '')


    ### DISPLAY CONFIG ###
    if Registrar.DEBUG_MESSAGE:
        if testMode:
            print "testMode enabled"
        else:
            print "testMode disabled"
        if not download_slave:
            print "no download_slave"
        if not download_master:
            print "no download_master"
        if not update_master:
            print "not updating maseter"
        if not update_slave:
            print "not updating slave"
        if not do_filter:
            print "not doing filter"
        if not do_sync:
            print "not doing sync"
        if not do_post:
            print "not doing post"

    ### PROCESS CLASS PARAMS ###

    FieldGroup.do_post = do_post
    SyncUpdate.setGlobals( MASTER_NAME, SLAVE_NAME, merge_mode, DEFAULT_LAST_SYNC)
    TimeUtils.setWpSrvOffset(wp_srv_offset)

    ### SET UP DIRECTORIES ###

    for path in (inFolder, outFolder, logFolder, srcFolder, pklFolder):
        if not os.path.exists(path):
            os.mkdir(path)

    fileSuffix = "_test" if testMode else ""
    fileSuffix += "_filter" if do_filter else ""
    m_x_filename = "act_x"+fileSuffix+"_"+importName+".csv"
    m_i_filename = "act_i"+fileSuffix+"_"+importName+".csv"
    s_x_filename = "wp_x"+fileSuffix+"_"+importName+".csv"
    remoteExportPath = os.path.join(remote_export_folder, m_x_filename)

    if download_master:
        maPath = os.path.join(inFolder, m_x_filename)
        maEncoding = "utf-8"
    else:
        # maPath = os.path.join(inFolder, "act_x_test_2016-05-03_23-01-48.csv")
        maPath = os.path.join(inFolder, master_file)
        # maPath = os.path.join(inFolder, "500-act-records-edited.csv")
        # maPath = os.path.join(inFolder, "500-act-records.csv")
        maEncoding = "utf8"
    if download_slave:
        saPath = os.path.join(inFolder, s_x_filename)
        saEncoding = "utf8"
    else:
        saPath = os.path.join(inFolder, slave_file)
        # saPath = os.path.join(inFolder, "500-wp-records-edited.csv")
        saEncoding = "utf8"

    moPath = os.path.join(outFolder, m_i_filename)
    repPath = os.path.join(outFolder, "usr_sync_report%s.html" % fileSuffix)
    WPresCsvPath = os.path.join(outFolder, "sync_report_wp%s.csv" % fileSuffix)
    masterResCsvPath = os.path.join(outFolder, "sync_report_act%s.csv" % fileSuffix)
    masterDeltaCsvPath = os.path.join(outFolder, "delta_report_act%s.csv" % fileSuffix)
    slaveDeltaCsvPath = os.path.join(outFolder, "delta_report_wp%s.csv" % fileSuffix)
    mFailPath = os.path.join(outFolder, "act_fails%s.csv" % fileSuffix)
    sFailPath = os.path.join(outFolder, "wp_fails%s.csv" % fileSuffix)
    sqlPath = os.path.join(srcFolder, "select_userdata_modtime.sql")
    # pklPath = os.path.join(pklFolder, "parser_pickle.pkl" )
    pklPath = os.path.join(pklFolder, "parser_pickle%s.pkl" % fileSuffix )
    logPath = os.path.join(logFolder, "log_%s.txt" % importName)
    zipPath = os.path.join(logFolder, "zip_%s.zip" % importName)

    ### PROCESS OTHER CONFIG ###

    assert store_url, "store url must not be blank"
    xmlrpc_uri = store_url + 'xmlrpc.php'
    json_uri = store_url + 'wp-json/wp/v2'

    actFields = ";".join(ColData_User.getACTImportCols())

    jsonConnectParams = {
        'json_uri': json_uri,
        'wp_user': wp_user,
        'wp_pass': wp_pass
    }

    wcApiParams = {
        'api_key':wc_api_key,
        'api_secret':wc_api_secret,
        'url':store_url
    }

    sqlConnectParams = {

    }

    actConnectParams = {
        'hostname':    m_ssh_host,
        'port':        m_ssh_port,
        'username':    m_ssh_user,
        'password':    m_ssh_pass,
    }

    actDbParams = {
        'db_x_exe':m_x_cmd,
        'db_i_exe':m_i_cmd,
        'db_name': m_db_name,
        'db_host': m_db_host,
        'db_user': m_db_user,
        'db_pass': m_db_pass,
        'fields' : actFields,
    }
    if sinceM: actDbParams['since'] = sinceM

    fsParams = {
        'importName': importName,
        'remote_export_folder': remote_export_folder,
        'inFolder': inFolder,
        'outFolder': outFolder
    }

    #########################################
    # Prepare Filter Data
    #########################################

    print debugUtils.hashify("PREPARE FILTER DATA"), timediff()

    if do_filter:
        filterFiles = {
            'users': userFile,
            'emails': emailFile,
            'cards': cardFile,
        }
        filterItems = {}
        for key, filterFile in filterFiles.items():
            if filterFile:
                try:
                    with open(os.path.join(inFolder,filterFile) ) as filterFileObj:
                        filterItems[key] = [\
                            re.sub(r'\s*([^\s].*[^\s])\s*(?:\n)', r'\1', line)\
                            for line in filterFileObj\
                        ]
                except IOError, e:
                    SanitationUtils.safePrint("could not open %s file [%s] from %s" % (
                        key,
                        filterFile,
                        unicode(os.getcwd())
                    ))
                    raise e
        if sinceM:
            filterItems['sinceM'] = TimeUtils.wpStrptime(sinceM)
        if sinceS:
            filterItems['sinceS'] = TimeUtils.wpStrptime(sinceS)