Beispiel #1
0
    def test_encode_datetime(self):
        self.assertEqual(
            '"2018-01-12T08:12:25.012"',
            json_encode(datetime(2018, 1, 12, 8, 12, 25, 12345),
                        use_utc=False),
        )

        self.assertEqual(
            '"2018-01-12T08:12:25.012Z"',
            json_encode(
                datetime(2018, 1, 12, 8, 12, 25, 12345, tzinfo=timezone.utc),
                use_utc=False,
            ),
        )

        self.assertEqual(
            '"2018-01-12T08:12:25.012-05:00"',
            json_encode(
                get_timezone('US/Eastern').localize(
                    datetime(2018, 1, 12, 8, 12, 25, 12345)),
                use_utc=False,
            ),
        )

        self.assertEqual(
            '"2018-01-12T08:12:25.012+08:00"',
            json_encode(
                get_timezone('Asia/Shanghai').localize(
                    datetime(2018, 1, 12, 8, 12, 25, 12345)),
                use_utc=False,
            ),
        )
Beispiel #2
0
    def test_encode_time(self):
        self.assertEqual('"08:12:25.012"',
                         json_encode(time(8, 12, 25, 12345), use_utc=False))

        self.assertEqual(
            '"08:12:25.012Z"',
            json_encode(time(8, 12, 25, 12345, tzinfo=timezone.utc),
                        use_utc=False))

        # "normalize" pytz timezone information to get "05:00" and not "04:56"
        # see https://stackoverflow.com/questions/26264897/time-zone-field-in-isoformat
        tzinfo = get_timezone('US/Eastern').localize(
            datetime(2018, 1, 12, 8, 12, 25, 12345), ).tzinfo

        self.assertEqual(
            '"08:12:25.012-05:00"',
            json_encode(time(8, 12, 25, 12345, tzinfo=tzinfo), use_utc=False))

        # "normalize" pytz timezone information to get "08:00" and not "08:06"
        # see https://stackoverflow.com/questions/26264897/time-zone-field-in-isoformat
        tzinfo = get_timezone('Asia/Shanghai').localize(
            datetime(2018, 1, 12, 8, 12, 25, 12345), ).tzinfo

        self.assertEqual(
            '"08:12:25.012+08:00"',
            json_encode(time(8, 12, 25, 12345, tzinfo=tzinfo), use_utc=False))
def create_datetime_column(spec, column_options):
    if spec.startswith('DateTime64'):
        cls = DateTime64Column
        spec = spec[11:-1]
        params = spec.split(',', 1)
        column_options['scale'] = int(params[0])
        if len(params) > 1:
            spec = params[1].strip() + ')'
    else:
        cls = DateTimeColumn
        spec = spec[9:]

    context = column_options['context']

    tz_name = timezone = None
    offset_naive = True

    # Use column's timezone if it's specified.
    if spec and spec[-1] == ')':
        tz_name = spec[1:-2]
        offset_naive = False
    else:
        if not context.settings.get('use_client_time_zone', False):
            try:
                local_timezone = get_localzone().zone
            except Exception:
                local_timezone = None

            if local_timezone != context.server_info.timezone:
                tz_name = context.server_info.timezone

    if tz_name:
        timezone = get_timezone(tz_name)

    return cls(timezone=timezone, offset_naive=offset_naive, **column_options)
Beispiel #4
0
def create_datetime_column(spec, column_options):
    context = column_options['context']

    tz_name = timezone = None
    offset_naive = True

    # Use column's timezone if it's specified.
    if spec[-1] == ')':
        tz_name = spec[10:-2]
        offset_naive = False
    else:
        if not context.settings.get('use_client_time_zone', False):
            try:
                local_timezone = get_localzone().zone
            except Exception:
                local_timezone = None

            if local_timezone != context.server_info.timezone:
                tz_name = context.server_info.timezone

    if tz_name:
        timezone = get_timezone(tz_name)

    return DateTimeColumn(timezone=timezone,
                          offset_naive=offset_naive,
                          **column_options)
 def process_request(self, request):
     tz = request.session.get('detected_timezone')
     if tz:
         # `request.timezone_active` is used in the template
         # tag to detect if the timezone has been activated
         request.timezone_active = True
         timezone.activate(get_timezone(tz))
     else:
         request.timezone_active = False
         timezone.deactivate()
Beispiel #6
0
    def test_encode_datetime__use_utc(self):
        self.assertEqual('"2018-01-12T08:12:25.012"',
                         json_encode(datetime(2018, 1, 12, 8, 12, 25, 12345)))

        self.assertEqual(
            '"2018-01-12T08:12:25.012Z"',
            json_encode(
                datetime(2018, 1, 12, 8, 12, 25, 12345, tzinfo=timezone.utc)))

        self.assertEqual(
            '"2018-01-12T13:12:25.012Z"',
            json_encode(
                get_timezone('US/Eastern').localize(
                    datetime(2018, 1, 12, 8, 12, 25, 12345))))

        self.assertEqual(
            '"2018-01-12T00:12:25.012Z"',
            json_encode(
                get_timezone('Asia/Shanghai').localize(
                    datetime(2018, 1, 12, 8, 12, 25, 12345))))
Beispiel #7
0
    def __init__(self,
                 *,
                 protocol_url=None,
                 api_version=None,
                 default_resource=None,
                 casing_function=None,
                 protocol_scope_prefix=None,
                 timezone=None,
                 **kwargs):
        """ Create a new protocol object

        :param str protocol_url: the base url used to communicate with the
         server
        :param str api_version: the api version
        :param str default_resource: the default resource to use when there is
         nothing explicitly specified during the requests
        :param function casing_function: the casing transform function to be
         used on api keywords (camelcase / pascalcase)
        :param str protocol_scope_prefix: prefix url for scopes
        :param pytz.UTC or str timezone: preferred timezone, defaults to the
         system timezone
        :raises ValueError: if protocol_url or api_version are not supplied
        """
        if protocol_url is None or api_version is None:
            raise ValueError(
                'Must provide valid protocol_url and api_version values')
        self.protocol_url = protocol_url or self._protocol_url
        self.protocol_scope_prefix = protocol_scope_prefix or ''
        self.api_version = api_version
        self.service_url = '{}{}/'.format(protocol_url, api_version)
        self.default_resource = default_resource or ME_RESOURCE
        self.use_default_casing = True if casing_function is None else False
        self.casing_function = casing_function or camelcase
        if timezone and isinstance(timezone, str):
            timezone = get_timezone(timezone)
        try:
            self.timezone = timezone or get_localzone()  # pytz timezone
        except UnknownTimeZoneError as e:
            log.info(
                'Timezone not provided and the local timezone could not be found. Default to UTC.'
            )
            self.timezone = UTC  # pytz.timezone('UTC')
        self.max_top_value = 500  # Max $top parameter value

        # define any keyword that can be different in this protocol
        # for example, attachments Odata type differs between Outlook
        #  rest api and graph: (graph = #microsoft.graph.fileAttachment and
        #  outlook = #Microsoft.OutlookServices.FileAttachment')
        self.keyword_data_store = {}
Beispiel #8
0
def create_datetime_column(spec, column_options):
    context = column_options['context']

    tz_name = timezone = None

    # Use column's timezone if it's specified.
    if spec[-1] == ')':
        tz_name = spec[10:-2]
    else:
        if not context.settings.get('use_client_time_zone', False):
            tz_name = context.server_info.timezone

    if tz_name:
        timezone = get_timezone(tz_name)

    return DateTimeColumn(timezone=timezone, **column_options)
Beispiel #9
0
def create_numpy_datetime_column(spec, column_options):
    if spec.startswith('DateTime64'):
        cls = NumpyDateTime64Column
        spec = spec[11:-1]
        params = spec.split(',', 1)
        column_options['scale'] = int(params[0])
        if len(params) > 1:
            spec = params[1].strip() + ')'
    else:
        cls = NumpyDateTimeColumn
        spec = spec[9:]

    context = column_options['context']

    tz_name = timezone = None
    offset_naive = True
    local_timezone = None

    # As Numpy do not use local timezone for converting timestamp to
    # datetime we need always detect local timezone for manual converting.
    try:
        local_timezone = get_localzone().key
    except AttributeError:
        local_timezone = get_localzone().zone
    except Exception:
        pass

    # Use column's timezone if it's specified.
    if spec and spec[-1] == ')':
        tz_name = spec[1:-2]
        offset_naive = False
    else:
        if not context.settings.get('use_client_time_zone', False):
            if local_timezone != context.server_info.timezone:
                tz_name = context.server_info.timezone

    if tz_name:
        timezone = get_timezone(tz_name)

    return cls(timezone=timezone,
               offset_naive=offset_naive,
               local_timezone=local_timezone,
               **column_options)
def chromium_history_to_csv(user=None,curruser=False, sourcedb=None, tempdb=None, outputdir=None, timezone='local'):
    """
    Parses the specified chromium History db to extract download and url history information.
    Defaults to look for chrome's history, but can override with 'sourcedb' param to specify the
    History file for any chromium base browser such as edge, brave, vivaldi, and more
    """
    if user is None and curruser is False and sourcedb is None:
        raise ValueError("Must specify a sourcedb path, explicit user, or curruser flag")
    if "win32" in sys.platform:
        if curruser:
            user = environ['USERNAME']
        if sourcedb is None:
            # Win10/7/vista
            sourcedb = r"C:\users\{0}\AppData\Local\Google\Chrome\User Data\Default\HISTORY".format(user)
            # checking if xp, this is valid in theory not tested
            if not path.exists(sourcedb):
                sourcedb = r"C:\Documents and Settings\{0}\Local Settings\Application Data\Google\Chrome\User Data\Default\HISTORY".format(user)
        if tempdb is None:    
            tempdb = r"c:\users\{0}\AppData\Local\Temp\HISTORY".format(environ['USERNAME'])
        if outputdir is None:
            outputdir = r"{0}\desktop".format(environ['USERPROFILE'])
    else:
        # assumes a nix variant
        if curruser:
            user = environ['USER']
        if sourcedb is None:
            sourcedb = r"/home/{0}/.config/google-chrome/Default/History".format(user)
            if not path.exists(sourcedb):
                sourcedb = r"/home/{0}/.config/chromium/Default/History".format(user)
        if tempdb is None:    
            tempdb = r"/tmp/HISTORY"
        if outputdir is None:
            outputdir = r"/home/{0}/Desktop".format(environ['USER'])
    url_history_csv = r"{0}\{1}chromium_history.csv".format(outputdir,user+"_" if user is not None else '')
    download_history_csv = r"{0}\{1}chromium_download_history.csv".format(outputdir,user+"_" if user is not None else '')
    # get the timezone
    if timezone == 'local':
        tz = get_localzone()
    else:
        tz = get_timezone(timezone)
    # making a copy, sometimes fails when browser has a handle on the file
    copyfile(sourcedb,tempdb)
    url_columns = ["url","title","visit_count","typed_count","last_visit_time","hidden"]#,"visit_time","from_visit","transition"]
    url_derived_columns = ["transition_core","transition_qualifiers","transition_description","qualifiers_description"]
    visit_columns = ["visit_time","from_visit","transition","visit_duration"]
    url_visits_columns = url_columns + visit_columns
    # there's also hash and transient columns, but don't seem to populate
    download_columns = ["current_path","target_path","start_time","received_bytes","total_bytes","state","danger_type","interrupt_reason",
        "end_time","opened","last_access_time","referrer","tab_url","tab_referrer_url","last_modified","mime_type","original_mime_type"]
    download_derived_columns = ["danger desc","interrupt desc"]
    download_all_columns = download_columns + download_derived_columns
    # can join the urls table with the keyword_search_terms table on url.id=keyword_search_terms.url_id, but doesn't really give more info
    # can join the urls table with the segments table on url.id=segments.url_id, but again not much more info
    timeline_query = "SELECT {0}, {1} FROM urls, visits WHERE urls.id = visits.url ORDER BY visits.visit_time DESC;".format(
        ", ".join(["urls."+col for col in url_columns]),", ".join(["visits."+col for col in visit_columns]))
    downloads_query = "SELECT {0} FROM downloads ORDER BY start_time DESC;".format(", ".join(download_columns))
    timeline = list()
    downloads = list()
    db = sqlite3.connect(tempdb)
    cursor = db.cursor()
    cursor.execute(timeline_query)
    timeline_results = cursor.fetchall()
    cursor.execute(downloads_query)
    download_results = cursor.fetchall()
    db.close()
    remove(tempdb)
    for result in timeline_results:
        entry = dict()
        for i in range(len(url_visits_columns)):
            if "time" in url_visits_columns[i] and len(str(result[i])) == 17:
                # this is the time that chromium starts counting from
                converted_time = (datetime(1601,1,1) + timedelta(microseconds=result[i]))
                entry[url_visits_columns[i]] = utc.localize(converted_time,is_dst=None).astimezone(tz).isoformat()
            elif "transition" in url_visits_columns[i]:
                qualifiers = list()
                qualifiers_description = list()
                for _, qmask in enumerate(transition_qualifiers):
                    if ((result[i] & QUALIFIER_MASK) & qmask) != 0:
                        qualifiers.append(transition_qualifiers[qmask])
                        qualifiers_description.append(transition_qualifier_descriptions[qmask])
                entry[url_visits_columns[i]] = result[i]
                entry[url_derived_columns[0]] = transition_types[(result[i] & CORE_MASK)]
                entry[url_derived_columns[1]] = "**|**".join(qualifiers)
                entry[url_derived_columns[2]] = transition_type_descriptions[(result[i] & CORE_MASK)]
                entry[url_derived_columns[3]] = "**|**".join(qualifiers_description)
            else:
                entry[url_visits_columns[i]] = result[i]
        timeline.append(entry)

    for result in download_results:
        entry = dict()
        for i in range(len(download_columns)):
            if "time" in download_columns[i] and len(str(result[i])) == 17:
                converted_time = (datetime(1601,1,1) + timedelta(microseconds=result[i]))
                entry[download_columns[i]] = utc.localize(converted_time,is_dst=None).astimezone(tz).isoformat()
            elif "state" in download_columns[i]:
                entry[download_columns[i]] = download_state_types[result[i]]
            elif "danger_type" in download_columns[i]:
                entry[download_columns[i]] = download_danger_types[result[i]]
                entry[download_derived_columns[0]] = download_danger_descriptions[result[i]]
            elif "interrupt_reason" in download_columns[i] and result[i] != 0:
                entry[download_columns[i]] = download_interrupt_reason_types[result[i]]
                entry[download_derived_columns[1]] = download_interrupt_reason_descriptions[result[i]]
            else:
                entry[download_columns[i]] = result[i]
        downloads.append(entry)

    column_names = url_visits_columns + url_derived_columns
    with open(url_history_csv,"w",encoding="utf-8",newline='') as csvfile:
        writer = csv.DictWriter(csvfile, fieldnames=column_names)
        writer.writeheader()
        writer.writerows(timeline)
    
    with open(download_history_csv,"w",encoding="utf-8",newline='') as csvfile:
        writer = csv.DictWriter(csvfile, fieldnames=download_all_columns)
        writer.writeheader()
        writer.writerows(downloads)