示例#1
0
def parse_datetime(s):
    """We use dateutil.parser to parse dates. However, it has the weird default of
       substituting every missing part of a date with today.
    So, parsing the date '2000' would result in
    <this day>.<this month>.2001 - which is not what we want. """
    # Parse with two different default dates to detect missing info
    # in the string
    def1 = datetime(1970, 1, 1)
    def2 = datetime(1971, 2, 2)

    result1 = datetime_parser(s, default=def1)
    result2 = datetime_parser(s, default=def2)

    res = {}

    if result1.year != def1.year or result2.year != def2.year:
        res["year"] = "{}".format(result1.year)

    if result1.month != def1.month or result2.month != def2.month:
        res["month"] = "{}".format(result1.month)

    if result1.day != def1.day or result2.day != def2.day:
        res["day"] = "{}".format(result1.day)

    return res
示例#2
0
文件: App.py 项目: reum/py-veracode
 def retrieve_info(self):
     attrs = self.api.get_app_info(self.id)
     if 'modified_date' in attrs:
         attrs['modified_date'] = datetime_parser(attrs['modified_date'])
     if 'policy_updated_date' in attrs:
         attrs['policy_updated_date'] = datetime_parser(
             attrs['policy_updated_date'])
     for key, value in attrs.items():
         setattr(self, key, value)
示例#3
0
    async def header_file_metadata(self):
        # Going with version as its the most correct term
        # TODO Change all references of revision to version @chrisseto
        # revisions will still be accepted until necessary changes are made to OSF
        version = self.get_query_argument(
            'version', default=None) or self.get_query_argument('revision',
                                                                default=None)
        data = await self.provider.metadata(self.path, revision=version)

        # Not setting etag for the moment
        # self.set_header('Etag', data.etag)  # This may not be appropriate

        if data.size is not None:
            self.set_header('Content-Length', data.size)

        if data.modified_utc is not None:
            last_modified = datetime_parser(data.modified_utc)
            last_modified_gmt = last_modified.astimezone(pytz.timezone('GMT'))
            self.set_header(
                'Last-Modified',
                last_modified_gmt.strftime('%a, %d %b %Y %H:%M:%S %Z'))

        self.set_header('Content-Type', data.content_type
                        or 'application/octet-stream')
        self.set_header('X-Waterbutler-Metadata',
                        json.dumps(data.json_api_serialized(self.resource)))
示例#4
0
def validate_cached_data():
    file_json = read_cache()
    cached_date = datetime_parser(file_json['date']).date()
    if cached_date != datetime.date.today():
        file_json['date'] = str(datetime.date.today())
        reset_tweets_index(file_json)

    write_cache(file_json)
示例#5
0
def _preprocess_date(result):
    """
    Parameters
    ----------
    result : str
      実行結果.日付けらしいことを求める

    Returns
    -------
    date
      result を datetime の date として表現したもの
    """
    result = result.translate(_td1)
    mmdd = r'((\d{1,2})月((\d{1,2})日)?)?'
    eraname = r'(天平)?[^\d]{2}((\d{1,2})|元)年?'
    years = r'(\d{1,4})年?'
    mt = re.match(r'({})(\({}\))?'.format(years, eraname) + mmdd, result)
    if mt:
        result = _from_mtgrp_to_str(mt.groups(), 0, 1, 4, 6, 8, 7,
                                    9).translate(_td2)
        try:
            if result.endswith("-"):
                date = datetime_parser(result[:-1]).date()
            else:
                date = datetime_parser(result).date()
            return date
        except ParserError as e:
            print(e, file=sys.stderr)
            raise RuntimeError(
                "Cannot parser as date '{}'".format(result)) from e
    mt = re.match(r'({})(\({}\))?'.format(eraname, years) + mmdd, result)
    if mt:
        result = _from_mtgrp_to_str(mt.groups(), 0, 3, 2, 6, 8, 7, 9)
    mt = re.match(r'\(({})\){}'.format(eraname, mmdd), result)
    if mt:
        result = _from_mtgrp_to_str(mt.groups(), 0, 3, 2, 4, 6, 5, 7)
    jn = Japanera()
    fmt = "%-E%-kO年"
    fmt += "%m月%d日" if "月" in result and "日" in result else "%m月" if "月" in result else ""
    res = jn.strptime(result, fmt)
    if res:
        return res[0].date()
    else:
        raise RuntimeError("Cannot parse as date '{}' by '{}'".format(
            result, fmt))
示例#6
0
def to_datetime(raw: Union[str, datetime]) -> datetime:
    """
    convert string to datetime; no-op if the input is already a datetime.

    This method exists because primitive constructors like `int(...)` are idempotent which makes
    them *really useful* for attrs converters. Datetime / `dateutil.parser` is NOT one of these.
    """
    # step 0, if its already a datetime do nothing.
    if isinstance(raw, datetime):
        return raw
    return datetime_parser(raw)
示例#7
0
def _cleanup_job(openshift: OpenShift, cleanup_namespace: str) -> None:
    """Cleanup resource of type job."""
    now = datetime.datetime.now(datetime.timezone.utc)

    _LOGGER.info("Cleaning old resources of type job")
    resources = openshift.ocp_client.resources.get(api_version="batch/v1", kind="Job")
    for item in resources.get(label_selector=_CLEANUP_LABEL_SELECTOR, namespace=cleanup_namespace).items:
        if not item.status.succeeded == 1:
            _LOGGER.info("Skipping %r as it has not been completed successfully", item.metadata.name)
            continue

        if not item.status.completionTime:
            _LOGGER.info(
                "Skipping resource %r of type %r- no completion time found in status field",
                item.metadata.name,
                resources.kind,
            )
            continue

        completed = datetime_parser(item.status.completionTime)
        lived_for = (now - completed).total_seconds()
        ttl = _parse_ttl(item.metadata.labels.ttl)

        if lived_for > ttl:
            _LOGGER.info(
                "Deleting resource %r of type %r in namespace %r - created at %r",
                item.metadata.name,
                resources.kind,
                cleanup_namespace,
                item.metadata.creationTimestamp,
            )
            try:
                resources.delete(name=item.metadata.name, namespace=cleanup_namespace)
                _METRIC_DELETED_JOBS.labels(
                    namespace=cleanup_namespace,
                    component=item.metadata.labels.component,
                    resource="Job",
                ).inc()
            except Exception:
                _LOGGER.exception(
                    "Failed to delete resource %r of type %r in namespace %r",
                    item.metadata.name,
                    resources.kind,
                    cleanup_namespace,
                )
        else:
            _LOGGER.info(
                "Keeping resource %r of type %r in namespace %r ttl not expired yet (lived for %r, ttl is %r)",
                item.metadata.name,
                resources.kind,
                cleanup_namespace,
                lived_for,
                ttl,
            )
示例#8
0
 def DATETIME(self, t):
     """
     A datetime expressed as https://www.w3.org/TR/NOTE-datetime. Resolves
     to a Python datetime. If the parsed datetime doesn't include timezone
     information, a timezone is added based on the current Django timezone
     configuration.
     """
     dt = datetime_parser(t.value)
     if not dt.tzinfo:
         dt = timezone.make_aware(dt)
     t.value = dt
     return t
示例#9
0
def _cleanup_pod(openshift: OpenShift, cleanup_namespace: str) -> None:
    """Cleanup resource of type pod."""
    now = datetime.datetime.now(datetime.timezone.utc)

    _LOGGER.info("Cleaning old resources of type pod")
    resources = openshift.ocp_client.resources.get(api_version="v1", kind="Pod")
    for item in resources.get(label_selector=_CLEANUP_LABEL_SELECTOR, namespace=cleanup_namespace).items:
        if item.status.phase != 'Succeeded':
            _LOGGER.info("Skipping %r as it has not been successful", item.metadata.name)
            continue

        ttl = _parse_ttl(item.metadata.labels.ttl)

        for container_status in item.status.containerStatuses:
            finished = datetime_parser(container_status.state.terminated.finishedAt)
            lived_for = (now - finished).total_seconds()

            if lived_for < ttl:
                _LOGGER.info(
                    "Skipping %r of type %r in namespace %r as finished containers lived"
                    "for %r and did not exceeded ttl %r",
                    item.metadata.name,
                    resources.kind,
                    cleanup_namespace,
                    lived_for,
                    ttl,
                 )
                break
        else:
            _LOGGER.info(
                "Deleting pod %r in namespace %r, created at %r - pod should be deleted based on ttl %r",
                item.metadata.name,
                cleanup_namespace,
                item.metadata.creationTimestamp,
                ttl
            )
            try:
                resources.delete(name=item.metadata.name, namespace=cleanup_namespace)
                _METRIC_DELETED_PODS.labels(
                    namespace=cleanup_namespace,
                    component=item.metadata.labels.component,
                    resource="Pod",
                ).inc()
            except Exception:
                _LOGGER.exception(
                    "Failed to delete resource %r of type %r in namespace %r",
                    item.metadata.name,
                    resources.kind,
                    cleanup_namespace,
                )
    def post(self):
        args = api.payload

        # creating a dataframe of the csv_url
        student_rows = get_dataframe_from_csv(args.get('csv_url'))

        # CSV Validation
        invalid_rows = check_csv(student_rows)
        if invalid_rows:
            return {
                'error': True,
                'invalid_rows': invalid_rows,
            }

        record_added_to_chanakya = 0

        # Adding each student from CSV DataFrame to chanakya
        for row in student_rows:
            student_data = {}
            stage = 'ETA'

            student_data['name'] = row.get('Name')
            student_data['gender'] = app.config['GENDER'](
                row.get('Gender').upper())
            student_data['dob'] = datetime_parser(row.get('Date of Birth'))
            student_data['religion'] = app.config['RELIGION'](
                row.get('Religion'))
            student_data['caste'] = app.config['CASTE'](row.get('Caste'))

            main_contact = row.get('Main Contact')
            alternative_contact = row.get('Alternative Contact')

            set_id = int(row.get('Set ID'))
            set_instance = QuestionSet.query.get(set_id)

            # creating the student, student_contact and an enrollment_key for the student with set_id
            student, enrollment = Student.offline_student_record(
                stage, student_data, main_contact, alternative_contact,
                set_instance)
            attempts = get_attempts(
                row, enrollment)  # this get all the attempts made by student
            QuestionAttempts.create_attempts(
                attempts, enrollment)  #storing the attempts to the database
            enrollment.calculate_test_score(
            )  #calculating the score of the student

            record_added_to_chanakya += 1

        return {'success': True, 'record_added': record_added_to_chanakya}
    async def header_file_metadata(self):
        data = await self.provider.metadata(self.path, revision=self.requested_version)

        # Not setting etag for the moment
        # self.set_header('Etag', data.etag)  # This may not be appropriate

        if data.size is not None:
            self.set_header('Content-Length', data.size)

        if data.modified_utc is not None:
            last_modified = datetime_parser(data.modified_utc)
            last_modified_gmt = last_modified.astimezone(pytz.timezone('GMT'))
            self.set_header('Last-Modified', last_modified_gmt.strftime('%a, %d %b %Y %H:%M:%S %Z'))

        self.set_header('Content-Type', data.content_type or 'application/octet-stream')
        self.set_header('X-Waterbutler-Metadata', json.dumps(data.json_api_serialized(self.resource)))
示例#12
0
def parse(date_string, **kwargs):
    """ A wrapper around python-dateutil's parse function which ensures it always returns an aware datetime """
    from dateutil.parser import parse as datetime_parser
    from django.utils.timezone import is_aware, make_aware

    parsed = datetime_parser(date_string, **kwargs)
    # Make aware
    parsed = parsed if is_aware(parsed) else make_aware(parsed, _get_tz())
    # Ensure that we have the correct offset, while also keeping what was passed in.
    original = parsed
    parsed = localtime(parsed, tz=parsed.tzinfo).replace(
        year=original.year,
        month=original.month,
        day=original.day,
        hour=original.hour
    )
    return parsed
示例#13
0
def parse(date_string, **kwargs):
    """ A wrapper around python-dateutil's parse function which ensures it always returns an aware datetime """
    from dateutil.parser import parse as datetime_parser
    from django.utils.timezone import make_aware

    parsed = datetime_parser(date_string, **kwargs)
    # Make aware
    parsed = parsed if is_aware(parsed) else make_aware(parsed, _get_tz())
    # Ensure that we have the correct offset, while also keeping what was passed in.
    original = parsed
    parsed = localtime(parsed, tz=parsed.tzinfo).replace(
        year=original.year,
        month=original.month,
        day=original.day,
        hour=original.hour
    )
    return parsed
示例#14
0
def _cleanup_workflows(openshift: OpenShift, cleanup_namespace: str) -> None:
    """Clean up finished Argo workflows if Argo does not clean them up."""
    now = datetime.datetime.now(datetime.timezone.utc)

    _LOGGER.info("Cleaning old Argo workflows")
    resources = openshift.ocp_client.resources.get(api_version="argoproj.io/v1alpha1", kind="Workflow")
    for item in resources.get(namespace=cleanup_namespace).items:
        if item.status.finishedAt is None:
            _LOGGER.info("Skipping %r as it is not finished yet", item.metadata.name)
            continue

        ttl = _parse_ttl(item.metadata.labels.ttl)
        finished = datetime_parser(item.status.finishedAt)
        lived_for = (now - finished).total_seconds()

        if lived_for < ttl:
            _LOGGER.info(
                "Skipping %r of type %r in namespace %r as workflow lived" "for %r and did not exceeded ttl %r",
                item.metadata.name,
                resources.kind,
                cleanup_namespace,
                lived_for,
                ttl,
            )
            continue

        _LOGGER.info(
            "Deleting workflow %r in namespace %r, created at %r",
            item.metadata.name,
            cleanup_namespace,
            item.metadata.creationTimestamp,
        )

        try:
            resources.delete(name=item.metadata.name, namespace=cleanup_namespace)
            _METRIC_DELETED_WORKFLOWS.labels(
                namespace=cleanup_namespace, component=item.metadata.labels.component, resource="Workflow",
            ).inc()
        except Exception:
            _LOGGER.exception(
                "Failed to delete resource %r of type %r in namespace %r",
                item.metadata.name,
                resources.kind,
                cleanup_namespace,
            )
示例#15
0
def extract_datetime(line: str, year: int, existing_time: datetime = None, fuzzy: bool = True) -> (datetime, bool):
    """Try to get a datetime object from the long line

    Return some datetime, and a boolean value telling if the value was actually extracted from the line"""

    matches = regExTimeGLog.match(line)
    did_get_good_time = False
    dt = None
    # TODO: allow type that specifies what kind of parse should be attempted
    if matches is not None:
        dtd = matches.groupdict()
        # print(year)
        # print(int(dtd["month"]))
        # print(int(dtd["day"]))
        # print(int(dtd["hour"]))
        # print(int(dtd["minute"]))
        # print(int(dtd["second"]))
        # print(int(dtd["microsecond"]))
        dt = datetime.datetime(year,
                               int(dtd["month"]),
                               int(dtd["day"]),
                               int(dtd["hour"]),
                               int(dtd["minute"]),
                               int(dtd["second"]),
                               int(dtd["microsecond"]))
    else:
        try:
            # I'd like to be less restrictive than iso8601, but, this is what we do for now.
            matches = regex_timestamp_iso8601.match(line)
            if matches is not None:
                dt = datetime_parser(line, fuzzy=fuzzy)
        except ValueError:
            dt = None

    if dt is not None:
        did_get_good_time = True
    elif existing_time is None:
        dt = datetime.datetime.now()
    else:
        dt = existing_time

    return dt, did_get_good_time
示例#16
0
文件: app.py 项目: fridex/cleanup-job
def _delete_old_analyzes(analyzers):
    """Delete old analyzers."""
    now = datetime.datetime.now().timestamp()
    lifetime = datetime.timedelta(
        seconds=THOTH_ANALYZER_CLEANUP_TIME).total_seconds()

    for analyzer in analyzers:
        # TODO: also delete pods where pull failed
        creation_time = datetime_parser(
            analyzer['metadata']['creationTimestamp']).timestamp()
        if creation_time + lifetime <= now:
            _LOGGER.info("Deleting pod %r", analyzer['metadata']['name'])
            try:
                _delete_pod(analyzer['metadata']['name'])
            except:
                _LOGGER.exception(
                    "Failed to delete pod {!r}, error is not fatal".format(
                        analyzer['metadata']['name']))
        else:
            _LOGGER.info("Keeping pod %r, not too old yet",
                         analyzer['metadata']['name'])
示例#17
0
    async def header_file_metadata(self):
        data = await self.provider.metadata(self.path,
                                            revision=self.requested_version)

        # Not setting etag for the moment
        # self.set_header('Etag', data.etag)  # This may not be appropriate

        if data.size is not None:
            self.set_header('Content-Length', data.size)

        if data.modified_utc is not None:
            last_modified = datetime_parser(data.modified_utc)
            last_modified_gmt = last_modified.astimezone(pytz.timezone('GMT'))
            self.set_header(
                'Last-Modified',
                last_modified_gmt.strftime('%a, %d %b %Y %H:%M:%S %Z'))

        self.set_header('Content-Type', data.content_type
                        or 'application/octet-stream')
        self.set_header('X-Waterbutler-Metadata',
                        json.dumps(data.json_api_serialized(self.resource)))
示例#18
0
def _creation_based_delete(item: Any, resources: Any, cleanup_namespace: str, metric: Any) -> None:
    """Delete the given object based on creation time."""
    now = datetime.datetime.now(datetime.timezone.utc)

    created = datetime_parser(item.metadata.creationTimestamp)
    lived_for = (now - created).total_seconds()
    ttl = _parse_ttl(item.metadata.labels.ttl)

    if lived_for > ttl:
        _LOGGER.info(
            "Deleting resource %r of type %r in namespace %r - created at %r",
            item.metadata.name,
            resources.kind,
            cleanup_namespace,
            item.metadata.creationTimestamp,
        )
        try:
            resources.delete(name=item.metadata.name, namespace=cleanup_namespace)
            metric.labels(
                namespace=cleanup_namespace,
                component=item.metadata.labels.component,
                resource="BuildConfig",
            ).inc()
        except Exception:
            _LOGGER.exception(
                "Failed to delete resource %r of type %r in namespace %r",
                item.metadata.name,
                resources.kind,
                cleanup_namespace,
            )
    else:
        _LOGGER.info(
            "Keeping resource %r of type %r in namespace %r ttl not expired yet (lived for %r, ttl is %r)",
            item.metadata.name,
            resources.kind,
            cleanup_namespace,
            lived_for,
            ttl,
        )
示例#19
0
def stamp_log_file(fname):
    with open(fname, 'r') as f:
        lines = f.readlines()

    if lines[0][:-1] == 'compressed':
        compressed = True
    elif lines[0][:-1] == 'raw':
        compressed = False
    else:
        raise NotImplementedError("The time-stamping does not support this type of file")

    start_time = datetime_parser(lines[1][12:-1])
    sampling_period = int(lines[2][15:-1])

    log_bytes = ''.join(lines[3:])
    if compressed:
        decomp_bytes = decompress_stream(bytearray(log_bytes))[0].tobytes()
        logs = convert(decomp_bytes)
    else:
        logs = convert(log_bytes)
    accel_logs = [[l.x, l.y, l.z] for l in logs if l.name.startswith('accel')]
    time_stamps = [start_time - datetime.timedelta(milliseconds=sampling_period * i) for i in range(len(accel_logs))][::-1]

    return pd.DataFrame(accel_logs, index=pd.DatetimeIndex(time_stamps), columns=['Ax', 'Ay', 'Az'], dtype=int)
示例#20
0
 def start(self):
     return datetime_parser(self._xml.find('start').text, ignoretz=True)
示例#21
0
def parse_date(x):
    if isinstance(x, datetime):
        x = x.date()
    if isinstance(x, date):
        return x
    return datetime_parser(x).date()
示例#22
0
def parse_datetime(x):
    if isinstance(x, datetime):
        return x
    return datetime_parser(x)
示例#23
0
 def load_active_crypto_orders(self):
     info = rh.get_all_open_crypto_orders()
     for i in info:
         self.order_ids[datetime_parser(
             i['created_at']).timestamp()] = i['id']
     print(self.order_ids)
示例#24
0
 def get_secs(t):
     return datetime_parser(t).strftime("%s")
示例#25
0
 def endDate(self) -> datetime:
     try:
         return datetime_parser(self._endDate)
     except Exception:
         return self._endDate
示例#26
0
 def startDate(self) -> datetime:
     try:
         return datetime_parser(self._startDate)
     except Exception:
         return self._startDate
示例#27
0
 def time(self):
     return datetime_parser(self._xml.find('time').text, ignoretz=True)
示例#28
0
 def timeStamp(self) -> datetime:
     return datetime_parser(self._timeStamp)
示例#29
0
 def _to_timestamp(self, timestamp_str):
     """Converts a timestamp string as returned by the API to
     a native datetime object and return it."""
     return datetime_parser(timestamp_str)
示例#30
0
 def end(self):
     return datetime_parser(self._xml.find('end').text, ignoretz=True)
示例#31
0
 def _to_timestamp(self, timestamp_str):
     """Converts a timestamp string as returned by the API to
     a native datetime object and return it."""
     return datetime_parser(timestamp_str)
示例#32
0
def _do_cleanup(cleanup_namespace: str) -> None:
    """Perform the actual cleanup."""
    openshift = OpenShift()
    now = datetime.datetime.now(datetime.timezone.utc)

    for resource_version, resource_type, creation_delete, metric in _RESOURCES:
        resources = openshift.ocp_client.resources.get(
            api_version=resource_version, kind=resource_type)
        for item in resources.get(label_selector=_CLEANUP_LABEL_SELECTOR,
                                  namespace=cleanup_namespace).items:
            if item.status.phase == "Succeeded":
                _LOGGER.debug(
                    "Checking expiration of resource %r from namespace %r of kind %r",
                    item.metadata.name,
                    cleanup_namespace,
                    resources.kind,
                )

                ttl = item.metadata.labels.ttl
                try:
                    parsed_ttl = parse_ttl(ttl) if ttl else _DEFAULT_TTL
                except Exception as exc:
                    _LOGGER.exception(
                        "Failed to parse TTL %r for resource %r of type %r in namespace %r the object will not be "
                        "deleted",
                        ttl,
                        item.metadata.name,
                        resources.kind,
                        cleanup_namespace,
                    )
                    continue

                if creation_delete:
                    if not item.metadata.creationTimestamp:
                        _LOGGER.info(
                            "Skipping resource %r of type %r- no creation timestsamp found in metadata",
                            item.metadata.name,
                            resources.type,
                        )
                        continue
                    created_str = item.metadata.creationTimestamp
                else:
                    if not item.status.completionTime:
                        _LOGGER.info(
                            "Skipping resource %r of type %r- no completion time found in status field",
                            item.metadata.name,
                            resources.kind,
                        )
                        continue
                    created_str = item.status.completionTime

                created = datetime_parser(created_str)
                lived_for = (now - created).total_seconds()

                if lived_for > parsed_ttl:
                    _LOGGER.info(
                        "Deleting resource %r of type %r in namespace %r - created at %r",
                        item.metadata.name,
                        resources.kind,
                        cleanup_namespace,
                        created_str,
                    )
                    try:
                        resources.delete(name=item.metadata.name,
                                         namespace=cleanup_namespace)
                        metric.labels(namespace=cleanup_namespace,
                                      component=item.metadata.labels.component,
                                      resource=resource_type).inc()
                    except Exception as exc:
                        _LOGGER.exception(
                            "Failed to delete resource %r of type %r in namespace %r",
                            item.metadata.name,
                            resources.kind,
                            cleanup_namespace,
                        )
                else:
                    _LOGGER.info(
                        "Keeping resource %r of type %r in namespace %r ttl not expired yet (lived for %r, ttl is %r)",
                        item.metadata.name,
                        resources.kind,
                        cleanup_namespace,
                        lived_for,
                        parsed_ttl,
                    )
            else:
                _LOGGER.info(
                    "Skipping resource %r- at phase %r",
                    item.metadata.name,
                    item.status.phase,
                )
示例#33
0
    async def _cmd_csv(self, args):
        message = args.message
        cur = self.sql.cur

        self.log.info("Start _cmd_csv command")
        self.log.info(args)

        if args.message.attachments:
            for attachment in args.message.attachments:
                response = requests.get(attachment['url'])

                data = list(csv.DictReader(io.StringIO(response.text)))
                ids = set()
                try:
                    for row in data:
                        ids.add(str(int(row['ID'])))
                        row['Date'] = datetime_parser(row['Date']).timestamp()
                        row['Weight (g)'] = float(
                            row['Weight (g)']
                        ) if row['Weight (g)'] != '' else None
                        row['Length (cm)'] = float(
                            row['Length (cm)']
                        ) if row['Length (cm)'] != '' else None
                        row['Notes'] = row[
                            'Notes'] if row['Notes'] != '' else None
                except Exception as e:
                    msg = f"Couldn't handle csv because: {e}"
                    await self.client.send_message(message.channel, msg)
                    continue

                # Get dragons for this user
                cur = self.sql.cur
                user_id = args.message.author.id
                dragon_ids = ",".join(ids)
                cmd = f"""
                    SELECT *
                    FROM dragons
                    WHERE
                        user_id=:user_id
                        AND dragon_id IN ({dragon_ids})
                """
                dragons = cur.execute(cmd, locals()).fetchall()
                if dragons == None or len(dragons) != len(ids):
                    msg = f"Woops! I can't save that CSV file. I couldn't match up you, your dragon IDs and my database. Are you sure the ID column is correct, and you own all these dragons?"
                    await self.client.send_message(message.channel, msg)
                    continue

                dragon_names = ", ".join([x['name'] for x in dragons])

                question = f"About to store {len(data)} rows for {dragon_names}. Are you sure?"
                try:
                    choice = await self.client.confirm_prompt(
                        args.message.channel,
                        question,
                        user=args.message.author,
                        timeout=60)
                except TimeoutError:
                    continue

                if choice is not True:
                    continue

                for row in data:
                    dragon_id = row['ID']
                    log_date = row['Date']
                    mass = row['Weight (g)']
                    length = row['Length (cm)']
                    note = row['Notes']

                    cmd = """
                        INSERT 
                        INTO dragon_stat_logs
                        (
                            dragon_id,
                            log_date,
                            mass,
                            length,
                            note
                        ) VALUES (
                            :dragon_id,
                            :log_date,
                            :mass,
                            :length,
                            :note
                        )
                    """
                    cur.execute(cmd, locals())
                await self.sql.commit(now=True)

        elif args.template:
            fields = ["ID", "Date", "Weight (g)", "Length (cm)", "Notes"]
            csv_file = io.StringIO()

            writer = csv.DictWriter(csv_file, fieldnames=fields)
            writer.writeheader()

            csv_file.seek(0)

            await self.client.send_file(message.channel,
                                        csv_file,
                                        filename=f"data_template.csv")

            csv_file.close()

        else:
            msg = "You can upload a file with the comment `>dragon csv <ID>`, or call `>dragon csv --template` to get a template file!"
            await self.client.send_message(message.channel, msg)

        self.log.info("Finished _cmd_csv command")
        return