Пример #1
0
def daterange(start_date: datetime.date, end_date: datetime.date):
    if isinstance(start_date, datetime.datetime):
        start_date = start_date.date()
    if isinstance(end_date, datetime.datetime):
        end_date = end_date.date()
    for n in range(int((end_date - start_date).days)):
        yield start_date + datetime.timedelta(n)
Пример #2
0
def get_day_minute_period_data_force_from_db(code,
                                             from_date: date,
                                             until_date: date,
                                             db_suffix='_M'):
    from_date = from_date if from_date.__class__.__name__ == 'date' else from_date.date(
    )
    until_date = until_date if until_date.__class__.__name__ == 'date' else until_date.date(
    )
    stock_db = MongoClient(db.HOME_MONGO_ADDRESS)['stock']
    return list(stock_db[code + db_suffix].find({
        '0': {
            '$gte': time_converter.datetime_to_intdate(from_date),
            '$lte': time_converter.datetime_to_intdate(until_date)
        }
    }))
def try_to_lock_topic_for_monitor(
    topic: Topic, frequency: MonitorRuleStatisticalInterval,
    process_date: date, principal_service: PrincipalService
) -> Tuple[Optional[MonitorJobLock], bool]:
    if isinstance(process_date, datetime):
        process_date = process_date.date()

    lock_service = get_lock_service(principal_service)
    lock_service.begin_transaction()
    try:
        lock = MonitorJobLock(
            # lockId: MonitorJobLockId = None
            tenantId=principal_service.get_tenant_id(),
            topicId=topic.topicId,
            frequency=frequency,
            processDate=process_date,
            status=MonitorJobLockStatus.READY,
            userId=principal_service.get_user_id(),
        )
        lock_service.create(lock)
        lock_service.commit_transaction()
        return lock, True
    except Exception:
        lock_service.rollback_transaction()
        return None, False
Пример #4
0
def try_to_lock_scheduler(
    scheduler: TopicSnapshotScheduler, process_date: date,
    principal_service: PrincipalService
) -> Tuple[Optional[TopicSnapshotJobLock], bool]:
    if isinstance(process_date, datetime):
        process_date = process_date.date()

    lock_service = get_lock_service(principal_service)
    lock_service.begin_transaction()
    # noinspection PyBroadException
    try:
        lock = TopicSnapshotJobLock(
            tenantId=principal_service.get_tenant_id(),
            schedulerId=scheduler.schedulerId,
            frequency=scheduler.frequency,
            processDate=process_date,
            rowCount=0,
            status=TopicSnapshotJobLockStatus.READY,
            userId=principal_service.get_user_id(),
        )
        lock_service.create(lock)
        lock_service.commit_transaction()
        return lock, True
    except Exception:
        lock_service.rollback_transaction()
        return None, False
Пример #5
0
 def _get_season(now: date) -> Season:
     """Return winter or summer."""
     if isinstance(now, datetime):
         now = now.date()
     now = now.replace(year=YEAR)
     return next(season for season, (start, end) in SEASONS
                 if start <= now <= end)
Пример #6
0
    def get_identifiers(self, as_of: dt.date = None) -> dict:
        """
        Get asset identifiers

        :param as_of: As of date for query
        :return: dict of identifiers

        **Usage**

        Get asset identifiers as of a given date. Where the identifiers are temporal (and can change over time), this
        function will return the identifiers as of that point in time. If no date is provided as a parameter, will use
        the current PricingContext.

        **Examples**

        Get current asset identifiers:

        >>> gs = SecurityMaster.get_asset("GS", AssetIdentifier.TICKER)
        >>> gs.get_identifiers()

        Get identifiers as of 1Jan18:

        >>> gs.get_identifiers(dt.date(2018,1,1))

        Use PricingContext to determine as of date:

        >>> with PricingContext(dt.date(2018,1,1)) as ctx:
        >>>     gs.get_identifiers()

        **See also**

        :class:`AssetIdentifier`
        :func:`get_asset`

        """
        if not as_of:
            as_of = PricingContext.current.pricing_date

            if isinstance(as_of, dt.datetime):
                as_of = as_of.date()

        valid_ids = set(item.value for item in AssetIdentifier)
        xrefs = GsAssetApi.get_asset_xrefs(self.__id)
        identifiers = {}

        for xref in xrefs:
            start_date = xref.startDate
            end_date = xref.endDate

            if start_date <= as_of <= end_date:
                identifiers = {
                    k.upper(): v
                    for k, v in xref.identifiers.as_dict().items()
                    if k.upper() in valid_ids
                }

        return identifiers
Пример #7
0
 def weekdays_until(to_date: datetime.date):
     """ Days from utc_now() until @to_date. Weekends are excluded. Holidays aren't
     >>> # Given now = datetime.date(2019, 5, 9)
     >>> DateUtils.weekdays_until(datetime.date(2019, 5, 13))
     2
     """
     if isinstance(to_date, datetime.datetime):
         to_date = to_date.date()
     now = DateUtils.utc_now().date()
     daygenerator = (now + datetime.timedelta(x + 1) for x in range((to_date - now).days))
     return sum(1 for day in daygenerator if day.weekday() < 5)
Пример #8
0
def get_project_weekly_effort(project: KippoProject, current_date: datetime.date=None):
    """
    Obtain the project weekly effort
    :param project:
    :param current_date:
    :return:
    """
    if not current_date:
        current_date = timezone.now().date()
    elif isinstance(current_date, datetime.datetime):
        current_date = current_date.date()

    if not project.start_date or not project.target_date:
        raise ProjectDatesError(f'{project.name} required dates not set: start_date={project.start_date}, target_date={project.target_date}')

    # get project participants
    participants = set(KippoTaskStatus.objects.filter(task__project=project,
                                                      task__assignee__github_login__isnull=False,
                                                      effort_date__gte=project.start_date,
                                                      effort_date__lte=current_date).values_list('task__assignee__github_login', flat=True))
    # get latest effort status
    # -- only a single entry per date

    # prepare dates
    search_dates = []
    start_date_calendar_info = project.start_date.isocalendar()
    start_date_year, start_date_week, _ = start_date_calendar_info
    initial_week_start_date = datetime.datetime.strptime(f'{start_date_year}-{start_date_week}-{TUESDAY_WEEKDAY}', '%Y-%W-%w').date()
    current_week_start_date = initial_week_start_date

    while current_week_start_date <= project.target_date:
        search_dates.append(current_week_start_date)
        last_week_start_date = current_week_start_date
        current_week_start_date += datetime.timedelta(days=7)
        if last_week_start_date < current_date < current_week_start_date:
            # add the current date (to show the current status)
            search_dates.append(current_date)
    if project.target_date not in search_dates:
        search_dates.append(project.target_date)

    active_column_names = project.columnset.get_active_column_names()
    all_status_entries = []  # state__in=GITHUB_ACTIVE_TASK_STATES
    for current_week_start_date in search_dates:
        previous_status_entries = KippoTaskStatus.objects.filter(task__project=project,
                                                                 task__assignee__github_login__isnull=False,
                                                                 effort_date=current_week_start_date,
                                                                 state__in=active_column_names).values('task__project', 'effort_date', 'task__assignee__github_login').annotate(task_count=Count('task'), estimate_days_sum=Coalesce(Sum('estimate_days'), Value(0)))

        all_status_entries.extend(list(previous_status_entries))

    if not all_status_entries:
        raise TaskStatusError(f'No TaskStatus found for project({project.name}) in ranges: {project.start_date} to {project.target_date}')

    return all_status_entries, search_dates
Пример #9
0
def get_day_period_data(code,
                        from_date: date,
                        until_date: date,
                        db_suffix='_D'):
    from_date = from_date if from_date.__class__.__name__ == 'date' else from_date.date(
    )
    until_date = until_date if until_date.__class__.__name__ == 'date' else until_date.date(
    )

    if from_date > datetime.now().date():
        from_date = datetime.now().date()

    if until_date > datetime.now().date():  # Cannot exceed today
        until_date = datetime.now().date()

    stock_db = MongoClient(db.HOME_MONGO_ADDRESS)['stock']

    db_data = list(stock_db[code + db_suffix].find({
        '0': {
            '$gte': time_converter.datetime_to_intdate(from_date),
            '$lte': time_converter.datetime_to_intdate(until_date)
        }
    }))
    #print('DB LEN', len(db_data))

    days = [time_converter.intdate_to_datetime(d['0']).date() for d in db_data]
    days = list(dict.fromkeys(days))
    #print('DAYS:', days)
    working_days = get_working_days(from_date, until_date)
    #print('WORKING DAYS:', working_days)
    empties = _insert_day_data(stock_db, code, days, working_days, db_suffix)
    if len(empties) > 0:
        db_data = list(stock_db[code + db_suffix].find({
            '0': {
                '$gte': time_converter.datetime_to_intdate(from_date),
                '$lte': time_converter.datetime_to_intdate(until_date)
            }
        }))
    #print(db_data)
    return db_data
Пример #10
0
    def date_to(self, date_to: date):
        """Sets the date_to of this PullRequestMetricsRequest.

        The date up to which to measure the metrics.

        :param date_to: The date_to of this PullRequestMetricsRequest.
        """
        if date_to is None:
            raise ValueError("Invalid value for `date_to`, must not be `None`")

        if isinstance(date_to, datetime):
            date_to = date_to.date()
        self._date_to = date_to
Пример #11
0
    def date_from(self, date_from: date):
        """Sets the date_from of this PullRequestMetricsRequest.

        The date from when to start measuring the metrics.

        :param date_from: The date_from of this PullRequestMetricsRequest.
        """
        if date_from is None:
            raise ValueError("Invalid value for `date_from`, must not be `None`")

        if isinstance(date_from, datetime):
            date_from = date_from.date()
        self._date_from = date_from
Пример #12
0
    def get_constituents(
        self,
        as_of: dt.date = None,
        position_type: PositionType = PositionType.CLOSE
    ) -> Tuple[PositionSet, ...]:
        """
        Get asset constituents

        :param as_of: As of date for query
        :param position_type:
        :return: dict of identifiers

        **Usage**

        Get index constituents as of a given date. If no date is provided as a parameter, will use the current
        PricingContext.

        **Examples**

        Get current index constituents (defaults to close):

        >>> import datetime as dt
        >>>
        >>> gs = SecurityMaster.get_asset('GSTHHVIP', AssetIdentifier.TICKER)
        >>> gs.get_constituents()

        Get constituents as of market open on 3Jan18:

        >>> gs.get_constituents(dt.date(2018,1,3), PositionType.OPEN)

        Use PricingContext to determine as of date:

        >>> with PricingContext(dt.date(2018,1,1)) as ctx:
        >>>     gs.get_constituents()

        **See also**

        :class:`AssetIdentifier`
        :func:`get_asset`

        """
        if not as_of:
            as_of = PricingContext.current.pricing_date

            if isinstance(as_of, dt.datetime):
                as_of = as_of.date()

        return GsAssetApi.get_asset_positions_for_date(self.__id, as_of,
                                                       position_type.value)
Пример #13
0
def week_start(
    d: date,
    starts_on: Day = Day.SUN,
) -> date:
    """Return the first day of the week for the passed date.

	Args:
		d: When to get the week start for.
		starts_on: The day of the week that the week starts on.

	Returns:
		A the first day of the week containing d.
	"""
    if isinstance(d, datetime):
        d = d.date()
    days = (d.weekday() + 7 - starts_on) % 7
    d -= timedelta(days=days)
    return d
Пример #14
0
def get_next_date_by_dow(dow: str,
                         when: date = datetime.now().date(),
                         direction: str = 'next'):
    """
    Takes a date object and determines when the next $dow is.
    E.G.: 2020-01-02 is a Thursday. The next Monday would be on 2020-01-06

    :param dow: The string representation of the next day of the week
    :param when: the 'when' relative to which the next dow is calculated
    :param direction: the 'direction' to search. Next or Previous
    :return:
    """
    log = logging.getLogger(__name__)

    # Check if the user has supplied a special 'relative' string like 'tomorrow'
    _relative = get_next_date_by_relative(dow, when)
    if _relative is not False:
        return _relative

    # Function does not care about H:m:s ... only date
    if isinstance(when, datetime):
        log.debug("Truncating when: {}....".format(when))
        when = when.date()
        log.debug("... to: {}".format(when))

    # No relative, so try to do the usual weekday stuff
    log.debug(
        "attempting to figure out the '{}' dow://{} relative to `{}`".format(
            direction, dow, when))
    # Figure out how many days from now are are needed to get to the next/previous dow
    # Note:  Monday == 0 ... Sunday == 6.
    if direction == 'next':
        _delta = ((get_dow_by_string(dow) + when.weekday()) % 7) + 1
        log.debug("_delta to '{}' '{}' is {} ".format(direction, dow, _delta))

        # Now that we know how many days to add, make a time delta obj and add that to when
        return when + timedelta(days=_delta)

    if direction == 'previous':
        _delta = ((when.weekday() - get_dow_by_string(dow)) % 7)
        # Now that we know how many days to add, make a time delta obj and add that to when
        return when - timedelta(days=_delta)
Пример #15
0
def fetch_market_data(contract: Contract,
                      date: datetime.date,
                      kind: str,
                      tz_name: str,
                      ib=None) -> List:
    if isinstance(date, datetime.datetime):
        date = date.date()
    assert kind in ("TRADES", "BID_ASK")
    data: List = []

    while True:
        start_time = _determine_next_timestamp(
            date=date, timestamps=[d.time for d in data], tz_name=tz_name)
        logger.info(f"Using start_time: {start_time}")

        ticks = _request_historical_ticks(
            ib=ib,
            contract=contract,
            start_time=start_time.strftime("%Y%m%d %H:%M:%S %Z"),
            what=kind,
        )

        ticks = [t for t in ticks if t not in data]

        if not ticks or ticks[0].time < start_time:
            break

        logger.debug(
            f"Received {len(ticks)} ticks between {ticks[0].time} and {ticks[-1].time}"
        )

        last_timestamp = pd.Timestamp(ticks[-1].time)
        last_date = last_timestamp.astimezone(tz_name).date()

        if last_date != date:
            # May contain data from next date, filter this out
            data.extend([tick for tick in ticks if pd.to_datetime(tick)])
            break
        else:
            data.extend(ticks)
    return data
def to_start_of_day(process_date: date):
    if isinstance(process_date, datetime):
        return process_date.date()
    else:
        return process_date
Пример #17
0
def get_engineer_project_load(schedule_start_date: datetime.date=None) -> Dict[KippoUser, List[KippoTask]]:
    """
    Schedule tasks to determine engineer work load
    :param schedule_start_date:
    :return: A dictionary of TelosUsers with assigned Tasks, where tasks have attached scheduled QluTask as Task.qlu_task
    """
    if not schedule_start_date:
        schedule_start_date = timezone.now().date()
    elif isinstance(schedule_start_date, datetime.datetime):
        schedule_start_date = schedule_start_date.date()

    engineer_project_load = {}
    telos_tasks = {}
    for developer in KippoUser.objects.filter(is_developer=True, is_active=True):

        # get active taskstatus
        active_taskstatus = KippoTaskStatus.objects.filter(task__assignee=developer,
                                                           task__is_closed=False,
                                                           task__project__is_closed=False,
                                                           task__assignee__github_login__isnull=False,
                                                           task__project__target_date__gt=schedule_start_date,
                                                           state__in=settings.GITHUB_ACTIVE_TASK_STATES).exclude(task__assignee__github_login=settings.UNASSIGNED_USER_GITHUB_LOGIN).order_by('task__project__target_date')

        # get related projects and tasks
        qlu_tasks = []
        qlu_milestones = []
        related_projects = []
        added_ids = []
        default_minimum = 1
        default_suggested = 3
        maximum_multiplier = 1.7

        for status in active_taskstatus:
            # create qlu estimates and tasks

            # - create estimates for task
            minimum_estimate = int(status.minimum_estimate_days) if status.minimum_estimate_days else default_minimum
            suggested_estimate = int(status.estimate_days) if status.estimate_days else default_suggested
            maximum_estimate = status.maximum_estimate_days
            if not maximum_estimate:
                maximum_estimate = int(round(suggested_estimate * maximum_multiplier, 0))
            qestimates = QluTaskEstimates(minimum_estimate,
                                          suggested_estimate,
                                          maximum_estimate)

            # QluTask Fields: (id: Any, absolute_priority, depends_on, estimates, assignee, project_id, milestone_id)
            related_milestone = status.task.milestone
            if related_milestone:
                milestone_id = related_milestone.id
            else:
                # treat the parent project as a milestone to get the task start/end
                milestone_id = f'p-{status.task.project.id}'  # matches below in milestone creation
                qlu_milestone = QluMilestone(milestone_id,
                                             status.task.project.start_date,
                                             status.task.project.target_date)
                qlu_milestones.append(qlu_milestone)

            telos_tasks[status.task.id] = status.task
            qtask = QluTask(
                status.task.id,
                absolute_priority=0,
                estimates=qestimates,
                assignee=developer.github_login,
                project_id=status.task.project.id,
                milestone_id=milestone_id,
            )
            qlu_tasks.append(qtask)

            # get project
            project = status.task.project
            if project.id not in added_ids:
                related_projects.append(project)
                added_ids.append(project.id)

        # get related milestones
        for project in related_projects:
            project_milestones = project.active_milestones()
            if not project_milestones:
                # create a single milestone covering the range of the project
                m = KippoMilestone()  # dummy holder for processing
                m.id = f'p-{project.id}'
                m.start_date = project.start_date
                m.target_date = project.target_date
                project_milestones = [m]

            for milestone in project_milestones:
                qlu_milestone = QluMilestone(milestone.id,
                                             milestone.start_date,
                                             milestone.target_date)
                qlu_milestones.append(qlu_milestone)

        if qlu_tasks:
            holidays = {developer.github_login: list(developer.personal_holiday_dates())}
            scheduler = QluTaskScheduler(milestones=qlu_milestones,
                                         personal_holidays=holidays,
                                         start_date=schedule_start_date)
            scheduled_results = scheduler.schedule(qlu_tasks)
            engineer_project_load[developer] = []
            for qlu_task in scheduled_results.tasks():
                telos_task_id = qlu_task.id
                telos_task = telos_tasks[telos_task_id]
                # attach qlu_task to telos task
                telos_task.qlu_task = qlu_task
                engineer_project_load[developer].append(telos_task)
        else:
            engineer_project_load[developer] = []
    return engineer_project_load
Пример #18
0
def cumulative_daily_totals_by_group(data,
                                     group_id_field,
                                     value_field,
                                     date_field,
                                     date_format='%Y%m%d',
                                     start_date: datetime.date = None,
                                     end_date: datetime.date = None,
                                     none_value=0):
    """
    For each group found, for each date, calculates the sum of all previous dates for that group.
    :param data: A list of records in the form [{group_field: group_id, value_field: value, date_field: date}, ...]
    :type data: list
    :param group_id_field: The name of the field containing the group ids used to link group records.  For example, 'postcode'
    :type group_id_field: str
    :param value_field: The name of the field containing the values to be averaged.
    :type value_field: str
    :param date_field: The name of the field containing the date values
    :type date_field: str
    :param date_format: If the date values in the data are date strings, set this parameter to the date format string (eg: '%Y%m%d').
    Default is '%Y%m%d'
    :type date_format: str
    :param start_date: The optional earliest date for which values will be calculated.  If not defined, the first date found in each group will
    be used as the start date.
    :type start_date: datetime
    :param end_date: The optional last date for which values will be calculated.  If not defined, the current date will be used.
    :type end_date: datetime
    :param none_value: The value to be used if no record is found for a specific date/region, or if the value in a data record is None.
    The default value is 0
    :type none_value: float
    :return: {yyyymmdd_group: avg}
    :rtype:
    """
    # calculate start and end dates
    if not start_date:
        start_date = datetime.date.min

    if end_date:
        if isinstance(end_date, datetime.datetime):
            end_date = end_date.date()
    else:
        end_date = datetime.datetime.now().date()

    # force None to 0.  Cannot add Nones to int or float
    if none_value is None:
        none_value = 0

    # extract relevant data and separate into groups
    group_data = {}
    for item in data:
        date_value = datetime_utils.to_date(item[date_field], date_format)

        if start_date <= date_value <= end_date:
            group = item[group_id_field]

            stat_value = item[value_field]
            if stat_value is None:
                stat_value = none_value

            # separate items into groups
            group_values = group_data.get(group, None)
            if not group_values:
                group_values = {}
                group_data[group] = group_values
            group_values[date_value] = group_values.get(date_value, 0) + stat_value

    # calculate cumulative daily totals
    result = {}
    for group, group_values in group_data.items():
        # if the start date was defined, use that date, otherwise use the first date in the group.
        if start_date > datetime.date.min:
            group_date = start_date
        else:
            group_date = min(group_values.keys())

        step = datetime.timedelta(days=1)

        total = 0
        while group_date <= end_date:
            total += group_values.get(group_date, none_value)
            key = '{}_{}'.format(group_date.strftime('%Y%m%d'), group)
            result[key] = total

            group_date += step

    return result
Пример #19
0
def construct_key(k_date: dt.date, k_time: dt.time, detector_number: str) -> KeyLv0:
    event_datetime = dt.datetime.combine(k_date.date(), k_time.time())
    return KeyLv0(event_datetime, detector_number)
Пример #20
0
def cli(src: str,
        targetdir: str,
        begin_date: datetime.date,
        end_date: datetime.date,
        roi: Union[str, List[float]],
        region: str,
        sgrid: bool,
        force_doy: bool,
        filter_product: str,
        filter_vampc: str,
        target_srs: str,
        co: Tuple[str],
        clip_valid: bool,
        round_int: int,
        gdal_kwarg: Union[Tuple[str], Dict[str, Union[str, int, float]]],
        overwrite: bool,
        last_smoothed: str) -> List:
    """Creates GeoTiff Mosaics from HDF5 files.

    The input can be either raw or smoothed HDF5 files. With the latter,
    the S-grid can also be mosaiced using the `--sgrid` flag.
    If no ROI is passed, then the full extent of the input files will be mosaiced, otherwise
    the GeoTiffs will be clipped to the ROI after warping.
    By default, the MODIS data will be warped to WGS1984 (EPSG:4326), but a custom spatial reference
    can be passed in with `--target-srs`, in wich case the target resolution has to be manually defined too. optionally,
    `--target-srs` can be set to `None`, using the product's native projection.
    If required, the output data can be clipped to the valid data range of the input data using the `--clip-valid` flag.
    Also, the output data can be rounded, if it's float (eg. sgrid) to defined precision, or if its integer to the defined
    exponent of 10 (round_int will be multiplied by -1 and passed to np.round!!!)
    Specific creation options can be passed to gdalwarp and gdaltranslate using the `--co` flag. The flag can be used multiple times,
    each input needs to be in the gdal format for COs, e.g. `KEY=VALUE`.
    Additional options can be passed to gdal.Translate (and with restrictions to warp) using `--gdal-kwarg`,
    e.g. `--gdal-kwarg xRes=10 --gdal-kwarg yRes=10`. The additional options can either be provided as a tuple with `KEY=VALUE` strings, or with a key-value dictionary.
    The keywords are sensitive to how gdal expects them,
    as they are directly passed to gdal.TranlsateOptions. For details, please check the documentation of gdal.TranslateOptions.

    Args:
        ctx (click.core.Context): Context for kwargs.
        src (str): Input directory (or file).
        targetdir (str): Target directory.
        begin_date (datetime.date): Start date for tiffs.
        end_date (datetime.date): End date for tiffs.
        roi (str): ROI for clipping. Passing ROI as a list[float] is also supported.
        region (str): Region for filename.
        sgrid (bool): Extract sgrid instead of data.
        force_doy (bool): Force DOY in filename.
        filter_product (str): Filter input by product code.
        filter_vampc (str): Filter inpout by vam parameter code.
        target_srs (str): Target spatial reference (in format GDAL can process) or "None".
        co (Tuple[str]): Creation options passed to gdal.Translate.
        clip_valid (bool): Clip data to valid range.
        round_int (int): Round integer.
        gdal_kwarg (Tuple[str]): translateOptions to the internal call to gdal::translate();
                                 the Tuple of strings (item formatting: "key=value") is parsed into a dict.
                                 Alternatively, passing a dict instead of a Tuple[str] is also supported.
        overwrite (bool): Overwrite existing Tiffs.
        last_smoothed (str): Rawdate (MODIS time step) that is checked to be the last in series at time of smoothing.

    """

    src_input = Path(src)

    if not src_input.exists():
        msg = "src_dir does not exist."
        log.error(msg)
        raise ValueError(msg)

    if src_input.is_dir():
        files = list(src_input.glob("*.h5"))
    else:
        files = [src_input]

    if filter_product is not None:
        product = filter_product.upper()
        files = [x for x in files if product in x.name]

    if filter_vampc:
        vampc = filter_vampc.upper()
        files = [x for x in files if vampc in x.name]

    if not files:
        msg = "No files found to process! Please check src and/or adjust filters!"
        log.error(msg)
        raise ValueError(msg)

    groups = [REGEX_PATTERNS["tile"].sub("*", x.name) for x in files]
    group_check = {".".join(x.split(".")[:-2]) for x in groups}
    if len(group_check) > 1:
        raise ValueError("Multiple product groups in input. Please filter or use separate directories!")

    groups = list(set(groups))

    if roi is not None:
        if not isinstance(roi, list):
            roi = [float(x) for x in roi.split(',')]
        if len(roi) != 4:
            raise ValueError("ROI for clip needs to be bounding box in format xmin,ymin,xmax,ymax")

        roi[1], roi[3] = roi[3], roi[1]


    if targetdir is None:
        if src_input.is_dir():
            targetdir = src_input
        else:
            targetdir = src_input.parent
    else:
        targetdir = Path(targetdir)

    if not targetdir.exists():
        targetdir.mkdir()

    if not targetdir.is_dir():
        msg = "Target directory needs to be a valid path!"
        log.error(msg)
        raise ValueError(msg)

    if begin_date:
        begin_date = begin_date.date()

    if end_date:
        end_date = end_date.date()

    if sgrid:
        dataset = "sgrid"
        clip_valid = False
    else:
        dataset = "data"

    if round_int is not None:
        round_int = round_int * -1

    if target_srs.lower() == "none":
        target_srs = None

    gdal_kwargs = {}
    if gdal_kwarg:
        if not isinstance(gdal_kwarg, dict):
            gdal_kwargs.update(
                {key:value for x in gdal_kwarg for key, value in [x.split("=")]}
            )
        else:
            gdal_kwargs = gdal_kwarg
    
    if last_smoothed is not None:
        last_smoothed = last_smoothed.strftime("%Y%j")
    
    click.echo("\nSTARTING modis_window.py!")

    mosaics = []
    for group in groups:
        log.debug("Processing group %s", group)

        group_pattern = re.compile(group)
        group_files = [str(x) for x in files if group_pattern.match(x.name)]

        mosaic = ModisMosaic(group_files)

        mosaics.extend(
            mosaic.generate_mosaics(
                dataset=dataset,
                targetdir=targetdir,
                target_srs=target_srs,
                aoi=roi,
                overwrite=overwrite,
                force_doy=force_doy,
                prefix=region,
                start=begin_date,
                stop=end_date,
                clip_valid=clip_valid,
                round_int=round_int,
                last_smoothed=last_smoothed,
                creationOptions=list(co),
                **gdal_kwargs,
                )
        )

    click.echo("\nCOMPLETED modis_window.py!")
    return mosaics