Example #1
0
def GetMetaDataForList(commit_range,
                       git_dir=None,
                       count=None,
                       series=None,
                       allow_overwrite=False):
    """Reads out patch series metadata from the commits

    This does a 'git log' on the relevant commits and pulls out the tags we
    are interested in.

    Args:
        commit_range: Range of commits to count (e.g. 'HEAD..base')
        git_dir: Path to git repositiory (None to use default)
        count: Number of commits to list, or None for no limit
        series: Series object to add information into. By default a new series
            is started.
        allow_overwrite: Allow tags to overwrite an existing tag
    Returns:
        A Series object containing information about the commits.
    """
    if not series:
        series = Series()
    series.allow_overwrite = allow_overwrite
    params = gitutil.LogCmd(commit_range,
                            reverse=True,
                            count=count,
                            git_dir=git_dir)
    stdout = command.RunPipe([params], capture=True).stdout
    ps = PatchStream(series, is_log=True)
    for line in stdout.splitlines():
        ps.ProcessLine(line)
    ps.Finalize()
    return series
def GetMetaDataForList(commit_range, git_dir=None, count=None,
                       series = None, allow_overwrite=False):
    """Reads out patch series metadata from the commits

    This does a 'git log' on the relevant commits and pulls out the tags we
    are interested in.

    Args:
        commit_range: Range of commits to count (e.g. 'HEAD..base')
        git_dir: Path to git repositiory (None to use default)
        count: Number of commits to list, or None for no limit
        series: Series object to add information into. By default a new series
            is started.
        allow_overwrite: Allow tags to overwrite an existing tag
    Returns:
        A Series object containing information about the commits.
    """
    if not series:
        series = Series()
    series.allow_overwrite = allow_overwrite
    params = gitutil.LogCmd(commit_range, reverse=True, count=count,
                            git_dir=git_dir)
    stdout = command.RunPipe([params], capture=True).stdout
    ps = PatchStream(series, is_log=True)
    for line in stdout.splitlines():
        ps.ProcessLine(line)
    ps.Finalize()
    return series
    def get_series(self, url):
        domain = get_tld(url)
        scrap_info = self.scrap_info[domain]['series']['css_path']
        series = Series(scrap_info)
        series.scrap(url)

        return series.get()
Example #4
0
    def _tvdbGetInfo(self):

        try:
            _shows = self.db.search(self.series.titleBase, "en")
        except (TVDBAttributeError, TVDBIndexError, TVDBValueError,
                error.BadData):
            _an_error = traceback.format_exc()
            log.debug(
                traceback.format_exception_only(type(_an_error),
                                                _an_error)[-1])
            raise SeriesNotFound

        if len(_shows) == 0: raise SeriesNotFound
        if len(_shows) == 1:
            if matching(self.series.title.lower(),
                        self.decode(_shows[0].SeriesName).lower(),
                        factor=85):
                _shows[0].update()
                _series = Series(tvdb=_shows[0])

                self.series.update(_series)
                self.series.source = 'tvdb'
                self.series.tvdb_info = _series
                return
            else:
                raise SeriesNotFound

        _rankings = {'Continuing': {}, 'Ended': {}, 'Other': {}}
        for _show in _shows:
            _title_suffix = self._check_suffix.match(
                self.decode(_show.SeriesName))
            if _title_suffix:
                _score = matching(self.series.titleBase.lower(),
                                  _title_suffix.group('SeriesName').lower())
            else:
                _score = matching(self.series.titleBase.lower(),
                                  self.decode(_show.SeriesName).lower())
            if _score < 90:
                continue

            _show.update()
            _series = Series(tvdb=_show)
            if _score in _rankings[_series.status]:
                _rankings[_series.status][_score][_series.title] = _series
            else:
                _rankings[_series.status][_score] = {_series.title: _series}

        if not _rankings: raise SeriesNotFound

        self._reviewShowData(_rankings, 'tvdb')

        return
Example #5
0
 def __init__(self, match_conf, dir_path):
     self.series1 = []
     for s1 in match_conf.series1.series:
         fn1 = os.path.join(dir_path, s1.name + '.new')
         if os.path.isfile(fn1):
             self.series1.append(
                 Series.read(fn1, name=s1.name + '-tuned', col1=1, col2=2))
     match_file = os.path.join(dir_path, match_conf.matchfile)
     if os.path.isfile(match_file):
         self.match = Series.read(match_file,
                                  name=os.path.basename(dir_path) + '-rel',
                                  col1=1,
                                  col2=3)
Example #6
0
    def __init__(self, rtnDict=True):
        log.trace('SeriesInfo.__init__')

        super(SeriesInfo, self).__init__()

        seriesinfo_group = self.options.parser.add_argument_group(
            "Episode Detail Options", description=None)
        seriesinfo_group.add_argument("--sn",
                                      "--name",
                                      type=str,
                                      dest='series_name')
        seriesinfo_group.add_argument("--season", type=int, dest='season')
        seriesinfo_group.add_argument("--epno",
                                      type=int,
                                      action='append',
                                      dest='epno')
        seriesinfo_group.add_argument("--tvdb",
                                      dest="processes",
                                      action="append_const",
                                      const='tvdb',
                                      help="Information to come from TVDB")
        seriesinfo_group.add_argument("--tvrage",
                                      dest="processes",
                                      action="append_const",
                                      const='tvrage',
                                      help="Information to come from TVRage")
        seriesinfo_group.add_argument(
            "--MyTrakt",
            dest="processes",
            action="append_const",
            const='MyTrakt',
            help="Information to come from MyTrakt.tv")
        seriesinfo_group.add_argument(
            "--series-only",
            "--so",
            dest="get_episodes",
            action="store_false",
            default=False,
            help="Information to come from MyTrakt.tv")

        self.db = api.TVDB("959D8E76B796A1FB")

        self.trakt_user = None

        self._check_suffix = re.compile(
            '^(?P<SeriesName>.*)[ \._\-][\(](?P<Suffix>(?:19|20)\d{2}|[a-zA-Z]*)[\)].*$',
            re.I)

        self.last_series = Series()
        self.series = Series()
        self.rtnDict = rtnDict
Example #7
0
	def auto(self, dir):
		#List all dirs
		dirs = listDirs(os.path.join(settings.MEDIA_DIR, dir))
		
		for d in dirs[:]:
			series = Series()
			series.auto(d)
			
			# Check if auto found any files
			if len(series.videos) != 0:
				self.series.append(series)
			
			else:
				dirs.remove(d)
Example #8
0
    def cost(self, load_data=None, start_at=None, end_at=None, step_count=None):
        """calculate the cost of energy based on the provided tariff

        R script produces one output file:
        timestamp, previous-interval-cost, cumulative-previous-interval-cost

        [tariff.R command]
        ./tariff.R
            --loadFile=LOAD_FILE
            --tariffFile=TARIFF_FILE
            --outputTimestampFile=OUTPUT_TIMES_FILE
            --demandResponseFile=DEMAND_RESPONSE_DATES
            --outputFile=OUTPUT_FILE
        """
        if load_data == None: load_data = self.training_load_series
        
        if not isinstance(load_data, Series):
            raise Exception("load_data argument must be a Series object")
        if not isinstance(self.tariff, Tariff):
            raise Exception("cannot calculate cost - no tariff provided")

        output_times = self._build_output_time_series(start_at, end_at,
                                                      step_size=900,
                                                      step_count=step_count)

        # ----- write temporary files ----- #
        load_tmp            = load_data.write_to_tempfile(exclude=False)
        tariff_tmp          = self.tariff.write_tariff_to_tempfile()
        output_times_tmp    = output_times.write_to_tempfile()
        output_tmp          = tempfile.NamedTemporaryFile()

        # ----- build command ----- #
        cmd = path.join(self.model_dir, 'tariff.R')
        cmd += " --loadFile=%s"             % load_tmp.name
        cmd += " --tariffFile=%s"           % tariff_tmp.name
        cmd += " --outputTimestampFile=%s"  % output_times_tmp.name
        cmd += " --outputFile=%s"           % output_tmp.name

        if len(self.tariff.dr_periods) > 0:
            dr_periods_tmp = self.tariff.write_dr_periods_to_tempfile()
            cmd += " --demandResponseFile=%s" % dr_periods_tmp.name

        self._run_script(cmd)
        
        # ----- process results ----- #
        cost_series             = Series(output_tmp.name, self.timezone, data_column=1)
        cumulative_cost_series  = Series(output_tmp.name, self.timezone, data_column=2)

        return cost_series, cumulative_cost_series
    def std(self,
            verbose=True,
            decode=True,
            passes=None,
            num_threads=1,
            apply_experimental_transforms=False):
        """ Eager operation to compute the standard deviation of the values in each column

        Parameters
        ----------
        verbose, decode, passes, num_threads, apply_experimental_transforms
            see LazyResult

        Returns
        -------
        Series

        """
        index = []
        data = []
        for column_name in self:
            index.append(column_name)
            # get as series
            series = self[str(column_name)]
            # apply the operation
            data.append(series.std().evaluate(verbose, decode, passes,
                                              num_threads,
                                              apply_experimental_transforms))

        return Series(np.array(data), np.dtype(np.float64),
                      Index(np.array(index).astype(np.str), np.dtype(np.str)))
Example #10
0
    def _aggregate(self,
                   operation,
                   verbose=True,
                   decode=True,
                   passes=None,
                   num_threads=1,
                   apply_experimental_transforms=False):
        assert isinstance(operation, (str, unicode))

        index = []
        data = []
        for column_name in self:
            index.append(column_name)
            # get as series
            series = self[str(column_name)]
            # apply the operation
            data.append(
                LazyResult(
                    weld_aggregate(series.expr, operation,
                                   series.weld_type), series.weld_type,
                    0).evaluate(verbose, decode, passes, num_threads,
                                apply_experimental_transforms))

        return Series(
            np.array(data).astype(np.float64), np.dtype(np.float64),
            Index(np.array(index).astype(np.str), np.dtype(np.str)))
Example #11
0
 def read_data(self, role, parameter, filename, base_dir=None):
     """Read a data series.
     
     Read a data series (record or target curve) into Scoter.
     
     Args:
         role: 0 for record, 1 for target
         parameter: 0 for d18O, 1 for RPI
         filename: path to data file
             If a filename of "" is supplied, read_data will ignore
             it and return with no error.
         base_dir: base directory used to resolve filename if it
             is a relative path
     """
     assert (0 <= role <= 1)
     assert (0 <= parameter <= 1)
     param_name = ("d18o", "rpi")[parameter]
     if os.path.isabs(filename):
         full_path = filename
     else:
         if base_dir is None:
             return
         full_path = os.path.join(base_dir, filename)
     if full_path != "" and os.path.isfile(full_path):
         logger.debug("Reading file: %d %d %s" %
                      (role, parameter, full_path))
         self.filenames[role][parameter] = full_path
         self.series[role][parameter] = Series.read(full_path,
                                                    parameter=param_name)
Example #12
0
 def __getitem__(self, item):
     if isinstance(item, pd.Series):
         return self.data_df.__getitem__(item).index
     elif isinstance(item, list):
         return StyleFrame(self.data_df.__getitem__(item))
     else:
         return Series(self.data_df.__getitem__(item))
Example #13
0
	def getShow(self):
		_series = Series(title=self.args.series_name)
		_series = self.getShowInfo(_series)

		for key, value in _series.__dict__.iteritems():
			log.info('Key: {}   Value: {}'.format(key, value))
		return
Example #14
0
def GetMetaDataForList(commit_range, git_dir=None, count=None,
                       series = Series()):
    """Reads out patch series metadata from the commits

    This does a 'git log' on the relevant commits and pulls out the tags we
    are interested in.

    Args:
        commit_range: Range of commits to count (e.g. 'HEAD..base')
        git_dir: Path to git repositiory (None to use default)
        count: Number of commits to list, or None for no limit
        series: Series object to add information into. By default a new series
            is started.
    Returns:
        A Series object containing information about the commits.
    """
    params = ['git', 'log', '--no-color', '--reverse', '--no-decorate',
                    commit_range]
    if count is not None:
        params[2:2] = ['-n%d' % count]
    if git_dir:
        params[1:1] = ['--git-dir', git_dir]
    pipe = [params]
    stdout = command.RunPipe(pipe, capture=True).stdout
    ps = PatchStream(series, is_log=True)
    for line in stdout.splitlines():
        ps.ProcessLine(line)
    ps.Finalize()
    return series
Example #15
0
    def diff(self, start_at=None, end_at=None, step_size=900, step_count=None):
        """calculate the difference between baseline and actual

        R script produces two output files:
        (1) diff:       timestamp,  kw_diff,    cumulative_kwh_diff
        (2) baseline:   timestamp,  kw_base,    cumulative_kwh_base

        [diff.R command]
        ./diff.R
            --loadFile=LOAD_FILE
            --baselineFile=BASELINE_LOAD_FILE
            --outputTimesFile=OUTPUT_TIMES_FILE
            --outputFile=OUTPUT_DIFF_FILE
            --predictedBaselineOutputFile=OUTPUT_BASE_FILE
        """
        if self.baseline_series == None: self.baseline()
        
        output_times = self._build_output_time_series(start_at, end_at,
                                                      step_size, step_count)

        # ----- write temporary files ----- #
        load_tmp            = self.training_load_series.write_to_tempfile(exclude=False)
        baseline_tmp        = self.baseline_series.write_to_tempfile()
        output_times_tmp    = output_times.write_to_tempfile()
        output_diff_tmp     = tempfile.NamedTemporaryFile()
        output_base_tmp     = tempfile.NamedTemporaryFile()
        
        # ----- build command ----- #
        cmd = path.join(self.model_dir, 'diff.R')
        cmd += " --loadFile=%s"                     % load_tmp.name
        cmd += " --baselineFile=%s"                 % baseline_tmp.name
        cmd += " --outputTimesFile=%s"              % output_times_tmp.name
        cmd += " --outputFile=%s"                   % output_diff_tmp.name
        cmd += " --predictedBaselineOutputFile=%s"  % output_base_tmp.name
        
        # ----- run script ----- #
        self._run_script(cmd)

        # ----- process results ----- #
        kw_diff = Series(output_diff_tmp.name, self.timezone, data_column=1)
        kw_base = Series(output_base_tmp.name, self.timezone, data_column=1)

        cumulative_kwh_diff = Series(output_diff_tmp.name, self.timezone, data_column=2)
        cumulative_kwh_base = Series(output_base_tmp.name, self.timezone, data_column=2)

        return kw_diff, kw_base, cumulative_kwh_diff, cumulative_kwh_base
    def add_series(self):
        data_mgr = DatabaseManager(Config().database_name, None)
        series_args = {}
        for i in range(self.add_series_table.rowCount()):
            try:
                if self.add_series_table.item(i, 1).background() == Qt.red:
                    return
            except AttributeError:
                pass

            curr_heading = self.add_series_table.item(i, 0).text()
            try:
                curr_text = self.add_series_table.item(i, 1).text()
            except AttributeError:  # is_completed
                curr_text = (self.add_series_table.cellWidget(i, 1)
                             .currentText())

            if curr_heading == "Name":
                series_args['name'] = curr_text
                if series_args['name'] in ["", "Unknown"]:
                    self.add_series_table.item(i, 1).setBackground(Qt.red)
                    return
            elif curr_heading == "Alt. Names":
                series_args['alt_names'] = curr_text
            elif curr_heading == "Author":
                series_args['author'] = curr_text
            elif curr_heading == "Volumes Owned":
                if curr_text in ["None", "0", ""]:
                    series_args['volumes_owned'] = generate_volumes_owned("")
                else:
                    series_args['volumes_owned'] = generate_volumes_owned(
                        curr_text)
            elif curr_heading == "Publisher":
                series_args['publisher'] = curr_text
            elif curr_heading == "Completed":
                status = curr_text
                series_args['is_completed'] = 1 if status == "Yes" else 0

        new_series = Series(**series_args)

        if new_series.add_series_to_database(data_mgr):
            cur = data_mgr.query("SELECT rowid FROM series WHERE name='%s'"
                                 % series_args['name'].replace("'", "''"))
            self.added = cur.fetchone()[0]
            self.close()
Example #17
0
 def new_series(self, path, fields):
     if path not in self.series:
         # New segment
         self.series[path] = Series(self._generate_serial(), fields)
         # Unlike segments, for which the register is modified during commits,
         # creation and deletion of series are immediately recorded into the register
         register.record_series(path, self.series[path])
     else:
         raise KeyError('Series already exists')
Example #18
0
    def _getInfoFromProviders(self, processOrder):

        if self.last_series.title == self.series.title:
            self.series.copyShow(self.last_series)
            return
        else:
            self.last_series = Series(title=self.series.title)

        options = {
            'tvdb': self._tvdbGetInfo,
            'MyTrakt': self._traktGetInfo,
            'tvrage': self._tvrageGetInfo
        }

        try:
            for service in processOrder:
                try:
                    options[service]()
                    if self.series.keysFound:
                        #						if not self.series.tvdb_id and 'tvdb' in processOrder:
                        #							self.series.title = re.sub(' and ', ' & ', self.series.title)
                        #							options['tvdb']()
                        raise GetOutOfLoop
                except SeriesNotFound:
                    sys.exc_clear()
                except GetOutOfLoop:
                    raise GetOutOfLoop
                except:
                    _an_error = traceback.format_exc()
                    log.debug(
                        traceback.format_exception_only(
                            type(_an_error), _an_error)[-1])
                    raise SeriesNotFound
            if self.series.keysFound:
                raise GetOutOfLoop
            self.last_request = {'LastRequestName': ''}
            raise SeriesNotFound('ALL: Unable to locate series: {}'.format(
                self.series.title))
        except GetOutOfLoop:
            sys.exc_clear()

        self.last_series.copyShow(self.series)

        return
Example #19
0
    def __init__(self, rtnDict=True):
        log.trace("SeriesInfo.__init__")

        super(SeriesInfo, self).__init__()

        seriesinfo_group = self.options.parser.add_argument_group("Episode Detail Options", description=None)
        seriesinfo_group.add_argument("--sn", "--name", type=str, dest="series_name")
        seriesinfo_group.add_argument("--season", type=int, dest="season")
        seriesinfo_group.add_argument("--epno", type=int, action="append", dest="epno")
        seriesinfo_group.add_argument(
            "--tvdb", dest="processes", action="append_const", const="tvdb", help="Information to come from TVDB"
        )
        seriesinfo_group.add_argument(
            "--tvrage", dest="processes", action="append_const", const="tvrage", help="Information to come from TVRage"
        )
        seriesinfo_group.add_argument(
            "--MyTrakt",
            dest="processes",
            action="append_const",
            const="MyTrakt",
            help="Information to come from MyTrakt.tv",
        )
        seriesinfo_group.add_argument(
            "--series-only",
            "--so",
            dest="get_episodes",
            action="store_false",
            default=False,
            help="Information to come from MyTrakt.tv",
        )

        self.db = api.TVDB("959D8E76B796A1FB")

        self.trakt_user = None

        self._check_suffix = re.compile(
            "^(?P<SeriesName>.*)[ \._\-][\(](?P<Suffix>(?:19|20)\d{2}|[a-zA-Z]*)[\)].*$", re.I
        )

        self.last_series = Series()
        self.series = Series()
        self.rtnDict = rtnDict
    def parse(self):
        series_tmp = []
        for serie in os.listdir(self.path):
            if not (serie.startswith(".")):
                series_tmp.append(serie)

        series = []
        for serie in sorted(series_tmp):
            series.append(Series(self.path + "/" + serie, serie))

        return series
def get_series_actors(series: Series,
                      actors_from_original_webpage: List[Actor],
                      show_all=True) -> List[Actor]:
    limit = 20
    uri = series.wikidata_uri
    if uri is None:
        wikidata_actors = {}
    else:
        wikidata_actors = get_wikidata_actors(uri)
    sorted_actors = []

    for actor_with_name_only in actors_from_original_webpage:
        if actor_with_name_only.name in wikidata_actors.keys():
            actor = wikidata_actors[actor_with_name_only.name]
            actor_with_name_only.improve(actor)
        elif wikidata_uri := series.get_actor_wikidata_uri(
                actor_name=actor_with_name_only.name):
            actor = query_wikidata_for_actor_info(wikidata_uri)
            actor_with_name_only.improve(actor)
        sorted_actors.append(actor_with_name_only)
Example #22
0
 def _get_series(self, data):
     """returns a series built from the data arg
     - if the data arg is None: return None
     - if the data arg is a Series: return the Series
     - if the data arg is a string: attempt to build Series from file path
     - if the data arg is a List: attempt to build Series from list
     """
     if (isinstance(data, Series)) | (data == None):
         return data
     else:
         return Series(data, self.timezone, self.temp_units)
Example #23
0
def GetMetaDataForTest(text):
    """Process metadata from a file containing a git log. Used for tests

    Args:
        text:
    """
    series = Series()
    ps = PatchStream(series, is_log=True)
    for line in text.splitlines():
        ps.ProcessLine(line)
    ps.Finalize()
    return series
Example #24
0
   def __init__(self):

      self.series = Series()
      self.author = Author()
      self.book = Book()
      self.whenRead = WhenRead()

      #establish list of fields the book method is purely responsible for
      # updating
      self.bookOnlyFields = ['book_id', 'title', 'notes', 'published',
                             'owner_status_id', 'read_status_id', 'type_id', 
                             'series_num']
Example #25
0
    def apply(self, series, subseries=0):
        """Warp one of the series using the other as a template.

        :param series: the index of the ``Bseries`` to warp, 0 or 1
        :type series: ``int``
        :param subseries: for multi-series ('tandem') ``Bseries``, the index
        of the actual ``Series`` within the ``Bseries``
        :return: the series, warped to fit the other series
        :rtype: ``Series``
        :raise ValueError: when ``series`` is not 0 or 1
        """

        if series not in (0, 1):
            raise ValueError("series must be 0 or 1.")

        warpee = series
        target = 1 - series
        warpee_bs = self.series[warpee]
        target_bs = self.series[target]

        # Make a copy of the warpee's data. We'll keep the
        # y values but go through it overwriting the x values
        # with their warped counterparts.
        new_data = self.series[warpee].series[subseries].data.copy()

        pos_w, pos_t = 0, 0  # warpee, target
        for run_n in range(len(self.runs[0])):
            len_w = self.runs[warpee][run_n]
            len_t = self.runs[target][run_n]

            # get the endpoints of the warpee and target runs
            w0, w1 = warpee_bs.get_xrange(pos_w, pos_w + len_w)
            t0, t1 = target_bs.get_xrange(pos_t, pos_t + len_t)
            wx0 = warpee_bs.series[subseries].data[0][w0]
            wx1 = warpee_bs.series[subseries].data[0][w1 - 1]
            tx0 = target_bs.series[subseries].data[0][t0]
            tx1 = target_bs.series[subseries].data[0][t1 - 1]

            # calculate scaling between warpee-x and target-x
            scale = (tx1 - tx0) / (wx1 - wx0)

            # apply the mapping to the warpee data within the run
            w0 = warpee_bs.get_block_start(pos_w)
            w1 = warpee_bs.get_block_start(pos_w + len_w)
            for i in xrange(w0, w1):
                new_data[0][i] = (new_data[0][i] - wx0) * scale + tx0

            # update block positions
            pos_w += len_w
            pos_t += len_t

        return Series(new_data, warpee_bs.series[subseries].name + "-wp")
Example #26
0
    def add_series(self, i, j, k, h, n):
        self.video_lib.append(
            Series(title=i, year=j, gener=k, season=h, episode=n))
        with open('series_lib.csv', 'a', newline='') as csvfile:
            fieldnames = ['Title', 'Year', 'Type', 'Seasons', 'Episode']
            writer = csv.DictWriter(csvfile, fieldnames=fieldnames)

            writer.writerow({
                'Title': i,
                'Year': j,
                'Type': k,
                'Seasons': h,
                'Episode': n
            })
Example #27
0
	def _processFile(self, pathname):

		_file_details = self.parser.getFileDetails(pathname)
		for key, value in _file_details.iteritems():
			log.info('Key: {}   Value: {}'.format(key, value))

		redirect_response = raw_input('Press any key to continue:  ')
		_series = Series(**_file_details)
		_series = self.getShowInfo(_series)

		for key, value in _series.__dict__.iteritems():
			log.info('Key: {}   Value: {}'.format(key, value))

		return
Example #28
0
def create_new_series(file):
    from tvdb_client import ApiV2Client
    api_client = ApiV2Client(TVDB_LOGIN['username'], TVDB_LOGIN['api_key'], TVDB_LOGIN['account_identifier'])
    api_client.login()
    show = api_client.get_series(file.tvdb_id)
    premiere = ''
    if 'data' in show:
        premiere = show['data']['firstAired']
    print(get_base_path(file))
    print(get_base_path(file).rsplit(os.sep, 1))
    base_path = get_base_path(file).rsplit(os.sep, 1)[0]
    SHOWS.update({file.series_name: Series(series_name=file.series_name, status=file.status, tvdb_id=file.tvdb_id,
                                           name_needed=file.name_needed, location=base_path, premiere=premiere)})
    file.report['info'].append('Series created')
Example #29
0
    def _tvrageGetInfo(self):

        _shows = feeds.search(self.series.titleBase)
        if not _shows: raise SeriesNotFound
        if len(_shows) == 1:
            _series = Series(tvrage=etree_to_dict(_shows[0])['show'])
            if matching(self.series.title.lower(),
                        _series.title.lower(),
                        factor=85):
                _series = Series(tvrage=_shows[0])
                self.series.update(_series)
                self.series.source = 'tvrage'
                self.series.tvrage_info = _series
                return
            else:
                raise SeriesNotFound

        _rankings = {}
        for _show in _shows:
            _series = Series(tvrage=etree_to_dict(_show)['show'])
            _score = matching(self.series.title.lower(),
                              self.decode(_series.titleBase.lower()))
            if _score < 85:
                continue

            if _score in _rankings[_series.status]:
                _rankings[_series.status][_score][_series.title] = _series
            else:
                _rankings[_series.status][_score] = {_series.title: _series}

        if not _rankings:
            raise SeriesNotFound

        self._reviewShowData(_rankings, 'tvrage')

        return
Example #30
0
    def _element_wise_operation(self, other, operation):
        if not isinstance(other, (str, unicode, int, long, float, bool)):
            raise TypeError('can only compare with scalars')

        assert isinstance(operation, (str, unicode))

        new_data = {}
        for column_name in self:
            # get as series
            series = self[str(column_name)]
            # apply the operation
            new_data[column_name] = Series(
                weld_element_wise_op(series.expr, other, operation,
                                     series.weld_type), series.dtype,
                self.index, series.name)

        return DataFrame(new_data, self.index)
Example #31
0
    def set_series(self, config):
        """ Set series from config

            Args:
                config (configparser.SectionProxy): dictionary of series,
                    containing their parameters.
        """
        for name, section in config.items():
            if name == 'DEFAULT':
                continue

            self.series.append(Series(
                name,
                directory_local_prefix=self.directory_local,
                directory_server_prefix=self.directory_server,
                **section
                ))
Example #32
0
 def add_dataset(self, dataset):
     try:
         if self.dicom_dataset.StudyInstanceUID == dataset.StudyInstanceUID:
             for x in self.series:
                 try:
                     x.add_dataset(dataset)
                     logger.debug("Part of this series")
                     break
                 except Exception as e:
                     logger.debug("Not part of this series")
             else:
                 self.series.append(Series(dicom_dataset=dataset))
         else:
             raise KeyError("Not the same StudyInstanceUIDs")
     except Exception as e:
         logger.debug("trouble adding series to study", exc_info=e)
         raise KeyError("Not the same StudyInstanceUIDs")
Example #33
0
def update_all_air_dates(show: Series):
    if not show.tvdb_id:
        return
    episodes = []
    for i in range(1, 100):
        eps = api_client.get_series_episodes(show.tvdb_id,
                                             episode_number=None,
                                             page=i)
        if 'code' in eps:
            break
        episodes.extend(eps['data'])
    for e_meta in episodes:
        e: Episode
        e = show.get_episode_by_sxe(e_meta['airedSeason'],
                                    e_meta['airedEpisodeNumber'])
        if not e:
            continue
        e.air_date = e_meta['firstAired']
Example #34
0
def GetMetaData(start, count):
    """Reads out patch series metadata from the commits

    This does a 'git log' on the relevant commits and pulls out the tags we
    are interested in.

    Args:
        start: Commit to start from: 0=HEAD, 1=next one, etc.
        count: Number of commits to list
    """
    pipe = [['git', 'log', '--reverse', 'HEAD~%d' % start, '-n%d' % count]]
    stdout = command.RunPipe(pipe, capture=True)
    series = Series()
    ps = PatchStream(series, is_log=True)
    for line in stdout.splitlines():
        ps.ProcessLine(line)
    ps.Finalize()
    return series
Example #35
0
def entry_to_series(entry):
    """
    entry_to_series()
    Takes a single row from a database query and converts it
    into a series.
    """
    if not entry:
        return None

    series = Series(
        name=str(entry[SI.NAME]),  # Series Name
        volumes_owned=str(entry[SI.VOL_OWNED]),  # Volumes Owned
        is_completed=entry[SI.IS_COMPLETED],  # Is Completed
        next_volume=entry[SI.NEXT_VOLUME],  # Next Volume
        publisher=str(entry[SI.PUBLISHER]),  # Publisher
        author=str(entry[SI.AUTHOR]),  # Author
        alt_names=str(entry[SI.ALT_NAMES]),  # Alternate Names
        rowid=entry[SI.ROWID])  # Row ID in db
    return series
Example #36
0
    def _getInfoFromProviders(self, processOrder):

        if self.last_series.title == self.series.title:
            self.series.copyShow(self.last_series)
            return
        else:
            self.last_series = Series(title=self.series.title)

        options = {"tvdb": self._tvdbGetInfo, "MyTrakt": self._traktGetInfo, "tvrage": self._tvrageGetInfo}

        try:
            for service in processOrder:
                try:
                    options[service]()
                    if self.series.keysFound:
                        # 						if not self.series.tvdb_id and 'tvdb' in processOrder:
                        # 							self.series.title = re.sub(' and ', ' & ', self.series.title)
                        # 							options['tvdb']()
                        raise GetOutOfLoop
                except SeriesNotFound:
                    sys.exc_clear()
                except GetOutOfLoop:
                    raise GetOutOfLoop
                except:
                    _an_error = traceback.format_exc()
                    log.debug(traceback.format_exception_only(type(_an_error), _an_error)[-1])
                    raise SeriesNotFound
            if self.series.keysFound:
                raise GetOutOfLoop
            self.last_request = {"LastRequestName": ""}
            raise SeriesNotFound("ALL: Unable to locate series: {}".format(self.series.title))
        except GetOutOfLoop:
            sys.exc_clear()

        self.last_series.copyShow(self.series)

        return
Example #37
0
 def __init__(self):
     super(MySeries, self).__init__()
     self.series = Series()
     self.initUI()
Example #38
0
    def getShowInfo(self, request, processOrder=["tvdb", "tvrage"], epdetail=True):
        log.trace("getShowInfo: Input Parm: {}".format(request))

        if not epdetail:
            self.args.get_episodes = False

        try:
            if self.args.processes is not None:
                processOrder = self.args.processes
            elif type(processOrder) == list:
                _s = set(["tvdb", "MyTrakt", "tvrage"])
                _diff = [_x for _x in processOrder if _x not in _s]
                if _diff:
                    raise InvalidArgumentValue("processOrder must be: {}".format("tvdb, MyTrakt, tvrage"))
        except:
            raise InvalidArgumentType("processOrder must be list, received: {}".format(type(processOrder)))

        if type(request) == dict:
            if "SeriesName" in request and request["SeriesName"] is not None:
                if self.args.series_name is not None:
                    request["SeriesName"] = self.args.series_name
                if self.args.season is not None:
                    request["SeasonNum"] = self.args.season
                if self.args.epno is not None:
                    request["EpisodeNums"] = self.args.epno
                self.series = Series(**request)
            elif self.args.series_name is not None:
                self.series.title = self.args.series_name
                if self.args.season is not None:
                    self.series.season = self.args.season
                if self.args.epno is not None:
                    self.series.episodeNums = self.args.epno
            else:
                error_msg = 'getDetails: Request Missing "SeriesName" Key: {!s}'.format(request)
                log.trace(error_msg)
                raise DictKeyError(error_msg)
        else:
            error_msg = "getDetails: Invalid object type passed, must be DICT, received: {}".format(type(request))
            log.trace(error_msg)
            raise InvalidArgumentType(error_msg)

        self._checkForAlias()
        # 		if  hasattr(self.series, 'episodenums'):
        # 			self._adjEpisodeNums()

        # Valid Request: Locate Show IDs
        try:
            self._getInfoFromProviders(processOrder)
        except SeriesNotFound:
            if self.series.titleSuffix:
                self.series.title = self.series.titleBase
                self._getInfoFromProviders(processOrder)
        except KeyboardInterrupt:
            sys.exit(8)

        if self.args.get_episodes:
            ep_get = {"tvdb_id": self._tvdbEpisodeInfo, "tvrage_id": self._tvrageEpisideInfo}

            try:
                if self.series.tvdb_id:
                    service = "tvdb_id"
                elif self.series.tvrage_id:
                    service = "tvrage_id"
                else:
                    raise SeriesNotFound
                ep_get[service]()
            except KeyboardInterrupt:
                sys.exit(8)

        if self.rtnDict:
            return self.series.getDict()
        else:
            return self.series
Example #39
0
class SeriesInfo(Library):
    def __init__(self, rtnDict=True):
        log.trace("SeriesInfo.__init__")

        super(SeriesInfo, self).__init__()

        seriesinfo_group = self.options.parser.add_argument_group("Episode Detail Options", description=None)
        seriesinfo_group.add_argument("--sn", "--name", type=str, dest="series_name")
        seriesinfo_group.add_argument("--season", type=int, dest="season")
        seriesinfo_group.add_argument("--epno", type=int, action="append", dest="epno")
        seriesinfo_group.add_argument(
            "--tvdb", dest="processes", action="append_const", const="tvdb", help="Information to come from TVDB"
        )
        seriesinfo_group.add_argument(
            "--tvrage", dest="processes", action="append_const", const="tvrage", help="Information to come from TVRage"
        )
        seriesinfo_group.add_argument(
            "--MyTrakt",
            dest="processes",
            action="append_const",
            const="MyTrakt",
            help="Information to come from MyTrakt.tv",
        )
        seriesinfo_group.add_argument(
            "--series-only",
            "--so",
            dest="get_episodes",
            action="store_false",
            default=False,
            help="Information to come from MyTrakt.tv",
        )

        self.db = api.TVDB("959D8E76B796A1FB")

        self.trakt_user = None

        self._check_suffix = re.compile(
            "^(?P<SeriesName>.*)[ \._\-][\(](?P<Suffix>(?:19|20)\d{2}|[a-zA-Z]*)[\)].*$", re.I
        )

        self.last_series = Series()
        self.series = Series()
        self.rtnDict = rtnDict

    def getShowInfo(self, request, processOrder=["tvdb", "tvrage"], epdetail=True):
        log.trace("getShowInfo: Input Parm: {}".format(request))

        if not epdetail:
            self.args.get_episodes = False

        try:
            if self.args.processes is not None:
                processOrder = self.args.processes
            elif type(processOrder) == list:
                _s = set(["tvdb", "MyTrakt", "tvrage"])
                _diff = [_x for _x in processOrder if _x not in _s]
                if _diff:
                    raise InvalidArgumentValue("processOrder must be: {}".format("tvdb, MyTrakt, tvrage"))
        except:
            raise InvalidArgumentType("processOrder must be list, received: {}".format(type(processOrder)))

        if type(request) == dict:
            if "SeriesName" in request and request["SeriesName"] is not None:
                if self.args.series_name is not None:
                    request["SeriesName"] = self.args.series_name
                if self.args.season is not None:
                    request["SeasonNum"] = self.args.season
                if self.args.epno is not None:
                    request["EpisodeNums"] = self.args.epno
                self.series = Series(**request)
            elif self.args.series_name is not None:
                self.series.title = self.args.series_name
                if self.args.season is not None:
                    self.series.season = self.args.season
                if self.args.epno is not None:
                    self.series.episodeNums = self.args.epno
            else:
                error_msg = 'getDetails: Request Missing "SeriesName" Key: {!s}'.format(request)
                log.trace(error_msg)
                raise DictKeyError(error_msg)
        else:
            error_msg = "getDetails: Invalid object type passed, must be DICT, received: {}".format(type(request))
            log.trace(error_msg)
            raise InvalidArgumentType(error_msg)

        self._checkForAlias()
        # 		if  hasattr(self.series, 'episodenums'):
        # 			self._adjEpisodeNums()

        # Valid Request: Locate Show IDs
        try:
            self._getInfoFromProviders(processOrder)
        except SeriesNotFound:
            if self.series.titleSuffix:
                self.series.title = self.series.titleBase
                self._getInfoFromProviders(processOrder)
        except KeyboardInterrupt:
            sys.exit(8)

        if self.args.get_episodes:
            ep_get = {"tvdb_id": self._tvdbEpisodeInfo, "tvrage_id": self._tvrageEpisideInfo}

            try:
                if self.series.tvdb_id:
                    service = "tvdb_id"
                elif self.series.tvrage_id:
                    service = "tvrage_id"
                else:
                    raise SeriesNotFound
                ep_get[service]()
            except KeyboardInterrupt:
                sys.exit(8)

        if self.rtnDict:
            return self.series.getDict()
        else:
            return self.series

    def _getInfoFromProviders(self, processOrder):

        if self.last_series.title == self.series.title:
            self.series.copyShow(self.last_series)
            return
        else:
            self.last_series = Series(title=self.series.title)

        options = {"tvdb": self._tvdbGetInfo, "MyTrakt": self._traktGetInfo, "tvrage": self._tvrageGetInfo}

        try:
            for service in processOrder:
                try:
                    options[service]()
                    if self.series.keysFound:
                        # 						if not self.series.tvdb_id and 'tvdb' in processOrder:
                        # 							self.series.title = re.sub(' and ', ' & ', self.series.title)
                        # 							options['tvdb']()
                        raise GetOutOfLoop
                except SeriesNotFound:
                    sys.exc_clear()
                except GetOutOfLoop:
                    raise GetOutOfLoop
                except:
                    _an_error = traceback.format_exc()
                    log.debug(traceback.format_exception_only(type(_an_error), _an_error)[-1])
                    raise SeriesNotFound
            if self.series.keysFound:
                raise GetOutOfLoop
            self.last_request = {"LastRequestName": ""}
            raise SeriesNotFound("ALL: Unable to locate series: {}".format(self.series.title))
        except GetOutOfLoop:
            sys.exc_clear()

        self.last_series.copyShow(self.series)

        return

    def _traktGetInfo(self):

        try:
            if not self.trakt_user:
                MyTrakt.api_key = self.settings.TraktAPIKey
                MyTrakt.authenticate(self.settings.TraktUserID, self.settings.TraktPassWord)
                self.trakt_user = User(self.settings.TraktUserID)
        except:
            raise SeriesNotFound("MyTrakt: Unable to connect to MyTrakt service: {}".format(self.settings.TraktUserID))

        show = TVShow(self.series.title)
        if not show.tvdb_id:
            raise SeriesNotFound("MyTrakt: Unable to locate series: {}".format(self.series.title))

        _title = self.decode(show.title)
        if not matching(self.series.title.lower(), _title.lower(), factor=85):
            raise SeriesNotFound("MyTrakt: Unable to locate series: {}".format(self.series.title))

        if not self.series.source:
            self.series.source = "MyTrakt"
            self.series.title = show.title

        if show.tvdb_id and self.series.tvdb_id is None:
            self.series.tvdb_id = show.tvdb_id

        if hasattr(show, "tvrage_id") and show.tvrage_id:
            if self.series.tvrage_id is None:
                self.series.tvrage_id = show.tvrage_id

        if hasattr(show, "imdb_id") and show.imdb_id:
            if self.series.imdb_id is None:
                self.series.imdb_id = self.decode(show.imdb_id)

        # 		if show.status and 'status' not in results:
        # 			results['status'] = series.status

        return

    def _tvdbGetInfo(self):

        try:
            _shows = self.db.search(self.series.titleBase, "en")
        except (TVDBAttributeError, TVDBIndexError, TVDBValueError, error.BadData):
            _an_error = traceback.format_exc()
            log.debug(traceback.format_exception_only(type(_an_error), _an_error)[-1])
            raise SeriesNotFound

        if len(_shows) == 0:
            raise SeriesNotFound
        if len(_shows) == 1:
            if matching(self.series.title.lower(), self.decode(_shows[0].SeriesName).lower(), factor=85):
                _shows[0].update()
                _series = Series(tvdb=_shows[0])

                self.series.update(_series)
                self.series.source = "tvdb"
                self.series.tvdb_info = _series
                return
            else:
                raise SeriesNotFound

        _rankings = {"Continuing": {}, "Ended": {}, "Other": {}}
        for _show in _shows:
            _title_suffix = self._check_suffix.match(self.decode(_show.SeriesName))
            if _title_suffix:
                _score = matching(self.series.titleBase.lower(), _title_suffix.group("SeriesName").lower())
            else:
                _score = matching(self.series.titleBase.lower(), self.decode(_show.SeriesName).lower())
            if _score < 90:
                continue

            _show.update()
            _series = Series(tvdb=_show)
            if _score in _rankings[_series.status]:
                _rankings[_series.status][_score][_series.title] = _series
            else:
                _rankings[_series.status][_score] = {_series.title: _series}

        if not _rankings:
            raise SeriesNotFound

        self._reviewShowData(_rankings, "tvdb")

        return

    def _tvrageGetInfo(self):

        _shows = feeds.search(self.series.titleBase)
        if not _shows:
            raise SeriesNotFound
        if len(_shows) == 1:
            _series = Series(tvrage=etree_to_dict(_shows[0])["show"])
            if matching(self.series.title.lower(), _series.title.lower(), factor=85):
                _series = Series(tvrage=_shows[0])
                self.series.update(_series)
                self.series.source = "tvrage"
                self.series.tvrage_info = _series
                return
            else:
                raise SeriesNotFound

        _rankings = {}
        for _show in _shows:
            _series = Series(tvrage=etree_to_dict(_show)["show"])
            _score = matching(self.series.title.lower(), self.decode(_series.titleBase.lower()))
            if _score < 85:
                continue

            if _score in _rankings[_series.status]:
                _rankings[_series.status][_score][_series.title] = _series
            else:
                _rankings[_series.status][_score] = {_series.title: _series}

        if not _rankings:
            raise SeriesNotFound

        self._reviewShowData(_rankings, "tvrage")

        return

    def _reviewShowData(self, _rankings, source):

        _check_order = ["Continuing", "Ended", "Other"]
        _show_status = {"Continuing": self._activeShows, "Ended": self._notActiveShows, "Other": self._notActiveShows}

        for _status in _check_order:
            if len(_rankings[_status]) > 0:
                found = _show_status[_status](_rankings[_status], source)
                if found:
                    return

        raise SeriesNotFound

    def _activeShows(self, _list, source):
        _check_order = ["Country", "Year", None]
        for key in sorted(_list, reverse=True):
            for _check in _check_order:
                for _series in _list[key].itervalues():
                    if _series.titleType != _check:
                        continue
                    if _check == "Country":
                        if self.series.titleType == "Country":
                            if self.series.country != _series.country:
                                continue
                        elif _series.country != "US":
                            continue
                    if _check == "Year":
                        if self.series.titleType == "Year":
                            if self.series.titleSuffix != _series.titleSuffix:
                                continue
                    if matching(_series.titleBase, self.series.titleBase, factor=98):
                        self.series.update(_series)
                        self.series.source = source
                        if source == "tvdb":
                            self.series.tvdb_info = _series
                        elif source == "tvrage":
                            self.series.tvrage_info = _series
                        return True
        return False

    def _notActiveShows(self, _list, source):
        _check_order = ["Match", "Country", "Year", None]
        for key in sorted(_list, reverse=True):
            for _check in _check_order:
                for _series in _list[key].itervalues():
                    if _check == "Match":
                        if _series.titleType == self.series.titleType:
                            if matching(_series.title, self.series.title, factor=98):
                                self.series.update(_series)
                                self.series.source = source
                                if source == "tvdb":
                                    self.series.tvdb_info = _series
                                elif source == "tvrage":
                                    self.series.tvrage_info = _series
                                return True
                    elif _series.titleType == _check:
                        if matching(_series.titleBase, self.series.titleBase, factor=98):
                            self.series.update(_series)
                            self.series.source = source
                            if source == "tvdb":
                                self.series.tvdb_info = _series
                            elif source == "tvrage":
                                self.series.tvrage_info = _series
                            return True
        return False

    def _checkForAlias(self):
        # Check for Alias
        alias_name = difflib.get_close_matches(self.series.title, self.settings.SeriesAliasList, 1, cutoff=0.9)
        if len(alias_name) > 0:
            self.series.title = self.settings.SeriesAliasList[alias_name[0]].rstrip()
        return

    def _adjEpisodeNums(self):
        for _entry in self.settings.EpisodeAdjList:
            if _entry["SeriesName"] == self.series.title and self.series.season:
                if _entry["SeasonNum"] == self.series.season:
                    if _entry["Begin"] <= self.series.episodeNums[0] and _entry["End"] >= self.series.episodeNums[0]:
                        self.series.season = self.series.season + _entry["AdjSeason"]
                        self.series.episodeNums[0] = self.series.episodeNums[0] + _entry["AdjEpisode"]
                        return
        return

    def _tvdbEpisodeInfo(self):
        log.trace("_tvdbEpisodeInfo: Retrieving Episodes - %s ID: %s" % (self.series.title, self.series.tvdb_id))

        _err_msg_1 = "TVDB: No Episode Data Found - {SeriesName}, ID: {tvdb_id}"
        _err_msg_2 = "TVDB: Connection Issues Retrieving Series and Episode Info - {SeriesName}, ID: {tvdb_id}"

        try:
            self.series.episodeData = []
            _series = self.db.get_series(self.series.tvdb_id, "en")
            _seasons = self._tvdbBuildTVSeason(_series)
            self.series.seasons = _seasons
            if self.series.season is not None:
                _season = _series[self.series.season]
                if self.series.episodeNums:
                    for epno in self.series.episodeNums:
                        _episode = _season[epno]
                        self.series.addEpisode(_season, _episode)
                else:
                    for _episode in _season:
                        self.series.addEpisode(_season, _episode)
            else:
                for _season in _series:
                    log.debug("Season: {}".format(_season.season_number))
                    for _episode in _season:
                        self.series.addEpisode(_season, _episode)
        except TVDBIndexError, message:
            _an_error = traceback.format_exc()
            log.debug(traceback.format_exception_only(type(_an_error), _an_error)[-1])
            log.debug(_err_msg_1.format(**self.series.getDict()))
            raise EpisodeNotFound(_err_msg_1.format(**self.series.getDict()))
        except IOError, message:
            _an_error = traceback.format_exc()
            log.debug(traceback.format_exception_only(type(_an_error), _an_error)[-1])
            log.debug(_err_msg_2.format(**self.series.getDict()))
            raise EpisodeNotFound(_err_msg_2.format(**self.series.getDict()))
Example #40
0
class Loadshape(object):
    
    def __init__(self, load_data, temp_data=None, forecast_temp_data=None,
                 timezone=None, temp_units='F', sq_ft=None,
                 tariff=None, log_level=logging.INFO):
        """load_data, temp_data, and forecast_temp_data may be:
                - List of Tuples containing timestamps and values
                - filename of a csv containing timestamps and values
                - Series object
        """
        logging.basicConfig(level=log_level)
        self.logger = logging.getLogger(__name__)

        if timezone == None: self.logger.warn("Assuming timezone is OS default")

        self.timezone   = utils.get_timezone(timezone)
        self.model_dir  = path.join(path.dirname(path.abspath(__file__)), 'r')
        self.temp_units = temp_units
        self.sq_ft      = sq_ft
        self.tariff     = tariff

        self.training_load_series           = self._get_series(load_data)
        self.training_temperature_series    = self._get_series(temp_data)
        self.forecast_temperature_series    = self._get_series(forecast_temp_data)
        
        self._stderr = None
        self._stdout = None

        self._reset_derivative_data()

    # ----- derivative data generators ----- #
    # baseline:             generates the baseline_series for the input data
    # diff:                 generates the diff_series (actual - baseline)
    # event_performance:    uses the diff for a specific time interval to compute
    #                       various performance statistics
    #
    def baseline(self, start_at=None, end_at=None,
                 weighting_days=14, modeling_interval=900, step_size=900):
        """baseline load shape generator: compiles necessary temporary files and
        shells out to R script:
        - training power data: timestamps and kW
        - training temperature data: timestamps and outdoor air temp [optional]
        - prediction times: timestamps only, prediction made for these times
        - prediction temperature data: timestamps and outdoor air temp [optional]
        
        Note: prediction temperature data is optional, but if training temperature
        data is provided but does not include temperatures for the requested
        prediction period, the model will ignore the temperature data. In order
        to get temperature adjusted predictions, temperature data must be available
        for both the training data and the prediction period.
        
        baseline.R
            --loadFile=LOAD_FILE
            --temperatureFile=TRAINING_TEMPERATURE_FILE
            --timeStampFile=PREDICTION_TIME_STAMPS_FILE
            --predictTemperatureFile=PREDICTION_TEMPERATURE_FILE
            --outputBaselineFile=OUTPUT_BASELINE_FILE
            --errorStatisticsFile=ERROR_STATISTICS_FILE
            --fahrenheit=BOOLEAN
            --timescaleDays=TIMESCALEDAYS
            --intervalMinutes=INTERVALMINUTES
        """
        self._reset_derivative_data()
    
        output_times = self._build_output_time_series(start_at, end_at, step_size)
        
        # ----- write temporary files ----- #
        baseline_tmp    = tempfile.NamedTemporaryFile()
        error_stats_tmp = tempfile.NamedTemporaryFile()
        power_tmp       = self.training_load_series.write_to_tempfile()
        prediction_tmp  = output_times.write_to_tempfile()

        # ----- build command ----- #
        cmd = path.join(self.model_dir, 'baseline.R')
        cmd += " --loadFile=%s"                 % power_tmp.name
        cmd += " --timeStampFile=%s"            % prediction_tmp.name
        cmd += " --outputBaselineFile=%s"       % baseline_tmp.name
        cmd += " --errorStatisticsFile=%s"      % error_stats_tmp.name
        cmd += " --timescaleDays=%s"            % weighting_days
        cmd += " --intervalMinutes=%s"          % (modeling_interval / 60)

        # ----- add in available temperature data ----- #
        if self.training_temperature_series != None:
            t_temp_tmp = self.training_temperature_series.write_to_tempfile()
            cmd += " --temperatureFile=%s" % t_temp_tmp.name
            f_flag = str(self.training_temperature_series.is_farenheit()).upper()
            cmd += " --fahrenheit=%s" % f_flag
            
            if self.forecast_temperature_series != None:
                ptemp_temp = self.forecast_temperature_series.write_to_tempfile()
                cmd += " --predictTemperatureFile=%s" % ptemp_temp.name

        # ----- run script ----- #
        self._run_script(cmd)

        # ----- process results ----- #
        self.baseline_series = Series(baseline_tmp.name, self.timezone)
        self.error_stats = self._read_error_stats(error_stats_tmp.name)
        
        return self.baseline_series

    def cost(self, load_data=None, start_at=None, end_at=None, step_count=None):
        """calculate the cost of energy based on the provided tariff

        R script produces one output file:
        timestamp, previous-interval-cost, cumulative-previous-interval-cost

        [tariff.R command]
        ./tariff.R
            --loadFile=LOAD_FILE
            --tariffFile=TARIFF_FILE
            --outputTimestampFile=OUTPUT_TIMES_FILE
            --demandResponseFile=DEMAND_RESPONSE_DATES
            --outputFile=OUTPUT_FILE
        """
        if load_data == None: load_data = self.training_load_series
        
        if not isinstance(load_data, Series):
            raise Exception("load_data argument must be a Series object")
        if not isinstance(self.tariff, Tariff):
            raise Exception("cannot calculate cost - no tariff provided")

        output_times = self._build_output_time_series(start_at, end_at,
                                                      step_size=900,
                                                      step_count=step_count)

        # ----- write temporary files ----- #
        load_tmp            = load_data.write_to_tempfile(exclude=False)
        tariff_tmp          = self.tariff.write_tariff_to_tempfile()
        output_times_tmp    = output_times.write_to_tempfile()
        output_tmp          = tempfile.NamedTemporaryFile()

        # ----- build command ----- #
        cmd = path.join(self.model_dir, 'tariff.R')
        cmd += " --loadFile=%s"             % load_tmp.name
        cmd += " --tariffFile=%s"           % tariff_tmp.name
        cmd += " --outputTimestampFile=%s"  % output_times_tmp.name
        cmd += " --outputFile=%s"           % output_tmp.name

        if len(self.tariff.dr_periods) > 0:
            dr_periods_tmp = self.tariff.write_dr_periods_to_tempfile()
            cmd += " --demandResponseFile=%s" % dr_periods_tmp.name

        self._run_script(cmd)
        
        # ----- process results ----- #
        cost_series             = Series(output_tmp.name, self.timezone, data_column=1)
        cumulative_cost_series  = Series(output_tmp.name, self.timezone, data_column=2)

        return cost_series, cumulative_cost_series
            
    def diff(self, start_at=None, end_at=None, step_size=900, step_count=None):
        """calculate the difference between baseline and actual

        R script produces two output files:
        (1) diff:       timestamp,  kw_diff,    cumulative_kwh_diff
        (2) baseline:   timestamp,  kw_base,    cumulative_kwh_base

        [diff.R command]
        ./diff.R
            --loadFile=LOAD_FILE
            --baselineFile=BASELINE_LOAD_FILE
            --outputTimesFile=OUTPUT_TIMES_FILE
            --outputFile=OUTPUT_DIFF_FILE
            --predictedBaselineOutputFile=OUTPUT_BASE_FILE
        """
        if self.baseline_series == None: self.baseline()
        
        output_times = self._build_output_time_series(start_at, end_at,
                                                      step_size, step_count)

        # ----- write temporary files ----- #
        load_tmp            = self.training_load_series.write_to_tempfile(exclude=False)
        baseline_tmp        = self.baseline_series.write_to_tempfile()
        output_times_tmp    = output_times.write_to_tempfile()
        output_diff_tmp     = tempfile.NamedTemporaryFile()
        output_base_tmp     = tempfile.NamedTemporaryFile()
        
        # ----- build command ----- #
        cmd = path.join(self.model_dir, 'diff.R')
        cmd += " --loadFile=%s"                     % load_tmp.name
        cmd += " --baselineFile=%s"                 % baseline_tmp.name
        cmd += " --outputTimesFile=%s"              % output_times_tmp.name
        cmd += " --outputFile=%s"                   % output_diff_tmp.name
        cmd += " --predictedBaselineOutputFile=%s"  % output_base_tmp.name
        
        # ----- run script ----- #
        self._run_script(cmd)

        # ----- process results ----- #
        kw_diff = Series(output_diff_tmp.name, self.timezone, data_column=1)
        kw_base = Series(output_base_tmp.name, self.timezone, data_column=1)

        cumulative_kwh_diff = Series(output_diff_tmp.name, self.timezone, data_column=2)
        cumulative_kwh_base = Series(output_base_tmp.name, self.timezone, data_column=2)

        return kw_diff, kw_base, cumulative_kwh_diff, cumulative_kwh_base
        
    def event_performance(self, start_at=None, end_at=None):
        """calcualte the event performance for a specific period of time
        returned performance metrics:
            - avg_kw_shed:              (average kW diff)
            - avg_percent_kw_shed       (average kW diff / average kW baseline)
            - kwh_reduction             (cumulative delta kWh)
            - percent_kwh_reduction     (cumulative delta kWh / cumulative kWh baseline)
            - total_savings ($)
            - total_percent_savings ($)
            - avg_w_sq_ft_shed          (average kW shed * 1000 / sq_ft)
        """
        # get diff values for period by diffing over a single interval
        diff_data = self.diff(start_at, end_at, step_count=1)
        kw_diff_series = diff_data[0]
        kw_base_series = diff_data[1]
        cumulative_kwh_diff_series = diff_data[2]
        cumulative_kwh_base_series = diff_data[3]

        # extract data from diff series
        ep = {}
        ep["avg_kw_shed"]           = kw_diff_series.values()[-1] * -1
        avg_kw_base                 = kw_base_series.values()[-1]
        ep["avg_percent_kw_shed"]   = (ep["avg_kw_shed"] / avg_kw_base) * 100
        ep["kwh_reduction"]         = cumulative_kwh_diff_series.values()[-1] * -1
        kwh_base                    = cumulative_kwh_base_series.values()[-1]
        ep["percent_kwh_reduction"] = (ep["kwh_reduction"] / kwh_base) * 100

        # add in W per square feet if square footage was provided
        if self.sq_ft:
            ep["avg_w_sq_ft_shed"]  = (ep["avg_kw_shed"] * 1000) / self.sq_ft

        # calculate $ savings if tariff provided
        if self.tariff != None:
            load_cost, load_cumulative_cost = self.cost(load_data=self.training_load_series,
                                                        start_at=start_at,
                                                        end_at=end_at,
                                                        step_count=1)

            base_cost, base_cumulative_cost = self.cost(load_data=self.baseline_series,
                                                        start_at=start_at,
                                                        end_at=end_at,
                                                        step_count=1)

            total_load_cost = load_cumulative_cost.values()[-1]
            total_base_cost = base_cumulative_cost.values()[-1]

            ep["total_savings"] = total_base_cost - total_load_cost
            ep["total_percent_savings"] = (ep["total_savings"] / total_base_cost) * 100

        # round values to something reasonable
        for key, val in ep.iteritems():
            if isinstance(val, float): ep[key] = round(val, 2)

        return ep
    
    def cumulative_sum(self, start_at=None, end_at=None, step_size=900):
        """return accumulated sum of differences bewetween baseline and actual
        energy. Returns a series.
        """
        if self.baseline_series == None: self.baseline()

        diff_data = self.diff(start_at, end_at, step_size)
        cumulative_kwh_diff_series = diff_data[2]
        return cumulative_kwh_diff_series    

    def _run_script(self, command):
        self.logger.info("Running R script...")

        p = Popen(command, shell=True, stdout=PIPE, stderr=PIPE)
        stdout, stderr = p.communicate()

        self._stdout = stdout
        self._stderr = stderr

        if stderr:
            self.logger.error(" --- R script error: --- ")
            for l in stderr.splitlines(): print " --> %s" % l

        if stdout:
            self.logger.info(" --- R script info: --- ")
            for l in stdout.splitlines(): print " --> %s" % l

        return True

    def actual_data(self, start_at, end_at, exclude=False, step_size=None):
        return self.training_load_series.data(start_at=start_at, end_at=end_at, exclude=exclude, step_size=step_size)

    def baseline_data(self, start_at, end_at, exclude=False, step_size=None):
        return self.baseline_series.data(start_at=start_at, end_at=end_at, exclude=exclude, step_size=step_size)

    def add_exclusion(self, start_at, end_at):
        """proxy add_exclusion to series"""
        self.training_load_series.add_exclusion(start_at, end_at)

    def add_named_exclusion(self, exclusion_name):
        """proxy add_named_exclusion to series"""
        self.training_load_series.add_named_exclusion(exclusion_name)

    def clear_exclusions(self):
        """proxy clear_exclusion to series"""
        self.training_load_series.clear_exclusions()

    def set_tariff(self, tariff):
        """add or replace tariff"""
        self.tariff = tariff

    def _get_series(self, data):
        """returns a series built from the data arg
        - if the data arg is None: return None
        - if the data arg is a Series: return the Series
        - if the data arg is a string: attempt to build Series from file path
        - if the data arg is a List: attempt to build Series from list
        """
        if (isinstance(data, Series)) | (data == None):
            return data
        else:
            return Series(data, self.timezone, self.temp_units)

    def _build_output_time_series(self, start_at=None, end_at=None,
                                  step_size=900, step_count=None):
        """assemble prediction series:
        - this is the series of timestamps for which baseline values will be calculated
        - the prediction series is stored in a Series object to take advantage of some of the Series features
        - default start_at/end is training_load_series.start_at/end_at
        - default prediction step is 900s
        - step_count will trump step_size
        """
        if start_at == None: start_at = self.training_load_series.start_at()
        if end_at == None: end_at = self.training_load_series.end_at()
        
        start_at = utils.read_timestamp(start_at, self.timezone)
        end_at = utils.read_timestamp(end_at, self.timezone)

        if step_count != None:
            duration = end_at - start_at
            step_size = int(float(duration) / step_count)

        p_data = range(start_at, end_at+1, step_size)
        p_data = [(v, 0) for v in p_data]
        
        return Series(p_data, self.timezone)

    def _read_error_stats(self, error_stats_file):
        """read error stats file and return values"""
        error_stats = {}
        
        with open(error_stats_file, 'r') as f:
            for ent in csv.reader(f):
                if ent: error_stats[ent[0].lower()] = float(ent[1])
                
        return error_stats

    def _reset_derivative_data(self):
        self.baseline_series                = None
        self.error_stats                    = None
        self.base_cost_series               = None
        self.load_cost_series               = None
Example #41
0
    def baseline(self, start_at=None, end_at=None,
                 weighting_days=14, modeling_interval=900, step_size=900):
        """baseline load shape generator: compiles necessary temporary files and
        shells out to R script:
        - training power data: timestamps and kW
        - training temperature data: timestamps and outdoor air temp [optional]
        - prediction times: timestamps only, prediction made for these times
        - prediction temperature data: timestamps and outdoor air temp [optional]
        
        Note: prediction temperature data is optional, but if training temperature
        data is provided but does not include temperatures for the requested
        prediction period, the model will ignore the temperature data. In order
        to get temperature adjusted predictions, temperature data must be available
        for both the training data and the prediction period.
        
        baseline.R
            --loadFile=LOAD_FILE
            --temperatureFile=TRAINING_TEMPERATURE_FILE
            --timeStampFile=PREDICTION_TIME_STAMPS_FILE
            --predictTemperatureFile=PREDICTION_TEMPERATURE_FILE
            --outputBaselineFile=OUTPUT_BASELINE_FILE
            --errorStatisticsFile=ERROR_STATISTICS_FILE
            --fahrenheit=BOOLEAN
            --timescaleDays=TIMESCALEDAYS
            --intervalMinutes=INTERVALMINUTES
        """
        self._reset_derivative_data()
    
        output_times = self._build_output_time_series(start_at, end_at, step_size)
        
        # ----- write temporary files ----- #
        baseline_tmp    = tempfile.NamedTemporaryFile()
        error_stats_tmp = tempfile.NamedTemporaryFile()
        power_tmp       = self.training_load_series.write_to_tempfile()
        prediction_tmp  = output_times.write_to_tempfile()

        # ----- build command ----- #
        cmd = path.join(self.model_dir, 'baseline.R')
        cmd += " --loadFile=%s"                 % power_tmp.name
        cmd += " --timeStampFile=%s"            % prediction_tmp.name
        cmd += " --outputBaselineFile=%s"       % baseline_tmp.name
        cmd += " --errorStatisticsFile=%s"      % error_stats_tmp.name
        cmd += " --timescaleDays=%s"            % weighting_days
        cmd += " --intervalMinutes=%s"          % (modeling_interval / 60)

        # ----- add in available temperature data ----- #
        if self.training_temperature_series != None:
            t_temp_tmp = self.training_temperature_series.write_to_tempfile()
            cmd += " --temperatureFile=%s" % t_temp_tmp.name
            f_flag = str(self.training_temperature_series.is_farenheit()).upper()
            cmd += " --fahrenheit=%s" % f_flag
            
            if self.forecast_temperature_series != None:
                ptemp_temp = self.forecast_temperature_series.write_to_tempfile()
                cmd += " --predictTemperatureFile=%s" % ptemp_temp.name

        # ----- run script ----- #
        self._run_script(cmd)

        # ----- process results ----- #
        self.baseline_series = Series(baseline_tmp.name, self.timezone)
        self.error_stats = self._read_error_stats(error_stats_tmp.name)
        
        return self.baseline_series
Example #42
0
class DetailProcessor():

   def __init__(self):

      self.series = Series()
      self.author = Author()
      self.book = Book()
      self.whenRead = WhenRead()

      #establish list of fields the book method is purely responsible for
      # updating
      self.bookOnlyFields = ['book_id', 'title', 'notes', 'published',
                             'owner_status_id', 'read_status_id', 'type_id', 
                             'series_num']

   def processForm(self, formDict):

      message = 'Record Updated'
      book_id = formDict['book_id']
      #if the record is new first call the add new book method, reciecve a new
      # book_id, append it to the dictionary
      # the send the dictionary to the update methods
      if formDict['activity'] == 'submit_new':
         book_id = self.book.addBook()
         formDict['book_id'] = book_id
         message = 'Record Added'

      bookDict = {}
      #create a special dictionary of fields the book method is responsible for
      # updating itself.
      for field in self.bookOnlyFields:
         bookDict[field] = formDict[field]

      #run the seriesUpdate method which will add a series to the DB if
      # necessary. append the new series id to the  bookDict
      seriesUpdate = self.series.updateSeries(formDict)
      bookDict['series_id'] = seriesUpdate

      bookUpdate = self.book.updateBook(bookDict)
      authorUpdate = self.author.updateAuthor(formDict)

      if formDict['when_read'] != '':
         dateUpdate = self.whenRead.updateWhenRead(formDict)

      #message =  self.buildMessasge() # insert all update return values
      return message, book_id

   def buildMessage(self, updated, added= None):
      '''accepts dict of fields updated and their new values
      returns properly formatted string for message display'''

      updates = ''
      adds = ''
      
      if updated:
         updates = 'Updated: <br> '
         for item in updated:
            if item in self.columns:
               d_name = self.columns[item][0]['display']
               updates += '%s changed to:  %s <br>'\
                   %(d_name, updated[item])
            else:
               updates += '%s was %s <br>' %(item, updated[item])

      if added:
         adds = 'Added: <br> '
         for item in added:
            adds += '%s: %s ' %(item, added[item])

      message = 'For this record the following fields were <br> %s %s'\
          %(updates, adds)

      if not added and not updated:
         message = 'Message: No fields changed, no updates made'
        
      return message
Example #43
0
class MySeries(QtGui.QWidget):
    
    def __init__(self):
        super(MySeries, self).__init__()
        self.series = Series()
        self.initUI()
        
    def initUI(self):
          
        # parametres de la position originale des elements
        x = 8
        y = 0
        
        # parametres des dimensions de la fenetre
        ox = 300
        oy = 300
        l = 250
        h = 180
        
        # label serie
        self.label_serie = QtGui.QLabel('', self)
        self.label_serie.setText("Nom de la série :".decode('utf-8'))
        self.label_serie.move(x+2, y+10)
        
        # combobox series
        combo = QtGui.QComboBox(self)
        for item in self.series.get_series_list():
            combo.addItem(item)
        combo.move(x, y+25)
        combo.activated[str].connect(self.on_series_activated)
        
        # label episodes
        self.label_episode = QtGui.QLabel('', self)
        self.label_episode.setText("Choix de l'épisode :".decode('utf-8'))
        self.label_episode.move(x+2, y+60)
        
        # combobox episodes
        self.combo_episodes = QtGui.QComboBox(self)
        self.combo_episodes.move(x, y+75)
        self.combo_episodes.activated[str].connect(self.on_episodes_activated)  
        if self.combo_episodes.currentText() == '':
            self.on_series_activated(combo.currentText())
        
        # bouton OK
        okb = QtGui.QPushButton('OK', self)
        okb.move(x-4, 120)
        okb.clicked[bool].connect(self.call_vlc)
        
        # dimensionne la fenetre et lui donne un titre
        self.setGeometry(ox, oy, l, h)
        self.setWindowTitle('My TV Shows')
        #self.adjustSize()
        self.show()

    def call_vlc(self, pressed):
        #if source.text() == "OK":
        episode_url = self.series.get_episode_url(self.combo_episodes.currentText())
        self.kill_process('VLC')
        self.kill_process('Subtitles')
        
        # nom du fichier
        pos = episode_url.rfind('/')
        filename = episode_url[pos+1:]
        
        # nom du sous-titre
        pos = filename.rfind('.')
        subtitle = filename[:pos] + '.srt'
        
        #print os.path.exists()
        
        # telecharge le sous-titre
        processus = subprocess.Popen('/Applications/Subtitles.app/Contents/MacOS/Subtitles ' + self.series.DOWNLOADS + '/' + filename,stdout=subprocess.PIPE,stderr=subprocess.PIPE,shell=True)
        subtitle_file = self.series.DOWNLOADS + '/' + subtitle
        
        # stream la video et affiche le sous-titre      
        processus = subprocess.Popen('/Applications/VLC.app/Contents/MacOS/VLC --no-media-library --no-playlist-tree --play-and-exit --sub-file=' + subtitle_file + ' ' + episode_url, stdout=subprocess.PIPE,stderr=subprocess.PIPE,shell=True)
    
    # méthode statique pour détruire un processus par son nom
    @staticmethod
    def kill_process(name):
        processus = subprocess.Popen(['ps', '-A'], stdout=subprocess.PIPE)
        out, err = processus.communicate()
        for line in out.splitlines():
            if name in line:
                pid = int(line.split(None, 1)[0])
                os.kill(pid, signal.SIGKILL)
    
    def on_series_activated(self, text):
        
        # detruit tous les elements de la combobox
        self.combo_episodes.clear()
        
        # remplit la combobox avec les noms de fichiers de la serie
        for item in self.series.get_episodes_list(text):
            self.combo_episodes.addItem(item)
        
        # redimensionne la combobox
        self.combo_episodes.SizeAdjustPolicy(QtGui.QComboBox.AdjustToContents)
        self.combo_episodes.setSizeAdjustPolicy(QtGui.QComboBox.AdjustToContents)
        self.combo_episodes.adjustSize()

        # redimensionne la fenetre
        self.adjustSize()
        
    def on_episodes_activated(self, text):         
        print "Episode : " + text
#!/Python34/python
import cgi

from connectors import mongodbConnector
from series import Series
import helper


print("Content-type: text/html; charset=utf8")
print()

form = cgi.FieldStorage()

# create series object with the values from html form
entry = Series()
entry.name = form.getvalue("series_choice")
entry.season = form.getvalue("season")
entry.episodenumber = form.getvalue("episodenumber")
entry.generalepisodenumber = form.getvalue("generalepisodenumber")
entry.dvdnumber = form.getvalue("dvdnumber")
entry.dvdepisodenumber = form.getvalue("dvdepisodenumber")
entry.episodename_de = form.getvalue("episodename_de")
entry.episodename_en = form.getvalue("episodename_en")
entry.actors = form.getvalue("actors")
entry.content = form.getvalue("content")
entry.director = form.getvalue("director")

mongodbConnector.createInfo(entry)

print()
print("<p>")
Example #45
0
test_dot_line_plot = 1
test_function_plot = 1
# Bar plotting
test_vertical_bar_plot = 1
test_horizontal_bar_plot = 1
# Pie plotting
test_pie_plot = 1
test_donut_plot = 1
# Others
test_gantt_chart = 1
test_themes = 1


if test_scatter_plot:
    #Default data
    data = Series([ (-2,10), (0,0), (0,15), (1,5), (2,0), (3,-10), (3,5) ])
    cairoplot.scatter_plot ( 'scatter_1_default_series.png', data = data, width = 500, height = 500, border = 20, axis = True, grid = True )
    
    #lists of coordinates x,y
    data = Series([[[1,2,3,4,5],[1,1,1,1,1]]])
    cairoplot.scatter_plot ( 'scatter_2_lists_series.png', data = data, width = 500, height = 500, border = 20, axis = True, grid = True )
    
    #lists of coordinates x,y,z
    data = Series([[[0.5,1,2,3,4,5],[0.5,1,1,1,1,1],[10,6,10,20,10,6]]])
    colors = [ (0,0,0,0.25), (1,0,0,0.75) ]
    cairoplot.scatter_plot ( 'scatter_3_lists_series.png', data = data, width = 500, height = 500, border = 20, axis = True, discrete = True,
                             grid = True, circle_colors = colors )    
    
    data = Series([(-1, -16, 12), (-12, 17, 11), (-4, 6, 5), (4, -20, 12), (13, -3, 21), (7, 14, 20), (-11, -2, 18), (19, 7, 18), (-10, -19, 15),
                  (-17, -2, 6), (-9, 4, 10), (14, 11, 16), (13, -11, 18), (20, 20, 16), (7, -8, 15), (-16, 17, 16), (16, 9, 9), (-3, -13, 25),
                  (-20, -6, 17), (-10, -10, 12), (-7, 17, 25), (10, -10, 13), (10, 13, 20), (17, 6, 15), (18, -11, 14), (18, -12, 11), (-9, 11, 14),