Beispiel #1
0
 def json2dataset(self, jsonFile):
     result_array = []
     #init min and max dates
     self.minDate = utils.convertDate(jsonFile[0]["date"])
     self.maxDate = utils.convertDate(jsonFile[0]["date"])
     for i in range(0, len(jsonFile)-1):
         result_dict = {}
         jobj = jsonFile[i]
         jdata = jobj["data"]
         jobj_type = jobj["type"]
         if jobj_type == 'kill':
             event_type = 'kill'
             event_actor = jdata["actor"]["playerId"]
             if event_actor not in self.actors:
                 self.actors.append(event_actor)
             event_victim = jdata["victim"]["playerId"]
             if jdata["headshot"] == True:
                 event_type = event_type + '_hs'
             if jdata["penetrated"] == True:
                 event_type = event_type + '_pnt'
             result_dict["event_type"] = event_type
             result_dict["date"] = utils.convertDate(jobj["date"]) #use python datetime format for dates
             #update min and max dates of session
             if result_dict["date"] < self.minDate:
                 self.minDate = result_dict["date"]
             elif result_dict["date"] > self.maxDate:
                 self.maxDate = result_dict["date"]
             result_dict["actor"] = event_actor
             result_dict["victim"] = event_victim
             result_dict["event_id"] = i
             result_array.append(result_dict)
     self.dataset = result_array
Beispiel #2
0
 def loadDict(self, data):
     self.name = data['name']
     for role in data['roles']:
         self.roles.append(role)
     if data['ip'] != '0.0.0.0':
         self.ip.append(data['ip'])
     self.lastUsed = utils.convertDate(data['last used'])
     self.registered = utils.convertDate(data['registered'])
     if data['token']:
         self.token = data['token']
    def stockScrape(self,
                    stockCode,
                    source="YAHOOFINANCE",
                    minDate=DEFAULT_STARTDATE):
        # Initialize pandas dataframe to hold stock data
        stockDataFrame = pd.DataFrame({
            self.DATE: [],
            self.TICKER: [],
            self.PRICE: []
        })

        if source == "YAHOOFINANCE":
            YahooCode = self.getYahooCode(stockCode)
            stock = yf.Ticker(YahooCode)

            sdate = utils.convertDate(minDate)  # start date
            dowloaded_data = stock.history(interval="1d", start=sdate)

            # Manipulate the output
            Dates = dowloaded_data.index.to_frame()

            Dates = Dates.reset_index(drop=True)
            Price = dowloaded_data['Close'].reset_index(drop=True)
            Ticker = pd.DataFrame([stockCode] * len(dowloaded_data['Close']),
                                  columns=['Ticker'])

        if source == "GOOGLEFINANCE":
            googleticker = self.getGoogleCode(stockCode)
            data = self.googledatatable.copy()

            sdate = utils.convertDate(minDate)  # start date
            edate = datetime.now()
            edate = edate.replace(hour=0, minute=0, second=0,
                                  microsecond=0)  # end date

            data.DATE = pd.to_datetime(data.DATE, format='%d/%m/%Y')
            dates_mask = ((data.DATE >= sdate) & (data.DATE <= edate))
            data = data.loc[dates_mask, [self.DATE, googleticker]]

            data = data.dropna()
            data = data[(data[googleticker] != 'NA')]

            Dates = pd.DataFrame(data.DATE,
                                 columns=['DATE']).reset_index(drop=True)
            Ticker = pd.DataFrame([stockCode] * len(data.DATE),
                                  columns=['Ticker'])
            Price = pd.DataFrame(data[googleticker],
                                 columns=[googleticker]).reset_index(drop=True)

        stockDataFrame = pd.concat([Dates, Ticker, Price], axis=1)
        stockDataFrame.columns = self.HISTORICAL_COLUMNS
        stockDataFrame.ignore_index = True

        # Add to SQL database
        self.addToDatabase(stockDataFrame, self.HISTORICAL_TABLE_NAME)
Beispiel #4
0
    def fastSync(self):

        lastSync = settings('LastIncrementalSync')
        if not lastSync:
            lastSync = "2010-01-01T00:00:00Z"

        lastSyncTime = utils.convertDate(lastSync)
        log.info("Last sync run: %s" % lastSyncTime)

        # get server RetentionDateTime
        try:
            result = self.doUtils(
                "{server}/emby/Emby.Kodi.SyncQueue/GetServerDateTime?format=json"
            )
            retention_time = result['RetentionDateTime']
        except Exception as error:
            log.error(error)
            retention_time = "2010-01-01T00:00:00Z"

        retention_time = utils.convertDate(retention_time)
        log.info("RetentionDateTime: %s" % retention_time)

        # if last sync before retention time do a full sync
        if retention_time > lastSyncTime:
            log.info(
                "Fast sync server retention insufficient, fall back to full sync"
            )
            return False

        params = {'LastUpdateDT': lastSync}
        if settings('enableMusic') != "true":
            params['filter'] = "music"
        url = "{server}/emby/Emby.Kodi.SyncQueue/{UserId}/GetItems?format=json"

        try:
            result = self.doUtils(url, parameters=params)
            processlist = {
                'added': result['ItemsAdded'],
                'update': result['ItemsUpdated'],
                'userdata': result['UserDataChanged'],
                'remove': result['ItemsRemoved']
            }

        except Exception as error:  # To be reviewed to only catch specific errors.
            log.error(error)
            log.error("Failed to retrieve latest updates using fast sync.")
            xbmcgui.Dialog().ok(lang(29999), lang(33095))
            return False

        else:
            log.info("Fast sync changes: %s" % result)
            for action in processlist:
                self.triage_items(action, processlist[action])
            return True
    def fastSync(self):

        lastSync = settings('LastIncrementalSync')
        if not lastSync:
            lastSync = "2010-01-01T00:00:00Z"

        lastSyncTime = utils.convertDate(lastSync)
        log.info("Last sync run: %s" % lastSyncTime)

        # get server RetentionDateTime
        try:
            result = self.doUtils("{server}/emby/Emby.Kodi.SyncQueue/GetServerDateTime?format=json")
            retention_time = result['RetentionDateTime']
        except Exception as error:
            log.error(error)
            retention_time = "2010-01-01T00:00:00Z"

        retention_time = utils.convertDate(retention_time)
        log.info("RetentionDateTime: %s" % retention_time)

        # if last sync before retention time do a full sync
        if retention_time > lastSyncTime:
            log.info("Fast sync server retention insufficient, fall back to full sync")
            return False

        params = {'LastUpdateDT': lastSync}
        if settings('enableMusic') != "true":
            params['filter'] = "music"
        url = "{server}/emby/Emby.Kodi.SyncQueue/{UserId}/GetItems?format=json"

        try:
            result = self.doUtils(url, parameters=params)
            processlist = {

                'added': result['ItemsAdded'],
                'update': result['ItemsUpdated'],
                'userdata': result['UserDataChanged'],
                'remove': result['ItemsRemoved']
            }

        except Exception as error: # To be reviewed to only catch specific errors.
            log.error(error)
            log.error("Failed to retrieve latest updates using fast sync.")
            xbmcgui.Dialog().ok(lang(29999), lang(33095))
            return False

        else:
            log.info("Fast sync changes: %s" % result)
            for action in processlist:
                self.triage_items(action, processlist[action])
            return True
Beispiel #6
0
def getHistoricalData(ticker, daterange):
    '''
    Will download the historical data for the given ticker and return it as a pandas dataframe.
    Daterange must be a 2 element list, in the following format: [[<start date>], [<end date>]], date format = int YYYYMMDD
    '''
    startYear, startMonth, startDay = utils.convertDate(daterange[0])
    endYear, endMonth, endDay = utils.convertDate(daterange[1])

    start = datetime(startYear, startMonth, startDay)
    end = datetime(endYear, endMonth, endDay)

    dataframe = iex.get_historical_data(ticker, start=start, end=end, output_format='pandas')
    dataframe.head()

    return dataframe
Beispiel #7
0
    def saveLastSync(self):

        # Save last sync time
        overlap = 2

        try: # datetime fails when used more than once, TypeError
            if self.isFastSync:
                result = self.doUtils("{server}/emby/Emby.Kodi.SyncQueue/GetServerDateTime?format=json")
                server_time = result['ServerDateTime']
                server_time = utils.convertDate(server_time)
            else:
                raise Exception("Fast sync server plugin is not enabled.")

        except Exception as e:
            # If the server plugin is not installed or an error happened.
            log.debug("An exception occurred: %s" % e)
            time_now = datetime.utcnow()-timedelta(minutes=overlap)
            lastSync = time_now.strftime('%Y-%m-%dT%H:%M:%SZ')
            log.info("New sync time: client time -%s min: %s" % (overlap, lastSync))

        else:
            lastSync = (server_time - timedelta(minutes=overlap)).strftime('%Y-%m-%dT%H:%M:%SZ')
            log.info("New sync time: server time -%s min: %s" % (overlap, lastSync))

        finally:
            settings('LastIncrementalSync', value=lastSync)
    def saveLastSync(self):

        # Save last sync time
        overlap = 2

        try: # datetime fails when used more than once, TypeError
            if self.isFastSync:
                result = self.doUtils("{server}/emby/Emby.Kodi.SyncQueue/GetServerDateTime?format=json")
                server_time = result['ServerDateTime']
                server_time = utils.convertDate(server_time)
            else:
                raise Exception("Fast sync server plugin is not enabled.")

        except Exception as e:
            # If the server plugin is not installed or an error happened.
            log.debug("An exception occurred: %s" % e)
            time_now = datetime.utcnow()-timedelta(minutes=overlap)
            lastSync = time_now.strftime('%Y-%m-%dT%H:%M:%SZ')
            log.info("New sync time: client time -%s min: %s" % (overlap, lastSync))

        else:
            lastSync = (server_time - timedelta(minutes=overlap)).strftime('%Y-%m-%dT%H:%M:%SZ')
            log.info("New sync time: server time -%s min: %s" % (overlap, lastSync))

        finally:
            settings('LastIncrementalSync', value=lastSync)
Beispiel #9
0
    def updatePosts(self, posts):
        if not posts:
            return
        if utils.convertDate(posts[0]['date']) > utils.convertDate(
                posts[-1]['date']):
            posts.reverse()
        for i in range(len(posts) - 1):
            if utils.convertDate(posts[i]['date']) > utils.convertDate(
                    posts[i + 1]['date']):
                print 'ERROR: database.updatePosts incoming posts not in order'
                print '-', utils.convertDate(
                    posts[i]['date']), utils.convertDate(posts[i + 1]['date'])
                return

        newPostCount = 0
        newUserCount = 0

        dateLookup = {}

        for post in self.posts:
            if post.date in dateLookup:
                dateLookup[post.date].append(post)
            else:
                dateLookup[post.date] = [post]

        for data in posts:
            newPost = Post(data)
            isNew = True

            if newPost.date in dateLookup:
                if newPost in dateLookup[newPost.date]:
                    isNew = False
                dateLookup[newPost.date].append(newPost)
            else:
                dateLookup[newPost.date] = [newPost]

            if isNew:
                if newPost.name not in self.users:
                    user = User()
                    user.name = newPost.name
                    self.users[user.name] = user
                    newUserCount += 1
                self.users[newPost.name].addIp(newPost.ip)

                self.posts.append(newPost)
                newPostCount += 1

        if newPostCount > 0:
            print newPostCount, 'posts inserted'
            self.posts.sort()
            self.savePosts()
        if newUserCount > 0:
            print newUserCount, 'users inserted'
            self.saveUsers()
Beispiel #10
0
    def __init__(self, data):
        self.id = int(data[0])  # Message id
        self.time = int(data[1])  # Unix timestamp
        self.date = utils.convertDate(data[2].split(',')[0])  # Date string
        self.name = data[3]  # Author's name
        self.level = self.levels[int(data[4]) - 1]  # Privileges
        self.exturl = data[5]  # Email/url
        self.content = BeautifulSoup(data[6],
                                     "html.parser").text  # Message text
        self.imgurl = data[7]  # Image url
        self.badFlags = int(data[8])  # Message type flags
        self.userid = data[9]  # Author's id
        self.flaghtml = data[10]
        self.localId = data[11]

        self.isRedirected = bool(self.badFlags & 16)
        self.isPrivate = bool(self.badFlags & 32)
        self.isSticky = self.id == -1
        self.isTemp = self.id == 0
    def updateStockData(self, stockCode, source="YAHOOFINANCE"):
        # Reads database
        sqlQuery = """SELECT {} FROM {} WHERE {} = '{}'; """ \
        .format(self.TICKER, self.HISTORICAL_TABLE_NAME, self.TICKER, stockCode)

        #print(sqlQuery)
        stockData = self.readDatabase(sqlQuery)

        # Checks whether any previous data has been added for the particular stock code
        # if not then run initialStockScrape to get all past data
        if stockData.empty:
            print('Running stockScrape() on {} using {}. --First run.'.format(
                stockCode, source))
            self.stockScrape(stockCode, source)
        else:
            #access database to get latestDate
            print('Running stockScrape() on {} using {}. --Updating data.'.
                  format(stockCode, source))
            # Performs SQL query to get the latest stock data date in database
            sqlQuery = """SELECT {}, max({}) AS Date FROM {} WHERE {} = '{}' GROUP BY {}""" \
            .format(self.TICKER, self.DATE, self.HISTORICAL_TABLE_NAME, self.TICKER, stockCode, self.TICKER)

            y = self.readDatabase(sqlQuery)
            minDate = y.Date[
                0]  # minDate is the earliest data of data that the program needs to download
            # Increment date by 1 day
            minDate = utils.incrementDate(minDate)

            today = datetime.now()
            today = today.replace(hour=0, minute=0, second=0,
                                  microsecond=0)  # end date

            if utils.convertDate(minDate) < today:
                # Updates stock data
                self.stockScrape(stockCode, source, minDate)
            else:
                # Data are already up to date
                print(
                    "Data for {} are already up to date. Module: updateStockData."
                    .format(stockCode))
Beispiel #12
0
 def loadDict(self, data):
     self.date = utils.convertDate(data['date'])
     self.name = data['name']
     self.content = data['content']
     self.ip = data['ip']
     self.email = data['email']
def find_frame(event, start_date, vid):
    date_str = event['date']
    date = utils.convertDate(date_str)
    delta = get_time_delta(start_date, date)
    return calculate_frames(delta, vid.get(cv2.CAP_PROP_FPS))