def main_Funtion(): print("Content-type:text/html\r\n\r\n") page = r"/var/www/html/downloadpage.html" file = open(page, 'r') print(file.read()) functions.combing_files(path, newfinal) functions.top_row(newfinal) functions.drop_duplicates(newfinal, "last") functions.end_time(newfinal) functions.new_resolved_cols(newfinal) functions.res_row(newfinal) functions.correct_dept(newfinal) functions.end_state(newfinal) functions.format_date(newfinal) functions.drop_duplicates(newfinal, "last") functions.time_diff(path, newfinal) functions.aggregate(newfinal) functions.timetaken(newfinal) functions.database_operations() # functions.testing(path, newfinal) shutil.rmtree(r"/var/www/html/uploads/compress") print('</body></html>') return
def add_meet(meet_id): print('Fetching TFRRS information for meet {}'.format(meet_id)) tfrrs_html = urllib.urlopen('http://www.tfrrs.org/results/{}.html'.format(meet_id)) soup = BeautifulSoup.BeautifulSoup(''.join(tfrrs_html)) name = soup.find('div', {'class': 'title'}).find('h2').text date = soup.find('span', text='Date:').parent.parent.text date = date.rpartition(':')[2] date = brf.format_date(date) location = soup.find('span', text='Location:').parent.parent.text location = location.rpartition(' - ')[2] city, sep, state = location.partition(', ') post_params = json.dumps({ 'tfrrs_id': meet_id, 'name': name, 'date': date, 'city': city, 'state': state, }) print('Attempting to add meet {} to BuzzRuns'.format(meet_id)) response, content = httplib.Http().request( base_url + "/meets.json", 'POST', post_params, headers = {'Content-type': 'application/json'}) print response.status, response.reason return response.status in set([201, 302])
def main_Funtion(): functions.combing_files(path, newfinal) functions.top_row(newfinal) functions.drop_duplicates(newfinal, "last") functions.end_time(newfinal) functions.new_resolved_cols(newfinal) functions.res_row(newfinal) functions.correct_dept(newfinal) functions.end_state(newfinal) functions.format_date(newfinal) functions.drop_duplicates(newfinal, "last") functions.time_diff(path, newfinal) functions.aggregate(newfinal) functions.timetaken(newfinal) functions.SLA_Counter(newfinal) #functions.testing(path,newfinal) return
def __init__(self, *args, **kwargs): # set our passed info self.m_addon = kwargs["addon"] self.encoding = kwargs.get("encoding", "UTF-8") self.headers = kwargs.get("headers", {}) # add user-agent and If-Modified-Since header self.headers.update({ "User-Agent": kwargs["useragent"], "If-Modified-Since": format_date(kwargs["sourcedate"], modified=True) })
def main_Funtion(path, newfinal): '''we are calling the functions that were written''' for filenameList in os.listdir(path): if os.path.isdir(os.path.join(path, filenameList)): path = os.path.join(path, filenameList) functions.combing_files(path, newfinal) functions.top_row(newfinal) functions.drop_duplicates(newfinal, "last") functions.end_time(newfinal) functions.new_resolved_cols(newfinal) functions.res_row(newfinal) functions.correct_dept(newfinal) functions.end_state(newfinal) functions.format_date(newfinal) functions.drop_duplicates(newfinal, "last") functions.time_diff(path, newfinal) functions.aggregate(newfinal) functions.timetaken(newfinal) functions.database_operations() functions.SLA_Counter(newfinal) #functions.testing(path, newfinal) return
def test_coordinates(request): print "Test Coordinates" drop_offs = None results_flag = False number_dropoffs = None cluster_list = None clusters = None current_lat = '40.730610' current_long = '-73.935242' if request.method == 'POST': results_flag = True #Updates the lat and long current_lat = request.POST.get('pick_up_lat', '40.730610') current_long = request.POST.get('pick_up_lon', '-73.935242') pickup_date = request.POST.get('pickup_date', '01/01/2015') pickup_date_init, pickup_date_end = format_date(pickup_date) drop_offs = get_dropoffs_df_from_db(current_lat, current_long, pickup_date_init, pickup_date_end) number_dropoffs = len(drop_offs) print "Number of dropoffs", len(drop_offs) dropoffs_df = pd.DataFrame(drop_offs) clusters, cluster_list = get_cluster_listthing(dropoffs_df) print clusters print "=======" print cluster_list context = RequestContext(request, { 'drop_offs': drop_offs, 'current_lat': Decimal(current_lat), 'current_long': Decimal(current_long), 'results_flag': results_flag, 'number_dropoffs': number_dropoffs, 'clusters': clusters, 'cluster_list': cluster_list, }) return render(request, 'taxis/coordinates_test.html', context)
def get_data(self, date, longitud, latitude): """ Function that connects to the database and makes a query filtering by a pick up location and a date. Converts the queryset to a dataframe and save it into the instance variable. :param date: A specific date :param longitud: The pick up longitud coordinate :param latitude: The pick up latitude coordinate :return: """ date_init, date_end = format_date(date) cursor = connection.cursor() cursor.execute( 'SELECT *, ' '(3959 * acos (cos ( radians(%s) )' '* cos( radians( pickup_latitude ) )' '* cos( radians( pickup_longitude ) ' '- radians( %s ) ) ' '+ sin ( radians( %s) )' '* sin( radians( pickup_latitude ) )' ')' ') AS distance ' 'FROM taxis_taxipickups ' 'HAVING distance < 0.0621371 ' 'AND pickup_datetime BETWEEN CAST(%s AS DATETIME) AND CAST(%s as DATETIME) ' 'ORDER BY pickup_datetime', [latitude, longitud, latitude, date_init, date_end]) try: self.taxi_dataframe = pd.DataFrame(dictfetchall(cursor)) self.taxi_dataframe['pickup_datetime'] = pd.to_datetime( self.taxi_dataframe['pickup_datetime']) except KeyError: raise KeyError( "No data for this date or this location. Please select another one" ) except LookupError as lk_error: raise LookupError(lk_error)
def get_data(self, date, longitud, latitude): """ Function that connects to the database and makes a query filtering by a pick up location and a date. Converts the queryset to a dataframe and save it into the instance variable. :param date: A specific date :param longitud: The pick up longitud coordinate :param latitude: The pick up latitude coordinate :return: """ date_init, date_end = format_date(date) cursor = connection.cursor() cursor.execute('SELECT *, ' '(3959 * acos (cos ( radians(%s) )' '* cos( radians( pickup_latitude ) )' '* cos( radians( pickup_longitude ) ' '- radians( %s ) ) ' '+ sin ( radians( %s) )' '* sin( radians( pickup_latitude ) )' ')' ') AS distance ' 'FROM taxis_taxipickups ' 'HAVING distance < 0.0621371 ' 'AND pickup_datetime BETWEEN CAST(%s AS DATETIME) AND CAST(%s as DATETIME) ' 'ORDER BY pickup_datetime', [latitude, longitud, latitude, date_init, date_end] ) try: self.taxi_dataframe = pd.DataFrame(dictfetchall(cursor)) self.taxi_dataframe['pickup_datetime'] = pd.to_datetime(self.taxi_dataframe['pickup_datetime']) except KeyError: raise KeyError("No data for this date or this location. Please select another one") except LookupError as lk_error: raise LookupError(lk_error)
def create_nfo_file(self): # set movie info nfoSource = self.nfo_file_format().format( title=self._escape_text(self.movie["title"]), mpaa=self.movie["mpaa"], studio=self._escape_text(self.movie["studio"]), releasedate=format_date(self.movie["releasedate"], reverse=True), copyright=self.movie["copyright"], director=self._escape_text(self.movie["director"]), writer=self._escape_text(self.movie["writer"]), plot=self._escape_text(self.movie["plot"]), cast=self._escape_text(self.movie["cast"]), genre=self._escape_text(self.movie["genre"]), poster=self.movie["thumbnail"].split("|")[0], trailertitle=self._escape_text(self.trailer[1][1]), quality=self.trailer[1][2], runtime=self.trailer[1][3], postdate=self.trailer[1][4], size=self.trailer[1][5], url=self.trailer[1][6], ) # save NFO source to file return self._save_nfo_file(nfoSource)
def _get_trailers(self): # check if this is an all HD list hd = ( xbmc.getInfoLabel("Container.Property(PluginCategory)") == self.m_addon.getLocalizedString(30118) ) # check if this is a downloaded list downloaded = ( xbmc.getInfoLabel("Container.Property(PluginCategory)") in [ self.m_addon.getLocalizedString(30121), self.m_addon.getLocalizedString(30122) ] ) # fetch trailers records = self.m_database.get_trailers( idMovie=self.m_addon.params["play"], hd=hd, downloaded=downloaded ) # set movie info self._set_movie_details(records[0]) # our quality index quality = ["Standard", "480p", "720p", "1080p"] # we use a dict to eliminate duplicates urls = {} # iterate thru and find best trailer URL for record in records: # if better quality or non existent add trailer, # formatted with post date for later sorting if (not urls.has_key(record[1]) or (quality.index(record[2]) > quality.index(urls[record[1]][1][2]))): urls.update({record[1]: [record[4], list(record[:14])]}) # we only need the values self.m_movie["trailers"] = urls.values() # sort so trailers are in order of post date self.m_movie["trailers"].sort() # if multiple trailers if (len(self.m_movie["trailers"]) > 1): # set choice to play all choice = len(self.m_movie["trailers"]) + 1 # ask if (self.m_addon.getSetting("trailer.multiple") == 0): # set trailer titles and color choices = [ "[COLOR {color}]{downloaded}{trailer} {min:0d}:{sec:02d} {posted} ({quality})[/COLOR]".format( color=["unwatched", "watched"][trailer[1][7] > 0], downloaded=["", "* "][trailer[1][10] is not None], trailer=trailer[1][1], min=divmod(trailer[1][3], 60)[0], sec=divmod(trailer[1][3], 60)[1], quality=trailer[1][2], posted=format_date( date=trailer[1][4], pretty=False, short=True ) ) for trailer in self.m_movie["trailers"] ] # add random and play all choices choices += [ "[ {option} ]".format(option=self.m_addon.getLocalizedString(30232)), "[ {option} ]".format(option=self.m_addon.getLocalizedString(30233)) ] # get user choice choice = xbmcgui.Dialog().select(self.m_movie["title"], choices) # random trailer if (self.m_addon.getSetting("trailer.multiple") == 1 or choice == len(self.m_movie["trailers"])): import random # iterate thru and eliminate watched trailers tlrs = [tlr for tlr in self.m_movie["trailers"] if tlr[1][7] < 1] # if there are unwatched trailers set them, otherwise use all if (tlrs): self.m_movie["trailers"] = tlrs # get random trailer choice = random.randint(0, len(self.m_movie["trailers"]) - 1) # choose trailer if (choice == -1): self.m_movie["trailers"] = [] elif (choice < len(self.m_movie["trailers"])): self.m_movie["trailers"] = [self.m_movie["trailers"][choice]] # iterate thru and check if trailer exists for count, trailer in enumerate(self.m_movie["trailers"]): # if file was downloaded use it self.m_movie["trailers"][count][1][6] = trailer[1][10] or trailer[1][6] # set download path if not already downloaded self.m_movie["trailers"][count][1][10] = ( trailer[1][10] or self._get_legal_filepath( self.m_movie["title"], trailer[1][6] ) )
def google_map(request): error_message = None drop_offs = None results_flag = False number_dropoffs = None pickup_distribution = OrderedDict() rate_summary = OrderedDict() distance_summary = OrderedDict() current_lat = '40.730610' current_long = '-73.935242' if request.method == 'POST': results_flag = True #Updates the lat and long current_lat = request.POST.get('pick_up_lat', '40.730610') current_long = request.POST.get('pick_up_lon', '-73.935242') pickup_date = request.POST.get('pickup_date', '01/01/2015') pickup_date_init, pickup_date_end = format_date(pickup_date) drop_offs = get_dropoffs_df_from_db(current_lat, current_long, pickup_date_init, pickup_date_end) number_dropoffs = len(drop_offs) print "Number of dropoffs", len(drop_offs) dropoffs_df = pd.DataFrame(drop_offs) try: hour_range = pd.date_range('00:00:00', periods=24, freq='H') for hour in hour_range: hour_string = hour.strftime("%H:%M") pickup_distribution[hour_string] = 0 dropoffs_df['pickup_datetime'] = pd.to_datetime(dropoffs_df['pickup_datetime']) times = pd.DatetimeIndex(dropoffs_df.pickup_datetime) hour_groups = dropoffs_df.groupby([times.hour]).size() for hg in hour_groups.index: #Leading zeros hour_string = str(hg).zfill(2)+':00' pickup_distribution[hour_string] = int(hour_groups[hg]) #Get the descriptive summary rate_sum_statistics = dropoffs_df['total_amount'].describe() rate_summary['Mean'] = rate_sum_statistics['mean'] rate_summary['Std Dev'] = rate_sum_statistics['std'] rate_summary['25%'] = rate_sum_statistics['25%'] rate_summary['50%'] = rate_sum_statistics['50%'] rate_summary['75%'] = rate_sum_statistics['75%'] rate_summary['Max'] = rate_sum_statistics['max'] distance_sum_statistics = dropoffs_df['trip_distance'].describe() distance_summary['Mean'] = distance_sum_statistics['mean'] distance_summary['Std Dev'] = distance_sum_statistics['std'] distance_summary['25%'] = distance_sum_statistics['25%'] distance_summary['50%'] = distance_sum_statistics['50%'] distance_summary['75%'] = distance_sum_statistics['75%'] distance_summary['Max'] = distance_sum_statistics['max'] except LookupError: print "Error" results_flag = False error_message = 'There is no data for this day or this location. Please try another another combination' context = RequestContext(request, { 'drop_offs': drop_offs, 'current_lat': Decimal(current_lat), 'current_long': Decimal(current_long), 'results_flag': results_flag, 'number_dropoffs': number_dropoffs, 'pickup_distribution': pickup_distribution, 'rate_summary': rate_summary, 'distance_summary': distance_summary, 'error': error_message, }) return render(request, 'taxis/google_map.html', context)
logFile.write(f'\nSample Point: {samplePoint}') else: if data_map[reportName]['Sample Point'] != '': samplePoint = functions.format_name( sheet[data_map[reportName]['Sample Point']].value) logFile.write(f'\nSample Point: {samplePoint}') else: samplePoint = '' logFile.write(f'\nSample Point Missing') # Grab sample date and format for renaming # If sample date is empty, check date received, then date completed if isinstance(sheet[data_map[reportName]['Sample Date']].value, datetime.date): sampleDate = functions.format_date( sheet[data_map[reportName]['Sample Date']].value) logFile.write(f'\nSample Date: {sampleDate}') elif isinstance(sheet[data_map[reportName]['Date Received']].value, datetime.date): sampleDate = functions.format_date( sheet[data_map[reportName]['Date Received']].value) logFile.write(f'\nDate Received: {sampleDate}') elif isinstance(sheet[data_map[reportName]['Date Completed']].value, datetime.date): sampleDate = functions.format_date( sheet[data_map[reportName]['Date Completed']].value) logFile.write(f'\nDate Completed: {sampleDate}') else: sampleDate = '' logFile.write(f'\nThe Sample Date is Missing')
data_dict['Details'] = ds[0] data_dict['Sources'] = ds[1] else: data_dict['Details'] = '' data_dict['Sources'] = '' data_dict['Case'] = format_str(tabs[0]) data_dict['Patient'] = format_str(tabs[1]) data_dict['Age'] = format_str(tabs[2]) data_dict['Gender'] = format_str(tabs[3]) data_dict['Nationality'] = format_str(tabs[4]) data_dict['Status'] = format_str(tabs[5]) data_dict['Infection_Source'] = format_str(tabs[6]) data_dict['Country_of_Origin'] = format_str(tabs[7]) data_dict['Symptomatic_to_Confirmation'] = format_str( tabs[8]) data_dict['Days_to_Recover'] = format_str(tabs[9]) data_dict['Symptomatic_At'] = format_date(tabs[10]) #date data_dict['Confirmed_At'] = format_date(tabs[11]) #date data_dict['Recovered_At'] = format_date(tabs[12]) #date data_dict['Displayed_Symptoms'] = format_str(tabs[13]) data_dict['URL'] = format_url(tabs[0]) csv_writer.writerow(data_dict) if (args.rows != 0) and (i >= args.rows): break except Exception as e: print(e)
def format_source_date(source_date, hours=0): # return properly formatted date return format_date(source_date, reverse=True, modified=True, hours=hours)
def add_movies(self, records, category): # get our media item dirItem = DirectoryItem() # FIXME: queue and now playing menu items # universal context menu items for trailer lists # #dirItem.addContextMenuItem( xbmc.getLocalizedString( 13347 ), "XBMC.Action(Queue)" ) dirItem.addContextMenuItem(self.m_addon.getLocalizedString(30903), u"XBMC.Action(Info)") # #dirItem.addContextMenuItem( xbmc.getLocalizedString( 13350 ), "XBMC.ActivateWindow(10028)" ) dirItem.addContextMenuItem(xbmc.getLocalizedString(24020), u"XBMC.RunScript({addon},task='configure')".format(addon=self.m_addon.getAddonInfo("Id"))) #print ["PSPSPSPSPSPSPSPS", self.m_addon.params] dirItem.addContextMenuItem(xbmc.getLocalizedString(137), u"XBMC.RunScript({addon},search={search}&category={category})".format(addon=self.m_addon.getAddonInfo("Id"), search=quote_plus(repr(self.m_addon.params.get("search", u""))), category=quote_plus(repr([u"", category][self.m_addon.getSetting("search.current.list")])))) # initialize our movies list movies = [] # video and audio stream details used by some skins to display flagging stream_details = { "video": { "Standard": {"codec": "h264", "aspect": 1.78, "width": 720, "height": 480}, "480p": {"codec": "h264", "aspect": 1.78, "width": 720, "height": 480}, "720p": {"codec": "h264", "aspect": 1.78, "width": 1280, "height": 720}, "1080p": {"codec": "h264", "aspect": 1.78, "width": 1920, "height": 1080} }, "audio": { "Standard": {"codec": "aac", "language": "en", "channels": 2}, "480p": {"codec": "aac", "language": "en", "channels": 2}, "720p": {"codec": "aac", "language": "en", "channels": 2}, "1080p": {"codec": "aac", "language": "en", "channels": 2} } } # iterate thru and set trailer for movie in records: # set downloaded based on all trailers available downloaded = ( sum([ tlr in movie[27] for tlr in movie[25].split(",") if (movie[27] is not None) ]) == len(movie[25].split(",")) ) # set playcount based on all trailers available playcount = int( sum([ tlr in movie[26] for tlr in movie[25].split(",") if (movie[26] is not None) ]) == len(movie[25].split(",")) ) # set URL and play parameter dirItem.url = u"{url}?play={id}".format( url=self.m_addon.params["path"], id=movie[0] ) # Play Trailer context = [( self.m_addon.getLocalizedString(30902), u"XBMC.RunPlugin({url})".format(url=dirItem.url), )] # Download & Play if (self.m_addon.getSetting("trailer.play.mode") == 0 and sum([tlr in movie[27] for tlr in movie[25].split(",") if (movie[27] is not None)]) < len(movie[24].split(","))): context += [( self.m_addon.getLocalizedString(30901), u"XBMC.RunPlugin({url}&download=True)".format( url=dirItem.url ), )] # Check Showtimes context += [( self.m_addon.getLocalizedString(30900), u"XBMC.RunScript({id},showtimes={title})".format( id=self.m_addon.getAddonInfo("Id"), title=quote_plus(repr(movie[1].split(" | ")[0])), ) )] # overlay overlay = [ xbmcgui.ICON_OVERLAY_UNWATCHED, xbmcgui.ICON_OVERLAY_WATCHED ][playcount] # tagline tagline = [ "", "In Theaters {date}".format( date=format_date(movie[4]) ) ][movie[4] != ""] # year year = int(movie[4][: 4] or 0) # format duration, sum of all available trailers # #duration = "{0:1.0f}:{1:02.0f}".format(*divmod(movie[23], 60))#movie[15] # set proper date for sorting, we use downloaded date for downloaded lists # and release date for in theaters and coming soon lists if (category.startswith("downloaded")): date = movie[21].split(" ")[0] elif (category.startswith("intheaters:") or category.startswith("comingsoon:")): date = movie[4] else: date = movie[16] # set our listitem dirItem.listitem = xbmcgui.ListItem( movie[1].split(" | ")[-1], iconImage="DefaultVideo.png", thumbnailImage="{url}|User-Agent={ua}".format( url=movie[11], ua=quote_plus(movie[30]) ) ) # set info dirItem.listitem.setInfo("video", { "Count": movie[0], "PlayCount": playcount, "SortTitle": movie[1].split(" | ")[0], "Title": u"{downloaded}{title}{trailers}".format( downloaded=[u"", u"* "][downloaded], title=movie[1].split(" | ")[-1], trailers=["", " - ({trailers})".format( trailers=len(movie[24].split(",")) ) ][len(movie[24].split(",")) > 1] ), "MPAA": movie[2], "Studio": movie[3], "Date": "{day}-{month}-{year}".format( day=date[8 :], month=date[5 : 7], year=date[: 4] ), "Year": year, "Director": movie[6], "Writer": movie[7], "Plot": movie[8] or "No plot available", "Cast": movie[9].split(" / "), "Genre": movie[10], "TagLine": tagline, "Size": movie[17], "lastplayed": movie[20] or "", "Overlay": overlay }) # set stream details stream_details["video"][movie[14]]["duration"] = movie[23] dirItem.listitem.addStreamInfo("video", stream_details["video"][movie[14]]) dirItem.listitem.addStreamInfo("audio", stream_details["audio"][movie[14]]) # set additional properties dirItem.listitem.setProperty("ReleaseDate", format_date(movie[4])) dirItem.listitem.setProperty("Copyright", movie[5]) dirItem.listitem.setProperty("AvailableTrailers", movie[24].replace(",", " / ")) ############################################### # dirItem.listitem.setProperty("fanart_image", self.m_addon.getSetting("fanart")[0]) ############################################### # dirItem.listitem.setProperty("Poster", movie[11]) # used for video info dialog to search on trailers not database # dirItem.listitem.setProperty("Addon.Actor", movie[9].split(" / ")[0]) # dirItem.listitem.setProperty("Addon.OnClick", "XBMC.RunScript(%s,search=%s)" % (self.m_addon.getAddonInfo("Id"), quote_plus(movie[9].split(" / ")[0].encode("UTF-8")),)) # add context menu items to listitem with replaceItems = True so only ours show dirItem.listitem.addContextMenuItems(dirItem.context + context, replaceItems=True) # add the movie to our movie list movies += [(dirItem.url, dirItem.listitem, dirItem.isFolder,)] # add movies to our media list return self.media_window.addItems(movies)