def track(request): if request.method == 'POST': form = UserForm(request.POST) if form.is_valid(): user = form.cleaned_data['username'] raw_feed = build_feed(user, 5, 2) f = Feed(username = user) f.save() for track in raw_feed: art_url = 'http://i.imgur.com/BNBFGfg.jpg' if track[5] != None: art_url = track[5] t = Track(id = track[0] , date = track[1] , title = track[2] , artist = track[3] , uri = track[4] , art = art_url) t.save() f.tracks.add(t) return HttpResponseRedirect('track.html') else: form = UserForm() return render(request, 'subs/track.html', { 'form': form })
def add_to_db(audio_files): for audio_file in audio_files: audio_file_id3 = eyed3.load(audio_file) # If the artist, album or track doesn't exist in the database, create # table(s) for them. try: if not Artist.objects.filter(name=audio_file_id3.tag.artist).exists(): artist = Artist(name=audio_file_id3.tag.artist) artist.save() if not Album.objects.filter(title=audio_file_id3.tag.album).exists(): album = Album(title=audio_file_id3.tag.album, \ artist=artist) album.save() if not Track.objects.filter(title=audio_file_id3.tag.title).exists(): track = Track(title=audio_file_id3.tag.title, \ album=album, \ artist=artist, \ fspath=audio_file, \ media_url=MEDIA_URL + audio_file.split(MEDIA_ROOT)[1]) track.save() print 'Added to DB: ' + audio_file_id3.tag.title except Exception as e: print 'Error: ' + e
def add_record(release): from models import Record, Artist, Track release_discogs_id = release.id try: # Check if we already have this album existing = Record.objects.get(discogs_id=release_discogs_id) return existing except Record.DoesNotExist: # Process record record_title = release.title if (len(record_title.split('- '))>1): record_title = record_title.split('- ')[1] record = Record(discogs_id = release_discogs_id, title = record_title, year = release.year, thumb = release.thumb, notes = release.notes) record.save() # Process artists for release_artist in release.artists: artist = Artist(discogs_id=release_artist.id, name=release_artist.name) artist.save() record.artists.add(artist) # Process tracklist for release_track in release.tracklist: track = Track() track.position = release_track.position track.title = release_track.title track.duration = release_track.duration track.save() record.tracklist.add(track) record.save() return record
def track_bid(request, pk): bid = get_object_or_404(Bid, pk=pk) t = Track() t.bid = bid if request.user: t.user = request.user t.save() return HttpResponseRedirect(bid.link)
def test_track(self): for i in xrange(10): track = Track(band='lalal', release='lolo', name='Track: %s' % (i,), fp_track_code='random: %s' % (i,), year='1980', youtube_code='aakaka') track.save() self.assertEqual(Track.objects.count(), 10)
def start(request): request.first_request = True track = Track() track.save() print track request.session["is_reporting"] = track.id logging.debug = patch_function(logging.debug, TypeSubRequest.LOG) logging.info = patch_function(logging.info, TypeSubRequest.LOG) logging.critical = patch_function(logging.critical, TypeSubRequest.LOG) logging.error = patch_function(logging.error, TypeSubRequest.LOG) logging.exception = patch_function(logging.exception, TypeSubRequest.LOG)
def upload(request): # The assumption here is that jQuery File Upload # has been configured to send files one at a time. # If multiple files can be uploaded simulatenously, # 'file' may be a list of files. file = upload_receive(request) instance = Track(flac=file, user=request.user) instance.save() basename = os.path.basename(instance.flac.file.name) filename = "%s%s%s" % (settings.MEDIA_ROOT, 'mp3/', basename) mp3_filename = "%s%s" % (filename, '.mp3') # os.chdir("%s%s" % (settings.MEDIA_ROOT, 'mp3/')) resp = os.system('track2track -V quiet -t mp3 -o "%s" "%s"' % (mp3_filename, instance.flac.path)) # subprocess.call(['track2track -V quiet -t mp3 -o "%s" "%s"' % (mp3_filename, instance.flac.path)]) instance.mp3 = "mp3/%s" % os.path.basename(mp3_filename) # f = open(filename, 'r') # djangofile = File(f) # instance.mp3.save('filename.txt', djangofile) # f.close() instance.save() us = UserSocialAuth.objects.get(user=request.user) token = us.extra_data.get("access_token") vk = vkontakte.API(token=token) server = vk.audio.getUploadServer() url = server.get("upload_url") files = {'file' : open(instance.mp3.path, 'rb')} r = requests.post(url, files=files) _json = json.loads(r.text) vk.audio.save(server=_json.get('server'), hash=_json.get('hash'), audio=_json.get('audio')) file_dict = { 'name': basename, 'size': instance.flac.file.size, # The assumption is that file_field is a FileField that saves to # the 'media' directory. 'url': settings.MEDIA_URL + 'flac/' + basename, 'thumbnail_url': settings.MEDIA_URL + 'flac/' + basename, 'delete_url': reverse('jfu_delete', kwargs={'pk': instance.pk}), 'delete_type': 'POST', 'r' : r.text } return UploadResponse(request, file_dict)
def test_scrape_track_cassandra_exists(self): # It shouldn't add if exists model with patch('web.utils.YouTubeExtractor.search_youtube_links', MagicMock(return_value=u'https://www.youtube.com/watch?v=Ckom3gf57Yw')): track = Track(band='lalal', release='lolo', name='Track', fp_track_code='random', youtube_code='Ckom3gf57Yw') track.save() task = scrape_track.delay('year - release - artist - Lalalala', 'tmp') name, folder = task.get() self.assertEqual(name, False) self.assertEqual(folder, False)
def generate_fingerprint_from_list(results, file_list): # TODO: os.system is thread safe?? # TODO: How to test this? codes_file = '/tmp/allcodes_%s.json' % (random.randint(1, 10000)) command = '/home/vagrant/echoprint-codegen/echoprint-codegen -s 10 30 < %s > %s' % (file_list, codes_file) os.system(command) # Create the Track models with open(codes_file, 'r') as data_file: data = json.load(data_file) for fingerprint in data: # check fp doesn't exist in database code_string = fingerprint.get('code') if code_string: response = fp.best_match_for_query(code_string) if not response.match(): label = [v for v in results if v[1] == fingerprint['metadata']['filename']][0][0] youtube_code = fingerprint['metadata']['filename'].replace('.mp3', '').replace('/tmp/', '') year = label.split('-')[0].strip() release = label.split('-')[1].strip() artist = label.split('-')[2].strip() title = label.split('-')[3].strip() fingerprint['metadata']['artist'] = artist fingerprint['metadata']['title'] = title # Track creation Track.sync() track = Track(band=artist, release=release, name=title, year=year, youtube_code=youtube_code) track.save() # Remove all - (due to limitation in fingerprint-server track_id match) fingerprint['metadata']['track_id'] = track.echoprint_id else: # remove duplicate element data.remove(fingerprint) print "This file is duplicated" # Overwrite with artist and title with open(codes_file, 'w') as data_file: data_file.write(json.dumps(data)) # Fastingest invoke => post all into echo-fingerprint codes, _ = parse_json_dump(codes_file) fp.ingest(codes) FileHandler.delete_file(codes_file) return True
def save_audio_post(request): """ Saves a Track """ form = TrackForm(request.POST, request.FILES) if form.is_valid(): the_track_file = request.FILES["file"] meta = metadata_for_filelike(the_track_file) newdoc = Track(docfile = request.FILES["file"]) try: newdoc.title = meta['title'] except: newdoc.title = request.FILES['file'].name try: newdoc.album = meta['album'] except: newdoc.album = 'unspecified' try: newdoc.author = meta['author'] except: newdoc.author = 'unspecified' try: newdoc.duration = meta['duration'] except: newdoc.duration = 'unspecified' try: newdoc.music_genre = meta['music_genre'] except: newdoc.music_genre = 'unspecified' newdoc.meta_data = meta if meta['mime_type'] == 'audio/mpeg': newdoc.body = "Audio track: "+newdoc.title+" was uploaded." if request.POST["body"] != "": newdoc.body = newdoc.body+ request.POST["body"] newdoc.save() else: print form.errors
def webhook_lidarr(): try: if request.json['eventType'] == 'Test': aprint('Received TEST webhook', 'WEBHOOK.MAIN') return HTTPResponse(status=200) if not request.json: error = { 'error': 'Request JSON not correct', 'code': 10, } return HTTPResponse(status=500, body=error) # pprint.pprint(request.json) webhook_request = request.json artist = webhook_request['artist']['name'] tracks = webhook_request['tracks'] except Exception as e: error = { 'error': 'Request JSON not correct', 'code': 10, 'stack_trace': str(e) } return HTTPResponse(status=500, body=error) for track in tracks: track_data = { 'ARTIST': artist, 'TITLE': track['title'], 'TRACK_NUMBER': track['trackNumber'], 'QUALITY': track['quality'] } msg = '{ARTIST} - {TITLE} ({TRACK_NUMBER}) | {QUALITY}'.format( ARTIST=track_data['ARTIST'], TITLE=track_data['TITLE'], TRACK_NUMBER=track_data['TRACK_NUMBER'], QUALITY=track_data['QUALITY']) new_track = Track(artist=track_data['ARTIST'], title=track_data['TITLE'], tracknumber=track_data['TRACK_NUMBER'], quality=track_data['QUALITY'], timestamp=datetime.datetime.now(current_tz)) new_track.save() aprint(msg, 'WEBHOOK.MUSIC') return HTTPResponse(status=200)
def track_save(request): genres = Genre.objects.filter(id__in=request.POST.getlist('genre')) if request.POST.get('track_id') is not None: track = Track.objects.get(id=request.POST.get('track_id')) track.track_name = request.POST.get('track_name') track.rating = request.POST.get('rating') track.genres = genres # print track.genres.all() track.save() else: track = Track( track_name=request.POST.get('track_name'), rating=request.POST.get('rating'), ) track.save() for genre in genres: track.genres.add(genre) track.save() return redirect('music.views.track_detail', track_id=track.id)
def index(): form = TrackForm() if form.validate_on_submit(): tracking = Track ( weight = form.weight.data, happy = form.happy.data, diet = form.diet.data, exercise = form.exercise.data, floss = form.floss.data, meditation = form.meditation.data, note = form.note.data, timestamp = datetime.utcnow(), author = g.user.to_dbref()) tracking.save() flash('Your post is now live!') calculate_weightAvg_async(g.user.to_dbref()) return redirect(url_for('index')) posts = Track.objects(author=g.user) analysis = Analysis.objects(author=g.user).first() return render_template("index.html", title = 'Home', form = form, posts = posts, analysis = analysis)
def get_data(self, **response_kwargs): track = Track() track.save() return track.dict()
def update_library(tree, dry_run=False, inudesu=False): changes = [] alltracks = Track.objects.filter(inudesu=inudesu) tracks_kept = [] for tid in tree['Tracks']: changed = False new = False t = tree['Tracks'][tid] added = make_aware(t['Date Added'], utc) if 'Album' not in t: t['Album'] = '' # to prevent future KeyErrors try: db_track = Track.objects.get(id=t['Persistent ID']) except Track.DoesNotExist: # we need to make a new track new = True db_track = Track() else: if ((db_track.id3_title != t['Name']) or (db_track.id3_artist != t['Artist']) or (db_track.id3_album != t['Album']) or (db_track.msec != t['Total Time']) or (db_track.composer != t.get('Composer')) or (db_track.added != added)): # we need to update an existing track changed = True pre_change = unicode(db_track) if new or changed: db_track.id = t['Persistent ID'] db_track.id3_title = t['Name'] db_track.id3_artist = t['Artist'] db_track.id3_album = t['Album'] db_track.msec = t['Total Time'] db_track.msec = t['Total Time'] db_track.added = added db_track.inudesu = inudesu if new: if not inudesu: db_track.hidden = True else: db_track.hidden = False changes.append('new:\n%s' % unicode(db_track)) if changed: changes.append('change:\n%s' % pre_change) changes.append('to:\n%s' % unicode(db_track)) if (new or changed) and (not dry_run): db_track.save() tracks_kept.append(db_track) for track in [tr for tr in alltracks if tr not in tracks_kept and not tr.hidden]: changes.append('hide:\n%s' % unicode(track)) if not dry_run: track.hidden = True track.save() return changes
def loadTrackFromFile(in_file=in_file, track_id=track_id, verbose=True, trackPointsOnly=trackPointsOnly): infile = os.path.abspath(os.path.join(os.path.dirname(__file__), in_file)) ext = os.path.basename(infile)[-3:] if ext == "log": delimiter = " " col_px = 0 col_utc = 1 col_date = 1 col_lat = 2 col_lon = 3 col_spd = 5 col_alt = 4 col_brg = 6 else: delimiter = ";" col_px = 0 col_utc = 1 col_date = 1 col_lat = 3 col_lon = 4 col_spd = 5 col_alt = 6 col_brg = 8 logReader = csv.reader(open(infile, "rb"), delimiter=delimiter, quotechar='"') i = 0 hasFirst = False trackSaved = False points = [] pattern1 = "none" pattern2 = "1970-01-01T00:00:00.0Z" for row in logReader: if not re.search(pattern1, row[col_utc]) and not re.search(pattern2, row[2]): utc = row[col_utc] lat = row[col_lat] lon = row[col_lon] alt = row[col_alt] spd = row[col_spd] brg = row[col_brg] if ext == "log": pxx = int(row[col_px]) px = i * tile_width + pxx if pxx > tile_width - 30: i += 1 hasFirst = True else: if i > 0: # ignore first line pxx = int(row[col_px]) - import_offset px = pxx # px = (pxx - tile_offset) * tile_width hasFirst = True else: i += 1 if hasFirst and not trackSaved and not trackPointsOnly: t = time.strftime("%Y-%m-%d %H:%M:%S", time.strptime(utc.strip(), "%Y-%m-%dT%H:%M:%S.%fZ")) print t track = Track( name=name, time=t, river_id=river_id, title=name, camera=camera, offset=tile_offset, width=total_width, maxResolution=MaxResolution, numZoomLevels=zoomlevels, data_path=data_path, height=tile_width, ) track.save() trackSaved = True track_id = track.id if hasFirst: t = time.strftime("%Y-%m-%d %H:%M:%S", time.strptime(utc.strip(), "%Y-%m-%dT%H:%M:%S.%fZ")) pnt = Point(float(lon), float(lat)) if len(points) > 0: if not pnt == points[len(points) - 1]: trackpoint = TrackPoint( track_id=track_id, px=px, time=t, geom=pnt, speed=spd, altitude=alt, heading=brg ) trackpoint.save() points.append(pnt) else: print "discard duplicate gps fix" else: trackpoint = TrackPoint( track_id=track_id, px=px, time=t, geom=pnt, speed=spd, altitude=alt, heading=brg ) trackpoint.save() points.append(pnt) else: print "discard non-valid gps log for at %s, fix: %s" % (row[1], row[2]) linestring = LineString(points) if not trackPointsOnly: track.geom = linestring if brg > 0: track.direction = brg track.save() length = Track.objects.length().get(id=track.id).length.km track.length = length track.save() track = Track.objects.get(id=track_id) length = Track.objects.length().get(id=track_id).length.km track.geom = linestring track.save() print "imported %s trackpoints, total length: %s " % (len(points), length)
def add(): tags = Tag.select(Tag.value).distinct() #pylint: disable=E1111 if request.method == "POST": # Validate uploaded file if 'gpx-file' not in request.files: flash("No file uploaded", "error") return redirect(request.url) gpx_file = request.files['gpx-file'] if gpx_file.filename == '': flash("No file selected", "error") return redirect(request.url) if not allowed_file(gpx_file.filename): flash("Only .gpx files supported!", "error") return redirect(request.url) # Store gpx file in filesystem gpx_filename = secure_filename(gpx_file.filename) gpx_filename = "%s_%s.gpx" % ( gpx_filename[:-4], int( datetime.now().timestamp())) # add timestamp to filename gpx_fspath = os.path.join(UPLOAD_BASE_DIR, UPLOAD_DIR, gpx_filename) os.makedirs(os.path.dirname(gpx_fspath), exist_ok=True) gpx_file.save(gpx_fspath) try: # Use gpx library to extract meta information from gpx file gpx = Gpx(gpx_fspath, True) gpx_metadata = gpx.process( force=True) # TODO: improve gpx lib and set force to False # Read form values: tags and name track_name = request.form.get( "name") or "Unnamend activity on %s" % gpx_metadata["date"] tags = request.form.getlist('tag-select') new_tags = request.form.get("new-tags").replace(" ", "") if new_tags != "": tags += new_tags.split(",") tags.append(gpx_metadata["date"][:4]) # implicit add of the year tags = set(tags) # Remove duplicate tags # Create DB ORM objects new_track = Track(name=track_name, date=gpx_metadata["date"], path=os.path.join(UPLOAD_DIR, gpx_filename)) # Read statistics new_track_stats = Statistic( track=new_track, distance_m=gpx_metadata["total_distance"], duration_s=gpx_metadata["duration"], duration_total_s=gpx_metadata["total_duration"], max_speed=gpx_metadata["max_speed"], avg_speed=gpx_metadata["avg_speed"], elevation_up_m=gpx_metadata["total_ascent"], elevation_down_m=gpx_metadata["total_descent"]) except Exception as e: flash("Error during gpx file processing: %s" % e, "error") # Clean up if 'new_track' in locals(): new_track.delete_instance() if 'new_track_stats' in locals(): new_track_stats.delete_instance() os.remove( os.path.join(os.path.dirname(os.path.realpath(__file__)), UPLOAD_BASE_DIR, UPLOAD_DIR, gpx_filename)) return redirect(request.url) # Store objects in DB new_track.save() new_track_stats.save() for tag in tags: my_tag = Tag(track=new_track, value=tag) my_tag.save() flash("Track '%s' added sucessfully." % track_name, "info") return redirect(url_for("show")) else: return render_template("add.html", tags=tags)
def save(extractor, metadataId): metadata = extractor.metadata[metadataId] fname = metadata.pop('fname') t = Track(fname, metadata) t.save()
def webhook(): from models import User, Track update = telegram.update.Update.de_json(request.get_json(force=True), bot) if update.effective_user is None: # TODO: I want exit automatically from channel return 'OK' command = update.message.text chat_id = update.message.chat_id chat_username = update.message.from_user.username standard_keyboard = ReplyKeyboardMarkup(keyboard=[[ KeyboardButton(text='track'), KeyboardButton(text='list') ], [KeyboardButton(text='delete'), KeyboardButton(text='info')]], resize_keyboard=True) new_message = 'Non ho capito, usa info per avere informazioni.' # keyboard = ReplyKeyboardRemove() keyboard = None disable_web_page_preview = False user = User.query.filter_by(chat_id=chat_id).first() if not user: user = User(chat_id=chat_id, state='creation', chat_username=chat_username) user.save() if not command: new_message = 'tipo di messaggio non supportato' elif command == 'cancel': if user.state == 'delete': new_message = 'eliminazione annullata' elif user.state == 'track': new_message = 'track annullata' elif user.state == 'track_name': track = Track.query.filter_by(user=user, name='temp').first() track.delete() new_message = 'track annullata' keyboard = standard_keyboard user.state = 'cancel' elif user.state == 'track': if command[:20] == 'https://www.g2a.com/': if command.find('?') != -1: command = command[:command.find('?')] url = command game_id = url[-14:] response = requests.get(url=G2A_URL + game_id, cookies=COOKIES) data = response.json() if len(data): new_price = float(data['lowest_price']) track = Track(name='temp', game_id=game_id, game_link=url + '?mkey=' + G2A_KEY, game_price=new_price, game_median_price=new_price, user=user) track.save() user.state = 'track_name' new_message = 'Con che nome lo vuoi salvare?' else: new_message = 'Link non valido' else: new_message = 'Link non valido' elif user.state == 'track_name': track = Track.query.filter_by(name=command, user=user).first() if track: new_message = 'Nome non valido' else: track = Track.query.filter_by(name='temp', user=user).first() # TODO: if track is None? track.name = command user.state = 'track_finish' track.save() new_message = 'Track salvato' keyboard = standard_keyboard elif user.state == 'delete': track = Track.query.filter_by(user=user, name=command).first() if track: track.delete() user.state = 'delete_finish' new_message = 'Track Eliminato' keyboard = standard_keyboard else: new_message = 'Nome non valido' elif command == 'track': user.state = 'track' new_message = 'Inserisci il link di g2a' keyboard = ReplyKeyboardMarkup( keyboard=[[KeyboardButton(text='cancel')]], resize_keyboard=True) elif command == 'delete': tracks = Track.query.filter_by(user=user).all() if len(tracks): command_list = [] for track in tracks: command_list.append([KeyboardButton(text=track.name)]) command_list.append([KeyboardButton(text='cancel')]) user.state = 'delete' new_message = 'Cosa vuoi eliminare?' keyboard = ReplyKeyboardMarkup(keyboard=command_list, resize_keyboard=True) else: new_message = 'Nessun gioco in lista' user.state = 'delete_empty' elif command == 'list': tracks = Track.query.filter_by(user=user).all() if len(tracks): new_message = 'Lista dei Track: \n\n' for track in tracks: new_message += 'Name: <a href="' + track.game_link + '">' + track.name + '</a>\n' new_message += 'Price: ' + str(track.game_price) + '\n' new_message += '\n' user.state = 'list' disable_web_page_preview = True else: new_message = 'Nessun gioco in lista' user.state = 'list_empty' elif command == 'info' or command == '/start': user.state = 'info' new_message = 'Benvenuto nel bot \n\n' new_message += 'Comandi: \n' new_message += 'track - aggiunge il link del gioco alla tua lista \n' new_message += 'list - mostra i giochi della tua lista \n' new_message += 'delete - cancella elementi dalla tua lista \n\n' new_message += 'Se hai problemi o vuoi segnalarmi bug scrivimi a @matteo_baldelli \n' new_message += '<a href="https://www.paypal.me/MatteoBaldelli">Donate</a> \n' new_message += '<a href="https://telegram.me/storebot?start=g2apricebot">Rate me</a>' keyboard = standard_keyboard disable_web_page_preview = True else: user.state = 'error' keyboard = standard_keyboard user.save() try: bot.send_message( chat_id, new_message, parse_mode='HTML', disable_web_page_preview=disable_web_page_preview, reply_markup=keyboard, ) except Unauthorized: user.delete() return 'OK'
def loadTrackFromFile(in_file=in_file, track_id=track_id, verbose=True, trackPointsOnly=trackPointsOnly): infile = os.path.abspath(os.path.join(os.path.dirname(__file__), in_file)) ext = os.path.basename(infile)[-3:] if ext == "log": delimiter = " " col_px = 0 col_utc = 1 col_date = 1 col_lat = 2 col_lon = 3 col_spd = 5 col_alt = 4 col_brg = 6 else: delimiter = ";" col_px = 0 col_utc = 1 col_date = 1 col_lat = 3 col_lon = 4 col_spd = 5 col_alt = 6 col_brg = 8 logReader = csv.reader(open(infile, 'rb'), delimiter=delimiter, quotechar='"') i = 0 hasFirst = False trackSaved = False points = [] pattern1 = "none" pattern2 = "1970-01-01T00:00:00.0Z" for row in logReader: if not re.search(pattern1, row[col_utc]) and not re.search( pattern2, row[2]): utc = row[col_utc] lat = row[col_lat] lon = row[col_lon] alt = row[col_alt] spd = row[col_spd] brg = row[col_brg] if ext == "log": pxx = int(row[col_px]) px = i * tile_width + pxx if pxx > tile_width - 30: i += 1 hasFirst = True else: if i > 0: #ignore first line pxx = int(row[col_px]) - import_offset px = pxx #px = (pxx - tile_offset) * tile_width hasFirst = True else: i += 1 if hasFirst and not trackSaved and not trackPointsOnly: t = time.strftime( "%Y-%m-%d %H:%M:%S", time.strptime(utc.strip(), "%Y-%m-%dT%H:%M:%S.%fZ")) print t track = Track(name=name, time=t, river_id=river_id, title=name, camera=camera, offset=tile_offset, width=total_width, maxResolution=MaxResolution, numZoomLevels=zoomlevels, data_path=data_path, height=tile_width) track.save() trackSaved = True track_id = track.id if hasFirst: t = time.strftime( "%Y-%m-%d %H:%M:%S", time.strptime(utc.strip(), "%Y-%m-%dT%H:%M:%S.%fZ")) pnt = Point(float(lon), float(lat)) if len(points) > 0: if not pnt == points[len(points) - 1]: trackpoint = TrackPoint(track_id=track_id, px=px, time=t, geom=pnt, speed=spd, altitude=alt, heading=brg) trackpoint.save() points.append(pnt) else: print "discard duplicate gps fix" else: trackpoint = TrackPoint(track_id=track_id, px=px, time=t, geom=pnt, speed=spd, altitude=alt, heading=brg) trackpoint.save() points.append(pnt) else: print "discard non-valid gps log for at %s, fix: %s" % (row[1], row[2]) linestring = LineString(points) if not trackPointsOnly: track.geom = linestring if brg > 0: track.direction = brg track.save() length = Track.objects.length().get(id=track.id).length.km track.length = length track.save() track = Track.objects.get(id=track_id) length = Track.objects.length().get(id=track_id).length.km track.geom = linestring track.save() print "imported %s trackpoints, total length: %s " % (len(points), length)
def update_library(tree, dry_run=False, inudesu=False): changes = [] alltracks = Track.objects.filter(inudesu=inudesu) tracks_kept = [] for tid in tree['Tracks']: changed = False new = False t = tree['Tracks'][tid] added = make_aware(t['Date Added'], utc) if 'Album' not in t: t['Album'] = '' # to prevent future KeyErrors try: db_track = Track.objects.get(id=t['Persistent ID']) except Track.DoesNotExist: # we need to make a new track new = True db_track = Track() else: db_dict = { 'title': db_track.id3_title, 'artist': db_track.id3_artist, 'album': db_track.id3_album, 'msec': db_track.msec, 'composer': db_track.composer, 'added': db_track.added, } track_dict = { 'title': t['Name'], 'artist': t['Artist'], 'album': t['Album'], 'msec': t['Total Time'], 'composer': t.get('Composer', ''), 'added': added, } if db_dict != track_dict: # we need to update an existing track changed = True field_alterations = [{ 'field': k, 'from': db_dict[k], 'to': track_dict[k], } for k in db_dict.keys() if db_dict[k] != track_dict[k]] if new or changed: db_track.id = t['Persistent ID'] db_track.id3_title = t['Name'] db_track.id3_artist = t['Artist'] db_track.id3_album = t['Album'] db_track.msec = t['Total Time'] db_track.composer = t.get('Composer', '') db_track.added = added db_track.inudesu = inudesu if new: if not inudesu: db_track.hidden = True else: db_track.hidden = False changes.append({ 'type': 'new', 'item': unicode(db_track), }) if changed: changes.append({ 'type': 'change', 'item': unicode(db_track), 'changes': field_alterations, }) if (new or changed) and (not dry_run): db_track.save() tracks_kept.append(db_track) for track in [tr for tr in alltracks if tr not in tracks_kept and not tr.hidden]: changes.append({ 'type': 'hide', 'item': unicode(track), }) if not dry_run: track.hidden = True track.save() return changes
def save_and_quit(extractor): for meta in extractor.metadata: fname = meta.pop('fname') t = Track(fname, meta) t.save() loop.quit()