def create_split_mnist(self, path, save_location): log('Creating the splits for Split-MNIST.') mnist_train_path = os.path.join(path, 'processed', 'mnist_train.pt') mnist_test_path = os.path.join(path, 'processed', 'mnist_test.pt') mnist_val_path = os.path.join(path, 'processed', 'mnist_val.pt') x_tr, y_tr = torch.load(mnist_train_path) x_te, y_te = torch.load(mnist_test_path) x_val, y_val = torch.load(mnist_val_path) class_per_task = int(10 / cfg.continual.n_tasks) train_tasks = [] test_tasks = [] val_tasks = [] for t in range(cfg.continual.n_tasks): c1 = t * class_per_task c2 = (t + 1) * class_per_task i_tr = ((y_tr >= c1) & (y_tr < c2)).nonzero().view(-1) i_te = ((y_te >= c1) & (y_te < c2)).nonzero().view(-1) i_val = ((y_val >= c1) & (y_val < c2)).nonzero().view(-1) train_tasks.append([x_tr[i_tr].clone(), y_tr[i_tr].clone()]) test_tasks.append([x_te[i_te].clone(), y_te[i_te].clone()]) val_tasks.append([x_val[i_val].clone(), y_val[i_val].clone()]) torch.save([train_tasks, test_tasks, val_tasks], save_location)
def reject(request): res = {'code': 0, 'msg': 'success', 'data': {}} params = request.POST.dict() required = { 'id': {'requried': True}, 'user_id': {'requried': True} } check_res = check(required, params) if check_res is None or check_res['code'] != 0: return JsonResponse(check_res) try: dynamic = Dynamic.objects.get(id=params['id']) if int(params['user_id'])!=dynamic.user_id: res = {'code': -3, 'msg': '当前用户无权限', 'data': dynamic.format()} return JsonResponse(res) if dynamic.state == 2: # dynamic.state = 1 dynamic.save() else: res = {'code': -3, 'msg': '状态不可拒绝', 'data': dynamic.format()} return JsonResponse(res) except Exception as e: res = {'code': -2, 'msg': e.__str__(), 'data': []} utils.log('ERROR', 'dynamic reject', res['msg'], data=params) return JsonResponse(res)
def _ProcessMessages(self): # nothing to do if we are not connected to an Emby server if not self._connected: return while True: try: message = self._websocket.recv() if message is None: break messageObj = json.loads(message) if not messageObj: log( 'invalid JSON message ({}) from {} received: {}'. format(len(message), mediaProvider2str(self._mediaProvider), message), xbmc.LOGWARNING) continue self._ProcessMessage(messageObj) except websocket.WebSocketTimeoutException: break except Exception as error: log( 'unknown exception when receiving data from {}: {}'.format( mediaProvider2str(self._mediaProvider), error.args[0]), xbmc.LOGWARNING) break
def channel(channel_id: str): """Load stream for the required channel id""" log('Loading channel {}'.format(channel_id), LogLevel.INFO) stream = get_provider().get_stream_info(channel_id) if not stream: ok_dialog(localize(30900)) return is_helper = inputstreamhelper.Helper(stream['manifest_type'], drm=stream['drm']) if not is_helper.check_inputstream(): ok_dialog(localize(30901)) return listitem = xbmcgui.ListItem(path=stream['path']) listitem.setMimeType(stream['mime_type']) listitem.setProperty('inputstream', 'inputstream.adaptive') listitem.setProperty('inputstream.adaptive.manifest_type', stream['manifest_type']) listitem.setProperty('inputstream.adaptive.manifest_update_parameter', 'full') listitem.setProperty('inputstream.adaptive.license_type', stream['license_type']) listitem.setProperty('inputstream.adaptive.license_key', stream['license_key']) xbmcplugin.setResolvedUrl(plugin.handle, True, listitem=listitem)
def checkLogin(self): if self.finished: return not self.expired url = Url.append(constants.URL_EMBY_CONNECT_BASE, constants.URL_EMBY_CONNECT_PIN) url = Url.addOptions(url, { constants.URL_QUERY_DEVICE_ID: self.deviceId, constants.URL_QUERY_PIN: self.pin, }) resultObj = Request.GetAsJson(url) if not resultObj or \ constants.PROPERTY_EMBY_CONNECT_PIN_IS_CONFIRMED not in resultObj or \ constants.PROPERTY_EMBY_CONNECT_PIN_IS_EXPIRED not in resultObj: log('failed to check status of PIN {} at {}: {}'.format(self.pin, url, resultObj), xbmc.LOGWARNING) self.finished = True self.expired = True return False self.finished = resultObj.get(constants.PROPERTY_EMBY_CONNECT_PIN_IS_CONFIRMED) self.expired = resultObj.get(constants.PROPERTY_EMBY_CONNECT_PIN_IS_EXPIRED) if self.expired: self.finished = True return self.finished
def updateURL(self, ip, port): if self.__ip != ip or self.__port != port: log("updateURL required:" + str(ip)) self.__url = "ws://" + ip + ":" + str(port) del self.__ws self.connect()
def insert(request): res = {'code': 0, 'msg': 'success', 'data': []} params = request.POST.dict() stu_id = params['stu_id'] params.pop('stu_id') params['ctime'] = datetime.datetime.strptime(params['ctime'], "%Y-%m-%d %H:%M:%S") try: user, cteated = User.objects.update_or_create(stu_id=stu_id, defaults=params) # 将头像存在本地 rpc_res = rpc(fc='upload/avatar', data={ 'avatar': user.avatar, 'user_id': user.id }) if rpc_res['code'] != None: if rpc_res['code'] == 0: user.avatar = rpc_res['data']['avatar'] user.save() else: log('ERROR', 'user login', 'faild to save avatar', data=user.avatar) res['data'] = user.format() except Exception as e: res = {'code': -2, 'msg': e.__str__(), 'data': []} log('ERROR', '@user inster', e.__str__()) return JsonResponse(res)
async def on_message(self, message): async with self.lock: user = message.author if (message.channel.id == self.channel.id) and (not user.bot) and (user not in self.players): log(f'NEW PLAYER: {user.name}#{user.discriminator}', indent=1) self.players.append(user) await self.refresh_message()
async def evaluate_ab_guesses(self): killed_players = [] log(f'Evaluating player guesses for aberration: "{self.current_ab}"') for user, guess_string in self.ab_guesses.items(): if AbsGame.AbsGameSession.is_guess_correct(guess_string, self.current_ab, self.selected_abs): log(f'CORRECT: {user.name}#{user.discriminator} guessed "{guess_string}".', indent=1) else: log(f'INCORRECT: {user.name}#{user.discriminator} guessed "{guess_string}".', indent=1) killed_players.append(user) for user in self.players_alive: if user not in self.ab_guesses: log(f'TOO SLOW: {user.name}#{user.discriminator} did not make a guess in time!', indent=1) killed_players.append(user) killed_players = [user for user in killed_players if user not in self.players_dead] self.players_alive = [user for user in self.players_alive if user not in killed_players] if killed_players: self.players_dead.extend(killed_players) log(f'The following players have died: {AbsGame.AbsGameSession.get_players_string(killed_players)}') embed = AbsGame.AbsGameSession.create_game_embed( title_singular=TEXT_GAME_DEATH_TITLE_SINGULAR, title_plural_format=TEXT_GAME_DEATH_TITLE_PLURAL_FORMAT, subtitle=TEXT_GAME_DEATH_SUBTITLE, players_label=TEXT_GAME_DEATH_LABEL, players=killed_players) await self.channel.send(embed=embed)
def linkEmbyConnect(handle, _): # retrieve the media provider mediaProvider = xbmcmediaimport.getProvider(handle) if not mediaProvider: log('cannot retrieve media provider', xbmc.LOGERROR) return # get the media provider settings providerSettings = mediaProvider.prepareSettings() if not providerSettings: return # make sure we have a valid device ID deviceId = providerSettings.getString(emby.constants.SETTING_PROVIDER_DEVICEID) if not deviceId: deviceId = Request.GenerateDeviceId() providerSettings.setString(emby.constants.SETTING_PROVIDER_DEVICEID, deviceId) embyConnect = linkToEmbyConnect(deviceId) if not embyConnect: return # make sure the configured Emby server is still accessible serverUrl = ProviderSettings.GetUrl(providerSettings) matchingServer = None serverId = Server.GetServerId(mediaProvider.getIdentifier()) # get all connected servers servers = EmbyConnect.GetServers(embyConnect.accessToken, embyConnect.userId) if not servers: log('no servers available for Emby Connect user id {}'.format(embyConnect.userId), xbmc.LOGWARNING) return for server in servers: if server.systemId == serverId: matchingServer = server break if not matchingServer: log('no Emby server matching {} found'.format(serverUrl), xbmc.LOGWARNING) xbmcgui.Dialog().ok(localise(32038), localise(32061)) return # change the settings providerSettings.setString(emby.constants.SETTING_PROVIDER_EMBY_CONNECT_USER_ID, embyConnect.userId) providerSettings.setString(emby.constants.SETTING_PROVIDER_EMBY_CONNECT_ACCESS_KEY, matchingServer.accessKey) success = False try: success = Server(mediaProvider).Authenticate(force=True) except: pass if success: xbmcgui.Dialog().ok(localise(32038), localise(32062)) log('successfully linked to Emby Connect server {} ({}) {}'.format(matchingServer.name, serverId, serverUrl)) else: xbmcgui.Dialog().ok(localise(32038), localise(32061)) log('failed to link to Emby Connect server {} ({}) {}'.format(matchingServer.name, serverId, serverUrl), xbmc.LOGWARNING)
def importItemsGenerator(handle, embyServer, url, mediaType, viewId, embyMediaType=None, viewName=None, raw=False, showProgress=True, allowDirectPlay=True): totalCount = 0 startIndex = 0 while True: if shouldCancel(handle, progress=startIndex, total=totalCount, showProgress=showProgress): return None try: # retrieve all items matching the current media type totalCount, importedItemsCount, importedItems = \ importItemsChunked(handle, embyServer, url, mediaType, viewId, startIndex, ITEM_REQUEST_LIMIT, embyMediaType=embyMediaType, viewName=viewName, raw=raw, showProgress=showProgress, allowDirectPlay=allowDirectPlay) yield importedItems # check if we have retrieved all available items startIndex += importedItemsCount if startIndex >= totalCount: break except RuntimeError as e: log(str(e), xbmc.LOGERROR) return None
def exchange(self): if not self.pin: return None if not self.finished or self.expired: return None if self._authenticationResult: return self._authenticationResult url = Url.append(constants.URL_EMBY_CONNECT_BASE, constants.URL_EMBY_CONNECT_PIN, constants.URL_EMBY_CONNECT_PIN_AUTHENTICATE) body = { constants.URL_QUERY_DEVICE_ID: self.deviceId, constants.URL_QUERY_PIN: self.pin, } resultObj = Request.PostAsJson(url, json=body) if not resultObj or \ constants.PROPERTY_EMBY_CONNECT_PIN_USER_ID not in resultObj or \ constants.PROPERTY_EMBY_CONNECT_PIN_ACCESS_TOKEN not in resultObj: log('failed to authenticate with PIN {} at {}: {}'.format(self.pin, url, resultObj)) return None self._authenticationResult = EmbyConnect.AuthenticationResult( accessToken=resultObj.get(constants.PROPERTY_EMBY_CONNECT_PIN_ACCESS_TOKEN), userId=resultObj.get(constants.PROPERTY_EMBY_CONNECT_PIN_USER_ID)) return self._authenticationResult
def apply(request): res = {'code': 0, 'msg': 'success', 'data': {}} params = request.POST.dict() required = { 'id': {'requried': True}, 'user_id': {'requried': True} } check_res = check(required, params) if check_res is None or check_res['code'] != 0: return JsonResponse(check_res) try: dynamic=Dynamic.objects.get(id=params['id']) if dynamic.state==1:# dynamic.state=2 dynamic.belongsTo=params['user_id'] res['data']['user_info']=dynamic.format()['user_info'] dynamic.save() else: res = {'code': -3, 'msg': '他人正在申领', 'data': dynamic.format()} return JsonResponse(res) #发消息确认 except Exception as e: res = {'code': -2, 'msg': e.__str__(), 'data': []} utils.log('ERROR', 'dynamic apply', res['msg'], data=params) return JsonResponse(res)
def startapp(app_name): """ Create django app """ # create django app and move to apps local("python manage.py startapp {0}".format(app_name)) local("mv {0} {1}/apps/".format(app_name, project_name)) # make managment command directory local("mkdir {0}/apps/{1}/management".format(project_name, app_name)) local("mkdir {0}/apps/{1}/management/commands".format( project_name, app_name ) ) local("touch {0}/apps/{1}/management/__init__.py".format( project_name, app_name ) ) local("touch {0}/apps/{1}/management/commands/__init__.py".format( project_name, app_name ) ) log("\nHEADS UP! Make sure you add '{0}.apps.{1}' ".format( project_name, app_name)) log("to INSTALLED_APPS in settings/common.py")
def Exchange(baseUrl, accessKey, userId, deviceId=None): if not baseUrl: raise ValueError('invalid baseUrl') if not accessKey: raise ValueError('invalid accessKey') if not userId: raise ValueError('invalid userId') exchangeUrl = server.Server.BuildConnectExchangeUrl(baseUrl, userId) headers = Request.PrepareApiCallHeaders(deviceId=deviceId) headers.update({ constants.EMBY_CONNECT_TOKEN_HEADER: accessKey, }) resultObj = Request.GetAsJson(exchangeUrl, headers=headers) if not resultObj or \ constants.PROPERTY_EMBY_CONNECT_EXCHANGE_LOCAL_USER_ID not in resultObj or \ constants.PROPERTY_EMBY_CONNECT_EXCHANGE_ACCESS_TOKEN not in resultObj: log('invalid response from {}: {}'.format(exchangeUrl, resultObj)) return None return EmbyConnect.AuthenticationResult( accessToken=resultObj.get(constants.PROPERTY_EMBY_CONNECT_EXCHANGE_ACCESS_TOKEN), userId=resultObj.get(constants.PROPERTY_EMBY_CONNECT_EXCHANGE_LOCAL_USER_ID) )
async def get_permission_config_for_server( self, server_id: int, permission: Permission) -> PermissionConfig: async with self.cache_lock: if server_id not in self.cache: self.cache[server_id] = {} if permission.id not in self.cache[server_id]: async with connect(self.db) as connection: query = 'SELECT * FROM permissions WHERE server_id=? AND permission_id=?' async with connection.execute( query, (server_id, permission.id)) as cursor: row = await cursor.fetchone() if row: server = self.bot.get_guild(server_id) whitelisted_channel_ids = set() for channel_id in loads(row[3]): if self.is_available_channel( server, channel_id): whitelisted_channel_ids.add(channel_id) else: log(f'WARNING: Channel {channel_id} in "{server.name}" is no longer available.' ) permission_config = PermissionConfig.get_config( is_enabled=row[2], whitelisted_channel_ids=frozenset( whitelisted_channel_ids)) else: permission_config = PermissionConfig.get_default_config_for_permission( permission) self.cache[server_id][permission.id] = permission_config return self.cache[server_id][permission.id]
def getTrailer(embyServer, itemId, itemObj, allowDirectPlay=True): # prefer local trailers if direct play is allowed if allowDirectPlay and itemObj.get( constants.PROPERTY_ITEM_LOCAL_TRAILER_COUNT, 0): localTrailers = Library.GetLocalTrailers(embyServer, itemId) if not localTrailers: log('failed to retrieve local trailers for item with ID {}'. format(itemId)) else: localTrailerUrl = Api.getPlaybackUrl( embyServer, itemId, localTrailers[0], allowDirectPlay=allowDirectPlay) if localTrailerUrl: return localTrailerUrl # otherwise use the first remote trailer if constants.PROPERTY_ITEM_REMOTE_TRAILERS in itemObj: remoteTrailers = itemObj.get( constants.PROPERTY_ITEM_REMOTE_TRAILERS) if remoteTrailers: return remoteTrailers[0].get( constants.PROPERTY_ITEM_REMOTE_TRAILERS_URL, None) return None
def show_parsed_vod_episode_links(type, channel, show, episode): try: links = [] episode_links = vaderClass.getWebVodMediaLinks(type, channel, show, episode) for item in episode_links: media_list = item['media_url'] source_url = item['source_url'] source_name = item['source_name'] numLinks = len(media_list) itemNum = 1 for media_url in media_list: appendString = str(itemNum) + '/' + str(numLinks) netloc = urlparse.urlparse(media_url)[1] if 'tvlogy.to' not in netloc: addDirectoryItem( plugin.handle, plugin.url_for(playExternalLink, link=urllib.quote_plus(media_url)), ListItem(source_name + ' : ' + netloc + ' : ' + appendString), True) itemNum = itemNum + 1 except Exception as e: utils.log("Error listing streams \n{0}\n{1}".format( e, traceback.format_exc())) pass endOfDirectory(plugin.handle)
def discoverProviderLocally(handle, options): baseUrl = xbmcgui.Dialog().input(localise(32050), 'http://') if not baseUrl: return None log('trying to discover an Emby server at {}...'.format(baseUrl)) try: serverInfo = emby.api.server.Server.GetInfo(baseUrl) if not serverInfo: return None except: return None providerId = Server.BuildProviderId(serverInfo.id) providerIconUrl = Server.BuildIconUrl(baseUrl) provider = xbmcmediaimport.MediaProvider( providerId, baseUrl, serverInfo.name, providerIconUrl, emby.constants.SUPPORTED_MEDIA_TYPES) provider.setIconUrl(kodi.Api.downloadIcon(provider)) # store local authentication in settings providerSettings = provider.prepareSettings() if not providerSettings: return None providerSettings.setString( emby.constants.SETTING_PROVIDER_AUTHENTICATION, emby.constants.SETTING_PROVIDER_AUTHENTICATION_OPTION_LOCAL) providerSettings.save() log('Local Emby server {} successfully discovered at {}'.format( mediaProvider2str(provider), baseUrl)) return provider
def _ProcessMessages(self): """Trigger processing of messages from the media providers websocket""" # nothing to do if we are not connected to a Plex server if not self._connected: return while True: try: message = self._websocket.recv() if not message: break messageObj = json.loads(message) if not messageObj: log(( f"invalid JSON message ({len(message)}) from {mediaProvider2str(self._mediaProvider)} " f"received: {message}"), xbmc.LOGWARNING) continue self._ProcessMessage(messageObj) except websocket.WebSocketTimeoutException: break except Exception as e: # TODO(Montellese): remove workaround for Kodi Python 3 issue if e.args and e.args[0] == 'timed out': break log( f"unknown exception when receiving data from {mediaProvider2str(self._mediaProvider)}: " f"{e.args[0]}", xbmc.LOGWARNING) break
def play_movie(category_id, name): streams = vaderClass.get_category_id_vod(category_id, sort=True) utils.log('Trying to play ' + name) for stream in streams: if stream['name'].lower() == name.lower(): chanName = stream['name'] stream_id = stream['stream_id'] icon = stream['stream_icon'] container_extension = stream['container_extension'] chanUrl = vaderClass.build_stream_url( stream_id, extension=container_extension, base='movie') info = {} info['title'] = name listitem = xbmcgui.ListItem(path=chanUrl, iconImage=icon) listitem.setInfo("video", info) listitem.setPath(chanUrl) win = xbmcgui.Window(10000) win.setProperty('vader.playing', 'True') xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, listitem) break return True
def show_vod_recent(type): streams = vaderClass.get_recent_vod(type) for stream in streams: utils.log(str(stream)) chanName = stream['name'] stream_id = stream['stream_id'] icon = stream['stream_icon'] container_extension = stream['container_extension'] chanUrl = vaderClass.build_stream_url(stream_id, extension=container_extension, base='movie') title = chanName title = str(title.encode('utf-8').decode('ascii', 'ignore')) plugintools.add_playable(title=title, url=chanUrl, thumbnail=icon, plot='', isPlayable=True, folder=False) endOfDirectory(plugin.handle)
def fromString(data): ServerPropertyId = 'Id' ServerPropertyName = 'Name' ServerPropertyAddress = 'Address' if data is None: return None data = ensure_str(data) obj = json.loads(data) if not ServerPropertyId in obj or not ServerPropertyName in obj or not ServerPropertyAddress in obj: log('invalid discovery message received: {}'.format(str(data)), xbmc.LOGWARNING) return None server = Server.Discovery() server.id = obj[ServerPropertyId] server.name = obj[ServerPropertyName] server.address = obj[ServerPropertyAddress] server.registered = False server.lastseen = time.time() if not server.id or not server.name or not server.address: return None return server
def loadProviderSettings(handle, options): # retrieve the media provider mediaProvider = xbmcmediaimport.getProvider(handle) if not mediaProvider: log('cannot retrieve media provider', xbmc.LOGERROR) return settings = mediaProvider.getSettings() if not settings: log('cannot retrieve media provider settings', xbmc.LOGERROR) return # make sure we have a device identifier if not settings.getString(emby.constants.SETTING_PROVIDER_DEVICEID): settings.setString(emby.constants.SETTING_PROVIDER_DEVICEID, str(uuid.uuid4())) settings.registerActionCallback( emby.constants.SETTING_PROVIDER_LINK_EMBY_CONNECT, 'linkembyconnect') settings.registerActionCallback( emby.constants.SETTING_PROVIDER_TEST_AUTHENTICATION, 'testauthentication') # register a setting options filler for the list of users settings.registerOptionsFillerCallback( emby.constants.SETTING_PROVIDER_USER, 'settingoptionsfillerusers') settings.setLoaded()
def evaluate_classification_model(weight, observed_tasks, verbose=True, mode='Test'): if cfg.is_cifar_10: classification_model = getattr(lib.baselines.common, cfg.model)(nclasses=10).to(cfg.device) elif cfg.is_cifar_100 or cfg.is_mini_imagenet: classification_model = getattr(lib.baselines.common, cfg.model)(nclasses=100).to(cfg.device) else: classification_model = getattr(models.classifiers, cfg.model)().to(cfg.device) if cfg.verbose: log(classification_model) new_state_dict = construct_state_dict_from_weights(classification_model, weight) classification_model.load_state_dict(new_state_dict) classification_model.eval() accuracy = lib.train.test(classification_model, observed_tasks, verbose=verbose, mode=mode) return accuracy, classification_model
def create_permuted_mnist(self, path, save_location): log('Creating %d permutations of MNIST.' % cfg.continual.n_tasks) mnist_train_path = os.path.join(path, 'processed', 'mnist_train.pt') mnist_test_path = os.path.join(path, 'processed', 'mnist_test.pt') mnist_val_path = os.path.join(path, 'processed', 'mnist_val.pt') x_tr, y_tr = torch.load(mnist_train_path) x_te, y_te = torch.load(mnist_test_path) x_val, y_val = torch.load(mnist_val_path) x_tr = x_tr.view(-1, 28 * 28) x_te = x_te.view(-1, 28 * 28) x_val = x_val.view(-1, 28 * 28) train_tasks = [] test_tasks = [] val_tasks = [] for task in range(cfg.continual.n_tasks): perm = torch.randperm(28 * 28) train_tasks.append( [x_tr.index_select(1, perm).view(-1, 28, 28), y_tr]) test_tasks.append( [x_te.index_select(1, perm).view(-1, 28, 28), y_te]) val_tasks.append( [x_val.index_select(1, perm).view(-1, 28, 28), y_val]) torch.save([train_tasks, test_tasks, val_tasks], save_location)
def create_rotated_mnist(self, path, save_location): log('Creating %d rotations of MNIST.' % cfg.continual.n_tasks) mnist_train_path = os.path.join(path, 'processed', 'mnist_train.pt') mnist_test_path = os.path.join(path, 'processed', 'mnist_test.pt') mnist_val_path = os.path.join(path, 'processed', 'mnist_val.pt') x_tr, y_tr = torch.load(mnist_train_path) x_te, y_te = torch.load(mnist_test_path) x_val, y_val = torch.load(mnist_val_path) train_tasks = [] test_tasks = [] val_tasks = [] min_rotation = 0. max_rotation = 180. for task in range(cfg.continual.n_tasks): min_rot = 1.0 * task / cfg.continual.n_tasks * ( max_rotation - min_rotation) + min_rotation max_rot = 1.0 * (task + 1) / cfg.continual.n_tasks * ( max_rotation - min_rotation) + min_rotation rot = random.random() * (max_rot - min_rot) + min_rot log('Rotating by %s degrees' % str(rot)) train_tasks.append([self.rotate_dataset(x_tr, rot), y_tr]) test_tasks.append([self.rotate_dataset(x_te, rot), y_te]) val_tasks.append([self.rotate_dataset(x_val, rot), y_val]) torch.save([train_tasks, test_tasks, val_tasks], save_location)
def update(request): res = {'code': 0, 'msg': 'success', 'data': {}} params = request.POST.dict() required = { 'id':{'requried':True}, 'user_id': {'required': False}, 'type': {'required': False}, 'category': {'required': False}, 'title': {'required': False}, 'desc': {'required': False}, 'images': {'required': False}, 'location': {'required': False}, 'meta': {'required': False}, } check_res = check(required, params) if check_res is None or check_res['code'] != 0: return JsonResponse(check_res) id=params['id'] params.pop('id') try: Dynamic.objects.filter(id=id).update(**params) except Exception as e: res = {'code': -2, 'msg': e.__str__(), 'data': []} utils.log('ERROR', 'dynamic update', res['msg'], data=params) return JsonResponse(res)
def Authenticate(username, password): if not username: raise ValueError('invalid username') if not password: raise ValueError('invalid password') url = Url.append(constants.URL_EMBY_CONNECT_BASE, constants.URL_EMBY_CONNECT_AUTHENTICATE) headers = EmbyConnect._getApplicationHeader() body = { constants.PROPERTY_EMBY_CONNECT_AUTHENTICATION_NAME_OR_EMAIL: username, constants.PROPERTY_EMBY_CONNECT_AUTHENTICATION_PASSWORD: hashlib.md5(password), # nosec } resultObj = Request.PostAsJson(url, headers=headers, json=body) if not resultObj or \ constants.PROPERTY_EMBY_CONNECT_AUTHENTICATION_ACCESS_TOKEN not in resultObj or \ constants.PROPERTY_EMBY_CONNECT_AUTHENTICATION_USER not in resultObj: log('invalid response from {}: {}'.format(url, resultObj)) return None userObj = resultObj.get(constants.PROPERTY_EMBY_CONNECT_AUTHENTICATION_USER) if constants.PROPERTY_EMBY_CONNECT_AUTHENTICATION_USER_ID not in userObj: log('invalid response from {}: {}'.format(url, resultObj)) return None return EmbyConnect.AuthenticationResult( accessToken=resultObj.get(constants.PROPERTY_EMBY_CONNECT_AUTHENTICATION_ACCESS_TOKEN), userId=userObj.get(constants.PROPERTY_EMBY_CONNECT_AUTHENTICATION_USER_ID) )
def _media_listing(media_type): log('Using JSON for retrieving %s info' %media_type) Medialist = [] if media_type == 'movie': json_query = xbmc.executeJSONRPC('{"jsonrpc": "2.0", "method": "VideoLibrary.GetMovies", "params": {"properties": ["file", "imdbnumber", "year", "trailer", "streamdetails", "art"], "sort": { "method": "label" } }, "id": 1}') json_query = unicode(json_query, 'utf-8', errors='ignore') jsonobject = simplejson.loads(json_query) if jsonobject.has_key('result') and jsonobject['result'].has_key('movies'): for item in jsonobject['result']['movies']: # disctype = media_disctype(item.get('file','').encode('utf-8').lower(), # item['streamdetails']['video']) Medialist.append({'dbid': item.get('movieid',''), 'id': item.get('imdbnumber',''), 'name': item.get('label',''), 'year': item.get('year',''), 'file': item.get('file',''), 'path': media_path(item.get('file','')), 'trailer': item.get('trailer',''), # 'disctype': disctype, 'art' : item.get('art',''), 'mediatype': media_type}) else: log('No JSON results found') return Medialist
def _handleRequestAsJson(result, requestType): if not isinstance(result, requests.Response): raise ValueError('invalid result: {}'.format(result)) if not requestType: raise ValueError('invalid requestType') if not Request._handleRequest(result, requestType): return None if not result.content: return None try: resultObj = result.json() if resultObj: return resultObj log('invalid response from {} {}'.format(requestType, result.url), xbmc.LOGERROR) except ValueError as err: log( 'response from {} {} is not a JSON object: {}'.format( requestType, result.url, err), xbmc.LOGERROR) return None
def _authenticate(self): if not self.Authenticate(): log('user authentication failed on media provider {}'.format( self._id)) return False return True
def gui_imagelist(image_list, art_type): log('- Retrieving image list for GUI') filteredlist = [] #retrieve list for artwork in image_list: if art_type in artwork['art_type']: filteredlist.append(artwork) return filteredlist
def _copyfile(self, sourcepath, targetpath, media_name = ''): targetdir = os.path.dirname(targetpath).encode("utf-8") if not self._exists(targetdir): if not self._mkdir(targetdir): raise CreateDirectoryError(targetdir) if not self._copy(sourcepath, targetpath): raise CopyError(targetpath) else: log("[%s] Copied successfully: %s" % (media_name, targetpath) )
def log_success(): """ Print giant ascii art and open web browser with a song. Troll responsibly. """ log(ART, "green") log("Your application is deployed. Time to celebrate.") song = random.choice(list(SONGS.values())) webbrowser.open(song, new=True)
def _delete_file_in_dirs(self, filename, targetdirs, reason, media_name = '' ): isdeleted = False for targetdir in targetdirs: path = os.path.join(targetdir, filename) if self._exists(path): self._delete(path) log("[%s] Deleted (%s): %s" % (media_name, reason, path)) isdeleted = True if not isdeleted: log("[%s] Ignoring (%s): %s" % (media_name, reason, filename))
def createdb(): """ Creates local database for project """ log("creating database") local('createdb {0}'.format(project_name)) if settings.USE_POSTGIS: local('echo "CREATE EXTENSION postgis;" | psql {0}'.format( project_name ) )
def __init__(self): log("Setting up fileops") self._exists = lambda path: xbmcvfs.exists(path) self._rmdir = lambda path: xbmcvfs.rmdir(path) self._mkdir = lambda path: xbmcvfs.mkdir(path) self._delete = lambda path: xbmcvfs.delete(path) self.downloadcount = 0 if not self._exists(tempdir): if not self._exists(__addonprofile__): if not self._mkdir(__addonprofile__): raise CreateDirectoryError(__addonprofile__) if not self._mkdir(tempdir): raise CreateDirectoryError(tempdir)
def choice_type(enabled_type_list, startup, artype_list): # Send the image type list to the selection dialog select = xbmcgui.Dialog().select(__addonname__ + ': ' + __localize__(32015) , enabled_type_list) # When nothing is selected from the dialog if select == -1: log('### Canceled by user') return False # If some selection was made else: # Check what artwork type has been chosen and parse the image restraints for item in artype_list: if enabled_type_list[select] == __localize__(item['gui_string']) and startup['mediatype'] == item['media_type']: return item else: return False
def destroy(): """ destoys the database and django project. Be careful! """ log("You are about to mothball this entire project.\n", "red") log("Please type the project name to destroy it: ", "red") log("'opd_use_of_force'\n") answer = raw_input("> ") if (answer == 'opd_use_of_force'): dropdb() local('cd .. && rm -rf {0}'.format(project_name)) log("opd_use_of_force is no more. See you later!\n", "green") else: log("You didn't type 'opd_use_of_force' correctly. Exiting.")
def dumpdata(app_name): """ Dump data of an app in JSON format and store in the fixtures directory """ if app_name is not '': fixtures_dir = os.path.join(settings.ROOT_DIR, app_name, 'fixtures') if not os.path.exists(fixtures_dir): os.makedirs(fixtures_dir) local("python manage.py dumpdata {0} > {1}/{2}.json".format( app_name, fixtures_dir, app_name ) ) else: log("please specify an app name", "red")
def check(): setting = get() settings_faulty = True while settings_faulty: settings_faulty = True check_movie = check_tvshow = check_musicvideo = check_centralize = True # re-check settings after posible change setting = get() # Check if faulty setting in movie section if setting.get('movie_enable'): if not setting.get('movie_poster') and not setting.get('movie_fanart') and not setting.get('movie_extrafanart') and not setting.get('movie_extrathumbs') and not setting.get('movie_logo') and not setting.get('movie_clearart') and not setting.get('movie_discart') and not setting.get('movie_landscape') and not setting.get('movie_banner'): check_movie = False log('Setting check: No subsetting of movies enabled') else: check_movie = True # Check if faulty setting in tvshow section if setting.get('tvshow_enable'): if not setting.get('tvshow_poster') and not setting.get('tvshow_seasonposter') and not setting.get('tvshow_fanart') and not setting.get('tvshow_extrafanart') and not setting.get('tvshow_clearart') and not setting.get('tvshow_characterart') and not setting.get('tvshow_logo') and not setting.get('tvshow_showbanner') and not setting.get('tvshow_seasonbanner') and not setting.get('tvshow_landscape') and not setting.get('tvshow_seasonlandscape'): check_tvshow = False log('Setting check: No subsetting of tv shows enabled') else: check_tvshow = True # Check if faulty setting in musicvideo section if setting.get('musicvideo_enable'): if not setting.get('musicvideo_poster') and not setting.get('musicvideo_fanart') and not setting.get('musicvideo_extrafanart') and not setting.get('musicvideo_extrathumbs') and not setting.get('musicvideo_logo') and not setting.get('musicvideo_clearart') and not setting.get('musicvideo_discart'): check_musicvideo = False log('Setting check: No subsetting of musicvideo enabled') else: check_musicvideo = True # Check if faulty setting in centralize section if setting.get('centralize_enable'): if setting.get('centralfolder_movies') == '' and setting.get('centralfolder_tvshows') == '': check_centralize = False log('Setting check: No centralized folder chosen') else: check_centralize = True # Compare all setting check if check_movie and check_tvshow and check_musicvideo and check_centralize: settings_faulty = False else: settings_faulty = True # Faulty setting found if settings_faulty: log('Faulty setting combination found') # when faulty setting detected ask to open the settings window if dialog_msg('yesno', line1 = __localize__(32003), line2 = __localize__(32004), background = False, nolabel = __localize__(32026), yeslabel = __localize__(32025)): __addon__.openSettings() # if not cancel the script else: return False else: return True
def loaddata(app_name): """ load the data of an app in json format and store it in the fixtures directory """ if app_name is not '': fixtures_dir = os.path.join( settings.ROOT_DIR, app_name, 'fixtures', "{0}.json".format(app_name) ) local("python manage.py loaddata {0}".format(fixtures_dir)) else: log("please specify an app name", "red")
def publish(bucket='staging', dryrun='False'): """ usage: fab publish:dryrun=[False | True], bucket=['staging' | 'production'] DEFAULT: Compress, build and deploy project to staging bucket on Amazon S3. pass dryrun=False to skip publishing the assets to the """ should_we_publish = True if dryrun == 'False' else False reset() compress() build() settings.USE_GRUNT and grunt_build() if should_we_publish: log('\nPublishing ...\n') deploy_to_s3(bucket) else: log('\nBuild is complete but no assets were published to AWS S3\n')
def erase_current_cache(self,filename): try: cached_thumb = self.get_cached_thumb(filename) log( "Cache file %s" % cached_thumb ) if xbmcvfs.exists( cached_thumb.replace("png" , "dds").replace("jpg" , "dds") ): xbmcvfs.delete( cached_thumb.replace("png" , "dds").replace("jpg" , "dds") ) copy = xbmcvfs.copy( filename , cached_thumb ) if copy: log("Cache succesful") else: log("Failed to copy to cached thumb") except : print_exc() log("Cache erasing error")
def cleanup(): if xbmcvfs.exists(tempdir): dialog_msg('update', percentage = 100, line1 = __localize__(32005), background = __addon__.getSetting('background')) log('Cleaning up temp files') for x in os.listdir(tempdir): tempfile = os.path.join(tempdir, x) xbmcvfs.delete(tempfile) if xbmcvfs.exists(tempfile): log('Error deleting temp file: %s' % tempfile, xbmc.LOGERROR) xbmcvfs.rmdir(tempdir) if xbmcvfs.exists(tempdir): log('Error deleting temp directory: %s' % tempdir, xbmc.LOGERROR) else: log('Deleted temp directory: %s' % tempdir)
def handle(self, *args, **options): data = os.path.join(settings.BASE_DIR, "data") files = list(all_files(data, "*.csv")) for filepath in files: log("Opening file {}\n".format(filepath), "cyan") log(" Loading data ...\n") copy = CopyMapping( Incident, filepath, dict( date="IncidentDate", year="Year", raw_location="Location", address="Edited Street Address", city_and_state="City and State", full_address="Edited Full Address", latitude="Latitude", longitude="Longitude", accuracy_score="Accuracy Score", accuracy_type="Accuracy Type", number="Number", street="Street", city="City", state="State", county="County", zipcode="Zip", ), ) copy.save() log(" Data loaded!\n", "green")
def onClick(self, controlID): log('# GUI control: %s' % controlID) if controlID == 6 or controlID == 3: num = self.img_list.getSelectedPosition() log('# GUI position: %s' % num) self.selected_id = self.img_list.getSelectedItem().getLabel2() log('# GUI selected image ID: %s' % self.selected_id) self.close()
def privmsg(self, hostmask, channel, msg): user = hostmask.split("!", 1)[0] alias = self.factory.alias.resolve(user) channel = channel if channel != self.nickname else user if self.factory.config.mad and "rhe" in msg.lower(): shutup = "SHUT UP {}".format(user.upper()) luck = random.randint(0,9) if luck == 0: self.kickban(channel, user, shutup) elif luck < 3: self.kick(channel, user, shutup) else: self.msg(channel, shutup) return if not msg.startswith("."): # not a trigger command if alias not in self.factory.config.markov_banned: self.factory.markov.learn(alias, msg, channel) return # do nothing command, sep, rest = msg.lstrip(".").partition(" ") command, msg, reverse = command.lower(), filter(lambda x: x, rest.split(" ")), False if command not in self.factory.pluginmanager.plugins and command[:2] == "un" and command[2:] in self.factory.pluginmanager.plugins: command = command[2:] reverse = True if not self.factory.pluginmanager.plugins[command]["reversible"]: return permissions = self.getPermissions(alias) if command in self.factory.pluginmanager.plugins and self.factory.pluginmanager.plugins[command]["access"] in permissions: log(user, channel, command, msg, reverse) if not self.factory.pluginmanager.plugins[command]["reversible"]: self.factory.pluginmanager.plugins[command]["command"](self, user, channel, msg) else: self.factory.pluginmanager.plugins[command]["command"](self, user, channel, msg, reverse) else: alias = self.factory.alias.resolve(command) if alias in self.factory.markov.users and "markov" in self.factory.pluginmanager.plugins: msg = [alias] + msg self.factory.pluginmanager.plugins["markov"]["command"](self, user, channel, msg)
def bootstrap(): """ DEFAULT: Run commands to setup a new project """ try: local("pip install -r requirements/base.txt") local("pip install -r requirements/python2.txt") createdb() # create postgis database local("python manage.py migrate") log( "Success! Now run `fab rs` to start the development server", "green" ) except Exception, e: log( "Uh oh! Something went wrong. Double check your settings. Error:", "red" ) raise e
def autostart(): xbmcaddon.Addon().setSetting(id="files_overwrite", value='false') tempdir = os.path.join(__addonprofile__, 'temp') service_runtime = str(setting.get('service_runtime') + ':00') log('## Service - Run at startup: %s'% setting.get('service_startup'), xbmc.LOGNOTICE) log('## Service - Delayed startup: %s minutes'% setting.get('service_startupdelay'), xbmc.LOGNOTICE) log('## Service - Run as service: %s'% setting.get('service_enable'), xbmc.LOGNOTICE) log('## Service - Time: %s'% service_runtime, xbmc.LOGNOTICE) log("##########........................") # Check if tempdir exists and remove it if xbmcvfs.exists(tempdir): xbmcvfs.rmdir(tempdir) log('Removing temp folder from previous aborted run.') xbmc.sleep(5000) # Run script when enabled and check on existence of tempdir. # This because it is possible that script was running even when we previously deleted it. # Could happen when switching profiles and service gets triggered again if setting.get('service_startup') and not xbmcvfs.exists(tempdir): xbmc.executebuiltin('XBMC.AlarmClock(ArtworkDownloader,XBMC.RunScript(script.artwork.downloader,silent=true),00:%s:15,silent)' % setting.get('setting.service_startupdelay')) if setting.get('service_enable'): while (not xbmc.abortRequested): xbmc.sleep(5000) if not(time.strftime('%H:%M') == service_runtime): pass else: if not xbmcvfs.exists(tempdir): log('Time is %s:%s, Scheduled run starting' % (time.strftime('%H'), time.strftime('%M'))) xbmc.executebuiltin('XBMC.RunScript(script.artwork.downloader,silent=true)') # Because we now use the commoncache module the script is run so fast it is possible it is started twice # within the one minute window. So keep looping until it goes out of that window while (not xbmc.abortRequested and time.strftime('%H:%M') == service_runtime): xbmc.sleep(5000) else: log('Addon already running, scheduled run aborted', xbmc.LOGNOTICE)
def _media_listing(media_type): log('Using JSON for retrieving %s info' %media_type) Medialist = [] if media_type == 'tvshow': json_query = xbmc.executeJSONRPC('{"jsonrpc": "2.0", "method": "VideoLibrary.GetTVShows", "params": {"properties": ["file", "imdbnumber", "art"], "sort": { "method": "label" } }, "id": 1}') json_query = unicode(json_query, 'utf-8', errors='ignore') jsonobject = simplejson.loads(json_query) if jsonobject.has_key('result') and jsonobject['result'].has_key('tvshows'): for item in jsonobject['result']['tvshows']: # Search for season information json_query_season = xbmc.executeJSONRPC('{"jsonrpc": "2.0", "method": "VideoLibrary.GetSeasons", "params": {"properties": ["season", "art"], "sort": { "method": "label" }, "tvshowid":%s }, "id": 1}' %item.get('tvshowid','')) jsonobject_season = simplejson.loads(json_query_season) # Get start/end and total seasons if jsonobject_season['result'].has_key('limits'): season_limit = jsonobject_season['result']['limits'] # Get the season numbers seasons_list = {} if jsonobject_season['result'].has_key('seasons'): seasons = jsonobject_season['result']['seasons'] for season in seasons: seasons_list[season.get('season')] = season.get('seasonid') Medialist.append({'id': item.get('imdbnumber',''), 'dbid': item.get('tvshowid',''), 'name': item.get('label',''), 'base_name': media_path(item.get('file','')), 'path': media_path(item.get('file','')), 'seasontotal': season_limit.get('total',''), 'seasonstart': season_limit.get('start',''), 'seasonend': season_limit.get('end',''), 'seasons': seasons_list, 'disctype': 'n/a', 'art' : item.get('art',''), 'mediatype': media_type}) elif media_type == 'movie': json_query = xbmc.executeJSONRPC('{"jsonrpc": "2.0", "method": "VideoLibrary.GetMovies", "params": {"properties": ["file", "imdbnumber", "year", "trailer", "streamdetails", "art"], "sort": { "method": "label" } }, "id": 1}') json_query = unicode(json_query, 'utf-8', errors='ignore') jsonobject = simplejson.loads(json_query) if jsonobject.has_key('result') and jsonobject['result'].has_key('movies'): for item in jsonobject['result']['movies']: imdbnumber = item.get('imdbnumber','') if imdbnumber in ['','tt0000000','0']: from lib.provider import tmdb # import on behalf of searching when there's no ID log('No valid ID found, trying to search themoviedb.org for matching title.') imdbnumber = tmdb._search_movie(item.get('label',''),item.get('year','')) disctype = media_disctype(item.get('file','').encode('utf-8').lower(), item['streamdetails']['video']) Medialist.append({'dbid': item.get('movieid',''), 'id': imdbnumber, 'name': item.get('label',''), 'year': item.get('year',''), 'file': item.get('file',''), 'base_name': base_name(item.get('file','')), 'path': media_path(item.get('file','')), 'trailer': item.get('trailer',''), 'disctype': disctype, 'art' : item.get('art',''), 'mediatype': media_type}) elif media_type == 'musicvideo': json_query = xbmc.executeJSONRPC('{"jsonrpc": "2.0", "method": "VideoLibrary.GetMusicVideos", "params": {"properties": ["file", "artist", "album", "track", "runtime", "year", "genre", "art"], "sort": { "method": "album" } }, "id": 1}') json_query = unicode(json_query, 'utf-8', errors='ignore') jsonobject = simplejson.loads(json_query) if jsonobject.has_key('result') and jsonobject['result'].has_key('musicvideos'): for item in jsonobject['result']['musicvideos']: Medialist.append({'dbid': item.get('musicvideoid',''), 'id': '', 'name': item.get('label',''), 'artist': item.get('artist',''), 'album': item.get('album',''), 'track': item.get('track',''), 'runtime': item.get('runtime',''), 'year': item.get('year',''), 'base_name': base_name(item.get('file','')), 'path': media_path(item.get('file','')), 'disctype': 'n/a', 'art' : item.get('art',''), 'mediatype': media_type}) else: log('No JSON results found') return Medialist
def s3deploy(): """Deploy build directory to S3 using aws cli""" # using aws cli since boto is busted with buckets that have periods (.) in the name local('cd {} && aws s3 cp --recursive --acl public-read build/ s3://{}/{}'.format( settings.BASE_DIR, AWS_BUCKET_NAME, VERBOSE_APP_NAME)) log('Deployed! visit http://{}/{}/\n'.format(AWS_BUCKET_NAME, VERBOSE_APP_NAME), 'green')
def _get_or_create_panel(self, window, name): try: return window.get_output_panel(name) except AttributeError: log("Couldn't get output panel.") return window.create_output_panel(name)
def execute(args, message='', path=None, cwd=None): # This is needed for Windows... not sure why. See: # https://github.com/surjikal/sublime-coffee-compile/issues/13 if path: log('Path:') log("\n".join(path)) path = os.pathsep.join(path) if PLATFORM_IS_WINDOWS: log('Platform is Windows!') os.environ['PATH'] = path path = None env = {'PATH': path} if path else None log('Env:') log(env) log('Args:') log(args) process = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, startupinfo=_get_startupinfo(), env=env, cwd=cwd) output, error = process.communicate(message) if output: output = output.decode('utf8') output = output.strip() return (output, error)
def _media_unique(media_type, dbid): log('Using JSON for retrieving %s info' %media_type) Medialist = [] if media_type == 'tvshow': json_query = xbmc.executeJSONRPC('{"jsonrpc": "2.0", "method": "VideoLibrary.GetTVShowDetails", "params": {"properties": ["file", "imdbnumber", "art"], "tvshowid":%s}, "id": 1}' %dbid) json_query = unicode(json_query, 'utf-8', errors='ignore') jsonobject = simplejson.loads(json_query) if jsonobject.has_key('result') and jsonobject['result'].has_key('tvshowdetails'): item = jsonobject['result']['tvshowdetails'] # Search for season information json_query_season = xbmc.executeJSONRPC('{"jsonrpc": "2.0", "method": "VideoLibrary.GetSeasons", "params": {"properties": ["season", "art"], "sort": { "method": "label" }, "tvshowid":%s }, "id": 1}' %item.get('tvshowid','')) jsonobject_season = simplejson.loads(json_query_season) # Get start/end and total seasons if jsonobject_season['result'].has_key('limits'): season_limit = jsonobject_season['result']['limits'] # Get the season numbers seasons_list =[] if jsonobject_season['result'].has_key('seasons'): seasons = jsonobject_season['result']['seasons'] for season in seasons: seasons_list.append(season.get('season')) Medialist.append({'id': item.get('imdbnumber',''), 'dbid': item.get('tvshowid',''), 'name': item.get('label',''), 'path': media_path(item.get('file','')), 'seasontotal': season_limit.get('total',''), 'seasonstart': season_limit.get('start',''), 'seasonend': season_limit.get('end',''), 'seasons': seasons_list, 'art' : item.get('art',''), 'mediatype': media_type}) elif media_type == 'movie': json_query = xbmc.executeJSONRPC('{"jsonrpc": "2.0", "method": "VideoLibrary.GetMovieDetails", "params": {"properties": ["file", "imdbnumber", "year", "trailer", "streamdetails", "art"], "movieid":%s }, "id": 1}' %dbid) json_query = unicode(json_query, 'utf-8', errors='ignore') jsonobject = simplejson.loads(json_query) if jsonobject.has_key('result') and jsonobject['result'].has_key('moviedetails'): item = jsonobject['result']['moviedetails'] disctype = media_disctype(item.get('file','').encode('utf-8').lower(), item['streamdetails']['video']) streamdetails = item['streamdetails']['video'] Medialist.append({'dbid': item.get('movieid',''), 'id': item.get('imdbnumber',''), 'name': item.get('label',''), 'year': item.get('year',''), 'file': item.get('file',''), 'path': media_path(item.get('file','')), 'trailer': item.get('trailer',''), 'disctype': disctype, 'art' : item.get('art',''), 'mediatype': media_type}) elif media_type == 'musicvideo': json_query = xbmc.executeJSONRPC('{"jsonrpc": "2.0", "method": "VideoLibrary.GetMusicVideoDetails", "params": {"properties": ["file", "artist", "album", "track", "runtime", "year", "genre", "art"], "movieid":%s }, "id": 1}' %dbid) json_query = unicode(json_query, 'utf-8', errors='ignore') jsonobject = simplejson.loads(json_query) if jsonobject.has_key('result') and jsonobject['result'].has_key('musicvideodetails'): item = jsonobject['result']['musicvideodetails'] Medialist.append({'dbid': item.get('musicvideoid',''), 'id': '', 'name': item.get('label',''), 'artist': item.get('artist',''), 'album': item.get('album',''), 'track': item.get('track',''), 'runtime': item.get('runtime',''), 'year': item.get('year',''), 'path': media_path(item.get('file','')), 'art' : item.get('art',''), 'mediatype': media_type}) else: log('No JSON results found') return Medialist
def deploy_to_s3(bucket='staging'): """ Deploy `build` directory to specified S3 bucket. Defaults to 'staging'. """ # See: https://gist.github.com/SavvyGuard/6115006 def percent_cb(complete, total): sys.stdout.write('.') sys.stdout.flush() # max size in bytes for uploading in parts. between 1 and 5 GB recommended MAX_SIZE = 20 * 1000 * 1000 # size of parts when uploading in parts PART_SIZE = 6 * 1000 * 1000 # paths dest_dir = VERBOSE_APP_NAME app_directory = BUILD_DIR source_dir = settings.STATIC_ROOT upload_file_names = [] app_directory_file_names = [] # Grab files for dir_, _, files in os.walk(source_dir): for filename in files: relative_directory = os.path.relpath(dir_, source_dir) relative_file = os.path.join(relative_directory, filename) upload_file_names.append(relative_file) for (app_directory, dirname, filename) in os.walk(app_directory): app_directory_file_names.extend(filename) # Upload static media for filename in upload_file_names: source_path = os.path.join(settings.STATIC_ROOT, filename) dest_path = os.path.join(dest_dir, filename) log( "\n Uploading {0} to bucket {1}\n".format( source_path, AWS_MEDIA_BUCKET_NAME ) ) filesize = os.path.getsize(source_path) if filesize > MAX_SIZE: log(" Large file. Running multipart upload") mp = s3_media_bucket fp = open(source_path, 'rb') fp_num = 0 while (fp.tell() < filesize): fp_num += 1 log("\n uploading part %i" % fp_num) mp.upload_part_from_file( fp, fp_num, cb=percent_cb, num_cb=10, size=PART_SIZE) mp.complete_upload() else: log("\n Running upload\n") k = Key(s3_media_bucket) k.key = dest_path k.set_contents_from_filename(source_path, cb=percent_cb, num_cb=10) k.make_public() # Upload build files for filename in app_directory_file_names: source_path = os.path.join(BUILD_DIR, filename) dest_path = os.path.join(dest_dir, filename) if bucket == 'staging': k = Key(s3_staging_bucket) elif bucket == 'production': k = Key(s3_bucket) else: log("Specify `staging` or `production`. Exiting ..", "red") sys.exit() k.key = dest_path k.set_contents_from_filename(source_path, cb=percent_cb, num_cb=10) k.make_public() # Celebrate when pushed to production log_success() if bucket == 'production' else ''
import os import sys from lib.utils import log from .other import log_success try: import boto from boto.s3.key import Key except ImportError: log("Remember to install boto before deploying", "red") from fabric.api import * from fabric.contrib import django django.settings_module("opd_use_of_force.settings") from django.conf import settings from opd_use_of_force.settings.production import ( AWS_BUCKET_NAME, AWS_MEDIA_BUCKET_NAME, AWS_STAGING_BUCKET_NAME, VERBOSE_APP_NAME, BUILD_DIR ) """ Deployment Tasks ================ """
def _media_unique(media_type, dbid): log("Using JSON for retrieving %s info" % media_type) Medialist = [] if media_type == "tvshow": json_query = xbmc.executeJSONRPC( '{"jsonrpc": "2.0", "method": "VideoLibrary.GetTVShowDetails", "params": {"properties": ["file", "imdbnumber", "art"], "tvshowid":%s}, "id": 1}' % dbid ) json_query = unicode(json_query, "utf-8", errors="ignore") jsonobject = simplejson.loads(json_query) if jsonobject["result"].has_key("tvshowdetails"): item = jsonobject["result"]["tvshowdetails"] # Search for season information json_query_season = xbmc.executeJSONRPC( '{"jsonrpc": "2.0", "method": "VideoLibrary.GetSeasons", "params": {"properties": ["season", "art"], "sort": { "method": "label" }, "tvshowid":%s }, "id": 1}' % item.get("tvshowid", "") ) jsonobject_season = simplejson.loads(json_query_season) # Get start/end and total seasons if jsonobject_season["result"].has_key("limits"): season_limit = jsonobject_season["result"]["limits"] # Get the season numbers seasons_list = [] if jsonobject_season["result"].has_key("seasons"): seasons = jsonobject_season["result"]["seasons"] for season in seasons: seasons_list.append(season.get("season")) Medialist.append( { "id": item.get("imdbnumber", ""), "dbid": item.get("tvshowid", ""), "name": item.get("label", ""), "path": media_path(item.get("file", "")), "seasontotal": season_limit.get("total", ""), "seasonstart": season_limit.get("start", ""), "seasonend": season_limit.get("end", ""), "seasons": seasons_list, "art": item.get("art", ""), "mediatype": media_type, } ) elif media_type == "movie": json_query = xbmc.executeJSONRPC( '{"jsonrpc": "2.0", "method": "VideoLibrary.GetMovieDetails", "params": {"properties": ["file", "imdbnumber", "year", "trailer", "streamdetails", "art"], "movieid":%s }, "id": 1}' % dbid ) json_query = unicode(json_query, "utf-8", errors="ignore") jsonobject = simplejson.loads(json_query) if jsonobject["result"].has_key("moviedetails"): item = jsonobject["result"]["moviedetails"] disctype = media_disctype(item.get("file", "").encode("utf-8").lower(), item["streamdetails"]["video"]) streamdetails = item["streamdetails"]["video"] Medialist.append( { "dbid": item.get("movieid", ""), "id": item.get("imdbnumber", ""), "name": item.get("label", ""), "year": item.get("year", ""), "file": item.get("file", ""), "path": media_path(item.get("file", "")), "trailer": item.get("trailer", ""), "disctype": disctype, "art": item.get("art", ""), "mediatype": media_type, } ) elif media_type == "musicvideo": json_query = xbmc.executeJSONRPC( '{"jsonrpc": "2.0", "method": "VideoLibrary.GetMusicVideoDetails", "params": {"properties": ["file", "artist", "album", "track", "runtime", "year", "genre", "art"], "movieid":%s }, "id": 1}' % dbid ) json_query = unicode(json_query, "utf-8", errors="ignore") jsonobject = simplejson.loads(json_query) if jsonobject["result"].has_key("musicvideodetails"): item = jsonobject["result"]["musicvideodetails"] Medialist.append( { "dbid": item.get("musicvideoid", ""), "id": "", "name": item.get("label", ""), "artist": item.get("artist", ""), "album": item.get("album", ""), "track": item.get("track", ""), "runtime": item.get("runtime", ""), "year": item.get("year", ""), "path": media_path(item.get("file", "")), "art": item.get("art", ""), "mediatype": media_type, } ) else: log("No JSON results found") return Medialist
tempfile = open(temppath, "wb") response = urllib2.urlopen(item['url']) tempfile.write(response.read()) tempfile.close() response.close() except HTTPError, e: if e.code == 404: raise HTTP404Error(item['url']) else: raise DownloadError(str(e)) except URLError: raise HTTPTimeout(item['url']) except socket.timeout, e: raise HTTPTimeout(item['url']) except Exception, e: log(str(e), xbmc.LOGNOTICE) else: log("[%s] Downloaded: %s" % (item['media_name'], item['filename'])) self.downloadcount += 1 for targetdir in item['targetdirs']: #targetpath = os.path.join(urllib.url2pathname(targetdir).replace('|',':'), filename) targetpath = os.path.join(targetdir, item['filename']) self._copyfile(temppath, targetpath, item['media_name']) def cleanup(): if xbmcvfs.exists(tempdir): dialog_msg('update', percentage = 100, line1 = __localize__(32005), background = __addon__.getSetting('background')) log('Cleaning up temp files') for x in os.listdir(tempdir): tempfile = os.path.join(tempdir, x) xbmcvfs.delete(tempfile)