def __init__(self, server, device=None, name=None): self.server = server self.client = server.client if device: self.device = Device(self.client, logger, device) elif name: device = [c for c in self.client.devices() if c.name == name] self.device = device if device else None if not self.device: raise NotFoundError('Astrometry device not found: {}'.format(name)) self.event_listener = AstrometryEventListener(self.device)
def _exception_for_response(self, response): if response.status_code == 404: return NotFoundError(response.reason) elif response.status_code == 400 and "OAuthException" in response.text: return InvalidAccessTokenError(response.reason) elif response.status_code == 401: return UnauthorizedError(response.reason) elif response.status_code == 429: return RateLimitExceededError(response.reason) else: return ResponseError("%d error: %s" % ( response.status_code, response.reason, ))
def __init__(self, client, logger, device=None, filter_wheel=None): self.client = client self.logger = logger if device: self.device = device self.filter_wheel = [ c for c in self.client.filter_wheels() if c.name == device.name ] if not self.filter_wheel: raise NotFoundError('FilterWheel {} not found'.format( device.name)) self.filter_wheel = self.filter_wheel[0] elif filter_wheel: self.filter_wheel = filter_wheel self.device = Device(client, logger, name=filter_wheel.name)
def create(session: Session, steam_id: int) -> "Dota": d = session.query(Dota).get(steam_id) if d is not None: raise AlreadyExistingError(repr(d)) r = requests.get( f"https://api.opendota.com/api/players/{Steam.to_steam_id_3(steam_id)}" ) r.raise_for_status() data = r.json() if "profile" not in data: raise NotFoundError( "The specified user has never played Dota or has a private match history" ) new_record = Dota(steam_id=str(steam_id)) new_record.update() return new_record
def import_event_json(zip_path): """ Imports and creates event from json zip """ global CUR_ID path = 'static/temp/import_event' # delete existing files if os.path.isdir(path): shutil.rmtree(path, ignore_errors=True) # extract files from zip with zipfile.ZipFile(zip_path, "r") as z: z.extractall('static/temp/import_event') # create event try: data = json.loads(open(path + '/event.json', 'r').read()) _, data = _trim_id(data) data = _delete_fields(('event', EventDAO), data) new_event = EventDAO.create(data, 'dont')[0] except BaseError as e: raise make_error('event', er=e) except Exception: raise make_error('event') # create other services try: service_ids = {} for item in IMPORT_SERIES: data = open(path + '/%s.json' % item[0], 'r').read() dic = json.loads(data) changed_ids = create_service_from_json(dic, item, new_event.id, service_ids) service_ids[item[0]] = changed_ids.copy() CUR_ID = None except BaseError as e: EventDAO.delete(new_event.id) raise make_error(item[0], er=e, id_=CUR_ID) except IOError: EventDAO.delete(new_event.id) raise NotFoundError('File %s.json missing in event zip' % item[0]) except ValueError: EventDAO.delete(new_event.id) raise make_error(item[0], er=ServerError('Invalid json')) except Exception: EventDAO.delete(new_event.id) raise make_error(item[0], id_=CUR_ID) # return return new_event
def create(session: Session, royal_username, telegram_user: TelegramUser): t = session.query(Telegram).filter_by( telegram_id=telegram_user.id).first() if t is not None: raise AlreadyExistingError(repr(t)) r = session.query(Royal).filter( Royal.username == royal_username).first() if r is None: raise NotFoundError("No Royal exists with that username") t = session.query(Telegram).filter(Telegram.royal_id == r.id).first() if t is not None: raise AlreadyExistingError(repr(t)) return Telegram(royal=r, telegram_id=telegram_user.id, first_name=telegram_user.first_name, last_name=telegram_user.last_name, username=telegram_user.username)
def _exception_for_response(self, response): if response.status_code == 404: return NotFoundError(response.reason) elif response.status_code == 400 and 'OAuthException' in response.text: return InvalidAccessTokenError(response.reason) elif response.status_code == 401: return UnauthorizedError(response.reason) elif response.status_code == 403: return ForbiddenError(response.reason) elif response.status_code == 429: return RateLimitExceededError(response.reason) else: return ResponseError(u'{} error: {}\nresponse: {}'.format( response.status_code, response.reason, response.text, ))
def __init__(self, settings, client, logger, device=None, camera=None): self.settings = settings self.client = client self.logger = logger self.images_db = camera_images_db if device: self.device = device self.camera = [ c for c in self.client.cameras() if c.name == device.name ] if not self.camera: raise NotFoundError('Camera {} not found'.format(device.name)) self.camera = self.camera[0] elif camera: self.camera = camera self.device = Device(client, logger, name=camera.name)
def create(session: Session, royal_username, discord_user: DiscordUser): d = session.query(Discord).filter( Discord.discord_id == discord_user.id).first() if d is not None: raise AlreadyExistingError(repr(d)) r = session.query(Royal).filter( Royal.username == royal_username).first() if r is None: raise NotFoundError("No Royal exists with that username") d = session.query(Discord).filter(Discord.royal_id == r.id).first() if d is not None: raise AlreadyExistingError(repr(d)) d = Discord(royal=r, discord_id=discord_user.id, name=discord_user.name, discriminator=discord_user.discriminator, avatar_hex=discord_user.avatar) return d
def move_sequence_job(sequence_id, sequence_job_id, json): app.logger.info('moving sequence job {}: direction: {}'.format( sequence_job_id, json['direction'])) with controller.sequences.lookup_edit(sequence_id) as sequence: index = [ index for index, job in enumerate(sequence.sequence_jobs) if job.id == sequence_job_id ] if not index: raise NotFoundError( 'Sequence job {} not found in sequence {}'.format( sequence_job_id, sequence_id)) index = index[0] new_index = index - 1 if json['direction'] == 'up' else index + 1 if new_index >= 0 and new_index < len(sequence.sequence_jobs): sequence.sequence_jobs.insert(new_index, sequence.sequence_jobs.pop(index)) return sequence.to_map()
def preprocess_runtime(state, args): """Pre process well known runtime includes""" # If not skipping run-time (used for compiler run-time) # preprocess well known headers if not args.rt: includes = ['foidlrt', 'langcore'] inc_files = [] for i in includes: ffile = _resolve_header(i, state.inclpath) if ffile: inc_files.append(ffile) else: raise NotFoundError( "Unable to resolve include file {}".format(i)) [_IHDR_PARSE((x, state, ParseLevel.LITE)) for x in inc_files] return state
def simbad_lookup(self, entry_name): def sanitize_name(name): name_entries = [x for x in name.split(' ') if x] cat = name_entries[0] if len(name_entries) > 1 else 'N/A' if name_entries[0].startswith('MCG'): cat = 'MCG' if cat == 'NAME': name_entries = name_entries[1:] return cat, ' '.join(name_entries) simbad_objects = Simbad.query_object(entry_name) if not simbad_objects: raise NotFoundError( 'Object with name {} not found in SIMBAD'.format(entry_name)) results = [] for simbad_object in simbad_objects: coordinates = SkyCoord(ra=simbad_object['RA_d'] * u.deg, dec=simbad_object['DEC_d'] * u.deg, equinox='J2000') object_id = simbad_object['MAIN_ID'].decode() catalog, object_name = sanitize_name(object_id) object_names = [ sanitize_name(x) for x in simbad_object['IDS'].decode().split('|') if x != object_id ] object_names = [{ 'catalog': catalog, 'name': name } for catalog, name in object_names] results.append( self.__decorate_entry({ 'raj2000': coordinates.ra.deg, 'dej2000': coordinates.dec.deg, 'displayName': object_name, 'objectNames': object_names, 'catalog': catalog, 'id': object_id, })) return results
def create(session: Session, royal_id: int, steam_id: str): s = session.query(Steam).get(steam_id) if s is not None: raise AlreadyExistingError(repr(s)) r = requests.get( f"https://api.steampowered.com/ISteamUser/GetPlayerSummaries/v0002/" f"?key={config['Steam']['api_key']}&steamids={steam_id}") r.raise_for_status() j = r.json() if len(j) == 0: raise NotFoundError( f"The steam_id doesn't match any steam account") s = Steam( royal_id=royal_id, steam_id=steam_id, persona_name=j["response"]["players"][0]["personaname"], avatar_hex=re.search( r"https://steamcdn-a\.akamaihd\.net/steamcommunity/public/images/avatars/../" r"(.+).jpg", j["response"]["players"][0]["avatar"]).group(1)) return s
def compute_include(self): include = self.bundle.ast.include # For each file include def resolve_hdr_file(fname): for p in self.bundle.inc_paths: hfile = util.file_exists(p, fname, 'defs') if hfile: return hfile return None inc_files = [] for i in include.value: ffile = resolve_hdr_file(i.value) if ffile: inc_files.append(ffile) else: raise NotFoundError("Unable to resolve include file {}".format( i.value)) return inc_files
def GetProduct(self, model): """Returns a copy of a product detail. Args: model: string, The sku / model of the product being retrieved. Returns: LazadaProduct, The product being searched. Raises: NotFoundError: The sku / model of the product is not in Lazada. MultipleResultsError: The sku / model is not unique in Lazada. """ results = [p for p in self._products if p.model == model] if not results: raise NotFoundError('Not found in Lazada: %s' % model) if len(results) > 1: raise MultipleResultsError('Multiple results in Lazada: %s' % model) return copy.deepcopy(results[0])
def _GetInventorySystemCacheItem(self, system, model): """Retrieves a single InventorySystemCacheItem. Args: system: string, The system code. model: string, The sku / model of the product being searched. Returns: InventorySystemCacheItem, The product being searched. Raises: NotFoundError: The sku / model of the product is not in the cached system database. """ cursor = self._db_client.cursor() cursor.execute( """ SELECT model, system, stocks, last_sync_batch_id FROM inventory_system_cache WHERE model=? AND system=? """, ( model, system, )) result = cursor.fetchone() if result is None: raise NotFoundError( 'InventorySystemCacheItem not found: %s in %s' % ( model, system, )) return InventorySystemCacheItem(model=result[0], system=result[1], stocks=result[2], last_sync_batch_id=result[3])
def solve_field(self, options): data = None fits_file = None logger.debug('Solve field options: {}'.format([ '{}: {}'.format(key, '<blob>' if key == 'fileBuffer' else value) for key, value in options.items() ])) if 'fileBuffer' in options: data = base64.b64decode( options['fileBuffer'] [options['fileBuffer'].find(Astrometry.DATAURL_SEPARATOR) + len(Astrometry.DATAURL_SEPARATOR):]) elif 'filePath' in options and os.path.isfile(options['filePath']): with open(options['filePath'], 'rb') as f: data = f.read() else: raise BadRequestError( 'You must pass either a fileBuffer object (data-uri formatted) or a filePath argument' ) fits_file = fits.open(BytesIO(data)) resolution = fits_file[0].data.shape self.__set_enabled(True) try: controller.controller.indi_server.event_listener.add( 'astrometry', self.event_listener) self.__set_astrometry_options(options) wait_for_solver_thread = threading.Thread( target=self.event_listener.wait_for_solver, args=(self.__solver_status(), )) wait_for_solver_thread.start() self.__upload_blob(data) logger.debug('Waiting for solver to finish') wait_for_solver_thread.join() if self.event_listener.error: raise self.event_listener.error final_status = self.__solver_status() if final_status == 'OK': solution_property = self.device.get_property( 'ASTROMETRY_RESULTS').to_map() solution_values = dict([(v['name'], v['value']) for v in solution_property['values']]) solution_property['values'].append({ 'label': 'Field width', 'name': 'ASTROMETRY_RESULTS_WIDTH', 'value': resolution[1] * solution_values['ASTROMETRY_RESULTS_PIXSCALE'] / 3600. }) solution_property['values'].append({ 'label': 'Field height', 'name': 'ASTROMETRY_RESULTS_HEIGHT', 'value': resolution[0] * solution_values['ASTROMETRY_RESULTS_PIXSCALE'] / 3600. }) if options['syncTelescope']: logger.debug(solution_values) telescope = [ t for t in self.server.telescopes() if t.id == options['telescope'] ] if not telescope: raise NotFoundError( 'Unable to find telescope {}'.format(telescope)) telescope = telescope[0] telescope_coordinates = { 'ra': solution_values['ASTROMETRY_RESULTS_RA'] * (24. / 360.), 'dec': solution_values['ASTROMETRY_RESULTS_DE'] } telescope.sync(telescope_coordinates) return {'status': 'OK', 'solution': solution_property} else: raise FailedMethodError( 'Plate solving failed, check astrometry driver log') finally: controller.controller.indi_server.event_listener.remove( 'astrometry') self.__set_enabled(False)
def import_event_json(task_handle, zip_path): """ Imports and creates event from json zip """ global CUR_ID, UPLOAD_QUEUE UPLOAD_QUEUE = [] update_state(task_handle, 'Started') with app.app_context(): path = app.config['BASE_DIR'] + '/static/uploads/import_event' # delete existing files if os.path.isdir(path): shutil.rmtree(path, ignore_errors=True) # extract files from zip with zipfile.ZipFile(zip_path, "r") as z: z.extractall(path) # create event try: update_state(task_handle, 'Importing event core') data = json.loads(open(path + '/event', 'r').read()) _, data = _trim_id(data) srv = ('event', EventDAO) data = _delete_fields(srv, data) new_event = EventDAO.create(data, 'dont')[0] version_data = data.get('version', {}) write_file(path + '/social_links', json.dumps(data.get('social_links', []))) # save social_links _upload_media_queue(srv, new_event) except BaseError as e: raise make_error('event', er=e) except Exception as e: raise make_error('event', er=e) # create other services try: service_ids = {} for item in IMPORT_SERIES: item[1].is_importing = True data = open(path + '/%s' % item[0], 'r').read() dic = json.loads(data) changed_ids = create_service_from_json(task_handle, dic, item, new_event.id, service_ids) service_ids[item[0]] = changed_ids.copy() CUR_ID = None item[1].is_importing = False except BaseError as e: EventDAO.delete(new_event.id) raise make_error(item[0], er=e, id_=CUR_ID) except IOError: EventDAO.delete(new_event.id) raise NotFoundError('File %s missing in event zip' % item[0]) except ValueError: EventDAO.delete(new_event.id) raise make_error(item[0], er=ServerError('Invalid json')) except Exception: print traceback.format_exc() EventDAO.delete(new_event.id) raise make_error(item[0], id_=CUR_ID) # run uploads _upload_media(task_handle, new_event.id, path) # set version VersionUpdater(False, new_event.id, '').set(version_data) # return return new_event
def lookup(self, image_id, **kwargs): image_map = redis_client.lookup(image_id, self.name, 'images') if not image_map: raise NotFoundError('Image with id {} not found in {} database'.format(image_id, self.name)) return Image.from_map(image_map, **kwargs)
def guide(name, json): guider = [g for g in controller.indi_server.guiders() if g.id == name] if not guider: raise NotFoundError('Guider {} not found'.format(name)) guider = guider[0] return guider.guide(json['direction'], json['duration'])
def image_is_ready(type, image): if get_image_database(type).lookup(image, file_required=False).is_ready(): return {'ready': True} else: raise NotFoundError('Image with type {} and id {} not found'.format( type, image))
def lookup_camera(id): camera = [c for c in controller.indi_server.cameras() if c.id == id] if not camera: raise NotFoundError('Camera {} not found'.format(id)) return camera[0]
def __wait_for_solution(self, options, resolution, fits_file_path, temp_path): try: solved, solution = self.__run_solve_field(options, fits_file_path, temp_path) if solved: solution_property = {'values': []} solution_property['values'].append({ 'label': 'Right ascension', 'name': 'ASTROMETRY_RESULTS_RA', 'value': solution['ASTROMETRY_RESULTS_RA'] }) solution_property['values'].append({ 'label': 'Declination', 'name': 'ASTROMETRY_RESULTS_DE', 'value': solution['ASTROMETRY_RESULTS_DE'] }) solution_property['values'].append({ 'label': 'Pixel scale', 'name': 'ASTROMETRY_RESULTS_PIXSCALE', 'value': solution['ASTROMETRY_RESULTS_PIXSCALE'] }) solution_property['values'].append({ 'label': 'Field width', 'name': 'ASTROMETRY_RESULTS_WIDTH', 'value': resolution[1] * solution['ASTROMETRY_RESULTS_PIXSCALE'] / 3600. }) solution_property['values'].append({ 'label': 'Field height', 'name': 'ASTROMETRY_RESULTS_HEIGHT', 'value': resolution[0] * solution['ASTROMETRY_RESULTS_PIXSCALE'] / 3600. }) if solution['ASTROMETRY_RESULTS_ORIENTATION'] is not None: solution_property['values'].append({ 'label': 'Field rotation (degrees E of N)', 'name': 'ASTROMETRY_RESULTS_ORIENTATION', 'value': solution['ASTROMETRY_RESULTS_ORIENTATION'] }) if options.get('syncTelescope'): telescope = [ t for t in self.server.telescopes() if t.id == options['telescope'] ] if not telescope: raise NotFoundError( 'Unable to find telescope {}'.format(telescope)) telescope = telescope[0] telescope_coordinates = { 'ra': solution['ASTROMETRY_RESULTS_RA'] * (24. / 360.), 'dec': solution['ASTROMETRY_RESULTS_DE'] } telescope.sync(telescope_coordinates) return { 'status': 'solved', 'solution': solution_property, } else: raise FailedMethodError( 'Plate solving failed, check astrometry driver log') except Exception as e: logger.warning('Error running platesolver with options {}'.format( self.__platesolving_options_log(options)), exc_info=e) self.__set_status('error') return { 'status': 'error', 'error': str(e), } finally: shutil.rmtree(temp_path, True) self.solver_thread = None if not options.get('internalSkipIdle', False): self.__set_status('idle')
def get_telescope(self, id): telescope = [t for t in self.telescopes() if t.id == id] if not telescope: raise NotFoundError('Telescope {} not found'.format(id)) return telescope[0]
def get_camera(self, id): camera = [c for c in self.cameras() if c.id == id] if not camera: raise NotFoundError('Camera {} not found'.format(id)) return camera[0]
def job(self, sequence_job_id): sequence_job = [x for x in self.sequence_jobs if x.id == sequence_job_id] if sequence_job: return sequence_job[0] raise NotFoundError()
def run(self, server, root_directory, event_listener, logger, on_update=None): camera = [c for c in server.cameras() if c.id == self.camera] if not camera: raise NotFoundError('Camera with id {} not found'.format( self.camera)) camera = camera[0] filter_wheel = None if self.filter_wheel: filter_wheel = [ f for f in server.filter_wheels() if f.id == self.filter_wheel ] if not filter_wheel: raise NotFoundError('Filter wheel with id {} not found'.format( self.filter_wheel)) filter_wheel = filter_wheel[0] sequence_root_path = os.path.join(root_directory, self.upload_path) logger.info( 'Starting sequence with camera: {}={} and filter_wheel: {}={}'. format(self.camera, camera.device.name, self.filter_wheel, filter_wheel.device.name if filter_wheel else 'N/A')) self.status = 'starting' #self.reset() on_update() try: os.makedirs(sequence_root_path, exist_ok=True) self.status = 'running' self.stopped = False for index, sequence_job in enumerate(self.sequence_jobs): if self.stopped: return if self.is_todo(sequence_job): self.running_sequence_job = sequence_job sequence_job.run(server, { 'camera': camera, 'filter_wheel': filter_wheel }, sequence_root_path, logger, event_listener, on_update, index=index) if not self.stopped: self.status = 'finished' except StopSequence as e: logger.info('Stopping sequence: {}'.format(e.message)) self.stop(on_update=on_update) except Exception as e: logger.exception('error running sequence') self.status = 'error' raise e finally: on_update() self.running_sequence_job = None
def get(self, todo_id): if todo_id not in todos: raise NotFoundError() return {todo_id: todos[todo_id]}