def runTest(self): nx, ny, nz, str_f, pt0, pt1 = self.args slidx = common.slice_index_two_points(pt0, pt1) str_fs = common.convert_to_tuple(str_f) # instance gpu_devices = common_gpu.gpu_device_list(print_info=False) context = cl.Context(gpu_devices) device = gpu_devices[0] fields = Fields(context, device, nx, ny, nz, '') getf = GetFields(fields, str_f, pt0, pt1) # host allocations eh_dict = {} for sf in str_fs: eh_dict[sf] = np.random.rand(*fields.ns).astype(fields.dtype) cl.enqueue_copy(fields.queue, fields.get_buf(sf), eh_dict[sf]) # verify getf.get_event().wait() for str_f in str_fs: original = eh_dict[str_f][slidx] copy = getf.get_fields(str_f) self.assertEqual(np.abs(eh_dict[str_f][slidx] - getf.get_fields(str_f)).max(), 0, self.args)
def init_method(init_node): fields = Fields() # TODO: This method currently only detects field assignments # It needs to also support field deletions eventually. for init_body_node in init_node.body: if not Resolver.is_filter_conditional(init_body_node): continue for if_body_node in init_body_node.body: if not Resolver.is_assignment_to_field(if_body_node): continue filter_name = Resolver.resolve(init_body_node.test) field = Resolver.drf_field_assignment(if_body_node) # TODO: Sometimes fields are assigned to temporary variables # before being assigned to the actual field. if not field: continue fields.add_representation(field['field_name'], filter_name, field) return fields
def runTest(self): nx, ny, nz, str_f, pt0, pt1 = self.args slidx = common.slice_index_two_points(pt0, pt1) str_fs = common.convert_to_tuple(str_f) # instance gpu_devices = common_gpu.gpu_device_list(print_info=False) context = cl.Context(gpu_devices) device = gpu_devices[0] fields = Fields(context, device, nx, ny, nz, '') getf = GetFields(fields, str_f, pt0, pt1) # host allocations eh_dict = {} for sf in str_fs: eh_dict[sf] = np.random.rand(*fields.ns).astype(fields.dtype) cl.enqueue_copy(fields.queue, fields.get_buf(sf), eh_dict[sf]) # verify getf.get_event().wait() for str_f in str_fs: original = eh_dict[str_f][slidx] copy = getf.get_fields(str_f) self.assertEqual( np.abs(eh_dict[str_f][slidx] - getf.get_fields(str_f)).max(), 0, self.args)
def set(self, pos, player): index = pos - 1 if self.board[ index] == Fields.EMPTY.value and player in Fields.players(): self.board[index] = player else: raise ValueError('cannot set {:s} to {:d}'.format(player, pos))
def __init__(self, input): # Store input parameters and check them self.input = input self.input.checkInputs() # Define geometry based on geometry input type if input.geometry == 'slab': self.geo = SlabGeometry(self) elif input.geometry == 'cylindrical': self.geo = CylindricalGeometry(self) else: self.geo = SphericalGeometry(self) # Initialize material handler now that geometry is initialized self.mat = Materials(self) # Initialize field variables self.fields = Fields(self) # Time step self.timeSteps = [] # Initialize the radiation and hydro problems self.hydro = LagrangianHydro(self) self.radPredictor = LagrangianRadiationPredictor(self) self.radCorrector = LagrangianRadiationCorrector(self)
def __init__(self, observing_programs=[], rp=None, fields=None, block_programs=True): # list of ObservingPrograms self.observing_programs = observing_programs # block on which the queue parameters were calculated self.queue_block = None # the queue itself self.queue = pd.DataFrame() # should we only consider fields from one program in a given # observing block? self.block_programs = block_programs if rp is None: # initialize an empty RequestPool self.rp = RequestPool() else: self.rp = rp if fields is None: self.fields = Fields() else: self.fields = fields self.Sky = SkyBrightness()
def runTest(self): if len(self.args) == 6: nx, ny, nz, str_f, pt0, pt1 = self.args src_is_array = False elif len(self.args) == 7: nx, ny, nz, str_f, pt0, pt1, src_is_array = self.args slidx = common.slice_index_two_points(pt0, pt1) str_fs = common.convert_to_tuple(str_f) # instance gpu_devices = common_gpu.gpu_device_list(print_info=False) context = cl.Context(gpu_devices) device = gpu_devices[0] fields = Fields(context, device, nx, ny, nz, '') setf = SetFields(fields, str_f, pt0, pt1, src_is_array) # generate random source if src_is_array: shape = list(common.shape_two_points(pt0, pt1)) shape[0] *= len(str_fs) value = np.random.rand(*shape).astype(fields.dtype) split_value = np.split(value, len(str_fs)) split_value_dict = dict(zip(str_fs, split_value)) else: value = np.random.ranf() # host allocations eh_dict = {} for sf in str_fs: eh_dict[sf] = np.zeros(fields.ns, dtype=fields.dtype) gpu_eh = np.zeros(fields.ns, dtype=fields.dtype) # verify for str_f in str_fs: if src_is_array: eh_dict[str_f][slidx] = split_value_dict[str_f] else: eh_dict[str_f][slidx] = value setf.set_fields(value) for str_f in str_fs: cl.enqueue_copy(fields.queue, gpu_eh, fields.get_buf(str_f)) self.assertEqual( np.abs(eh_dict[str_f] - gpu_eh).max(), 0, self.args)
def runTest(self): if len(self.args) == 6: nx, ny, nz, str_f, pt0, pt1 = self.args src_is_array = False elif len(self.args) == 7: nx, ny, nz, str_f, pt0, pt1, src_is_array = self.args slidx = common.slice_index_two_points(pt0, pt1) str_fs = common.convert_to_tuple(str_f) # instance gpu_devices = common_gpu.gpu_device_list(print_info=False) context = cl.Context(gpu_devices) device = gpu_devices[0] fields = Fields(context, device, nx, ny, nz, '') setf = SetFields(fields, str_f, pt0, pt1, src_is_array) # generate random source if src_is_array: shape = list( common.shape_two_points(pt0, pt1) ) shape[0] *= len(str_fs) value = np.random.rand(*shape).astype(fields.dtype) split_value = np.split(value, len(str_fs)) split_value_dict = dict( zip(str_fs, split_value) ) else: value = np.random.ranf() # host allocations eh_dict = {} for sf in str_fs: eh_dict[sf] = np.zeros(fields.ns, dtype=fields.dtype) gpu_eh = np.zeros(fields.ns, dtype=fields.dtype) # verify for str_f in str_fs: if src_is_array: eh_dict[str_f][slidx] = split_value_dict[str_f] else: eh_dict[str_f][slidx] = value setf.set_fields(value) for str_f in str_fs: cl.enqueue_copy(fields.queue, gpu_eh, fields.get_buf(str_f)) self.assertEqual(np.abs(eh_dict[str_f] - gpu_eh).max(), 0, self.args)
def build_observing_programs(self): OPs = [] f = Fields() for prog in self.config['observing_programs']: field_ids = f.select_field_ids(**prog['field_selections']) OP = ObservingProgram(PROGRAM_NAME_TO_ID[prog['program_name']], prog['subprogram_name'], prog[ 'observing_fraction'], field_ids, prog['filter_ids'], prog['internight_gap_days'] * u.day, prog['n_visits_per_night'], prog['intranight_gap_min'] * u.min, prog['intranight_half_width_min'] * u.min, nightly_priority=prog['nightly_priority'], filter_choice=prog['filter_choice']) OPs.append(OP) return OPs
def set_fields(self, conf_fields=None): conf_fields = get_default(conf_fields, self.conf.fields) fields = Fields.from_conf(conf_fields) fields.read_interpolants(self.eldesc_dir) fields.setup_approximations(self.domain) ## print fields ## print fields[0].aps ## pause() fields.setup_global_base() fields.setup_coors() self.fields = fields
def __init__(self, board=None): if board is None: self.board = ['-' for _ in range(0, 9)] else: fields = len(board) if fields != 9: raise ValueError('9 fields required, has {:d}'.format(fields)) for field in board: if field not in Fields.all(): raise ValueError( 'illegal field value "{:s}"'.format(field)) self.board = board
def drf_meta_fields(meta_node): def resolve_fields(fields_node, read_only=False): fields = [] known_types = [ast.Tuple, ast.List, ast.Set] if isinstance(fields_node, ast.BinOp): assert isinstance(fields_node.op, ast.Add) # if either is Attribute, resolve_fields returns [] which is fine # since it will be handled by the logic in bases return resolve_fields(fields_node.left) + resolve_fields( fields_node.right) if any(isinstance(fields_node, t) for t in known_types): for field_node in fields_node.elts: field = Field(field_name=Resolver.resolve(field_node), read_only=read_only) fields.append(field) return fields fields = Fields() for node in meta_node.body: if not isinstance(node, ast.Assign): continue lhs, rhs = Resolver.resolve(node) if lhs == 'fields': fields.extend(resolve_fields(rhs)) elif lhs == 'read_only_fields': fields.extend(resolve_fields(rhs, read_only=True)) return fields
def __init__(self, observing_programs=[], rp=None, fields=None): # list of ObservingPrograms self.observing_programs = observing_programs if rp is None: # initialize an empty RequestPool self.rp = RequestPool() else: self.rp = rp if fields is None: self.fields = Fields() else: self.fields = fields
def test_max_length(app): app.populating_fields( Fields( firstname= "Start khgkudhfuhgkdfhog sldnvgldjnrgjndjlfngbjdnlbjngfnkjngfbkjnkjg73ryehwifhrkghdf" "sfhvbksbfkuksvbnfkskbfndkndbknkdunfknkdbjfkdurotiguw48t938w4u98tugef948ug9e8r9hu8rth" "dufhbogudofbuijdotibjoeitjboinjtrnojhtboiejr8ot3 oiwrjgo3ij4oe59hjr0e59jh950hjr9t50h riej" "eirojgboeirjboijetoibjojgtrnoibjrgoijrjntoietoigjot83u4r5o38409 iwvngoierngoienrooijgierjg" "ijbgoiejtbiojrtiobjrjtonirjtnoijroijtnoirjtniojoryitnjoityjnoirjtnjrtptjprjthpbeojpteeEND", lastname= "Start sfnlbdnflbjidofijtb slkrjgbe djfnbldnlfkblkdgmnlbknijroiobdjgtijcvjbnkjdngkjbngfkbjn" "dfkbmlkngflbjkgfnnlkfgklmflkgmnlkfmgnmkllfkmgobn ksjdngvkjsnkdvbf fjnvglbdjfnbljnclgbkn" "djfnbjdngbljndlgknblkdnglkbmldkmnlkndflkbnlgjnlbngfbkdjgkbjkf dlkfbhjldjitglhdjgfblknldkg" "df;kmblkdgmblkmdglknmlfmdfhbndljtnguheotoy8e5roigj jrglidjflhigdlj ldjgldjigjpdifjgnl djvnfds" "lsnfvblksnlfsm fdsjbnlsijrojifwoeurjoqw83u4982398ru49i7ytei547yt985ue9y804e58tguoerghero" )) app.submit_data()
def test_with_enter(app): app.populating_fields(Fields(firstname=" ", lastname=" ")) app.submit_data()
def test_empty_login(app): app.populating_fields(Fields(firstname="", lastname="")) app.submit_data()
def test_login(app): app.populating_fields(Fields(firstname="Oksana", lastname="Prokopek")) app.submit_data()
def parse(self, myxml): expfileds = Fields() patternCad = re.compile(r"<CategoryID>(.*?)</CategoryID>") patternCae = re.compile(r"<CategoryName>(.*?)</CategoryName>") patternCoy = re.compile(r'<Country>(.*?)</Country>') patternCuy = re.compile(r"<Currency>(.*?)</Currency>") patternCue = re.compile(r'>(\d+\.\d+)</CurrentPrice>') patternFee = re.compile(r'<FeedbackScore>(.*?)</FeedbackScore>') patternFer = re.compile( r"<FeedbackRatingStar>(.*?)</FeedbackRatingStar>") patternGal = re.compile(r"<GalleryURL>(.*?)</GalleryURL>") patternHit = re.compile(r"<HitCount>(.*?)</HitCount>") patternHir = re.compile(r'<HitCounter>(.*?)</HitCounter>') patternItd = re.compile(r"<ItemID>(.*?)</ItemID>") patternLon = re.compile(r"<Location>(.*?)</Location>") patternQud = re.compile(r"<QuantitySold>(.*?)</QuantitySold>") patternQue = re.compile( r"<QuantitySoldByPickupInStore>(.*?)</QuantitySoldByPickupInStore>" ) patternSht = re.compile(r'>(\d+\.\d+)</ShippingServiceCost>') patternShe = re.compile(r"<ShippingService>(.*?)</ShippingService>") patternSku = re.compile(r"<SKU>(.*?)</SKU>") patternSte = re.compile(r"<StartTime>(.*?)</StartTime>") patternStr = re.compile(r"<StoreOwner>(.*?)</StoreOwner>") patternStl = re.compile(r"<StoreURL>(.*?)</StoreURL>") patternTie = re.compile(r'<Title>(.*?)</Title>') patternUsd = re.compile(r'<UserID>(.*?)</UserID>') patternUse = re.compile(r'<Site>(.*?)</Site>') patternVil = re.compile(r'<ViewItemURL>(.*?)</ViewItemURL>') patternLin = re.compile(r'<ListingDuration>(.*?)</ListingDuration>') patternPrg = re.compile(r'<PrivateListing>(.*?)</PrivateListing>') patternLis = re.compile(r'<ListingStatus>(.*?)</ListingStatus>') xml = myxml if xml: try: expfileds.fielddic['categoryid'] = re.findall(patternCad, xml)[0] try: expfileds.fielddic['sku'] = re.findall(patternSku, xml)[0][:11] except: expfileds.fielddic['sku'] = 'Nosku' expfileds.fielddic['categoryname'] = re.findall( patternCae, xml)[0] expfileds.fielddic['country'] = re.findall(patternCoy, xml)[0] expfileds.fielddic['currency'] = re.findall(patternCuy, xml)[0] try: expfileds.fielddic['currentprice'] = re.findall( patternCue, xml)[0] except IndexError: print xml expfileds.fielddic['feedbackscore'] = re.findall( patternFee, xml)[0] expfileds.fielddic['feedbackstar'] = re.findall( patternFer, xml)[0] expfileds.fielddic['galleryurl'] = re.findall(patternGal, xml)[0] expfileds.fielddic['starttime'] = re.findall(patternSte, xml)[0] expfileds.fielddic['hitcount'] = re.findall(patternHit, xml)[0] expfileds.fielddic['hitcounter'] = re.findall(patternHir, xml)[0] expfileds.fielddic['itemid'] = re.findall(patternItd, xml)[0] expfileds.fielddic['location'] = re.findall(patternLon, xml)[0] expfileds.fielddic['quantitysold'] = re.findall( patternQud, xml)[0] expfileds.fielddic['quantitysoldinstore'] = re.findall( patternQue, xml)[0] try: expfileds.fielddic['shippingcost'] = re.findall( patternSht, xml)[0] except IndexError: print xml expfileds.fielddic['shippingservice'] = re.findall( patternShe, xml)[0] expfileds.fielddic['starttime'] = re.findall(patternSte, xml)[0] expfileds.fielddic['storeowner'] = re.findall(patternStr, xml)[0] try: expfileds.fielddic['storeurl'] = re.findall( patternStl, xml)[0] except: expfileds.fielddic['storeurl'] = 'Nostoreurl' expfileds.fielddic['title'] = re.findall(patternTie, xml)[0] expfileds.fielddic['userid'] = re.findall(patternUsd, xml)[0] expfileds.fielddic['usersite'] = re.findall(patternUse, xml)[0] expfileds.fielddic['viewitemurl'] = re.findall( patternVil, xml)[0] expfileds.fielddic['listduration'] = re.findall( patternLin, xml)[0] expfileds.fielddic['privatelisting'] = re.findall( patternPrg, xml)[0] expfileds.fielddic['listingstatus'] = re.findall( patternLis, xml)[0] return expfileds.fielddic except Exception as e: print
class RadPydro: def __init__(self, input): # Store input parameters and check them self.input = input self.input.checkInputs() # Define geometry based on geometry input type if input.geometry == 'slab': self.geo = SlabGeometry(self) elif input.geometry == 'cylindrical': self.geo = CylindricalGeometry(self) else: self.geo = SphericalGeometry(self) # Initialize material handler now that geometry is initialized self.mat = Materials(self) # Initialize field variables self.fields = Fields(self) # Time parameters self.timeSteps = [] self.time = 0. self.timeStep_num = 0 self.Tf = input.Tf # Initialize hydro problem self.hydro = LagrangianHydro(self) # Initialize radiation problem (if used) self.radPredictor = LagrangianRadiationPredictor(self) self.radCorrector = LagrangianRadiationCorrector(self) # Init storage for energies in conservation check self.kinetic_energy = [] self.internal_energy = [] self.radiation_energy = [] self.radiation_leakage = [] self.work_energy = [] self.total_energy = [] # Compute initial energies for each kinetic = 0 internal = 0 radiation = 0 for i in range(self.geo.N + 1): kinetic += 1 / 2 * self.mat.m_half[i] * self.fields.u_IC[i]**2 if i < self.geo.N: internal += self.mat.m[i] * self.fields.e_IC[i] radiation += self.mat.m[i] * self.fields.E_IC[ i] / self.fields.rho_IC[i] total = kinetic + internal + radiation self.kinetic_energy.append(kinetic) self.internal_energy.append(internal) self.radiation_energy.append(radiation) self.radiation_leakage.append(0) self.work_energy.append(0) self.total_energy.append(total) self.total_radiation_leakage = 0 self.total_work_energy = 0 def computeTimeStep(self): dr = self.geo.dr u = self.fields.u F_c = self.input.CoFactor relEFactor = self.input.relEFactor c_s = (self.mat.gamma * self.fields.P / self.fields.rho)**(1 / 2) E_k = (self.fields.E + self.fields.E_old) / 2 dE_k = np.zeros(self.geo.N) if len(self.timeSteps) == 0: dE_k = E_k else: dE_k = abs( (self.fields.E - self.fields.E_old) / self.timeSteps[-1]) u_center = np.zeros(self.geo.N) for i in range(self.geo.N): u_center = abs((u[i] + u[i + 1]) / 2) dt_E = min(relEFactor * E_k / dE_k) dt_u = min(dr * F_c / u_center) dt_cs = min(dr * F_c / c_s) self.timeSteps.append(min(self.input.maxTimeStep, dt_E, dt_u, dt_cs)) def run(self): if self.input.running_mode == 'hydro': self.runHydro() elif self.input.running_mode == 'rad': self.runRad() elif self.input.running_mode == 'radhydro': self.runRadHydro() def runHydro(self): while self.time < self.input.T_final: # Compute time step size for this time step self.computeTimeStep() # Update time and time step number self.time += self.timeSteps[-1] self.timeStep_num += 1 print('=========================================================') print('Starting time step %i, time = %.3e' \ % (self.timeStep_num, self.time)) print( '=========================================================\n') # Add artificial viscosity for this time step self.fields.addArtificialViscosity() # Predictor step self.hydro.recomputeVelocity(True) self.geo.moveMesh(True) self.hydro.recomputeDensity(True) self.hydro.recomputeInternalEnergy(True) self.fields.recomputeTemperature(True) self.fields.recomputePressure(True) # Corrector step self.hydro.recomputeVelocity(False) self.geo.moveMesh(False) self.hydro.recomputeDensity(False) self.hydro.recomputeInternalEnergy(False) self.fields.recomputeTemperature(False) self.fields.recomputePressure(False) # Energy conservation check energy_diff = self.recomputeEnergyConservation() print('Energy conservation check: ', energy_diff, '\n') # Copy to old containers for next time step self.fields.stepFields() self.geo.stepGeometry() def runRad(self): while self.time < self.input.T_final: # Compute time step size for this time step self.computeTimeStep() # Update time and time step number self.time += self.timeSteps[-1] self.timeStep_num += 1 print('=========================================================') print('Starting time step %i, time = %.3e' \ % (self.timeStep_num, self.time)) print( '=========================================================\n') # Predictor step self.radPredictor.recomputeRadiationEnergy() self.radPredictor.recomputeInternalEnergy() self.fields.recomputeTemperature(True) self.fields.recomputePressure(True) # Corrector step self.radCorrector.recomputeRadiationEnergy() self.radCorrector.recomputeInternalEnergy() self.fields.recomputeTemperature(False) self.fields.recomputePressure(False) # Energy conservation check energy_diff = self.recomputeEnergyConservation() print('Energy conservation check: ', energy_diff, '\n') # Copy to old containers for next time step self.fields.stepFields() def runRadHydro(self): while self.time < self.input.T_final: # Compute time step size for this time step self.computeTimeStep() # Update time and time step number self.time += self.timeSteps[-1] self.timeStep_num += 1 print('=========================================================') print('Starting time step %i, time = %.3e' \ % (self.timeStep_num, self.time)) print( '=========================================================\n') # Add artificial viscosity for this time step self.fields.addArtificialViscosity() # Predictor step self.hydro.recomputeVelocity(True) self.geo.moveMesh(True) self.hydro.recomputeDensity(True) self.radPredictor.recomputeRadiationEnergy() self.radPredictor.recomputeInternalEnergy() self.fields.recomputeTemperature(True) self.fields.recomputePressure(True) # Corrector step self.hydro.recomputeVelocity(False) self.geo.moveMesh(False) self.hydro.recomputeDensity(False) self.radCorrector.recomputeRadiationEnergy() self.radCorrector.recomputeInternalEnergy() self.fields.recomputeTemperature(False) self.fields.recomputePressure(False) # Energy conservation check energy_diff = self.recomputeEnergyConservation() print('Energy conservation check: ', energy_diff, '\n') # Copy to old containers for next time step self.fields.stepFields() self.geo.stepGeometry() def recomputeEnergyConservation(self): kinetic_energy = self.kinetic_energy internal_energy = self.internal_energy radiation_energy = self.radiation_energy radiation_leakage = self.radiation_leakage work_energy = self.work_energy total_energy = self.total_energy c = self.input.c dt = self.timeSteps[-1] m = self.mat.m m_half = self.mat.m_half u = self.fields.u e = self.fields.e E = self.fields.E rho = self.fields.rho A_k = (self.geo.A + self.geo.A_old) / 2 A_pk = (self.geo.A_p + self.geo.A_old) / 2 dr_k = (self.geo.dr + self.geo.dr_old) / 2 dr_pk = (self.geo.dr_p + self.geo.dr_old) / 2 E_k = (self.fields.E + self.fields.E_old) / 2 E_pk = (self.fields.E_p + self.fields.E_old) / 2 T_k = (self.fields.T + self.fields.T_old) / 2 T_pk = (self.fields.T_p + self.fields.T_old) / 2 rho_k = (self.fields.rho + self.fields.rho_old) / 2 rho_pk = (self.fields.rho_p + self.fields.rho_old) / 2 u_k = (self.fields.u + self.fields.u_old) / 2 P_pk = (self.fields.P_p + self.fields.P_old) / 2 # Recomputing kappa_t at the cell edges and cell centers self.mat.recomputeKappa_t(T_pk) kappa_t_pk_edge = self.mat.kappa_t self.mat.recomputeKappa_a(T_pk) kappa_t_pk_center = self.mat.kappa_a + self.mat.kappa_s # Setting up boundary parameters for the radiation terms # in the momentum equation if self.input.rad_L is 'source': E_bL_k = self.fields.E_bL E_bL_pk = self.fields.E_bL else: E_bL_k = E_k[0] E_bL_pk = E_pk[0] if self.input.rad_R is 'source': E_bR_k = self.fields.E_bR E_bR_pk = self.fields.E_bR else: E_bR_k = E_k[-1] E_bR_pk = E_pk[-1] # Compute the boundary radiation energies in the momentum eqn coeff_E_L = 3 * rho_pk[0] * dr_pk[0] * kappa_t_pk_center[0] coeff_E_R = 3 * rho_pk[-1] * dr_pk[-1] * kappa_t_pk_center[-1] E_L = (coeff_E_L * E_bL_pk + 4 * E_pk[0]) / (coeff_E_L + 4) E_R = (coeff_E_R * E_bR_pk + 4 * E_pk[-1]) / (coeff_E_R + 4) # Compute radiation flux at boundaries coeff_F_L = -2 * c / (3 * rho_k[0] * dr_k[0] * kappa_t_pk_edge[0] + 4) coeff_F_R = -2 * c / (3 * rho_k[-1] * dr_k[-1] * kappa_t_pk_edge[-1] + 4) F_L = coeff_F_L * (E_k[0] - E_bL_k) F_R = coeff_F_R * (E_bR_k - E_k[-1]) # Setting up boundary parameters for the pressure boundary values if self.input.hydro_L is 'P': P_bL_pk = self.fields.P_L else: P_bL_pk = P_pk[0] + 1 / 3 * (E_pk[0] - E_L) if self.input.hydro_R is 'P': P_bR_pk = self.fields.P_R else: P_bR_pk = P_pk[-1] + 1 / 3 * (E_pk[-1] - E_R) # Compute kinetic, internal, and radiation energies for this timestep kinetic, internal, radiation = 0, 0, 0 for i in range(self.geo.N + 1): kinetic += 1 / 2 * m_half[i] * u[i]**2 if i < self.geo.N: internal += m[i] * e[i] radiation += m[i] * E[i] / rho[i] # Compute radiation leakage leakage = (A_k[-1] * F_R - A_k[0] * F_L) * dt # Compute compressive work work = (A_pk[-1] * 1 / 3 * E_R * u_k[-1] - A_pk[0] * 1 / 3 * E_L * u_k[0]) * dt work += (A_pk[-1] * P_bR_pk * u_k[-1] - A_pk[0] * P_bL_pk * u_k[0]) * dt # Compute total energy total = kinetic + internal + radiation + leakage + work # Compute energy final - initial energies dKE = kinetic - kinetic_energy[0] dIE = internal - internal_energy[0] dRE = radiation - radiation_energy[0] # Compute energy losses from pressure work, drift, and leakage total_work = self.total_work_energy + work total_leak = self.total_radiation_leakage + leakage # Update loss terms from pressure work, drift, and leakage self.total_work_energy += work self.total_radiation_leakage += leakage # Append to storage kinetic_energy.append(kinetic) internal_energy.append(internal) radiation_energy.append(radiation) radiation_leakage.append(leakage) work_energy.append(work) total_energy.append(total) return dKE + dIE + dRE + total_work + total_leak
def find_serializer_fields(self, serializer_name): nodes = self.serializer_registry.nodes if serializer_name in self.memo_dict: return self.memo_dict[serializer_name] class_node = nodes[serializer_name] fields = Fields() init_node = None # Look at own class variables first, this trumps everything else for node in class_node.body: if self.is_class_var(node): # explicit class var trumps Meta fields.add(Resolver.class_var_drf_field(node), overwrite=True) elif self.is_meta(node): fields.extend(Resolver.drf_meta_fields(node)) elif self.is_init_method(node): init_node = node # add fields from bases, in left to right order. The bases of the base # trumps the neighbour of the base if there's overlap. for base in class_node.bases: base = Resolver.resolve(base) if base == 'object': continue if base not in nodes: # TODO: ??? continue base_class_vars = self.find_serializer_fields(base) fields.extend(base_class_vars) # dynamic fields trump or augment existing fields if serializer_name in self.dynamic_fields: if not init_node: msg = ('Did not find __init__ in {} but view specifies dynamic' ' fields.').format(serializer_name) raise Exception(msg) dynamic_fields = Resolver.init_method(node) for field in dynamic_fields: if field not in fields: fields.add(field) continue previous_field = fields[field['field_name']] augmented_field = self.augment_field(previous_field, field) fields.add(augmented_field, overwrite=True) self.memo_dict[serializer_name] = fields return fields
def find_serializer_fields(self, serializer_name): nodes = self.serializer_registry.nodes if serializer_name in self.memo_dict: return self.memo_dict[serializer_name] class_node = nodes[serializer_name] fields = Fields() init_node = None # Look at own class variables first, this trumps everything else for node in class_node.body: if self.is_class_var(node): # explicit class var trumps Meta fields.add(Resolver.drf_field_assignment(node), overwrite=True) elif self.is_meta(node): fields.extend(Resolver.drf_meta_fields(node)) elif self.is_init_method(node): init_node = node # add fields from bases, in left to right order. The bases of the base # trumps the neighbour of the base if there's overlap. for base in class_node.bases: base = Resolver.resolve(base) if base == 'object': continue if base not in nodes: # TODO: ??? continue base_class_vars = self.find_serializer_fields(base) fields.extend(base_class_vars) # Check for dynamic fields that were inherited from direct ancestors. # TODO: Find a better way to support inheritance parent_in_dynamic_fields = any( getattr(parent_class, 'attr', None) in self.dynamic_fields or getattr(parent_class, 'id', None) in self.dynamic_fields for parent_class in class_node.bases) # dynamic fields trump or augment existing fields if serializer_name in self.dynamic_fields or parent_in_dynamic_fields: if init_node: dynamic_fields = Resolver.init_method(init_node) for field_name, field in dynamic_fields.iteritems(): if field_name not in fields: fields.add(field) continue previous_field = fields[field_name] augmented_field = self.augment_field(previous_field, field) fields.add(augmented_field, overwrite=True) self.memo_dict[serializer_name] = fields return fields
def parse(self, sellerid, timefrom, timeto): getlist = GetList() myset = getlist.get_list(sellerid, starttimefrom=timefrom, starttimeto=timeto) patternCad = re.compile(r"<CategoryID>(.*?)</CategoryID>") patternCae = re.compile(r"<CategoryName>(.*?)</CategoryName>") patternCoy = re.compile(r'<Country>(.*?)</Country>') patternCuy = re.compile(r"<Currency>(.*?)</Currency>") patternCue = re.compile(r'>(\d+\.\d+)</CurrentPrice>') patternFee = re.compile(r'<FeedbackScore>(.*?)</FeedbackScore>') patternFer = re.compile( r"<FeedbackRatingStar>(.*?)</FeedbackRatingStar>") patternGal = re.compile(r"<GalleryURL>(.*?)</GalleryURL>") patternHit = re.compile(r"<HitCount>(.*?)</HitCount>") patternHir = re.compile(r'<HitCounter>(.*?)</HitCounter>') patternItd = re.compile(r"<ItemID>(.*?)</ItemID>") patternLon = re.compile(r"<Location>(.*?)</Location>") patternQud = re.compile(r"<QuantitySold>(.*?)</QuantitySold>") patternQue = re.compile( r"<QuantitySoldByPickupInStore>(.*?)</QuantitySoldByPickupInStore>" ) patternSht = re.compile(r'>(\d+\.\d+)</ShippingServiceCost>') patternShe = re.compile(r"<ShippingService>(.*?)</ShippingService>") patternSku = re.compile(r"<SKU>(.*?)</SKU>") patternSte = re.compile(r"<StartTime>(.*?)</StartTime>") patternStr = re.compile(r"<StoreOwner>(.*?)</StoreOwner>") patternStl = re.compile(r"<StoreURL>(.*?)</StoreURL>") patternTie = re.compile(r'<Title>(.*?)</Title>') patternUsd = re.compile(r'<UserID>(.*?)</UserID>') patternUse = re.compile(r'<Site>(.*?)</Site>') patternVil = re.compile(r'<ViewItemURL>(.*?)</ViewItemURL>') patternLin = re.compile(r'<ListingDuration>(.*?)</ListingDuration>') patternPrg = re.compile(r'<PrivateListing>(.*?)</PrivateListing>') if isinstance(myset, type(None)): print 'So I get nothing from Getlist Call' # failedlist=[] # failedlist.append(sellerid) # failedlist.append(timefrom) # failedlist.append(timeto) # self.failedqueue.put(failedlist) # print 'And this request have been put into the failed queue' elif len(myset) == 0: print 'No listings occur between %s and %s' % (timefrom, timeto) else: for xml in self.get_xml(myset): expfileds = Fields() expfileds.fielddic['categoryid'] = re.findall(patternCad, xml)[0] expfileds.fielddic['sku'] = re.findall(patternSku, xml)[0][:11] expfileds.fielddic['categoryname'] = re.findall( patternCae, xml)[0] expfileds.fielddic['country'] = re.findall(patternCoy, xml)[0] expfileds.fielddic['currency'] = re.findall(patternCuy, xml)[0] try: expfileds.fielddic['currentprice'] = re.findall( patternCue, xml)[0] except IndexError: print xml expfileds.fielddic['feedbackscore'] = re.findall( patternFee, xml)[0] expfileds.fielddic['feedbackstar'] = re.findall( patternFer, xml)[0] expfileds.fielddic['galleryurl'] = re.findall(patternGal, xml)[0] expfileds.fielddic['hitcount'] = re.findall(patternHit, xml)[0] expfileds.fielddic['hitcounter'] = re.findall(patternHir, xml)[0] expfileds.fielddic['itemid'] = re.findall(patternItd, xml)[0] expfileds.fielddic['location'] = re.findall(patternLon, xml)[0] expfileds.fielddic['quantitysold'] = re.findall( patternQud, xml)[0] expfileds.fielddic['quantitysoldinstore'] = re.findall( patternQue, xml)[0] try: expfileds.fielddic['shippingcost'] = re.findall( patternSht, xml)[0] except IndexError: print xml expfileds.fielddic['shippingservice'] = re.findall( patternShe, xml)[0] expfileds.fielddic['starttime'] = re.findall(patternSte, xml)[0] expfileds.fielddic['storeowner'] = re.findall(patternStr, xml)[0] expfileds.fielddic['storeurl'] = re.findall(patternStl, xml)[0] expfileds.fielddic['title'] = re.findall(patternTie, xml)[0] expfileds.fielddic['userid'] = re.findall(patternUsd, xml)[0] expfileds.fielddic['usersite'] = re.findall(patternUse, xml)[0] expfileds.fielddic['viewitemurl'] = re.findall( patternVil, xml)[0] expfileds.fielddic['listduration'] = re.findall( patternLin, xml)[0] expfileds.fielddic['privatelisting'] = re.findall( patternPrg, xml)[0] yield expfileds.fielddic
class QueueManager(object): def __init__(self, observing_programs=[], rp=None, fields=None, block_programs=True): # list of ObservingPrograms self.observing_programs = observing_programs # block on which the queue parameters were calculated self.queue_block = None # the queue itself self.queue = pd.DataFrame() # should we only consider fields from one program in a given # observing block? self.block_programs = block_programs if rp is None: # initialize an empty RequestPool self.rp = RequestPool() else: self.rp = rp if fields is None: self.fields = Fields() else: self.fields = fields self.Sky = SkyBrightness() def add_observing_program(self, observing_program): self.observing_programs.append(observing_program) def assign_nightly_requests(self, current_state): # clear previous request pool self.rp.clear_all_requests() # reset the first observation of the night counters self.fields.clear_first_obs() for program in self.observing_programs: request_sets = program.assign_nightly_requests( current_state['current_time'], self.fields, block_programs=self.block_programs) for rs in request_sets: self.rp.add_requests(rs['program_id'], rs['field_ids'], rs['filter_id'], rs['cadence_func'], rs['cadence_pars'], rs['request_number_tonight'], rs['total_requests_tonight'], priority=rs['priority']) assert(len(self.rp.pool) > 0) def next_obs(self, current_state): """Given current state, return the parameters for the next request""" # don't store the telescope state locally! # define functions that actually do the work in subclasses return self._next_obs(current_state) def update_queue(self, current_state, **kwargs): """Recalculate queue""" # define functions that actually do the work in subclasses return self._update_queue(current_state) def remove_requests(self, request_id): """Remove a request from both the queue and the request pool""" self.queue = self.queue.drop(request_id) self.rp.remove_requests(request_id)
def init_method(init_node): fields = Fields() # TODO: ??? return fields
def test_enter_link(app): app.populating_fields( Fields(firstname="http://www.montypython.com/", lastname="http://www.montypython.com/")) app.submit_data()
def test_spec_char(app): app.populating_fields( Fields(firstname="d426373!@#$%^&*()_+}{:?><", lastname="d426373!@#$%^&*()_+}{:?><")) app.submit_data()
def __init__(self, input): # Store input parameters and check them self.input = input self.input.checkInputs() # Define geometry based on geometry input type if input.geometry == 'slab': self.geo = SlabGeometry(self) elif input.geometry == 'cylindrical': self.geo = CylindricalGeometry(self) else: self.geo = SphericalGeometry(self) # Initialize material handler now that geometry is initialized self.mat = Materials(self) # Initialize field variables self.fields = Fields(self) # Time parameters self.timeSteps = [] self.time = 0. self.timeStep_num = 0 self.Tf = input.Tf # Initialize hydro problem self.hydro = LagrangianHydro(self) # Initialize radiation problem (if used) self.radPredictor = LagrangianRadiationPredictor(self) self.radCorrector = LagrangianRadiationCorrector(self) # Init storage for energies in conservation check self.kinetic_energy = [] self.internal_energy = [] self.radiation_energy = [] self.radiation_leakage = [] self.work_energy = [] self.total_energy = [] # Compute initial energies for each kinetic = 0 internal = 0 radiation = 0 for i in range(self.geo.N + 1): kinetic += 1 / 2 * self.mat.m_half[i] * self.fields.u_IC[i]**2 if i < self.geo.N: internal += self.mat.m[i] * self.fields.e_IC[i] radiation += self.mat.m[i] * self.fields.E_IC[ i] / self.fields.rho_IC[i] total = kinetic + internal + radiation self.kinetic_energy.append(kinetic) self.internal_energy.append(internal) self.radiation_energy.append(radiation) self.radiation_leakage.append(0) self.work_energy.append(0) self.total_energy.append(total) self.total_radiation_leakage = 0 self.total_work_energy = 0
def parse(self,sellerid,starttime): myset=ItemBySeller(sellerid,starttime).get_item() patternCad=re.compile(r"<CategoryID>(.*?)</CategoryID>") patternCae=re.compile(r"<CategoryName>(.*?)</CategoryName>") patternCoy=re.compile(r'<Country>(.*?)</Country>') patternCuy=re.compile(r"<Currency>(.*?)</Currency>") patternCue=re.compile(r'<CurrentPrice currencyID="USD">(.*?)</CurrentPrice>') patternFee=re.compile(r'<FeedbackScore>(.*?)</FeedbackScore>') patternFer=re.compile(r"<FeedbackRatingStar>(.*?)</FeedbackRatingStar>") patternGal=re.compile(r"<GalleryURL>(.*?)</GalleryURL>") patternHit=re.compile(r"<HitCount>(.*?)</HitCount>") patternHir=re.compile(r'<HitCounter>(.*?)</HitCounter>') patternItd=re.compile(r"<ItemID>(.*?)</ItemID>") patternLon=re.compile(r"<Location>(.*?)</Location>") patternQud=re.compile(r"<QuantitySold>(.*?)</QuantitySold>") patternQue=re.compile(r"<QuantitySoldByPickupInStore>(.*?)</QuantitySoldByPickupInStore>") patternSht=re.compile(r'<ShippingServiceCost currencyID="USD">(.*?)</ShippingServiceCost>') patternShe=re.compile(r"<ShippingService>(.*?)</ShippingService>") patternSku=re.compile(r"<SKU>(.*?)</SKU>") patternSte=re.compile(r"<StartTime>(.*?)</StartTime>") patternStr=re.compile(r"<StoreOwner>(.*?)</StoreOwner>") patternStl=re.compile(r"<StoreURL>(.*?)</StoreURL>") patternTie=re.compile(r'<Title>(.*?)</Title>') patternUsd=re.compile(r'<UserID>(.*?)</UserID>') patternUse=re.compile(r'<Site>(.*?)</Site>') patternVil=re.compile(r'<ViewItemURL>(.*?)</ViewItemURL>') patternLin=re.compile(r'<ListingDuration>(.*)</ListingDuration>') for xml in self.get_xml(myset): expfileds=Fields() expfileds.fielddic['categoryid']=re.findall(patternCad,xml)[0] expfileds.fielddic['categoryname']=re.findall(patternCae,xml)[0] expfileds.fielddic['country']=re.findall(patternCoy,xml)[0] expfileds.fielddic['currency']=re.findall(patternCuy,xml)[0] expfileds.fielddic['currentprice']=re.findall(patternCue,xml)[0] expfileds.fielddic['feedbackscore']=re.findall(patternFee,xml)[0] expfileds.fielddic['feedbackstar']=re.findall(patternFer,xml)[0] expfileds.fielddic['galleryurl']=re.findall(patternGal,xml)[0] expfileds.fielddic['hitcount']=re.findall(patternHit,xml)[0] expfileds.fielddic['hitcounter']=re.findall(patternHir,xml)[0] expfileds.fielddic['itemid']=re.findall(patternItd,xml)[0] expfileds.fielddic['location']=re.findall(patternLon,xml)[0] expfileds.fielddic['quantitysold']=re.findall(patternQud,xml)[0] expfileds.fielddic['quantitysoldinstore']=re.findall(patternQue,xml)[0] expfileds.fielddic['shippingcost']=re.findall(patternSht,xml)[0] expfileds.fielddic['shippingservice']=re.findall(patternShe,xml)[0] expfileds.fielddic['sku']=re.findall(patternSku,xml) expfileds.fielddic['starttime']=re.findall(patternSte,xml)[0] expfileds.fielddic['storeowner']=re.findall(patternStr,xml)[0] expfileds.fielddic['storeurl']=re.findall(patternStl,xml)[0] expfileds.fielddic['title']=re.findall(patternTie,xml)[0] expfileds.fielddic['userid']=re.findall(patternUsd,xml)[0] expfileds.fielddic['usersite']=re.findall(patternUse,xml)[0] expfileds.fielddic['viewitemurl']=re.findall(patternVil,xml)[0] expfileds.fielddic['listduration']=re.findall(patternLin,xml) yield expfileds.fielddic
def sublist_with_all_of_same(self, lists): for player in Fields.players(): for sublist in lists: if self.all_of_same(sublist, player): return player return None
def parse(self,sellerid,timefrom,timeto): getlist=GetList() myset=getlist.get_list(sellerid,starttimefrom=timefrom,starttimeto=timeto) patternCad=re.compile(r"<CategoryID>(.*?)</CategoryID>") patternCae=re.compile(r"<CategoryName>(.*?)</CategoryName>") patternCoy=re.compile(r'<Country>(.*?)</Country>') patternCuy=re.compile(r"<Currency>(.*?)</Currency>") patternCue=re.compile(r'>(\d+\.\d+)</CurrentPrice>') patternFee=re.compile(r'<FeedbackScore>(.*?)</FeedbackScore>') patternFer=re.compile(r"<FeedbackRatingStar>(.*?)</FeedbackRatingStar>") patternGal=re.compile(r"<GalleryURL>(.*?)</GalleryURL>") patternHit=re.compile(r"<HitCount>(.*?)</HitCount>") patternHir=re.compile(r'<HitCounter>(.*?)</HitCounter>') patternItd=re.compile(r"<ItemID>(.*?)</ItemID>") patternLon=re.compile(r"<Location>(.*?)</Location>") patternQud=re.compile(r"<QuantitySold>(.*?)</QuantitySold>") patternQue=re.compile(r"<QuantitySoldByPickupInStore>(.*?)</QuantitySoldByPickupInStore>") patternSht=re.compile(r'>(\d+\.\d+)</ShippingServiceCost>') patternShe=re.compile(r"<ShippingService>(.*?)</ShippingService>") patternSku=re.compile(r"<SKU>(.*?)</SKU>") patternSte=re.compile(r"<StartTime>(.*?)</StartTime>") patternStr=re.compile(r"<StoreOwner>(.*?)</StoreOwner>") patternStl=re.compile(r"<StoreURL>(.*?)</StoreURL>") patternTie=re.compile(r'<Title>(.*?)</Title>') patternUsd=re.compile(r'<UserID>(.*?)</UserID>') patternUse=re.compile(r'<Site>(.*?)</Site>') patternVil=re.compile(r'<ViewItemURL>(.*?)</ViewItemURL>') patternLin=re.compile(r'<ListingDuration>(.*?)</ListingDuration>') patternPrg=re.compile(r'<PrivateListing>(.*?)</PrivateListing>') if isinstance(myset,type(None)): print 'So I get nothing from Getlist Call' # failedlist=[] # failedlist.append(sellerid) # failedlist.append(timefrom) # failedlist.append(timeto) # self.failedqueue.put(failedlist) # print 'And this request have been put into the failed queue' elif len(myset)==0: print 'No listings occur between %s and %s' % (timefrom,timeto) else: for xml in self.get_xml(myset): expfileds=Fields() expfileds.fielddic['categoryid']=re.findall(patternCad,xml)[0] expfileds.fielddic['sku']=re.findall(patternSku,xml)[0][:11] expfileds.fielddic['categoryname']=re.findall(patternCae,xml)[0] expfileds.fielddic['country']=re.findall(patternCoy,xml)[0] expfileds.fielddic['currency']=re.findall(patternCuy,xml)[0] try: expfileds.fielddic['currentprice']=re.findall(patternCue,xml)[0] except IndexError: print xml expfileds.fielddic['feedbackscore']=re.findall(patternFee,xml)[0] expfileds.fielddic['feedbackstar']=re.findall(patternFer,xml)[0] expfileds.fielddic['galleryurl']=re.findall(patternGal,xml)[0] expfileds.fielddic['hitcount']=re.findall(patternHit,xml)[0] expfileds.fielddic['hitcounter']=re.findall(patternHir,xml)[0] expfileds.fielddic['itemid']=re.findall(patternItd,xml)[0] expfileds.fielddic['location']=re.findall(patternLon,xml)[0] expfileds.fielddic['quantitysold']=re.findall(patternQud,xml)[0] expfileds.fielddic['quantitysoldinstore']=re.findall(patternQue,xml)[0] try: expfileds.fielddic['shippingcost']=re.findall(patternSht,xml)[0] except IndexError: print xml expfileds.fielddic['shippingservice']=re.findall(patternShe,xml)[0] expfileds.fielddic['starttime']=re.findall(patternSte,xml)[0] expfileds.fielddic['storeowner']=re.findall(patternStr,xml)[0] expfileds.fielddic['storeurl']=re.findall(patternStl,xml)[0] expfileds.fielddic['title']=re.findall(patternTie,xml)[0] expfileds.fielddic['userid']=re.findall(patternUsd,xml)[0] expfileds.fielddic['usersite']=re.findall(patternUse,xml)[0] expfileds.fielddic['viewitemurl']=re.findall(patternVil,xml)[0] expfileds.fielddic['listduration']=re.findall(patternLin,xml)[0] expfileds.fielddic['privatelisting']=re.findall(patternPrg,xml)[0] yield expfileds.fielddic
def parse(self,myxml): expfileds=Fields() patternCad=re.compile(r"<CategoryID>(.*?)</CategoryID>") patternCae=re.compile(r"<CategoryName>(.*?)</CategoryName>") patternCoy=re.compile(r'<Country>(.*?)</Country>') patternCuy=re.compile(r"<Currency>(.*?)</Currency>") patternCue=re.compile(r'>(\d+\.\d+)</CurrentPrice>') patternFee=re.compile(r'<FeedbackScore>(.*?)</FeedbackScore>') patternFer=re.compile(r"<FeedbackRatingStar>(.*?)</FeedbackRatingStar>") patternGal=re.compile(r"<GalleryURL>(.*?)</GalleryURL>") patternHit=re.compile(r"<HitCount>(.*?)</HitCount>") patternHir=re.compile(r'<HitCounter>(.*?)</HitCounter>') patternItd=re.compile(r"<ItemID>(.*?)</ItemID>") patternLon=re.compile(r"<Location>(.*?)</Location>") patternQud=re.compile(r"<QuantitySold>(.*?)</QuantitySold>") patternQue=re.compile(r"<QuantitySoldByPickupInStore>(.*?)</QuantitySoldByPickupInStore>") patternSht=re.compile(r'>(\d+\.\d+)</ShippingServiceCost>') patternShe=re.compile(r"<ShippingService>(.*?)</ShippingService>") patternSku=re.compile(r"<SKU>(.*?)</SKU>") patternSte=re.compile(r"<StartTime>(.*?)</StartTime>") patternStr=re.compile(r"<StoreOwner>(.*?)</StoreOwner>") patternStl=re.compile(r"<StoreURL>(.*?)</StoreURL>") patternTie=re.compile(r'<Title>(.*?)</Title>') patternUsd=re.compile(r'<UserID>(.*?)</UserID>') patternUse=re.compile(r'<Site>(.*?)</Site>') patternVil=re.compile(r'<ViewItemURL>(.*?)</ViewItemURL>') patternLin=re.compile(r'<ListingDuration>(.*?)</ListingDuration>') patternPrg=re.compile(r'<PrivateListing>(.*?)</PrivateListing>') patternLis=re.compile(r'<ListingStatus>(.*?)</ListingStatus>') xml=myxml if xml: try: try: expfileds.fielddic['categoryid']=re.findall(patternCad,xml)[0] except Exception as e: print e expfileds.fielddic['categoryid']="000000" try: expfileds.fielddic['sku']=re.findall(patternSku,xml)[0][:11] except Exception as e: print e expfileds.fielddic['sku']='Nosku' try: expfileds.fielddic['categoryname']=re.findall(patternCae,xml)[0] except Exception as e: print e expfileds.fielddic['categoryname']="ebay" try: expfileds.fielddic['country']=re.findall(patternCoy,xml)[0] except Exception as e: print e expfileds.fielddic['country']="Universe" try: expfileds.fielddic['currency']=re.findall(patternCuy,xml)[0] except Exception as e: print e expfileds.fielddic['currency']="unkown" try: expfileds.fielddic['currentprice']=re.findall(patternCue,xml)[0] except Exception as e: print e expfileds.fielddic['currentprice']='0' try: expfileds.fielddic['feedbackscore']=re.findall(patternFee,xml)[0] except Exception as e: print e expfileds.fielddic['feedbackscore']='0' try: expfileds.fielddic['feedbackstar']=re.findall(patternFer,xml)[0] except Exception as e: print e expfileds.fielddic['feedbackstar']="unknow" try: expfileds.fielddic['galleryurl']=re.findall(patternGal,xml)[0] except Exception as e: print e expfileds.fielddic['galleryurl']="unknow" try: expfileds.fielddic['starttime']=re.findall(patternSte,xml)[0] expfileds.fielddic['hitcount']=re.findall(patternHit,xml)[0] expfileds.fielddic['hitcounter']=re.findall(patternHir,xml)[0] expfileds.fielddic['itemid']=re.findall(patternItd,xml)[0] expfileds.fielddic['location']=re.findall(patternLon,xml)[0] expfileds.fielddic['quantitysold']=re.findall(patternQud,xml)[0] expfileds.fielddic['quantitysoldinstore']=re.findall(patternQue,xml)[0] except Exception as e: print e try: expfileds.fielddic['shippingcost']=re.findall(patternSht,xml)[0] except IndexError: # print xml expfileds.fielddic['shippingcost']="0" expfileds.fielddic['shippingservice']=re.findall(patternShe,xml)[0] expfileds.fielddic['starttime']=re.findall(patternSte,xml)[0] expfileds.fielddic['storeowner']=re.findall(patternStr,xml)[0] try: expfileds.fielddic['storeurl']=re.findall(patternStl,xml)[0] except: expfileds.fielddic['storeurl']='Nostoreurl' expfileds.fielddic['title']=re.findall(patternTie,xml)[0] expfileds.fielddic['userid']=re.findall(patternUsd,xml)[0] expfileds.fielddic['usersite']=re.findall(patternUse,xml)[0] expfileds.fielddic['viewitemurl']=re.findall(patternVil,xml)[0] expfileds.fielddic['listduration']=re.findall(patternLin,xml)[0] expfileds.fielddic['privatelisting']=re.findall(patternPrg,xml)[0] expfileds.fielddic['listingstatus']=re.findall(patternLis,xml)[0] return expfileds.fielddic except Exception as e: print "can't parse xml"
def fields(self) -> Fields: return Fields()