def get_key_config(key_name): key_config = ConfigSection('apikey:%s' % key_name) key_id = int(key_config.get_option('key_id')) v_code = key_config.get_option('v_code') entity_id = key_config.get_option('id') if entity_id is not None: entity_id = int(entity_id) return key_id, v_code, entity_id
def __init__(self, platform, conn): self.platform = platform self.conn = conn self.cursor = self.conn.cursor() self.sql = ConfigSection('../conf', '../local') self.sql.load(f'{platform}.cfg') # self.sql.dump() # TODO: This should come in another way if platform == 'postgresql': self.queryparm = '%s' else: self.queryparm = '?'
import logging from config import ConfigSection from evelink.account import Account from evelink.api import API from evelink.cache.sqlite import SqliteCache from evelink.char import Char from evelink.eve import EVE _config = ConfigSection('eveapi') _log = logging.getLogger('sound.srp.be.eveapi') def get_key_config(key_name): key_config = ConfigSection('apikey:%s' % key_name) key_id = int(key_config.get_option('key_id')) v_code = key_config.get_option('v_code') entity_id = key_config.get_option('id') if entity_id is not None: entity_id = int(entity_id) return key_id, v_code, entity_id def get_api_key(key): if isinstance(key, API): return key key_id, v_code, entity_id = get_key_config(key) return API(api_key=(key_id, v_code), cache=SqliteCache(_config.get_option('cache_location'))) def get_characters(key):
def __setitem__(self, option, value): ConfigSection.__setitem__(self, option, value) if option == 'secret': reload()
import logging import sys from config import ConfigSection from datastore import Character, Corporation, KillMail, Payment, PaymentDetail, Control from datetime import datetime, timedelta import eveapi _config = ConfigSection('paymentconsolidator') _log = logging.getLogger('sound.srp.be.paymentconsolidator') def process(kill, payment): if kill.srp_amount is None: return False if kill.payments is None: kill.payments = [] check = sum([p.amount for p in kill.payments]) if check == kill.srp_amount: return False _log.info('Processing kill %d, payment %d, srp_amount %d, paid_amount %d' % (kill.kill_id, payment.payment_id, kill.srp_amount, kill.paid_amount or 0)) current = kill.srp_amount - check detail = None for det in kill.payments: if det.payment_id == payment.payment_id: detail = det if current != 0: payment.payment_amount += current if detail is None: detail = PaymentDetail(payment.payment_id, kill.kill_id, current)
def main(): from config import ConfigSectionKey, ConfigSection # load cloud specific values config = ConfigSectionKey('conf', 'local') # config.load('connect.ini') config.load_file('../conf/azure.ini') cloud = config('cloud') # Load cloud test specific values cloud_config = ConfigSection('conf', 'local') cloud_config.load_file('../conf/cloud.cfg') # test_queue_message = cloud_config('test_queue_message_1') """objectstore logic""" #objectstore = Objectstore(cloud.account_id, cloud) #objectstore.put('C:/test/test.txt', 'test.txt') # objectstore.get('../tmp/readwrite_downloaded.txt', 'readwrite.txt') # objectstore.get('C:/udp-app-master/dev/tests/working', 'test.txt') # objectstore.delete('test.txt') # file_check = filecmp.cmp(f1='C:/test/test.txt', f2='C:/test/get/test.txt', shallow=False) # print(file_check) """Queue logic""" queue = Queue(cloud.account_id, cloud) # queue_names = queue._list_queue_names() # print(queue_names) # IMPORTANT: The Queue delete method parameter should now be an # ObjectstoreNotification object instead of just the message_id of the object. #queue.delete(notification) # queue.encode_function = QueueMessageFormat.text_base64encode queue_message = '{"Message":"Hello World"}' # encoded_message = str(base64.b64encode(queue_message.encode('utf-8'))) queue_message = { "topic": "test_queue_message", "subject": "/This/Is/A/Test/Message/TestMessage", "eventType": "test_queue_message", "eventTime": "", "id": "", "data":{ "api": "", "clientRequestId": "", "requestId": "", "eTag": "", "contentType": "", "contentLength": 0, "blobType": "", "url": "", "sequencer": "", "storageDiagnostics": { "batchId": "" } }, "dataVersion": "", "metadataVersion": "1" } json_queue_message = json.dumps(queue_message) # response = queue.get() # notification = ObjectstoreNotification(response) queue.put(json_queue_message) response = queue.get() notification = ObjectstoreNotification(response) queue.delete(notification) """
import googledatastore import logging import calendar import sha from googledatastore.helper import * from datetime import datetime from config import ConfigSection _config = ConfigSection('datastore') _log = logging.getLogger('sound.be.datastore') googledatastore.set_options(dataset = _config.get_option('dataset')) def _date_to_timestamp(date): return long(calendar.timegm(date.utctimetuple()) * 1000000L) + date.microsecond class _BaseEntity(object): """ Base class for all entities. Handles the conversion between the simple model objects defined in this module and google cloud datastore entity objects. """ def _sub_entities(self): """ Get a dictionary of sub entities of the current entity. The keys of the dictionary are the field names in the current entity. The values are the model class used by that field. By default this is an empty dictionary unless overridden in a subclass. """ return dict()
import logging from beaker.cache import CacheManager from beaker.util import parse_cache_config_options from config import ConfigSection from sqlalchemy import * from sqlalchemy.orm import create_session from sqlalchemy.ext.declarative import declarative_base _config = ConfigSection('staticdata') _log = logging.getLogger('sound.srp.be.staticdata') Base = declarative_base() engine = create_engine(_config.get_option('connection_string')) metadata = MetaData(bind = engine) session = create_session(bind = engine) cache = CacheManager(**parse_cache_config_options( { 'cache.type': _config.get_option('cache_type') })) class InvType(Base): __table__ = Table('invTypes', metadata, autoload=True) @staticmethod @cache.cache('InvType.by_id') def by_id(type_id): _log.debug('Get InvType by id: %d' % type_id) return session.query(InvType).filter_by(typeID = type_id).first() @staticmethod @cache.cache('InvType.by_name') def by_name(type_name): _log.debug('Get InvType by name: %s' % type_name)
class Database: def __init__(self, platform, conn): self.platform = platform self.conn = conn self.cursor = self.conn.cursor() self.sql = ConfigSection('../conf', '../local') self.sql.load(f'{platform}.cfg') # self.sql.dump() # TODO: This should come in another way if platform == 'postgresql': self.queryparm = '%s' else: self.queryparm = '?' @staticmethod def log(command_name, sql): single_line_sql = sql.replace('\n', r'\n') logger.debug(f'sql({command_name}): {single_line_sql}') # def sql(self, command): # return self.sql_config.sections[command] def is_null(self, sql_command): # Note: referenced in embedded f-string # noinspection PyUnusedLocal # Note: command_name used in embedded f-string. command_name = 'is_null' self.log(command_name, sql_command) self.cursor.execute(sql_command) row = self.cursor.fetchone() if row: # print(f'not_null(row[0]) = {row[0]}') return row[0] is None else: # print(f'not_null() - no row') return True def execute(self, command_name, value=None): # noinspection PyUnusedLocal queryparm = self.queryparm sql_template = self.sql(command_name) sql_command = expand(sql_template) if value is None: cursor = self.cursor.execute(sql_command) else: cursor = self.cursor.execute(sql_command, value) self.log(command_name, sql_command) return cursor # noinspection PyUnusedLocal def timestamp_literal(self, timestamp_value): timestamp_str = f'{current_timestamp:%Y-%m-%d %H:%M:%S}' command_name = 'timestamp_literal' sql_template = self.sql(command_name) # we evaluation expression in Python vs via database engine return expand(sql_template) # noinspection PyUnusedLocal def current_timestamp(self, timezone=None): command_name = 'current_timestamp' sql_template = self.sql(command_name) sql_command = expand(sql_template) self.cursor.execute(sql_command) return self.cursor.fetchone()[0] def current_rowversion(self, table_name): # Based on MS RowVersion CDC. raise NotImplementedError( f'MS RowVersion CDC not supported yet ({table_name})') # noinspection PyUnusedLocal def current_sequence(self, table_name): # Note: Based on Siriusware proprietary CDC vs MS RowVersion CDC. command_name = 'current_sequence' sql_template = self.sql(command_name) sql_command = expand(sql_template) self.log(command_name, sql_command) return self.cursor.fetchone()[0] # noinspection PyUnusedLocal def does_database_exist(self, database_name): command_name = 'does_database_exist' sql_template = self.sql(command_name) sql_command = expand(sql_template) self.log(command_name, sql_command) return not self.is_null(sql_command) def create_database(self, database_name): command_name = 'create_database' if not self.does_database_exist(database_name): autocommit = self.conn.autocommit self.conn.autocommit = True sql_template = self.sql(command_name) sql_command = expand(sql_template) self.log(command_name, sql_command) self.cursor.execute(sql_command) self.conn.autocommit = autocommit # noinspection PyUnusedLocal # Note: database_name used in embedded f-strings. def use_database(self, database_name): command_name = 'use_database' sql_template = self.sql('use_database') sql_command = expand(sql_template) self.log(command_name, sql_command) self.cursor.execute(sql_command) # noinspection PyUnusedLocal # Note: schema_name used in embedded f-string def does_schema_exist(self, schema_name): command_name = 'does_schema_exist' sql_template = self.sql(command_name) sql_command = expand(sql_template) self.log(command_name, sql_command) return not self.is_null(sql_command) def create_schema(self, schema_name): command_name = 'create_schema' if not self.does_schema_exist(schema_name): autocommit = self.conn.autocommit self.conn.autocommit = True sql_template = self.sql(command_name) sql_command = expand(sql_template) self.log(command_name, sql_command) self.cursor.execute(sql_command) self.conn.autocommit = autocommit # noinspection PyUnusedLocal # Note: schema_name, table_name used in embedded f-strings. # Note: Treats views as tables. def does_table_exist(self, schema_name, table_name): command_name = 'does_table_exist' sql_template = self.sql(command_name) sql_command = expand(sql_template) self.log(command_name, sql_command) return not self.is_null(sql_command) def select_table_schema(self, schema_name, table_name): command_name = 'select_table_schema' if not self.does_table_exist(schema_name, table_name): # print(f'Table does not exist: {schema_name}.{table_name}') return None else: sql_template = self.sql(command_name) sql_command = expand(sql_template) self.log(command_name, sql_command) self.cursor.execute(sql_command) # 2018-05-29 - make sure pickled table schema is not tied to database client rows = self.cursor.fetchall() column_names = [column[0] for column in self.cursor.description] columns = [] for row in rows: column = Object() columns.append(column) for column_name in column_names: value = getattr(row, column_name) setattr(column, column_name, value) # return Table(table_name, self.cursor.fetchall()) return tableschema.TableSchema(table_name, columns) def select_table_pk(self, schema_name, table_name): """Returns a comma delimited string of sorted pk column names or '' if no pk is defined.""" command_name = 'select_table_pk' if not self.does_table_exist(schema_name, table_name): # print(f'Table does not exist: {schema_name}.{table_name}') return None else: sql_template = self.sql(command_name) sql_command = expand(sql_template) self.log(command_name, sql_command) self.cursor.execute(sql_command) rows = self.cursor.fetchall() if not rows: pk_columns = '' else: pk_columns = sorted([row[0] for row in rows]) pk_columns = ', '.join(pk_columns) return pk_columns def create_table_from_table_schema(self, schema_name, table_name, table, extended_definitions=None): command_name = 'create_table_from_table_schema' if not self.does_table_exist(schema_name, table_name): autocommit = self.conn.autocommit self.conn.autocommit = True # noinspection PyUnusedLocal # Note: column_definitions used in embedded f-strings. column_definitions = table.column_definitions(extended_definitions) sql_template = self.sql(command_name) sql_command = expand(sql_template) # print(f'create_table_from_table_schema:\n{sql_command}\n') self.log(command_name, sql_command) self.cursor.execute(sql_command) self.conn.autocommit = autocommit # TODO: Replace schema_name, table_name with [command_name]. def create_named_table(self, schema_name, table_name): command_name = f'create_named_table_{schema_name}_{table_name}' if not self.does_table_exist(schema_name, table_name): autocommit = self.conn.autocommit self.conn.autocommit = True sql_template = self.sql(command_name) sql_command = expand(sql_template) self.log(command_name, sql_command) self.cursor.execute(sql_command) self.conn.autocommit = autocommit def drop_table(self, schema_name, table_name): command_name = 'drop_table' if self.does_table_exist(schema_name, table_name): autocommit = self.conn.autocommit self.conn.autocommit = True sql_template = self.sql(command_name) sql_command = expand(sql_template) self.log(command_name, sql_command) self.cursor.execute(sql_command) self.conn.autocommit = autocommit # applies to session vs global temp tables def drop_temp_table(self, table_name): command_name = 'drop_temp_table' # strip optional leading #'s from table name since our SQL template includes # FIX: This means we strip ##global_temp as well as #local_temp # noinspection PyUnusedLocal table_name = table_name.strip('#') autocommit = self.conn.autocommit self.conn.autocommit = True sql_template = self.sql(command_name) sql_command = expand(sql_template) self.log(command_name, sql_command) self.cursor.execute(sql_command) self.conn.autocommit = autocommit # noinspection PyUnusedLocal # Note: schema_name, table_name used in embedded f-strings. def insert_into_table(self, schema_name, table_name, **column_names_values): command_name = f'insert_into_table' column_names = ', '.join(quote(column_names_values.keys())) column_placeholders = ', '.join([self.queryparm] * len(column_names_values)) column_values = column_names_values.values() autocommit = self.conn.autocommit self.conn.autocommit = True sql_template = self.sql(command_name) sql_command = expand(sql_template) self.log(command_name, sql_command) self.cursor.execute(sql_command, *column_values) self.conn.autocommit = autocommit # noinspection PyUnusedLocal # Note: schema_name, table_name used in embedded f-strings. def bulk_insert_into_table(self, schema_name, table_name, table_schema, rows, extended_definitions=None): command_name = f'insert_into_table' # insert extended column definitions into schema if extended_definitions: table_schema.column_definitions(extended_definitions) column_names = ', '.join(quote(table_schema.columns.keys())) # print(f'column_names: {column_names}') column_placeholders = ', '.join([self.queryparm] * len(table_schema.columns)) autocommit = self.conn.autocommit self.conn.autocommit = False sql_template = self.sql(command_name) sql_command = expand(sql_template) self.log(command_name, sql_command) self.cursor.fast_executemany = True row_count = self.cursor.executemany(sql_command, rows) self.cursor.commit() self.conn.autocommit = autocommit return row_count # noinspection PyUnusedLocal # Note: schema_name, table_name used in embedded f-strings. def capture_select(self, schema_name, table_name, column_names, last_timestamp=None, current_timestamp=None): command_name = f'capture_select' column_names = ', '.join(quote(column_names)) autocommit = self.conn.autocommit if self.platform == 'mssql': self.conn.autocommit = True sql_template = self.sql(command_name) # print(f'\ncapture_select.sql_template:\n{sql_template}\n') sql_command = expand(sql_template) self.log(command_name, sql_command) self.cursor.execute(sql_command) if self.platform == 'mssql': self.conn.autocommit = autocommit return self.cursor # noinspection PyUnusedLocal # Note: schema_name, table_name used in embedded f-strings. def delete_where(self, schema_name, table_name, value): command_name = f'delete_where' sql_template = self.sql(command_name) sql_command = expand(sql_template) self.log(command_name, sql_command) self.cursor.execute(sql_command) # FUTURE: # insert into # update # merge ''' [get_pk] [get_pk] select {pk_column_name} as pk __from {schema_name}.{table_name} __where {nk_column_name} = {nk_column_value}; [insert_pk] insert into {schema_name}.{table_name} __{column_names} __values __{column_values}; ''' # Future: cache pk requests in a local session dict; pk's will never change once issued # noinspection PyUnusedLocal # Note: schema_name, table_name used in embedded f-strings. def get_pk(self, schema_name, table_name, pk_column_name, nk_column_name, **key_values): command_name = f'get_pk' pk_conditions = list() for key, value in key_values.items(): pk_conditions.append(f'{key}={value}') pk_conditions = ' and '.join(pk_conditions) autocommit = self.conn.autocommit self.conn.autocommit = True sql_template = self.sql(command_name) sql_command = expand(sql_template) self.log(command_name, sql_command) self.cursor.execute(sql_command) self.conn.autocommit = autocommit
import json import logging import market import valuer import web from config import ConfigSection from datastore import KillMail, Victim, Attacker, Item, GroupedItems, ShipClass from datetime import datetime from staticdata import InvType, InvFlag, MapSolarSystem _config = ConfigSection('crest') _log = logging.getLogger('sound.srp.be.crest') alliance_id = int(_config.get_option('alliance_id')) def get_killmail(kill): url = None if type(kill) is str: url = kill elif type(kill) is KillMail: url = kill.crest_url elif type(kill) is int: kill = KillMail(kill) url = kill.crest_url if url is None: return None data = web.fetch_url(url) return json.loads(data) def get_ship_class(ship):
from evelink.cache.sqlite import SqliteCache from evelink.corp import Corp from evelink.map import Map from collections import defaultdict, namedtuple from datetime import datetime, timedelta import logging import sys from config import ConfigSection from datastore import Corporation, Tower, Reactor, Reactant, Silo from eveapi import get_api_key, get_key_config from staticdata import InvType, InvGroup, InvTypeReaction, InvControlTowerResource, DgmAttributeTypes, DgmTypeAttributes, MapDenormalize one_hour = timedelta(hours = 1) _config = ConfigSection('posimporter') _log = logging.getLogger('sound.posmon.be.main') multiplierAttribute = DgmAttributeTypes.by_name('moonMiningAmount') def find(predicate, collection): if not collection: return None, None for idx, elem in enumerate(collection): if predicate(elem): return idx, elem return None, None def get_api_keys(): api_keys = dict() for key in _config.get_option('keys').split(','): api_keys[key] = get_api_key(key) return api_keys
import logging import urllib2 import zlib from config import ConfigSection _config = ConfigSection('web') _log = logging.getLogger('sound.srp.be.web') def fetch_url(path): _log.debug('Fetching url: ' + path) resp = None try: req = urllib2.Request(path) req.add_header('Accept-Encoding', 'gzip') req.add_header('User-agent', _config.get_option('user_agent')) resp = urllib2.urlopen(req) except urllib2.HTTPError as e: _log.error('HTTPError(%d): %s' % (e.code, e.reason)) raise e except urllib2.URLError as e: _log.error('URLError: %s' % e.reason) raise e try: content = resp.read() if resp.info().get('Content-Encoding') == 'gzip': _log.debug('Decompressing response.') content = zlib.decompress(content, 32 + zlib.MAX_WBITS) return content finally:
import itertools import json import logging import market import sha import valuer import web from config import ConfigSection from datastore import KillMail, Victim, Attacker, Item, GroupedItems, ShipClass, Character from datetime import datetime, timedelta from staticdata import MapSolarSystem, InvType, InvFlag _config = ConfigSection('zkillboard') _log = logging.getLogger('sound.srp.be.zkillboard') alliance_id = int(_config.get_option('alliance_id')) def get_kill(kill_id): _log.info('Getting killmail by id: %d.' % kill_id) url = 'https://zkillboard.com/api/killID/%d/' % kill_id content = web.fetch_url(url) kills = json.loads(content) return kills[0] if kills and len(kills) > 0 else None def get_crest_hash(kill_id): _log.info('Getting killmail by id: %d.' % kill_id) url = 'https://zkillboard.com/api/killID/%d/no-items/' % kill_id content = web.fetch_url(url) kills = json.loads(content) km = kills[0] if kills and len(kills) > 0 else None
import googledatastore import logging import calendar import sha from googledatastore.helper import * from datetime import datetime from config import ConfigSection _config = ConfigSection('datastore') _log = logging.getLogger('sound.be.datastore') googledatastore.set_options(dataset=_config.get_option('dataset')) def _date_to_timestamp(date): return long( calendar.timegm(date.utctimetuple()) * 1000000L) + date.microsecond class _BaseEntity(object): """ Base class for all entities. Handles the conversion between the simple model objects defined in this module and google cloud datastore entity objects. """ def _sub_entities(self): """ Get a dictionary of sub entities of the current entity. The keys of the dictionary are the field names in the current entity. The values are the model class used by that field. By default this is an empty dictionary unless overridden in a subclass. """
import sys import logging from datetime import datetime, timedelta from config import ConfigSection from datastore import KillMail, LossMailAttributes from staticdata import InvType, InvGroup _config = ConfigSection('classifier') _log = logging.getLogger('sound.srp.be.classifier') exploration_ships = [ InvType.by_name('Heron').typeID, InvType.by_name('Imicus').typeID, InvType.by_name('Magnate').typeID, InvType.by_name('Probe').typeID, InvType.by_name('Astero').typeID ] + [t.typeID for t in InvGroup.by_name('Covert Ops').types] bait_ships = [ InvType.by_name('Procurer').typeID, InvType.by_name('Sigil').typeID, InvType.by_name('Badger').typeID, InvType.by_name('Wreathe').typeID, InvType.by_name('Nereus').typeID ] industry_ship_groups = [ InvGroup.by_name('Mining Frigate'), InvGroup.by_name('Mining Barge'), InvGroup.by_name('Expedition Frigate'), InvGroup.by_name('Exhumer'),
import logging from config import ConfigSection from datastore import Payment from datetime import datetime, timedelta from evelink.api import API, APIError from evelink.cache.shelf import ShelveCache from evelink.corp import Corp from eveapi import get_api_key _config = ConfigSection('paymentverifier') _log = logging.getLogger('sound.srp.be.paymentverifier') def get_wallets(): for wallet in _config.get_option('wallets').split(','): parts = wallet.split('-') ticker = parts[0] division = int(parts[1]) yield (ticker, division) def get_journal_entries(): lookBackDays = int(_config.get_option('look_back_days')) startTime = datetime.now() - timedelta(lookBackDays) for ticker, division in get_wallets(): key = get_api_key(ticker) corpApi = Corp(key) finished = False journal = corpApi.wallet_journal(account=division).result while not finished: for entry in journal:
import crest import itertools import logging import market import valuer from config import ConfigSection from datastore import KillMail, Victim, Attacker, Item, GroupedItems, ShipClass, Character from datetime import datetime, timedelta from eveapi import get_api_key from evelink.corp import Corp from staticdata import MapSolarSystem, InvType, InvFlag _config = ConfigSection('xmlkillimporter') _log = logging.getLogger('sound.srp.be.xmlkillimporter') alliance_id = int(_config.get_option('alliance_id')) def get_api_keys(): return _config.get_option('keys').split(',') def get_new_kills(all_kills): _log.info('Filtering out kills that are already in the datastore.') ids = set(all_kills.keys()) old_ids = set(map((lambda k: k.kill_id), KillMail.load_multi(ids))) new_ids = ids.difference(old_ids) return [all_kills[id] for id in new_ids] def get_ship_class(ship): _log.debug('Getting ship class for %s.' % ship.typeName)
import crest import itertools import logging import market import valuer from config import ConfigSection from datastore import KillMail, Victim, Attacker, Item, GroupedItems, ShipClass, Character from datetime import datetime, timedelta from eveapi import get_api_key from evelink.corp import Corp from staticdata import MapSolarSystem, InvType, InvFlag _config = ConfigSection('xmlkillimporter') _log = logging.getLogger('sound.srp.be.xmlkillimporter') alliance_id = int(_config.get_option('alliance_id')) def get_api_keys(): return _config.get_option('keys').split(',') def get_new_kills(all_kills): _log.info('Filtering out kills that are already in the datastore.') ids = set(all_kills.keys()) old_ids = set(map((lambda k: k.kill_id), KillMail.load_multi(ids))) new_ids = ids.difference(old_ids) return [ all_kills[id] for id in new_ids ] def get_ship_class(ship): _log.debug('Getting ship class for %s.' % ship.typeName) if ship.category.categoryName != 'Ship': return 'Other' groupName = ship.group.groupName
import logging import web import xml.etree.ElementTree as ET from beaker.cache import CacheManager from beaker.util import parse_cache_config_options from config import ConfigSection from staticdata import MapSolarSystem, MapRegion, InvType _config = ConfigSection('market') _log = logging.getLogger('sound.srp.be.market') jita_id = MapSolarSystem.by_name('Jita').solarSystemID forge_id = MapRegion.by_name('The Forge').regionID cache = CacheManager(**parse_cache_config_options( {'cache.type': _config.get_option('cache_type')})) @cache.cache('get_jita_price') def get_jita_price(type_id): _log.debug('Getting Jita sell price for type %d.' % type_id) url = 'http://api.eve-central.com/api/quicklook?typeid=%d&usesystem=%d' % ( type_id, jita_id) response = web.fetch_url(url) root = ET.fromstring(response) sell_orders = root.find('quicklook').find('sell_orders') price = None for order in sell_orders.findall('order'): p = float(order.find('price').text) if price is None or p < price: price = p if price is None: