def run(self): port = serial.Serial(self.serial_port, baudrate = self.serial_baudrate, timeout = self.serial_timeout, parity = self.serial_parity, stopbits=serial.STOPBITS_TWO, bytesize=serial.SEVENBITS) # database creation conn = DatabaseConnector.connectSQLiteDB('meshDB.db') # MessageManager creation self.messageManager = MessageManager(conn) # reading loop print("Python version detected : "+str(sys.version_info.major)) while True: print("--- Listening...") rline = self.readlineCR(port) currentDate = datetime.datetime.utcnow() try: print("Parsing line: "+rline) message = self.messageManager.parse_line(currentDate, rline) # CSV writing #self.df.loc[self.row_iter] = [datetime.datetime.utcnow(), node_number, sensor_type, float(sensor_value)] #self.df.to_csv('./test.csv',index = False) # DB writing self.messageManager.postMessage(message) except ValueError: print("ValueError Exception") print(str(len(rline))+" "+rline) except Exception as e: print("Erreur"+e) conn.rollback() # raise e print("--- Received: "+rline+"\n") # closing of the database conn.close()
def run(self): port = serial.Serial(self.serial_port, baudrate=self.serial_baudrate, timeout=self.serial_timeout, parity=self.serial_parity, stopbits=serial.STOPBITS_TWO, bytesize=serial.SEVENBITS) # database creation conn = DatabaseConnector.connectSQLiteDB('meshDB.db') # MessageManager creation self.messageManager = MessageManager(conn) # reading loop print("Python version detected : " + str(sys.version_info.major)) while True: print("--- Listening...") rline = self.readlineCR(port) currentDate = datetime.datetime.utcnow() try: print("Parsing line: " + rline) message = self.messageManager.parse_line(currentDate, rline) # CSV writing #self.df.loc[self.row_iter] = [datetime.datetime.utcnow(), node_number, sensor_type, float(sensor_value)] #self.df.to_csv('./test.csv',index = False) # DB writing self.messageManager.postMessage(message) except ValueError: print("ValueError Exception") print(str(len(rline)) + " " + rline) except Exception as e: print("Erreur" + e) conn.rollback() # raise e print("--- Received: " + rline + "\n") # closing of the database conn.close()
class DatabaseDataManager: def __init__(self, logger_name): self.connector = DatabaseConnector(logger_name) self.insert_statement = "" self.update_statement = "" self.logger = logging.getLogger(logger_name) # logger.info("Database Data Executor created") def insert(self, table, values, commit=True): if isinstance(values, (list, tuple)): for single_value_set in values: self.connector.create_insert(table, single_value_set) self.connector.execute_insert(False) if commit: self.connector.commit_execute() else: self.connector.create_insert(table, values) self.connector.execute_insert() def select(self, table, cols=(), where_clause=(), single=False): self.connector.create_select(table, cols, where_clause) return self.connector.execute_select(single) def update(self, table, clauses=(), commit=True): for clause in clauses: self.connector.create_update(table, clause['COL_VALS'], clause['WHERE_CLAUSE']) self.connector.execute_update(False) if commit: self.connector.commit_execute() def delete(self, table, where_clause, commit=True): self.connector.create_delete(table, where_clause) return self.connector.execute_delete() def commit(self): self.connector.commit_execute()
class Alpha(discord.Client): accountProperties = DatabaseConnector(mode="account") def prepare(self): """Prepares all required objects and fetches Alpha settings """ self.logging = error_reporting.Client() async def on_ready(self): """Initiates all Discord dependent functions and flags the bot as ready to process requests """ self.alphaGuild = client.get_guild(414498292655980583) self.proRoles = [ discord.utils.get(self.alphaGuild.roles, id=484387309303758848), # Alpha Pro role discord.utils.get( self.alphaGuild.roles, id=647824289923334155) # Registered Alpha Account role ] print("[Startup]: Alpha Manager is online") async def on_member_join(self, member): """Scanns each member joining into Alpha community guild for spam Parameters ---------- guild : discord.Member Member object passed by discord.py """ try: await self.update_alpha_guild_roles(only=member.id) except asyncio.CancelledError: pass except Exception: print(traceback.format_exc()) if os.environ["PRODUCTION_MODE"]: self.logging.report_exception() async def update_alpha_guild_roles(self, only=None): """Updates Alpha community guild roles """ try: if not await self.accountProperties.check_status(): return for member in self.alphaGuild.members: if only is not None and only != member.id: continue await asyncio.sleep(0.1) accountId = await self.accountProperties.match(member.id) if accountId is not None: properties = await self.accountProperties.get(accountId) if self.proRoles[1] not in member.roles: await member.add_roles(self.proRoles[1]) if properties["customer"]["personalSubscription"].get( "plan", "free") != "free": if self.proRoles[0] not in member.roles: await member.add_roles(self.proRoles[0]) elif self.proRoles[0] in member.roles: await member.remove_roles(self.proRoles[0]) elif self.proRoles[0] in member.roles or self.proRoles[ 1] in member.roles: await member.remove_roles(self.proRoles[0], self.proRoles[1]) except asyncio.CancelledError: pass except Exception: print(traceback.format_exc()) if os.environ["PRODUCTION_MODE"]: self.logging.report_exception() # ------------------------- # Job queue # ------------------------- async def job_queue(self): """Executes scheduled jobs as long as Alpha Bot is online """ while True: try: await asyncio.sleep(Utils.seconds_until_cycle()) t = datetime.datetime.now().astimezone(pytz.utc) timeframes = Utils.get_accepted_timeframes(t) if "5m" in timeframes: await self.update_alpha_guild_roles() except asyncio.CancelledError: return except Exception: print(traceback.format_exc()) if os.environ["PRODUCTION_MODE"]: self.logging.report_exception()
# LD1coefficient = 0.0001; #Costs an extra 1/2 second for size 100 batches from 1600 to 200, 0 for off networkName = "treeNetworkSmall" setNames = ["gray-40x40-tree"] imageHeights = 40 imageWidths = 40 networkShape = [1600, 50, 50] learningRate = 0.5 momentum = 0.9 numberItterations = 10000 batchSize = 10 LD1coefficient = 0.0001 #Costs an extra 1/2 second for size 100 batches from 1600 to 200, 0 for off startOver = True dbc = DatabaseConnector() trainingSets = [dbc.getTrainingSetId(setNames[0])] cases = dbc.getTrainingCases(setNames[0]) cases = np.array(cases) rbm = DeepRBM(networkShape) if (startOver != True): n = dbc.getNetwork(networkName) if (n): rbm.setWeights(n.weights) print "Number of training cases" print cases.shape rbm = trainRBMs(rbm, cases, batchSize, numberItterations, learningRate,
class TopicSpider(scrapy.Spider): name = 'TopicSpider' #website specifics USERNAME="" PASSWORD="" WEBSITE_URL="" #phpbb3 prosilver specifics LOGIN_PAGE_URL="" LOGIN_PAGE='ucp.php?mode=login' VIEWTOPIC_PAGE="viewtopic.php" VIEWFORUM_PAGE="viewforum.php" INDEX_PAGE="index.php" URL_FORUM_ARGUMENT="f" URL_TOPIC_ARGUMENT="t" databaseConnector=DatabaseConnector() #Page selectors CURRENT_PAGE_SELECTOR=".pagination > a > strong ::text" NEXT_PAGE_SELECTOR=".display-options a.right-box::attr(href)" #POST selectors POST_AUTHORS_SELECTOR=".post > .inner > .postbody > .author > strong > a ::text" POST_DATES_SELECTOR=".post > .inner > .postbody > .author" POST_CONTENTS_SELECTOR='.post > .inner > .postbody > .content' POST_TITLES_SELECTOR='.post > .inner > .postbody > h3 > a ::text' POST_URLS_SELECTOR='.post > .inner > .postbody > .author > a ::attr(href)' POST_IDS_SELECTOR='.post > .inner > .postbody > h3 > a ::attr(href)' #TOPIC selectors TOPIC_URL_SELECTOR='#page-body > h2 > a ::attr(href)' def __init__(self): settings = get_project_settings() self.USERNAME=settings.get('WEBSITE_USERNAME') self.PASSWORD=settings.get('WEBSITE_PASSWORD') self.WEBSITE_URL=settings.get('WEBSITE_URL') self.LOGIN_PAGE_URL = self.WEBSITE_URL+self.LOGIN_PAGE def getForumIdFromUrl(self,url): return url.replace(self.WEBSITE_URL+self.VIEWTOPIC_PAGE+"?","").split('&')[0].replace(self.URL_FORUM_ARGUMENT+'=','') def getTopicIdFromUrl(self,url): return url.replace(self.WEBSITE_URL+self.VIEWTOPIC_PAGE+"?","").split('&')[1].replace(self.URL_TOPIC_ARGUMENT+'=','') def start_requests(self): yield scrapy.Request(url=self.LOGIN_PAGE_URL, callback=self.login) def login(self,response): return scrapy.FormRequest.from_response(response,formdata={'username': self.USERNAME, 'password': self.PASSWORD,'login': '******', 'redirect':self.INDEX_PAGE}, callback=self.afterLogin) def afterLogin(self, response): if "Logout" in response.body: logging.getLogger().info("Successfully logged in. Let's start crawling!") #crawl each forum self.databaseConnector.open() topics=self.databaseConnector.selectAllTopics() for topic in topics: yield scrapy.Request(url=topic['url'], callback=self.parse) else: logging.getLogger().info("Login failed.") return def parse(self, response): topicUrl=response.request.url forumId=self.getForumIdFromUrl(topicUrl) topicId=self.getTopicIdFromUrl(topicUrl) #parse authors and contents authors=response.css(self.POST_AUTHORS_SELECTOR).extract() dates=response.css(self.POST_DATES_SELECTOR).xpath('text()[position()=2]') contents=response.css(self.POST_CONTENTS_SELECTOR) titles=response.css(self.POST_TITLES_SELECTOR) urls=response.css(self.POST_URLS_SELECTOR) ids=response.css(self.POST_IDS_SELECTOR) lastPost=len(authors) i=0 #iterate through posts and submit to DB while (i < lastPost): author=authors[i] date=dates[i].extract()[3:] content="" nodes=contents[i].xpath('node()') for node in nodes: content+=node.extract() title=titles[i].extract() url=self.WEBSITE_URL+urls[i].extract()[2:] id=ids[i].extract()[2:] postItem=Post(id=id, title=title, date=date, author=author, content=content, url=url,forumId=forumId,topicId=topicId) logging.getLogger().info("Committing post ID: "+postItem['id']) yield postItem i+=1 #follow to topic's next page or quit nextPageRelativeUrl = response.css(self.NEXT_PAGE_SELECTOR).extract() if nextPageRelativeUrl: nextPageRelativeUrl=nextPageRelativeUrl[0] nextPageRelativeUrl=nextPageRelativeUrl[2:] nextPageAbsoluteUrl = self.WEBSITE_URL+nextPageRelativeUrl time.sleep(randint(1,3)) yield scrapy.Request(nextPageAbsoluteUrl,callback=self.parse,dont_filter = True) else: logging.getLogger().info("Finished crawling Topic "+topicId) return
class Alpha(discord.AutoShardedClient): accountProperties = DatabaseConnector(mode="account") def prepare(self): self.logging = ErrorReportingClient(service="discord_manager") async def on_ready(self): t = datetime.now().astimezone(utc) self.alphaGuild = client.get_guild(414498292655980583) self.proRoles = [ discord.utils.get(self.alphaGuild.roles, id=484387309303758848), # Alpha Pro role discord.utils.get(self.alphaGuild.roles, id=593768473277104148), # Ichibot role discord.utils.get(self.alphaGuild.roles, id=647824289923334155), # Registered role discord.utils.get(self.alphaGuild.roles, id=601524236464553984) # Beta tester role ] await self.update_system_status(t) await self.update_static_messages() print("[Startup]: Alpha Manager is online") async def update_static_messages(self): if not environ["PRODUCTION_MODE"]: return try: faqAndRulesChannel = client.get_channel(601160698310950914) guildRulesMessage = await faqAndRulesChannel.fetch_message( 850729258049601556) termsOfServiceMessage = await faqAndRulesChannel.fetch_message( 850729261216301086) faqMessage = await faqAndRulesChannel.fetch_message( 850731156391329793) if guildRulesMessage is not None: await guildRulesMessage.edit( content=None, embed=discord.Embed( title= "All members of this official Alpha community must follow the community rules. Failure to do so will result in a warning, kick, or ban, based on our sole discretion.", description= "[Community rules](https://www.alphabotsystem.com/community-rules) (last modified on January 31, 2020).", color=0x673AB7), suppress=False) if termsOfServiceMessage is not None: await termsOfServiceMessage.edit( content=None, embed=discord.Embed( title= "By using Alpha branded services you agree to our Terms of Service and Privacy Policy. You can read them on our website.", description= "[Terms of Service](https://www.alphabotsystem.com/terms-of-service) (last modified on March 6, 2020)\n[Privacy Policy](https://www.alphabotsystem.com/privacy-policy) (last modified on January 31, 2020).", color=0x673AB7), suppress=False) if faqMessage is not None: await faqMessage.edit( content=None, embed=discord.Embed( title= "If you have any questions, refer to our FAQ section, guide, or ask for help in support channels.", description= "[Frequently Asked Questions](https://www.alphabotsystem.com/faq)\n[Feature overview with examples](https://www.alphabotsystem.com/guide)\nFor other questions, use <#574196284215525386>.", color=0x673AB7), suppress=False) ichibotChannel = client.get_channel(825460988660023326) howtoMessage = await ichibotChannel.fetch_message( 850764390290030603) if howtoMessage is not None: await howtoMessage.edit( content=None, embed=discord.Embed( title= "Best-in-class order execution client. Trade cryptocurrencies via Ichibot right in Discord.", description= "[Sign up for a free account on our website](https://www.alphabotsystem.com/sign-up). If you already signed up, [sign in](https://www.alphabotsystem.com/sign-in), connect your account with your Discord profile, and add an API key. All Ichibot commands are prefixed with `x`. Learn more about Ichibot on their [GitLab page](https://www.alphabotsystem.com/guide/ichibot).", color=0x673AB7), suppress=False) except CancelledError: pass except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception() async def on_member_join(self, member): await self.update_alpha_guild_roles(only=member.id) async def update_alpha_guild_roles(self, only=None): try: if not await self.accountProperties.check_status(): return accounts = await self.accountProperties.keys() matches = {value: key for key, value in accounts.items()} for member in self.alphaGuild.members: if only is not None and only != member.id: continue accountId = matches.get(str(member.id)) if accountId is not None: await sleep(0.4) properties = await self.accountProperties.get(accountId) if properties is None: continue if self.proRoles[2] not in member.roles: try: await member.add_roles(self.proRoles[2]) except: pass if len(properties["apiKeys"].keys()) != 0: if self.proRoles[1] not in member.roles: try: await member.add_roles(self.proRoles[1]) except: pass elif self.proRoles[1] in member.roles: try: await member.remove_roles(self.proRoles[1]) except: pass if properties["customer"]["personalSubscription"].get( "plan", "free") != "free": if self.proRoles[0] not in member.roles: await member.add_roles(self.proRoles[0]) elif self.proRoles[0] in member.roles: try: await member.remove_roles(self.proRoles[0]) except: pass elif self.proRoles[0] in member.roles or self.proRoles[ 2] in member.roles: await sleep(0.4) try: await member.remove_roles(self.proRoles[0], self.proRoles[2]) except: pass except CancelledError: pass except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception() # ------------------------- # Job queue # ------------------------- async def job_queue(self): while True: try: await sleep(Utils.seconds_until_cycle()) t = datetime.now().astimezone(utc) timeframes = Utils.get_accepted_timeframes(t) if "1m" in timeframes: await self.update_system_status(t) if "15m" in timeframes: client.loop.create_task(self.update_alpha_guild_roles()) except CancelledError: return except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception() async def update_system_status(self, t): try: statistics = await database.document("discord/statistics").get() statistics = statistics.to_dict()["{}-{:02d}".format( t.year, t.month)] t2 = t + timedelta(minutes=5) if t2.month != t.month: await database.document("discord/statistics").set( {"{}-{:02d}".format(t2.year, t2.month): statistics}, merge=True) numOfCharts = ":chart_with_upwards_trend: {:,} charts requested".format( statistics["c"] + statistics["hmap"]) numOfAlerts = ":bell: {:,} alerts set".format(statistics["alerts"]) numOfPrices = ":money_with_wings: {:,} prices & details pulled".format( statistics["d"] + statistics["p"] + statistics["v"] + statistics["mcap"] + statistics["mk"] + statistics["convert"]) numOfTrades = ":dart: {:,} trades executed".format( statistics["paper"] + statistics["x"]) numOfGuilds = ":heart: Used in {:,} Discord communities".format( statistics["servers"]) statisticsEmbed = discord.Embed(title="{}\n{}\n{}\n{}\n{}".format( numOfCharts, numOfAlerts, numOfPrices, numOfTrades, numOfGuilds), color=0x673AB7) if environ["PRODUCTION_MODE"]: statusChannel = client.get_channel(560884869899485233) statsMessage = await statusChannel.fetch_message( 850729112321392640) if statsMessage is not None: await statsMessage.edit(content=None, embed=statisticsEmbed, suppress=False) except CancelledError: pass except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception() # ------------------------- # Message handling # ------------------------- async def on_message(self, message): try: if message.author.id != 361916376069439490: return if message.clean_content.lower().startswith("beta "): parameters = message.clean_content.split(" ")[1:] if len(parameters) == 2: ch = client.get_channel(int(parameters[0])) me = await ch.fetch_message(int(parameters[1])) reactions = me.reactions for reaction in reactions: async for user in reaction.users(): try: if self.proRoles[3] not in user.roles: await user.add_roles(self.proRoles[3]) except: pass await message.delete() except CancelledError: pass except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception()
class AlertsServer(object): accountProperties = DatabaseConnector(mode="account") registeredAccounts = {} zmqContext = Context.instance() # ------------------------- # Startup # ------------------------- def __init__(self): self.isServiceAvailable = True signal(SIGINT, self.exit_gracefully) signal(SIGTERM, self.exit_gracefully) self.logging = ErrorReportingClient(service="alerts") self.cache = {} def exit_gracefully(self): print("[Startup]: Alerts Server handler is exiting") self.isServiceAvailable = False # ------------------------- # Job queue # ------------------------- def run(self): while self.isServiceAvailable: try: sleep(Utils.seconds_until_cycle()) t = datetime.now().astimezone(utc) timeframes = Utils.get_accepted_timeframes(t) if "1m" in timeframes: self.update_accounts() self.process_price_alerts() except (KeyboardInterrupt, SystemExit): return except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception() def update_accounts(self): try: self.registeredAccounts = self.accountProperties.keys() except (KeyboardInterrupt, SystemExit): pass except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception() # ------------------------- # Price Alerts # ------------------------- def process_price_alerts(self): try: self.cache = {} users = database.document("details/marketAlerts").collections() with ThreadPoolExecutor(max_workers=20) as pool: for user in users: accountId = user.id authorId = accountId if accountId.isdigit() else self.registeredAccounts.get(accountId) if authorId is None: continue for alert in user.stream(): pool.submit(self.check_price_alert, authorId, accountId, alert.reference, alert.to_dict()) except (KeyboardInterrupt, SystemExit): pass except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception() def check_price_alert(self, authorId, accountId, reference, alert): socket = AlertsServer.zmqContext.socket(REQ) socket.connect("tcp://candle-server:6900") socket.setsockopt(LINGER, 3) poller = Poller() poller.register(socket, POLLIN) try: currentPlatform = alert["request"].get("currentPlatform") currentRequest = alert["request"].get(currentPlatform) ticker = currentRequest.get("ticker") hashName = hash(dumps(ticker, option=OPT_SORT_KEYS)) if alert["timestamp"] < time() - 86400 * 30.5 * 3: if environ["PRODUCTION_MODE"]: database.document("discord/properties/messages/{}".format(str(uuid4()))).set({ "title": "Price alert for {} ({}) at {} {} expired.".format(ticker.get("base"), currentPlatform if ticker.get("exchange") is None else ticker.get("exchange").get("name"), alert.get("levelText", alert["level"]), ticker.get("quote")), "subtitle": "Price Alerts", "description": "Price alerts automatically cancel after 3 months. If you'd like to keep your alert, you'll have to schedule it again.", "color": 6765239, "user": authorId, "channel": alert["channel"] }) reference.delete() else: print("{}: price alert for {} ({}) at {} {} expired".format(accountId, ticker.get("base"), currentPlatform if ticker.get("exchange") is None else ticker.get("exchange").get("name"), alert.get("levelText", alert["level"]), ticker.get("quote"))) else: if hashName in self.cache: payload = self.cache.get(hashName) else: alert["request"]["timestamp"] = time() alert["request"]["authorId"] = authorId socket.send_multipart([b"alerts", b"candle", dumps(alert["request"])]) responses = poller.poll(30 * 1000) if len(responses) != 0: [payload, responseText] = socket.recv_multipart() payload = loads(payload) responseText = responseText.decode() if not bool(payload): if responseText != "": print("Alert request error:", responseText) if environ["PRODUCTION_MODE"]: self.logging.report(responseText) return self.cache[hashName] = payload else: raise Exception("time out") for candle in reversed(payload["candles"]): if candle[0] < alert["timestamp"]: break if (alert["placement"] == "below" and candle[3] is not None and candle[3] <= alert["level"]) or (alert["placement"] == "above" and candle[2] is not None and alert["level"] <= candle[2]): if environ["PRODUCTION_MODE"]: database.document("discord/properties/messages/{}".format(str(uuid4()))).set({ "title": "Price of {} ({}) hit {} {}.".format(ticker.get("base"), payload.get("platform") if ticker.get("exchange") is None else ticker.get("exchange").get("name"), alert.get("levelText", alert["level"]), ticker.get("quote")), "description": alert.get("triggerMessage"), "subtitle": "Price Alerts", "color": 6765239, "user": None if {"id": "public", "value": "public"} in currentRequest.get("preferences") else authorId, "channel": alert["channel"] }) reference.delete() else: print("{}: price of {} ({}) hit {} {}".format(accountId, ticker.get("base"), payload.get("platform") if ticker.get("exchange") is None else ticker.get("exchange").get("name"), alert.get("levelText", alert["level"]), ticker.get("quote"))) break except (KeyboardInterrupt, SystemExit): pass except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception(user=f"{accountId}, {authorId}") socket.close()
def __init__(self): self.db_connector = DatabaseConnector() if cfg.load_data_from_file: # TODO : Correct the datatypes here! self.user_data = pd.read_csv(cfg.user_data_filename, sep=',', encoding='utf-8') self.user_data.drop(self.user_data.columns[[0]], axis=1, inplace=True) #print self.user_data.dtypes self.user_orig_data = pd.read_csv(cfg.user_orig_data_filename, sep=',', encoding='utf-8') self.user_orig_data.drop(self.user_orig_data.columns[[0]], axis=1, inplace=True) #print self.user_orig_data.dtypes '''self.repo_data = pd.read_csv(cfg.repo_data_filename, sep=',', encoding='utf-8', dtype={"repo_id":"object" ,"owner_id":"object" , "is_private":"object" ,"is_forked":"object" ,"cont_count":"object" , "language":"object" ,"days_from_creation":"object" ,"days_from_updation":"object" , "days_from_push":"object" ,"size":"object" ,"watcher_count":"object" , "stargazer_count":"object" ,"has_wiki":"object" ,"fork_count":"object" , "open_issues":"object" ,"sub_count":"object" ,"readme":"object" ,"description":"object"})''' self.repo_data = pd.read_csv(cfg.repo_data_filename, sep=',', encoding='utf-8', dtype={ "repo_id": "int64", "owner_id": "int64", "is_private": "bool", "is_forked": "bool", "cont_count": "int64", "language": "string", "days_from_creation": "int64", "days_from_updation": "int64", "days_from_push": "int64", "size": "int64", "watcher_count": "int64", "stargazer_count": "int64", "has_wiki": "bool", "fork_count": "int64", "open_issues": "int64", "sub_count": "int64", "readme": "string", "description": "string" }) self.repo_data.drop(self.repo_data.columns[[0]], axis=1, inplace=True) # Replace NaNs self.repo_data['language'].fillna(' ', inplace=True) self.repo_data['readme'].fillna(' ', inplace=True) self.repo_data['description'].fillna(' ', inplace=True) #print self.repo_data.dtypes # Repo Data is a must for converting the dTypes. Do it above! Or cast all of them as object? self.repo_orig_data = pd.read_csv(cfg.repo_orig_data_filename, sep=',', encoding='utf-8') self.repo_orig_data.drop(self.repo_orig_data.columns[[0]], axis=1, inplace=True) #print self.repo_orig_data.dtypes # Replace NaNs self.repo_orig_data['language'].fillna(' ', inplace=True) self.repo_orig_data['readme'].fillna(' ', inplace=True) self.repo_orig_data['description'].fillna(' ', inplace=True) self.user_repo_association = pd.read_csv( cfg.user_repo_association_filename, sep=',', encoding='utf-8') self.user_repo_association.drop( self.user_repo_association.columns[[0]], axis=1, inplace=True) self.user_repo_association = self.user_repo_association[ self.user_repo_association['rating'] <= cfg.rating_matrix_removal_limit] self.user_repo_association['rating'] = self.user_repo_association[ 'rating'].apply(rescale) # TODO : DropNA? # Print the shapes. print "user_data.shape" + str(self.user_data.shape) print "user_orig_data.shape" + str(self.user_orig_data.shape) print "repo_data.shape" + str(self.repo_data.shape) print "repo_orig_data.shape" + str(self.repo_orig_data.shape) print "user_repo_association.shape" + str( self.user_repo_association.shape) #print self.user_repo_association.dtypes # Load from Pickle. '''self.user_data = pd.read_pickle(cfg.user_data_filename_pkl) self.user_orig_data = pd.read_pickle(cfg.user_orig_data_filename_pkl) self.repo_data = pd.read_pickle(cfg.repo_data_filename_pkl) self.repo_orig_data = pd.read_pickle(cfg.repo_orig_data_filename_pkl) self.user_repo_association = pd.read_pickle(cfg.user_repo_association_filename_pkl)''' else: self.user_orig_data = self.db_connector.get_user_data( limit=cfg.train_users_limit) self.repo_orig_data = self.db_connector.get_repo_data( limit=cfg.train_repos_limit) self.user_data = pd.DataFrame(columns=[ "user_id", "location", "repo_count", "followers_count", "folowee_count", "days_from_creation", "days_from_update", "interest_q", "tech_q", "languages_q", "positions_q", "status_q" ]) self.repo_data = pd.DataFrame(columns=[ "repo_id", "owner_id", "is_private", "is_forked", "cont_count", "language", "days_from_creation", "days_from_updation", "days_from_push", "size", "watcher_count", "stargazer_count", "has_wiki", "fork_count", "open_issues", "sub_count", "readme", "description" ]) # TODO : Could We keep the description also for matching? self.user_repo_association = pd.DataFrame( columns=["user_id", "repo_id", "rating"]) self.bio_analyzer = BiographyAnalyzer(cfg.interests_tolerance, cfg.tech_tolerance, cfg.languages_tolerance, cfg.position_tolerance, cfg.student_status_tolerance) self.commit_log_analyzer = CommitLogAnalyzer() self.create_datasets() self.user_repo_association = self.user_repo_association[ self.user_repo_association['rating'] <= cfg.rating_matrix_removal_limit] self.user_repo_association['rating'] = self.user_repo_association[ 'rating'].apply(rescale)
class TrainFlowManager: 'This class collects the data from database, synthesizes the data in the form the recommendation engines can use and then make the models for reco.' def __init__(self): self.db_connector = DatabaseConnector() if cfg.load_data_from_file: # TODO : Correct the datatypes here! self.user_data = pd.read_csv(cfg.user_data_filename, sep=',', encoding='utf-8') self.user_data.drop(self.user_data.columns[[0]], axis=1, inplace=True) #print self.user_data.dtypes self.user_orig_data = pd.read_csv(cfg.user_orig_data_filename, sep=',', encoding='utf-8') self.user_orig_data.drop(self.user_orig_data.columns[[0]], axis=1, inplace=True) #print self.user_orig_data.dtypes '''self.repo_data = pd.read_csv(cfg.repo_data_filename, sep=',', encoding='utf-8', dtype={"repo_id":"object" ,"owner_id":"object" , "is_private":"object" ,"is_forked":"object" ,"cont_count":"object" , "language":"object" ,"days_from_creation":"object" ,"days_from_updation":"object" , "days_from_push":"object" ,"size":"object" ,"watcher_count":"object" , "stargazer_count":"object" ,"has_wiki":"object" ,"fork_count":"object" , "open_issues":"object" ,"sub_count":"object" ,"readme":"object" ,"description":"object"})''' self.repo_data = pd.read_csv(cfg.repo_data_filename, sep=',', encoding='utf-8', dtype={ "repo_id": "int64", "owner_id": "int64", "is_private": "bool", "is_forked": "bool", "cont_count": "int64", "language": "string", "days_from_creation": "int64", "days_from_updation": "int64", "days_from_push": "int64", "size": "int64", "watcher_count": "int64", "stargazer_count": "int64", "has_wiki": "bool", "fork_count": "int64", "open_issues": "int64", "sub_count": "int64", "readme": "string", "description": "string" }) self.repo_data.drop(self.repo_data.columns[[0]], axis=1, inplace=True) # Replace NaNs self.repo_data['language'].fillna(' ', inplace=True) self.repo_data['readme'].fillna(' ', inplace=True) self.repo_data['description'].fillna(' ', inplace=True) #print self.repo_data.dtypes # Repo Data is a must for converting the dTypes. Do it above! Or cast all of them as object? self.repo_orig_data = pd.read_csv(cfg.repo_orig_data_filename, sep=',', encoding='utf-8') self.repo_orig_data.drop(self.repo_orig_data.columns[[0]], axis=1, inplace=True) #print self.repo_orig_data.dtypes # Replace NaNs self.repo_orig_data['language'].fillna(' ', inplace=True) self.repo_orig_data['readme'].fillna(' ', inplace=True) self.repo_orig_data['description'].fillna(' ', inplace=True) self.user_repo_association = pd.read_csv( cfg.user_repo_association_filename, sep=',', encoding='utf-8') self.user_repo_association.drop( self.user_repo_association.columns[[0]], axis=1, inplace=True) self.user_repo_association = self.user_repo_association[ self.user_repo_association['rating'] <= cfg.rating_matrix_removal_limit] self.user_repo_association['rating'] = self.user_repo_association[ 'rating'].apply(rescale) # TODO : DropNA? # Print the shapes. print "user_data.shape" + str(self.user_data.shape) print "user_orig_data.shape" + str(self.user_orig_data.shape) print "repo_data.shape" + str(self.repo_data.shape) print "repo_orig_data.shape" + str(self.repo_orig_data.shape) print "user_repo_association.shape" + str( self.user_repo_association.shape) #print self.user_repo_association.dtypes # Load from Pickle. '''self.user_data = pd.read_pickle(cfg.user_data_filename_pkl) self.user_orig_data = pd.read_pickle(cfg.user_orig_data_filename_pkl) self.repo_data = pd.read_pickle(cfg.repo_data_filename_pkl) self.repo_orig_data = pd.read_pickle(cfg.repo_orig_data_filename_pkl) self.user_repo_association = pd.read_pickle(cfg.user_repo_association_filename_pkl)''' else: self.user_orig_data = self.db_connector.get_user_data( limit=cfg.train_users_limit) self.repo_orig_data = self.db_connector.get_repo_data( limit=cfg.train_repos_limit) self.user_data = pd.DataFrame(columns=[ "user_id", "location", "repo_count", "followers_count", "folowee_count", "days_from_creation", "days_from_update", "interest_q", "tech_q", "languages_q", "positions_q", "status_q" ]) self.repo_data = pd.DataFrame(columns=[ "repo_id", "owner_id", "is_private", "is_forked", "cont_count", "language", "days_from_creation", "days_from_updation", "days_from_push", "size", "watcher_count", "stargazer_count", "has_wiki", "fork_count", "open_issues", "sub_count", "readme", "description" ]) # TODO : Could We keep the description also for matching? self.user_repo_association = pd.DataFrame( columns=["user_id", "repo_id", "rating"]) self.bio_analyzer = BiographyAnalyzer(cfg.interests_tolerance, cfg.tech_tolerance, cfg.languages_tolerance, cfg.position_tolerance, cfg.student_status_tolerance) self.commit_log_analyzer = CommitLogAnalyzer() self.create_datasets() self.user_repo_association = self.user_repo_association[ self.user_repo_association['rating'] <= cfg.rating_matrix_removal_limit] self.user_repo_association['rating'] = self.user_repo_association[ 'rating'].apply(rescale) # this API will give the internal stuff of this class def get_data_structures(self): return self.user_orig_data, self.repo_orig_data, self.user_data, self.repo_data # This API will pull the Data and populate the local data structures. def create_datasets(self): self.create_user_data() self.create_repo_data() self.synthesize_user_repo_association() return def __none_checker_int(self, input): return input if input is not None else 0 def __none_checker_string(self, input): return input if input is not None else "" def __get_date_diff(self, input_date): curr_date = datetime.datetime.today() if input_date is None: return curr_date #print "The input type of the date is =" + str(type(input_date)) #parsed_date = dateparser.parse(str(input_date)) parsed_date = input_date.to_pydatetime() diff_in_days = (curr_date - parsed_date).days return diff_in_days def create_user_data(self): for index, row in self.user_orig_data.iterrows(): print row['user_id'], row['name'] self.user_data.set_value(index, 'user_id', row['user_id']) self.user_data.set_value( index, 'location', self.__none_checker_string(row['location'])) self.user_data.set_value( index, 'repo_count', self.__none_checker_int(row['repo_count'])) self.user_data.set_value( index, 'followers_count', self.__none_checker_int(row['followers_count'])) self.user_data.set_value( index, 'folowee_count', self.__none_checker_int(row['followees_count'])) # take care of dates here self.user_data.set_value(index, 'days_from_creation', self.__get_date_diff(row['created_at'])) self.user_data.set_value(index, 'days_from_update', self.__get_date_diff(row['updated_at'])) # Synthesize the info from bio. Not very Accurate. '''"user_id", "location", "repo_count", "followers_count", "folowee_count", "days_from_creation", "days_from_update", "interest_q", "tech_q", "languages_q", "positions_q", "status_q"''' curr_bio_text = row['bio'] if curr_bio_text is None or curr_bio_text == "": curr_bio_text = cfg.default_bio_text [interest_q, tech_q, languages_q, positions_q, status_q] = self.bio_analyzer.process_bio(curr_bio_text) # Add the data to userr data. self.user_data.set_value(index, 'interest_q', interest_q) self.user_data.set_value(index, 'tech_q', tech_q) self.user_data.set_value(index, 'languages_q', languages_q) self.user_data.set_value(index, 'positions_q', positions_q) self.user_data.set_value(index, 'status_q', status_q) # Main public API which will return a graph lab model for user_item_rating model def train_for_user_item_association(self): train_data = gl.SFrame(self.user_repo_association) # Train Model # factorization_recommender???? TODO : item_similarity_recommender self.item_sim_model = gl.factorization_recommender.create( train_data, user_id='user_id', item_id='repo_id', target='rating', verbose=True) #print self.item_sim_model.evaluate(train_data) return self.item_sim_model # This API will train a model for item similarity. def train_for_item_content_similarity(self): '''sliced_columns = ["repo_id", "owner_id", "is_private", "is_forked", "cont_count", "language", "days_from_creation", "days_from_updation", "days_from_push", "size", "watcher_count", "stargazer_count", "has_wiki", "fork_count", "open_issues", "sub_count"]''' sliced_columns = [ "owner_id", "repo_id", "is_forked", "cont_count", "language", "size", "has_wiki" ] sliced_repo_data = self.repo_data[sliced_columns] sliced_repo_data.rename(index=str, columns={"owner_id": "user_id"}, inplace=True) # TODO: Rename owner_id to user_id #print sliced_repo_data.dtypes #print sliced_repo_data.isnull() train_data = gl.SFrame(sliced_repo_data) train_data_observation = gl.SFrame(self.user_repo_association) self.item_content_model = gl.recommender.item_content_recommender.create( item_data=train_data, item_id='repo_id', observation_data=train_data_observation, user_id='user_id', target='rating', verbose=True) '''self.item_content_model = gl.recommender.item_content_recommender.create(item_data=train_data, item_id='repo_id', user_id='user_id', verbose=True)''' # Evaluate Model on training dataset #print self.item_content_model.evaluate(train_data_observation) return self.item_content_model def create_repo_data(self): '''["repo_id", "owner_id", "is_private", "is_forked", "cont_count", "language", "days_from_creation", "days_from_updation", "days_from_push", "size", "watcher_count", "stargazer_count", "has_wiki", "fork_count", "open_issues", "sub_count", "readme", "description"]''' for index, row in self.repo_orig_data.iterrows(): print row['repo_id'], row['repo_name'] self.repo_data.set_value(index, 'repo_id', row['repo_id']) self.repo_data.set_value(index, 'owner_id', row['owner_id']) self.repo_data.set_value(index, 'is_private', row['is_private']) self.repo_data.set_value(index, 'is_forked', row['is_forked']) self.repo_data.set_value( index, 'cont_count', self.__none_checker_int(row['contributor_count'])) self.repo_data.set_value( index, 'language', self.__none_checker_string(row['language'])) # Dates self.repo_data.set_value(index, 'days_from_creation', self.__get_date_diff(row['created_at'])) self.repo_data.set_value(index, 'days_from_updation', self.__get_date_diff(row['updated_at'])) self.repo_data.set_value(index, 'days_from_push', self.__get_date_diff(row['pushed_at'])) self.repo_data.set_value(index, 'size', self.__none_checker_int(row['size'])) self.repo_data.set_value( index, 'watcher_count', self.__none_checker_int(row['watcher_count'])) self.repo_data.set_value( index, 'stargazer_count', self.__none_checker_int(row['stargazer_count'])) self.repo_data.set_value(index, 'has_wiki', row['has_wiki']) forks_count_total = self.__none_checker_int( row['forks_count']) + self.__none_checker_int(row['forks']) open_issues_count_total = self.__none_checker_int( row['open_issues_count']) + self.__none_checker_int( row['open_issues']) self.repo_data.set_value(index, 'fork_count', forks_count_total) self.repo_data.set_value(index, 'open_issues', open_issues_count_total) self.repo_data.set_value( index, 'sub_count', self.__none_checker_int(row['subscribers_count'])) # Capture the description and readme for the repo. "readme", "description" self.repo_data.set_value(index, 'readme', self.__none_checker_string(row['readme'])) self.repo_data.set_value(index, 'description', self.__none_checker_string(row['readme'])) # TODO : Enable the below line #self.repo_data.set_value(index, 'description', self.__none_checker_string(row['description'])) #print self.repo_data.dtypes def __map_bool_to_int(self, input_bool): return 1 if input_bool == True else 0 def synthesize_user_repo_association(self): print "Synthesizing User Repo Association." # This API will find out the repositories importance and the user's association with them to finally allocate one rating for every repository # There are lot of things on which the Rating of this sentiment depends. # Rating Synthesizing weights for the different things. # Sentiments :: length, structural_integrity_score, topic_relevance_score, positivity_score, spelling_integrity_score # "is_forked", "cont_count", "days_from_push", "size", "watcher_count", "stargazer_count", "has_wiki", "fork_count", "open_issues", "sub_count", no_of_commits. # Total 16 this on which we are dependent. # We use a linear combination of different factors as distributed by the weights set in the configuration manager. # We have to divide the weights by 100 for normalisation. association_processing_limit = cfg.association_processing_limit for index, row in self.repo_data.iterrows(): if association_processing_limit <= 0: break try: '''user_id", "repo_id", "rating''' print "Synthesizing info for repo = " + str( row['repo_id']) + " and owner = " + str(row['owner_id']) curr_user_id = row['owner_id'] curr_repo_id = row['repo_id'] self.user_repo_association.set_value(index, 'user_id', curr_user_id) self.user_repo_association.set_value(index, 'repo_id', curr_repo_id) # Synthesize the Rating using the linear combination of the values depending on whether it's directly or inversely proportional. # First collect all the commit logs for this repo/repo user combination. curr_commits = [] if cfg.is_commits_from_repo_only: curr_commits = self.db_connector.get_commits_for_repo( curr_repo_id) else: curr_commits = self.db_connector.get_commits_for_user_repo( curr_user_id, curr_repo_id) # Capture the best description text. if row['readme'] != "": best_description = row['readme'] elif row['description'] != "": best_description = row['description'] else: best_description = cfg.default_description [length, structural_integrity_score, topic_relevance_score, positivity_score, spelling_integrity_score] \ = self.commit_log_analyzer.process_batch_logs(curr_commits, best_description) no_of_commits = len(curr_commits) # Sentiments :: length, structural_integrity_score, topic_relevance_score, positivity_score, spelling_integrity_score # "is_forked", "cont_count", "days_from_push", "size", "watcher_count", "stargazer_count", "has_wiki", "fork_count", "open_issues", "sub_count", no_of_commits. a1 = length * float(cfg.average_commit_length_weight) / 100 a2 = structural_integrity_score * float( cfg.structural_integrity_score_weight) / 100 a3 = topic_relevance_score * float( cfg.topic_relevance_score_weight) / 100 a4 = positivity_score * float( cfg.topic_relevance_score_weight) / 100 a5 = spelling_integrity_score * float( cfg.spelling_integrity_score_weight) / 100 a6 = no_of_commits * float(cfg.no_of_commits_weight) / 100 a7 = float(cfg.is_forked_weight) / ( 100 * (1 + self.__map_bool_to_int(row['is_forked']))) a8 = row['cont_count'] * float(cfg.cont_count_weight) / 100 a9 = float( cfg.days_from_push_weight) / (100 * (1 + row['days_from_push'])) a10 = row['size'] * float(cfg.repo_size_weight) / 100 a11 = row['watcher_count'] * float( cfg.watcher_count_weight) / 100 a12 = row['stargazer_count'] * float( cfg.stargazer_count_weight) / 100 a13 = self.__map_bool_to_int(row['has_wiki']) * float( cfg.has_wiki_weight) / 100 a14 = row['fork_count'] * float(cfg.fork_count_weight) / 100 a15 = row['open_issues'] * float(cfg.open_issues_weight) / 100 a16 = row['sub_count'] * float(cfg.sub_count_weight) / 100 cumulative_score = a1 + a2 + a3 + a4 + a5 + a6 + a7 + a8 + a9 + a10 + a11 + a12 + a13 + a14 + a15 + a16 # Insert the cumulative score to the 3rd column self.user_repo_association.set_value(index, 'rating', cumulative_score) association_processing_limit -= 1 except Exception as e: error = "Error in synthesizing association data. The error is = " + str( e) + "Other info :: Row Data = " + str(row) print error log_mgr.add_log_to_file(error)
# networkName = "faceNetworkNoLD" networkName = "treeNetwork" # networkName = "cupNetwork" # networkName = "cupNetworkSmall" # networkName = "treeNetworkSmall" layerToObserve = 1 binarize = False samplesForAverage = 1 canvasWidth = 900 canvasHeight = 600 dbc = DatabaseConnector() network = dbc.getNetwork(networkName) rbm = DeepRBM(network.model) rbm.setWeights(network.weights) root = Tk() root.geometry(str(canvasWidth) + "x" + str(canvasHeight)) canvas = Canvas(root, width=canvasWidth, height=canvasHeight) canvas.pack() references = [] for i in range(network.model[layerToObserve]): print "Sampling neuron " + str(i)
class PaperTraderServer(object): accountProperties = DatabaseConnector(mode="account") registeredAccounts = {} zmqContext = Context.instance() # ------------------------- # Startup # ------------------------- def __init__(self): self.isServiceAvailable = True signal(SIGINT, self.exit_gracefully) signal(SIGTERM, self.exit_gracefully) self.logging = ErrorReportingClient(service="paper_trader") self.cache = {} def exit_gracefully(self): print("[Startup]: Paper Trader Server handler is exiting") self.isServiceAvailable = False # ------------------------- # Job queue # ------------------------- def run(self): while self.isServiceAvailable: try: sleep(Utils.seconds_until_cycle()) t = datetime.now().astimezone(utc) timeframes = Utils.get_accepted_timeframes(t) if "1m" in timeframes: self.update_accounts() self.process_paper_limit_orders() except (KeyboardInterrupt, SystemExit): return except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception() def update_accounts(self): try: self.registeredAccounts = self.accountProperties.keys() except (KeyboardInterrupt, SystemExit): pass except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception() # ------------------------- # Paper trading # ------------------------- def process_paper_limit_orders(self): try: self.cache = {} users = database.document("details/openPaperOrders").collections() with ThreadPoolExecutor(max_workers=20) as pool: for user in users: accountId = user.id authorId = self.registeredAccounts.get( accountId, accountId) if authorId is None: continue for order in user.stream(): pool.submit(self.check_paper_order, authorId, accountId, order.reference, order.id, order.to_dict()) except (KeyboardInterrupt, SystemExit): pass except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception() def check_paper_order(self, authorId, accountId, reference, orderId, order): socket = PaperTraderServer.zmqContext.socket(REQ) socket.connect("tcp://candle-server:6900") socket.setsockopt(LINGER, 3) poller = Poller() poller.register(socket, POLLIN) try: currentPlatform = order["request"].get("currentPlatform") currentRequest = order["request"].get(currentPlatform) ticker = currentRequest.get("ticker") hashName = hash(dumps(ticker, option=OPT_SORT_KEYS)) if order["timestamp"] < time() - 86400 * 30.5 * 3: if environ["PRODUCTION_MODE"]: database.document("discord/properties/messages/{}".format( str(uuid4()) )).set({ "title": "Paper {} order of {} {} at {} {} expired.".format( order["orderType"].replace("-", " "), order["amountText"], ticker.get("base"), order["price"], ticker.get("quote")), "subtitle": "Alpha Paper Trader", "description": "Paper orders automatically cancel after 3 months. If you'd like to keep your order, you'll have to set it again.", "color": 6765239, "user": authorId, "channel": order["channel"] }) reference.delete() else: print("{}: paper {} order of {} {} at {} expired".format( order["orderType"].replace("-", " "), order["amountText"], ticker.get("base"), order["price"], ticker.get("quote"))) else: if hashName in self.cache: payload = self.cache.get(hashName) else: order["request"]["timestamp"] = time() order["request"]["authorId"] = authorId socket.send_multipart( [b"papertrader", b"candle", dumps(order["request"])]) responses = poller.poll(30 * 1000) if len(responses) != 0: [payload, responseText] = socket.recv_multipart() payload = loads(payload) responseText = responseText.decode() if not bool(payload): if responseText != "": print("Paper order request error:", responseText) if environ["PRODUCTION_MODE"]: self.logging.report(responseText) return self.cache[hashName] = payload else: raise Exception("time out") accountProperties = self.accountProperties.get(accountId) for candle in reversed(payload["candles"]): if candle[0] < order["timestamp"] / 1000: break if (order["placement"] == "below" and candle[3] is not None and candle[3] <= order["price"]) or ( order["placement"] == "above" and candle[2] is not None and order["price"] <= candle[2]): if environ["PRODUCTION_MODE"]: base = ticker.get("base") quote = ticker.get("quote") if base in [ "USD", "USDT", "USDC", "DAI", "HUSD", "TUSD", "PAX", "USDK", "USDN", "BUSD", "GUSD", "USDS" ]: baseBalance = accountProperties["paperTrader"][ "balance"] base = "USD" else: baseBalance = accountProperties["paperTrader"][ "balance"][currentPlatform] if quote in [ "USD", "USDT", "USDC", "DAI", "HUSD", "TUSD", "PAX", "USDK", "USDN", "BUSD", "GUSD", "USDS" ]: quoteBalance = accountProperties[ "paperTrader"]["balance"] quote = "USD" else: quoteBalance = accountProperties[ "paperTrader"]["balance"][currentPlatform] execAmount = order["amount"] if order["orderType"] == "buy": baseBalance[base] = baseBalance.get( base, 0) + execAmount elif order["orderType"] == "sell": quoteBalance[quote] = quoteBalance.get( quote, 0) + execAmount * order["price"] elif order["orderType"] == "stop-buy": execAmount = min( abs(quoteBalance.get(quote, 0)), order["price"] * execAmount) / order["price"] baseBalance[base] = baseBalance.get( base, 0) + execAmount quoteBalance[quote] = quoteBalance.get( quote, 0) - order["price"] * execAmount elif order["orderType"] == "stop-sell": execAmount = min(abs(baseBalance.get(base, 0)), execAmount) baseBalance[base] = baseBalance.get( base, 0) - execAmount quoteBalance[quote] = quoteBalance.get( quote, 0) + execAmount * order["price"] order["status"] = "filled" database.document( "details/paperOrderHistory/{}/{}".format( accountId, orderId)).set(order) database.document( "accounts/{}".format(accountId)).set( { "paperTrader": accountProperties["paperTrader"] }, merge=True) database.document( "discord/properties/messages/{}".format( str(uuid4())) ).set({ "title": "Paper {} order of {} {} at {} {} was successfully executed." .format(order["orderType"].replace("-", " "), order["amountText"], ticker.get("base"), order["price"], ticker.get("quote")), "subtitle": "Alpha Paper Trader", "color": 6765239, "user": authorId }) reference.delete() else: print( "{}: paper {} order of {} {} at {} {} was successfully executed" .format(accountId, order["orderType"].replace("-", " "), order["amountText"], ticker.get("base"), order["price"], ticker.get("quote"))) break except (KeyboardInterrupt, SystemExit): pass except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception(user=f"{accountId}, {authorId}")
from Tkinter import * from PIL import Image, ImageTk from DatabaseConnector import DatabaseConnector from DeepRBM import DeepRBM import numpy as np networkName = "treeNetwork" dbc = DatabaseConnector() network = dbc.getNetwork(networkName) rbm = DeepRBM(network.model) rbm.setWeights(network.weights) samplesForAverage = 10 binarize = True imageWidth = network.imageWidth imageHeight = network.imageHeight input = np.random.randn(samplesForAverage, network.model[0]) flag = True refreshRate = 1 def updateImage(): global picture global flag global input global samplesForAverage
def __init__(self, app_id): self.__waf = WolframAlphaFetch(app_id) self.__dbc = DatabaseConnector.getInstance()
def __init__(self, logger_name): self.connector = DatabaseConnector(logger_name) self.insert_statement = "" self.update_statement = "" self.logger = logging.getLogger(logger_name)
class Alpha(discord.Client): isBotReady = False clientId = None clientName = None timeOffset = 0 lastPing = 0 exponentialBakcoff = 0 guildProperties = DatabaseConnector(mode="guild") tickerId = None exchange = None platform = None isFree = False def prepare(self): """Prepares all required objects and fetches Alpha settings """ Processor.clientId = b"discord_satellite" self.logging = error_reporting.Client() self.timeOffset = randint(0, 30) self.priceText = None time.sleep(self.timeOffset) self.clientName = str(uuid.uuid4()) self.get_assigned_id() print("[Startup]: received task: {}/{}/{}".format( self.platform, self.exchange, self.tickerId)) print("[Startup]: parser initialization complete") async def on_ready(self): """Initiates all Discord dependent functions and flags the bot as ready to process requests """ self.isBotReady = True print("[Startup]: Alpha Satellite is online") def get_assigned_id(self): try: currentSelectedId = self.clientId tasks = database.collection( "dataserver/configuration/satellites").get() assignments = {doc.id: doc.to_dict() for doc in tasks} if self.clientId is None or assignments[ self.clientId]["uuid"] != self.clientName: for clientId in assignments: if currentSelectedId is None or assignments[clientId][ "ping"] < assignments[currentSelectedId]["ping"]: currentSelectedId = clientId if os.environ["PRODUCTION_MODE"] and time.time() > self.lastPing: database.document( "dataserver/configuration/satellites/{}".format( currentSelectedId)).set( { "ping": int(time.time()), "uuid": self.clientName }, merge=True) self.lastPing = time.time() + 1 * 1.1**self.exponentialBakcoff self.exponentialBakcoff += 1 if self.clientId is None or not self.isBotReady: self.clientId = currentSelectedId self.platform, self.exchange, self.tickerId = assignments[ self.clientId]["task"] self.isFree = self.tickerId in [ "BTCUSD", "ETHUSD" ] and self.platform == "CoinGecko" elif self.clientId != currentSelectedId and os.environ[ "PRODUCTION_MODE"]: self.isBotReady = False self.clientId = currentSelectedId self.platform, self.exchange, self.tickerId = assignments[ self.clientId]["task"] self.isFree = self.tickerId in [ "BTCUSD", "ETHUSD" ] and self.platform == "CoinGecko" print("[Shutdown]: Task missmatch, shutting down") raise KeyboardInterrupt except Exception: print(traceback.format_exc()) if os.environ["PRODUCTION_MODE"]: self.logging.report_exception() async def on_guild_join(self, guild): """Updates quild count on guild_join event and leaves all guilds flagged as banned Parameters ---------- guild : discord.Guild Guild object passed by discord.py """ try: properties = await self.guildProperties.get(guild.id) if properties is None: return elif not self.isFree and not properties["addons"]["satellites"][ "enabled"]: try: await guild.me.edit(nick="Disabled") except: return elif self.isFree or properties["addons"]["satellites"][ "connection"] is not None: try: await guild.me.edit(nick=self.priceText) except: return else: try: await guild.me.edit(nick="Alpha Pro required") except: return except Exception: print(traceback.format_exc()) if os.environ["PRODUCTION_MODE"]: self.logging.report_exception() async def job_queue(self): """Updates Alpha Bot user status with latest prices """ while True: try: await asyncio.sleep(Utils.seconds_until_cycle()) if not await self.guildProperties.check_status(): continue t = datetime.datetime.now().astimezone(pytz.utc) timeframes = Utils.get_accepted_timeframes(t) isPremium = self.tickerId in [ "EURUSD", "GBPUSD", "AUDJPY", "AUDUSD", "EURJPY", "GBPJPY", "NZDJPY", "NZDUSD", "CADUSD", "JPYUSD", "ZARUSD" ] refreshRate = "5m" if len(client.guilds) > 1 and ( not isPremium or len(client.guilds) > 15) else "15m" if "1m" in timeframes: self.get_assigned_id() if refreshRate in timeframes: await asyncio.sleep(self.timeOffset) try: outputMessage, request = Processor.process_quote_arguments( client.user.id, [] if self.exchange is None else [self.exchange], tickerId=self.tickerId, platformQueue=[self.platform]) except: continue if outputMessage is not None: print(outputMessage) if os.environ["PRODUCTION_MODE"]: self.logging.report(outputMessage) continue try: payload, quoteText = await Processor.execute_data_server_request( "quote", request, timeout=30) except: continue if payload is None or payload["quotePrice"] is None: print("Requested price for `{}` is not available". format(request.get_ticker().name ) if quoteText is None else quoteText) continue self.priceText = "{} {}".format(payload["quotePrice"], payload["quoteTicker"]) changeText = "" if payload[ "change"] is None else "{:+.2f} % | ".format( payload["change"]) tickerText = "{} | ".format( request.get_ticker().id) if request.get_exchange( ) is None else "{} on {} | ".format( request.get_ticker().id, request.get_exchange().name) statusText = "{}{}alphabotsystem.com".format( changeText, tickerText) status = discord.Status.online if payload[ "change"] is None or payload[ "change"] >= 0 else discord.Status.dnd for guild in client.guilds: properties = await self.guildProperties.get(guild.id) if properties is None: continue elif not self.isFree and not properties["addons"][ "satellites"]["enabled"]: try: await guild.me.edit(nick="Disabled") except: continue elif self.isFree or properties["addons"]["satellites"][ "connection"] is not None: try: await guild.me.edit(nick=self.priceText) except: continue else: try: await guild.me.edit(nick="Alpha Pro required") except: continue try: await client.change_presence( status=status, activity=discord.Activity( type=discord.ActivityType.watching, name=statusText)) except: pass except asyncio.CancelledError: return except Exception: print(traceback.format_exc()) if os.environ["PRODUCTION_MODE"]: self.logging.report_exception()
# LD1coefficient = 0.0001; #Costs an extra 1/2 second for size 100 batches from 1600 to 200, 0 for off networkName = "treeNetworkSmall" setNames = ["gray-40x40-tree"] imageHeights = 40 imageWidths = 40 networkShape = [1600, 50, 50] learningRate = 0.5 momentum = 0.9; numberItterations = 10000 batchSize = 10 LD1coefficient = 0.0001; #Costs an extra 1/2 second for size 100 batches from 1600 to 200, 0 for off startOver = True; dbc = DatabaseConnector() trainingSets = [dbc.getTrainingSetId(setNames[0])] cases = dbc.getTrainingCases(setNames[0]) cases = np.array(cases); rbm = DeepRBM(networkShape); if(startOver != True): n = dbc.getNetwork(networkName); if(n): rbm.setWeights(n.weights) print "Number of training cases" print cases.shape
class CronJobs(object): accountProperties = DatabaseConnector(mode="account") zmqContext = zmq.Context.instance() # ------------------------- # Startup # ------------------------- def __init__(self): self.isServiceAvailable = True signal.signal(signal.SIGINT, self.exit_gracefully) signal.signal(signal.SIGTERM, self.exit_gracefully) self.logging = error_reporting.Client() def exit_gracefully(self): print("[Startup]: Cron Job handler is exiting") self.isServiceAvailable = False # ------------------------- # Job queue # ------------------------- def run(self): while self.isServiceAvailable: try: time.sleep(Utils.seconds_until_cycle()) t = datetime.datetime.now().astimezone(pytz.utc) timeframes = Utils.get_accepted_timeframes(t) if "1m" in timeframes: self.process_price_alerts() # self.update_paper_limit_orders() except (KeyboardInterrupt, SystemExit): return except Exception: print(traceback.format_exc()) if os.environ["PRODUCTION_MODE"]: self.logging.report_exception() def job_queue(self): while True: try: time.sleep(Utils.seconds_until_cycle()) t = datetime.datetime.now().astimezone(pytz.utc) timeframes = Utils.get_accepted_timeframes(t) if "4H" in timeframes: self.update_popular_tickers() except (KeyboardInterrupt, SystemExit): return except Exception: print(traceback.format_exc()) if os.environ["PRODUCTION_MODE"]: self.logging.report_exception() # ------------------------- # Price Alerts # ------------------------- def process_price_alerts(self): """Sends out price alert notifications """ try: with ThreadPoolExecutor(max_workers=20) as pool: accounts = pool.submit(asyncio.run, self.accountProperties.keys()).result() users = database.document("details/marketAlerts").collections() for user in users: accountId = user.id authorId = pool.submit( asyncio.run, self.accountProperties.match(accountId) ).result() if accountId in accounts else accountId if authorId is None: continue for alert in user.stream(): pool.submit(self.check_price_alert, authorId, accountId, alert.reference, alert.to_dict()) except (KeyboardInterrupt, SystemExit): pass except Exception: print(traceback.format_exc()) if os.environ["PRODUCTION_MODE"]: self.logging.report_exception() def check_price_alert(self, authorId, accountId, reference, alert): socket = CronJobs.zmqContext.socket(zmq.REQ) socket.connect("tcp://candle-server:6900") socket.setsockopt(zmq.LINGER, 3) poller = zmq.Poller() poller.register(socket, zmq.POLLIN) try: alertRequest = pickle.loads(zlib.decompress(alert["request"])) alertRequest.timestamp = time.time() ticker = alertRequest.get_ticker() exchange = alertRequest.get_exchange() if alertRequest.currentPlatform == "CCXT": levelText = Utils.format_price(exchange.properties, ticker.symbol, alert["level"]) elif alertRequest.currentPlatform == "IEXC" or alertRequest.currentPlatform == "Quandl": levelText = "{:,.5f}".format(alert["level"]) else: levelText = "{:,.0f}".format(alert["level"]) if alert["timestamp"] < time.time() - 86400 * 30.5 * 6: if os.environ["PRODUCTION_MODE"]: database.document("discord/properties/messages/{}".format( str(uuid.uuid4()) )).set({ "title": "Price alert for {} ({}) at {} {} expired.".format( ticker.base, alertRequest.currentPlatform if exchange is None else exchange.name, levelText, ticker.quote), "subtitle": "Alpha Price Alerts", "description": "Price alerts automatically cancel after 6 months. If you'd like to keep your alert, you'll have to schedule it again.", "color": 6765239, "user": authorId, "channel": alert["channel"] }) reference.delete() else: print( "{}: price alert for {} ({}) at {} {} expired.".format( accountId, ticker.base, alertRequest.currentPlatform if exchange is None else exchange.name, levelText, ticker.quote)) else: socket.send_multipart([ b"cronjob", b"candle", zlib.compress(pickle.dumps(alertRequest, -1)) ]) responses = poller.poll(30 * 1000) if len(responses) != 0: response = socket.recv() payload, responseText = pickle.loads( zlib.decompress(response)) if payload is None: if responseText is not None: print("Alert request error", responseText) if os.environ["PRODUCTION_MODE"]: self.logging.report(responseText) return alertRequest.set_current(platform=payload["platform"]) for candle in reversed(payload["candles"]): if candle[0] < alert["timestamp"]: break if (candle[3] <= alert["level"] and alert["placement"] == "below") or (alert["level"] <= candle[2] and alert["placement"] == "above"): if os.environ["PRODUCTION_MODE"]: database.document( "discord/properties/messages/{}".format( str(uuid.uuid4())) ).set({ "title": "Price of {} ({}) hit {} {}.".format( ticker.base, alertRequest.currentPlatform if exchange is None else exchange.name, levelText, ticker.quote), "subtitle": "Alpha Price Alerts", "description": None, "color": 6765239, "user": authorId, "channel": alert["channel"] }) reference.delete() else: print("{}: price of {} ({}) hit {} {}.".format( accountId, ticker.base, alertRequest.currentPlatform if exchange is None else exchange.name, levelText, ticker.quote)) break except (KeyboardInterrupt, SystemExit): pass except Exception: print(traceback.format_exc()) if os.environ["PRODUCTION_MODE"]: self.logging.report_exception() socket.close() # ------------------------- # Paper trading # ------------------------- def update_paper_limit_orders(self): """Process paper limit orders """ socket = CronJobs.zmqContext.socket(zmq.REQ) socket.connect("tcp://candle-server:6900") socket.setsockopt(zmq.LINGER, 3) poller = zmq.Poller() poller.register(socket, zmq.POLLIN) try: for accountId in self.accountProperties: if "customer" in self.accountProperties[accountId]: for exchange in self.accountProperties[accountId][ "paperTrader"]: if exchange in ["globalLastReset", "globalResetCount"]: continue paper = self.accountProperties[accountId][ "paperTrader"][exchange] for order in list(paper["openOrders"]): paperRequest = pickle.loads( zlib.decompress(order["request"])) ticker = paperRequest.get_ticker() exchange = paperRequest.get_exchange() if paperRequest.currentPlatform == "CCXT": levelText = Utils.format_price( exchange.properties, ticker.symbol, order["price"]) elif paperRequest.currentPlatform == "IEXC" or paperRequest.currentPlatform == "Quandl": levelText = "{:,.5f}".format(order["price"]) else: levelText = "{:,.0f}".format(order["price"]) socket.send_multipart( [b"cronjob", b"candle", order["request"]]) responses = poller.poll(5 * 1000) if len(responses) != 0: response = socket.recv() payload, responseText = pickle.loads( zlib.decompress(response)) if payload is None: if responseText is not None: print("Paper order request error", responseText) if os.environ["PRODUCTION_MODE"]: self.logging.report(responseText) return for candle in reversed(payload["candles"]): if candle[0] < order["timestamp"] / 1000: break if candle[3] < order["price"] < candle[2]: baseOrder = paper["balance"][ ticker.base] quoteOrder = paper["balance"][ ticker.quote] execAmount = order["amount"] isPricePercent, isLimitOrder, reduceOnly = order[ "parameters"] if reduceOnly and ( (order["orderType"] == "buy" and baseOrder["amount"] >= 0) or (order["orderType"] == "sell" and baseOrder["amount"] <= 0)): order["status"] = "canceled" paper["openOrders"].remove(order) if exchange.id == "bitmex": averageEntry = ( baseOrder["entry"] * baseOrder["amount"] + order["price"] * execAmount ) / ( baseOrder["amount"] + execAmount ) if baseOrder[ "amount"] + execAmount != 0 else 0 quoteValue = ( abs(execAmount) * (-1 if reduceOnly else 1) ) / (averageEntry if averageEntry != 0 else baseOrder["entry"]) / leverage roi = ( (order["price"] - baseOrder["entry"]) * 0.000001 if ticker.symbol == "ETH/USD" else (1 / baseOrder["entry"] - 1 / order["price"])) * baseOrder[ "amount"] if baseOrder[ "entry"] != 0 else 0 orderFee = execAmount * exchange.properties.markets[ ticker. symbol]["maker" if isLimitOrder else "taker"] if order[ "orderType"] == "buy" or order[ "orderType"] == "sell": baseOrder[ "entry"] = averageEntry baseOrder[ "amount"] += execAmount elif order[ "orderType"] == "stop-buy" or order[ "orderType"] == "stop-sell": quoteOrder["amount"] += round( roi - (quoteValue + abs(orderFee) / order["price"]), 8) baseOrder[ "entry"] = averageEntry baseOrder[ "amount"] += execAmount else: if order["orderType"] == "buy": if reduceOnly: execAmount = min( abs(quoteOrder[ "amount"]), order["price"] * execAmount ) / order["price"] orderFee = execAmount * exchange.properties.markets[ ticker.symbol]["maker"] baseOrder[ "amount"] += execAmount - orderFee elif order["orderType"] == "sell": if reduceOnly: execAmount = min( abs(baseOrder["amount"] ), execAmount) orderFee = execAmount * exchange.properties.markets[ ticker.symbol]["maker"] quoteOrder["amount"] += ( execAmount - orderFee) * order["price"] elif order[ "orderType"] == "stop-buy": if reduceOnly: execAmount = min( abs(quoteOrder[ "amount"]), order["price"] * execAmount ) / order["price"] orderFee = execAmount * exchange.properties.markets[ ticker.symbol]["taker"] baseOrder[ "amount"] += execAmount - orderFee quoteOrder["amount"] -= order[ "price"] * execAmount elif order[ "orderType"] == "stop-sell": if reduceOnly: execAmount = min( abs(baseOrder["amount"] ), execAmount) orderFee = execAmount * exchange.properties.markets[ ticker.symbol]["taker"] baseOrder[ "amount"] -= execAmount quoteOrder["amount"] += ( execAmount - orderFee) * order["price"] paper["openOrders"].remove(order) order["status"] = "filled" paper["history"].append(order) database.document("accounts/{}".format( accountId)).set( { "paperTrader": { exchange.id: paper } }, merge=True) if self.server.accountProperties[ accountId]["oauth"][ "discord"].get( "userId") is not None: database.document( "discord/properties/messages/{}" .format(str(uuid.uuid4())) ).set({ "title": "Paper {} order of {} {} on {} at {} was successfully executed." .format( order["orderType"].replace( "-", " "), Utils.format_amount( exchange.properties, ticker.symbol, order["amount"]), order["base"], exchange.name, order["price"]), "subtitle": "Alpha Paper Trader", "description": None, "color": 6765239, "user": self.server. accountProperties[accountId] ["oauth"]["discord"]["userId"], "channel": "611107823111372810" }) except (KeyboardInterrupt, SystemExit): pass except Exception: print(traceback.format_exc()) if os.environ["PRODUCTION_MODE"]: self.logging.report_exception() # ------------------------- # Data updates # ------------------------- def update_popular_tickers(self): if not os.environ["PRODUCTION_MODE"]: return try: processingTimestamp = time.time() platforms = [ "TradingLite", "TradingView", "Bookmap", "GoCharting", "Alpha Flow", "CoinGecko", "CCXT", "IEXC", "Quandl", "Alpha Paper Trader" ] dataset1d = [] dataset8d = [] topTickerMap = {"traditional": {}, "crypto": {}} risingTickerMap = {"traditional": {}, "crypto": {}} for platform in platforms: requests1d = database.collection( "dataserver/statistics/{}".format(platform)).where( "timestamp", ">=", processingTimestamp - 86400 * 1).get() requests8d = database.collection( "dataserver/statistics/{}".format(platform)).where( "timestamp", "<", processingTimestamp - 86400 * 1).where( "timestamp", ">=", processingTimestamp - 86400 * 8).get() requests31d = database.collection( "dataserver/statistics/{}".format(platform)).where( "timestamp", "<", processingTimestamp - 86400 * 31).get() for e in requests31d: database.document("dataserver/statistics/{}/{}".format( platform, e.id)).delete() for e in requests1d: request = e.to_dict() if request["ticker"]["base"] in [ "BTC", "BTC.D", "BTC1!", "BLX", "XBT", "XBTUSD", "BTCUSD", "BTCUSDT" ]: request["ticker"]["base"] = "BTC" if request["ticker"][ "bias"] == "crypto" else "BTCUSD" if request["ticker"]["base"] in [ "ETH", "ETH.D", "ETHUSD", "ETHUSDT" ]: request["ticker"]["base"] = "ETH" if request["ticker"][ "bias"] == "crypto" else "ETHUSD" if request["ticker"]["base"] in [ "TOTAL2", "TOTAL", "OPTIONS" ] or any([ e in request["ticker"]["base"] for e in ["LONGS", "SHORTS"] ]): continue dataset1d.append(request) for e in requests8d: request = e.to_dict() if request["ticker"]["base"] in [ "BTC", "BTC.D", "BTC1!", "BLX", "XBT", "XBTUSD", "BTCUSD", "BTCUSDT" ]: request["ticker"]["base"] = "BTC" if request["ticker"][ "bias"] == "crypto" else "BTCUSD" if request["ticker"]["base"] in [ "ETH", "ETH.D", "ETHUSD", "ETHUSDT" ]: request["ticker"]["base"] = "ETH" if request["ticker"][ "bias"] == "crypto" else "ETHUSD" if request["ticker"]["base"] in [ "TOTAL2", "TOTAL", "OPTIONS" ] or any([ e in request["ticker"]["base"] for e in ["LONGS", "SHORTS"] ]): continue dataset8d.append(request) for request in dataset1d: topTickerMap[request["ticker"]["bias"]][request["ticker"][ "base"]] = topTickerMap[request["ticker"]["bias"]].get( request["ticker"]["base"], 0) + 1 for request in dataset8d: risingTickerMap[request["ticker"]["bias"]][request["ticker"][ "base"]] = risingTickerMap[request["ticker"]["bias"]].get( request["ticker"]["base"], 0) + 1 sortedTopTickerMap = { "traditional": sorted(topTickerMap["traditional"].items(), key=lambda item: item[1]), "crypto": sorted(topTickerMap["crypto"].items(), key=lambda item: item[1]) } sortedRisingTickerMap = { "traditional": sorted( [(base, topTickerMap["traditional"].get(base, 0) / (score / 7)) for base, score in risingTickerMap["traditional"].items() if score >= 7], key=lambda item: item[1]), "crypto": sorted([(base, topTickerMap["crypto"].get(base, 0) / (score / 7)) for base, score in risingTickerMap["crypto"].items() if score >= 7], key=lambda item: item[1]) } maxScoreTopTraditional = sortedTopTickerMap["traditional"][-1][1] maxScoreTopCrypto = sortedTopTickerMap["crypto"][-1][1] topTraditionalTickers = [{ "id": k, "rank": v / maxScoreTopTraditional * 100 } for k, v in sortedTopTickerMap["traditional"][-20:]] topCryptoTickers = [{ "id": k, "rank": v / maxScoreTopCrypto * 100 } for k, v in sortedTopTickerMap["crypto"][-20:]] risingTraditionalTickers = [{ "id": k, "rank": v } for k, v in sortedRisingTickerMap["traditional"][-20:]] risingCryptoTickers = [{ "id": k, "rank": v } for k, v in sortedRisingTickerMap["crypto"][-20:]] database.document("dataserver/statistics").set({ "top": { "traditional": topTraditionalTickers, "crypto": topCryptoTickers }, "rising": { "traditional": risingTraditionalTickers, "crypto": risingCryptoTickers } }) except (KeyboardInterrupt, SystemExit): pass except Exception: print(traceback.format_exc()) if os.environ["PRODUCTION_MODE"]: self.logging.report_exception()
from XBeeReciever import XBeeReciever from DatabaseConnector import DatabaseConnector import serial import struct import binascii import time PORT = 'COM4' BAUD_RATE = 9600 HOST = '127.0.0.1' USER = '******' PASSWORD = '******' DB = 'homemaster' xbee_reciever = XBeeReciever(PORT, BAUD_RATE) db = DatabaseConnector(USER, PASSWORD, HOST, DB) # Continuously read and print packets while True: try: response_data = xbee_reciever.get_response() data = response_data['rf_data'] data_length = response_data['data_length'] source_address = response_data['source_address'] if data_length == 22: temperature = xbee_reciever.get_temperature(data) hummidity = xbee_reciever.get_hummidity(data) date_and_time = time.strftime("%Y-%m-%d %H:%M:%S") query = ("INSERT INTO sensorsdata "
class Alpha(discord.AutoShardedClient): isBotReady = False updatingNickname = False timeOffset = 0 accountProperties = DatabaseConnector(mode="account") guildProperties = DatabaseConnector(mode="guild") tickerId = None exchange = None platform = None isFree = False def prepare(self): Processor.clientId = b"discord_satellite" self.logging = ErrorReportingClient(service="satellites") self.timeOffset = randint(0, 600) / 10.0 self.priceText = None async def on_ready(self): self.platform, self.exchange, self.tickerId = constants.configuration[ client.user.id] self.isFree = self.platform == "CoinGecko" and self.exchange is None and self.tickerId in [ "BTCUSD", "ETHUSD" ] self.isBotReady = True print("[Startup]: Alpha Satellite is online") async def on_guild_remove(self, guild): try: guildProperties = await self.guildProperties.get(guild.id) if guildProperties is None: return if str(client.user.id ) in guildProperties["addons"]["satellites"].get( "added", []): await database.document("discord/properties/guilds/{}".format( guild.id)).set( { "addons": { "satellites": { "added": ArrayRemove([str(client.user.id)]) } } }, merge=True) except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception(user=str(guild.id)) async def job_queue(self): while True: try: await sleep(Utils.seconds_until_cycle()) t = datetime.now().astimezone(utc) timeframes = Utils.get_accepted_timeframes(t) isPremium = self.tickerId in [ "EURUSD", "GBPUSD", "AUDJPY", "AUDUSD", "EURJPY", "GBPJPY", "NZDJPY", "NZDUSD", "CADUSD", "JPYUSD", "ZARUSD" ] if len(client.guilds) == 1: refreshRate = "8H" elif isPremium and len(client.guilds) < 15: refreshRate = "1H" else: refreshRate = "5m" if refreshRate in timeframes and not self.updatingNickname: client.loop.create_task(self.update_nicknames()) if "1H" in timeframes: client.loop.create_task(self.update_properties()) except CancelledError: return except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception() async def update_properties(self): try: satelliteRef = database.document( "dataserver/configuration/satellites/{}".format( client.user.id)) properties = await satelliteRef.get() properties = properties.to_dict() guildIds = [str(e.id) for e in client.guilds] for guildId in properties.get("servers", []): if guildId not in guildIds: await database.document( "discord/properties/guilds/{}".format(guildId)).set( { "addons": { "satellites": { "added": ArrayRemove([guildId]) } } }, merge=True) await satelliteRef.set({ "count": len(guildIds), "servers": guildIds }) except CancelledError: return except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception() async def update_nicknames(self): try: self.updatingNickname = True await sleep(self.timeOffset) outputMessage, request = await Processor.process_quote_arguments( MessageRequest(), [] if self.exchange is None else [self.exchange], tickerId=self.tickerId, platformQueue=[self.platform]) if outputMessage is not None: print(outputMessage) return try: payload, quoteText = await Processor.process_request( "quote", client.user.id, request) except: return if payload is None or "quotePrice" not in payload: print("Something wen't wrong when fetching the price:", quoteText) return currentRequest = request.get(payload.get("platform")) ticker = currentRequest.get("ticker") self.priceText = payload["quotePrice"] changeText = "{} | ".format( payload["change"]) if "change" in payload else "" tickerText = "{} | ".format(ticker.get("id")) if not bool( ticker.get("exchange")) else "{} on {} | ".format( ticker.get("id"), ticker.get("exchange").get("name")) statusText = "{}{}alphabotsystem.com".format( changeText, tickerText) status = discord.Status.dnd if payload.get( "messageColor") == "red" else discord.Status.online for guild in client.guilds: if not guild.me.guild_permissions.change_nickname: continue if self.isFree: await self.update_nickname(guild, self.priceText) else: guildProperties = await self.guildProperties.get(guild.id) if guildProperties is None: await sleep(0.5) continue connection = guildProperties.get("addons", {}).get( "satellites", {}).get( "connection", guildProperties.get("settings", {}).get("setup", {}).get("connection")) accountProperties = await self.accountProperties.get( connection) if accountProperties is None: await sleep(0.5) continue if accountProperties.get("customer", {}).get( "personalSubscription", {}).get("subscription") is not None: if not guildProperties["addons"]["satellites"][ "enabled"]: await database.document( "discord/properties/guilds/{}".format(guild.id) ).set( { "addons": { "satellites": { "enabled": True, "connection": connection } } }, merge=True) if str( client.user.id ) not in guildProperties["addons"]["satellites"].get( "added", []): await database.document( "discord/properties/guilds/{}".format(guild.id) ).set( { "addons": { "satellites": { "added": ArrayUnion([str(client.user.id)]) } } }, merge=True) await self.update_nickname(guild, self.priceText) else: await self.update_nickname(guild, "Alpha Pro required") try: await client.change_presence( status=status, activity=discord.Activity( type=discord.ActivityType.watching, name=statusText)) except: pass except CancelledError: return except Exception: print(format_exc()) if environ["PRODUCTION_MODE"]: self.logging.report_exception() finally: self.updatingNickname = False async def update_nickname(self, guild, nickname): if guild.me.nick != nickname: try: await guild.me.edit(nick=nickname) except: pass else: await sleep(0.5)
import datetime from Message import Message from MessageManager import MessageManager from DatabaseConnector import DatabaseConnector if __name__ =='__main__': if len(sys.argv) > 1: # /dev/ttyACM0 A = Server(sys.argv[1]) A.run() else: print("Missing serial port pathname") # database creation conn = DatabaseConnector.connectSQLiteDB('meshDB.db') # MessageManager creation messageManager = MessageManager(conn) # DB reading messageManager.getAllMessages() # CSV writing df = messageManager.getAllMessagesintoPandaDataframe() df.to_csv('csvFile.csv', index = False) # DB update newMessage = Message('3', datetime.datetime.utcnow(), '0000', '0000', '0000') messageManager.putMessage(newMessage)
class ForumSpider(scrapy.Spider): name = 'ForumSpider' #website specifics USERNAME = "" PASSWORD = "" WEBSITE_URL = "" #phpbb3 prosilver specifics LOGIN_PAGE_URL = "" LOGIN_PAGE = 'ucp.php?mode=login' VIEWTOPIC_PAGE = "viewtopic.php" VIEWFORUM_PAGE = "viewforum.php" INDEX_PAGE = "index.php" URL_FORUM_ARGUMENT = "f" URL_TOPIC_ARGUMENT = "t" URL_VIEW_PRINT_ARGUMENT = 'view=print' databaseConnector = DatabaseConnector() #selectors CURRENT_PAGE_SELECTOR = ".pagination > a > strong ::text" NEXT_PAGE_SELECTOR = ".display-options a.right-box::attr(href)" FORUMS_SELECTOR = ".forabg .forumtitle" FORUM_TITLE_SELECTOR = "::text" FORUM_HREF_SELECTOR = "::attr(href)" TOPICS_SELECTOR = ".forumbg .topictitle" TOPIC_TITLE_SELECTOR = "::text" TOPIC_HREF_SELECTOR = "::attr(href)" FORUM_URLS = [] def __init__(self): settings = get_project_settings() self.USERNAME = settings.get('WEBSITE_USERNAME') self.PASSWORD = settings.get('WEBSITE_PASSWORD') self.WEBSITE_URL = settings.get('WEBSITE_URL') self.LOGIN_PAGE_URL = self.WEBSITE_URL + self.LOGIN_PAGE #forum urls to crawl self.FORUM_URLS = [ self.WEBSITE_URL + self.VIEWFORUM_PAGE + "?" + self.URL_FORUM_ARGUMENT + "=1" ] def start_requests(self): yield scrapy.Request(url=self.LOGIN_PAGE_URL, callback=self.login) def login(self, response): return scrapy.FormRequest.from_response(response, formdata={ 'username': self.USERNAME, 'password': self.PASSWORD, 'login': '******', 'redirect': self.INDEX_PAGE }, callback=self.afterLogin) def afterLogin(self, response): if "Logout" in response.body: logging.getLogger().info( "Successfully logged in. Let's start crawling!") #crawl each forum for forumUrl in self.FORUM_URLS: yield scrapy.Request(url=forumUrl, callback=self.parse) else: logging.getLogger().info("Login failed.") return def getTopicIdFromUrl(self, url, forumId): return url.replace( self.WEBSITE_URL + self.VIEWTOPIC_PAGE + "?" + self.URL_FORUM_ARGUMENT + "=" + forumId + "&" + self.URL_TOPIC_ARGUMENT + "=", "") def getForumIdFromUrl(self, url): return url.replace( self.WEBSITE_URL + self.VIEWFORUM_PAGE + "?" + self.URL_FORUM_ARGUMENT + "=", "") def skipCallback(self, url): pass def parseTopicPage(self, response): currentForumId = response.meta["forumId"] topicItemId = response.meta["id"] topicItemTitle = response.meta["title"] topicItemUrl = response.meta["url"] topicItemContentHtml = response.body topicItem = Topic(id=topicItemId, forumId=currentForumId, title=topicItemTitle, content=topicItemContentHtml, url=topicItemUrl) logging.getLogger().info("Sending to pipeline topic ID: " + topicItem['id']) yield topicItem def parse(self, response): forumUrl = response.request.url currentForumId = forumUrl[36:].split('&')[0].replace( self.URL_FORUM_ARGUMENT + '=', '') currentPage = response.css(self.CURRENT_PAGE_SELECTOR) #save the whole page #crawl page for forums, only if its the first page forums = response.css(self.FORUMS_SELECTOR) if currentPage: currentPage = currentPage.extract()[0] if currentPage == '1' or not currentPage: if forums: for forum in forums: forumItemTitle = forum.css( self.FORUM_TITLE_SELECTOR).extract()[0] forumItemUrl = self.WEBSITE_URL + (forum.css( self.FORUM_HREF_SELECTOR).extract()[0])[2:] forumItemId = self.getForumIdFromUrl(forumItemUrl) forumItem = Forum(id=forumItemId, title=forumItemTitle, url=forumItemUrl) logging.getLogger().info("Sending to pipeline forum ID: " + forumItem['id']) yield forumItem yield scrapy.Request(forumItemUrl, callback=self.parse) #crawl page for topics, in case they exist topics = response.css(self.TOPICS_SELECTOR) if topics: for topic in topics: topicItemTitle = topic.css( self.TOPIC_TITLE_SELECTOR).extract()[0] topicItemUrl = self.WEBSITE_URL + (topic.css( self.TOPIC_HREF_SELECTOR).extract()[0])[2:] topicItemId = self.getTopicIdFromUrl(topicItemUrl, currentForumId) topicItemContentHtml = 'N/A' # responseTopicPage=(yield scrapy.Request(topicItemUrl+'&'+self.URL_VIEW_PRINT_ARGUMENT,callback=self.parseTopicPage,dont_filter = True)) yield scrapy.Request(topicItemUrl + '&' + self.URL_VIEW_PRINT_ARGUMENT, callback=self.parseTopicPage, meta={ "id": topicItemId, "title": topicItemTitle, "url": topicItemUrl, "forumId": currentForumId }, dont_filter=True) #follow to topic's next page or quit nextPageRelativeUrl = response.css(self.NEXT_PAGE_SELECTOR).extract() if nextPageRelativeUrl: nextPageRelativeUrl = nextPageRelativeUrl[0] nextPageRelativeUrl = nextPageRelativeUrl[2:] nextPageAbsoluteUrl = self.WEBSITE_URL + nextPageRelativeUrl time.sleep(randint(1, 3)) yield scrapy.Request(nextPageAbsoluteUrl, callback=self.parse, dont_filter=True) else: logging.getLogger().info("Finished crawling Forum " + currentForumId) return