def startagent(port = myport): if agentdaemonize: print "Daemonizing" from hqdaemon import daemonize daemonize('/dev/null','/tmp/daemon.log','/tmp/daemon.log') if sys.platform.find("win32") != -1: import win32file win32file._setmaxstdio(2048) if use_ssl: print "Agent is started on port ", myport, " SSL is enabled " try: myContextFactory = ssl.DefaultOpenSSLContextFactory(agent_ssl_key, agent_ssl_cert) ctx = myContextFactory.getContext() # ctx.set_verify( SSL.VERIFY_PEER, verifyCallback) # ctx.set_options( SSL.OP_ALL ) # Since we have self-signed certs we have to explicitly # tell the server to trust them. ctx.load_verify_locations(agent_ssl_pem) except Exception, e: print "Failed to start, SSL keys are specified but do not exist ", e sys.exit(1) reactor.listenSSL(myport, ExecuteActionFactory(), myContextFactory) reactor.run(installSignalHandlers=False)
def set_max_open_files_limits(min_soft=3072, min_hard=4096): # Get current limits if salt.utils.platform.is_windows(): import win32file prev_hard = win32file._getmaxstdio() prev_soft = 512 else: import resource prev_soft, prev_hard = resource.getrlimit(resource.RLIMIT_NOFILE) # Check minimum required limits set_limits = False if prev_soft < min_soft: soft = min_soft set_limits = True else: soft = prev_soft if prev_hard < min_hard: hard = min_hard set_limits = True else: hard = prev_hard # Increase limits if set_limits: log.debug( " * Max open files settings is too low (soft: %s, hard: %s) for running the tests. " "Trying to raise the limits to soft: %s, hard: %s", prev_soft, prev_hard, soft, hard, ) try: if salt.utils.platform.is_windows(): hard = 2048 if hard > 2048 else hard win32file._setmaxstdio(hard) else: resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) except Exception as err: # pylint: disable=broad-except log.error( "Failed to raise the max open files settings -> %s. Please issue the following command " "on your console: 'ulimit -u %s'", err, soft, ) exit(1) return soft, hard
def set_filehandle_limits(self, limits='integration'): ''' Set soft and hard limits on open file handles at required thresholds for integration tests or unit tests ''' # Get current limits if salt.utils.platform.is_windows(): import win32file prev_hard = win32file._getmaxstdio() prev_soft = 512 else: prev_soft, prev_hard = resource.getrlimit(resource.RLIMIT_NOFILE) # Get required limits min_soft = MAX_OPEN_FILES[limits]['soft_limit'] min_hard = MAX_OPEN_FILES[limits]['hard_limit'] # Check minimum required limits set_limits = False if prev_soft < min_soft: soft = min_soft set_limits = True else: soft = prev_soft if prev_hard < min_hard: hard = min_hard set_limits = True else: hard = prev_hard # Increase limits if set_limits: print( ' * Max open files settings is too low (soft: {0}, hard: {1}) ' 'for running the tests'.format(prev_soft, prev_hard)) print(' * Trying to raise the limits to soft: ' '{0}, hard: {1}'.format(soft, hard)) try: if salt.utils.platform.is_windows(): hard = 2048 if hard > 2048 else hard win32file._setmaxstdio(hard) else: resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) except Exception as err: print('ERROR: Failed to raise the max open files settings -> ' '{0}'.format(err)) print('Please issue the following command on your console:') print(' ulimit -n {0}'.format(soft)) self.exit() finally: print('~' * getattr(self.options, 'output_columns', PNUM))
def set_filehandle_limits(self, limits="integration"): """ Set soft and hard limits on open file handles at required thresholds for integration tests or unit tests """ # Get current limits if salt.utils.platform.is_windows(): import win32file prev_hard = win32file._getmaxstdio() prev_soft = 512 else: prev_soft, prev_hard = resource.getrlimit(resource.RLIMIT_NOFILE) # Get required limits min_soft = MAX_OPEN_FILES[limits]["soft_limit"] min_hard = MAX_OPEN_FILES[limits]["hard_limit"] # Check minimum required limits set_limits = False if prev_soft < min_soft: soft = min_soft set_limits = True else: soft = prev_soft if prev_hard < min_hard: hard = min_hard set_limits = True else: hard = prev_hard # Increase limits if set_limits: print(" * Max open files settings is too low (soft: {}, hard: {}) " "for running the tests".format(prev_soft, prev_hard)) print(" * Trying to raise the limits to soft: " "{}, hard: {}".format(soft, hard)) try: if salt.utils.platform.is_windows(): hard = 2048 if hard > 2048 else hard win32file._setmaxstdio(hard) else: resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) except Exception as err: # pylint: disable=broad-except print("ERROR: Failed to raise the max open files settings -> " "{}".format(err)) print("Please issue the following command on your console:") print(" ulimit -n {}".format(soft)) self.exit() finally: print("~" * getattr(self.options, "output_columns", PNUM))
def start_requests(self): win32file._setmaxstdio(2048) infile = open('candidate_category_54.txt','rb') requests = [] for row in infile: category = row.split(',')[0].decode('utf-8') outfile = open('54/raw/'+category,'wb') # outfile = os.fdopen('54/raw/'+category,'w') for page in range(10): request_url = u"http://baike.baidu.com/search?word="+category+"&pn="+str(page*10) print request_url requests.append(scrapy.FormRequest(request_url,callback=lambda response,outfile=outfile,category=category:self.parseCustom(response,outfile,category))) return requests
def main(): os.environ.setdefault("DJANGO_SETTINGS_MODULE", "all_settings") # Load django models. This is needed to populate the DB before using it. django.setup() if platform.system() == 'Windows': try: import win32file win32file._setmaxstdio(2048) except ImportError: print "Cannot find package 'win32file'. Installing it is "\ "recommended before running the UT " \ "(you can do so using 'pip install pypiwin32')" from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
def start_requests(self): win32file._setmaxstdio(2048) infile = open('candidate_category_54.txt', 'rb') requests = [] for row in infile: category = row.split(',')[0].decode('utf-8') outfile = open('54/raw/' + category, 'wb') # outfile = os.fdopen('54/raw/'+category,'w') for page in range(10): request_url = u"http://baike.baidu.com/search?word=" + category + "&pn=" + str( page * 10) print request_url requests.append( scrapy.FormRequest( request_url, callback=lambda response, outfile=outfile, category=category: self.parseCustom( response, outfile, category))) return requests
def setMaxfilesopened(limit): try: if sys.platform == "win32": import win32file maxstdio = win32file._getmaxstdio() if maxstdio < limit: logging.debug("Current maxstdio: %s, changing to %s..." % (maxstdio, limit)) win32file._setmaxstdio(limit) return True else: import resource soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) if soft < limit: logging.debug("Current RLIMIT_NOFILE: %s (max: %s), changing to %s..." % (soft, hard, limit)) resource.setrlimit(resource.RLIMIT_NOFILE, (limit, hard)) return True except Exception, err: logging.error("Failed to modify max files open limit: %s" % err) return False
def _setup_process(opts): ''' This function is supposed to set up the newly spawned process ''' if not salt.utils.platform.is_windows(): return if opts: max_open_files = opts.get('max_open_files') if max_open_files: try: if max_open_files > 8192: max_open_files = 8192 log.warning('max_open_files ajusted to 8192, since that is maximum in C runtime.') import win32file count = win32file._setmaxstdio(max_open_files) # pylint: disable=W0212 if count != max_open_files: log.error('Failed to set \'max_open_files\' on the process') except ImportError: log.error('Failed to set \'max_open_files\' on the process')
# pylint: disable=wildcard-import,unused-wildcard-import import platform from rotest.common import core_log from rotest.common.django_utils.settings import * if platform.system() == 'Windows': # pragma: no cover try: import win32file core_log.debug("Setting 2048 as the file descriptors limit") win32file._setmaxstdio(2048) # pylint: disable=protected-access except ImportError: import warnings warnings.warn("Cannot find package 'win32file'. " "You must install it using 'pip install pypiwin32'")
import os import sys # FIXME: Search and replace these from the tests if pyarrow wheel is available PY39 = sys.version_info >= (3, 9, 0) PYARROW_NOT_AVAILABLE = "pyarrow not available yet for Python3.9" # Increasing fd ulimit for tests if os.name == "nt": import subprocess import win32file # pylint: disable=import-error win32file._setmaxstdio(2048) # Workaround for two bugs: # # 1) gitpython-developers/GitPython#546 # GitPython leaves git cat-file --batch/--batch-check processes that are # not cleaned up correctly, so Popen._active list has their defunct # process handles, that it is not able to cleanup because of bug 2) # # 2) https://bugs.python.org/issue37380 # subprocess.Popen._internal_poll on windows is getting # # OSError: [WinError 6] The handle is invalid # # exception, which it doesn't ignore and so Popen is not able to cleanup # old processes and that prevents it from creating any new processes at # all, which results in our tests failing whenever they try to use Popen.
class MatchSpider(scrapy.Spider): #increase maximum open files if platform.system() == 'Windows': import win32file win32file._setmaxstdio(4000) name = "match" def __init__(self): self.seasons_selected = [ '2015/2016', '2016/2017', '2017/2018', '2018/2019', '2019/2020' ] self.countries_selected = ['England' ] #, 'Germany', 'Spain', 'Italy', 'France'] self.leagues_selected = [ 'Premier League', 'Primera Division', '1. Bundesliga', 'Serie A', 'Ligue 1' ] #allowed_domains = ["reuters.mx-api.enetscores.com", 'json.mx-api.enetscores.com'] self.start_urls = [ "http://ronaldopt.mx-api.enetscores.com/page/xhr/standings/" ] self.stages = [] self.matches = [] self.detailed_stats = True #COUNTRY def parse(self, response): countries_dirty = response.xpath( '//li[@class="mx-dropdown-option"]/text()').extract() countries = list(filter(None, [x.strip() for x in countries_dirty])) for country in countries: if country in self.countries_selected: href = response.xpath('//li[text()[contains(.,"' + country + '")]]/@data-snippetparams').re_first( '"params":"(.+)"') url = self.start_urls[0] + href yield scrapy.Request(url, callback=self.parseLeague, meta={'country': country}) #LEAGUE def parseLeague(self, response): country = response.meta['country'] leagues = response.xpath( '//div[@class="mx-dropdown-container mx-flexbox mx-float-left mx-template-dropdown"]/div/ul/li/text()' ).extract() for league in leagues: if league in self.leagues_selected: href = response.xpath('//li[text()[contains(.,"' + league + '")]]/@data-snippetparams').re_first( '"params":"(.+)"') url = 'http://ronaldopt.mx-api.enetscores.com/page/xhr/standings/' + href yield scrapy.Request(url, callback=self.parseSeason, meta={ 'country': country, 'league': league }) #SEASON def parseSeason(self, response): country = response.meta['country'] league = response.meta['league'] seasons = response.xpath( '//div[@class="mx-dropdown-container mx-flexbox mx-float-left mx-tournament-dropdown"]/div/ul/li/text()' ).extract() for season in seasons: if season in self.seasons_selected: href = response.xpath('//li[text()[contains(.,"' + season + '")]]/@data-snippetparams').re_first( '"params":"(.+)"') url = 'http://ronaldopt.mx-api.enetscores.com/page/xhr/standings/' + href yield scrapy.Request(url, callback=self.parseMatches, meta={ 'country': country, 'league': league, 'season': season }) #OPEN SEASON def parseMatches(self, response): country = response.meta['country'] league = response.meta['league'] season = response.meta['season'] href = response.xpath( '//div[contains(@class,"mx-matches-finished-betting_extended")]/@data-params' ).re_first('params":"(.+)/') url = 'http://ronaldopt.mx-api.enetscores.com/page/xhr/stage_results/' + href first_stage_url = url + '/1' yield scrapy.Request(first_stage_url, callback=self.parseStage, meta={ 'href': href, 'country': country, 'league': league, 'season': season }) #LOOP STAGES def parseStage(self, response): country = response.meta['country'] league = response.meta['league'] season = response.meta['season'] href = response.meta['href'] url = 'http://ronaldopt.mx-api.enetscores.com/page/xhr/stage_results/' + href totalPages = response.xpath( '//span[contains(@class,"mx-pager-next")]/@data-params').re_first( 'total_pages": "([0-9]+)"') if not self.stages and totalPages is not None: iterateStages = range(1, int(totalPages) + 1) else: iterateStages = self.stages for stage in iterateStages: full_stage_url = url + '/' + str(stage) yield scrapy.Request(full_stage_url, callback=self.parseAllMatchesInStage, dont_filter=True, meta={ 'stage': stage, 'country': country, 'league': league, 'season': season }) #MATCHES IN STAGE def parseAllMatchesInStage(self, response): country = response.meta['country'] league = response.meta['league'] season = response.meta['season'] stage = response.meta['stage'] matchesDataEventList = response.xpath( '//a[contains(@class, "mx-link")]/@data-event').extract() # dateList = response.xpath('//span[@class="mx-time-startdatetime mx-break-small"]/text()').extract() matchList = list() if len(self.matches) >= 1: for match in self.matches: matchList.append(matchesDataEventList[match - 1]) else: matchList = list(matchesDataEventList) counter = 0 for matchId in matchList: match = Match() match["matchId"] = matchId match["country"] = country match["league"] = league match["season"] = season #date = dateList[counter] #match["date"] = dateList[0] url = 'http://ronaldopt.mx-api.enetscores.com/page/xhr/match_center/' + matchId + '/' counter += 1 yield scrapy.Request(url, callback=self.parseMatchGeneralStats, meta={'match': match}) #MATCH GENERAL STATS def parseMatchGeneralStats(self, response): match = response.meta['match'] stage = response.xpath( '//span[@class="mx-stage-name"]/text()').re_first('\s([0-9]+)') match["stage"] = stage fullTeamNameHome = response.xpath( '//div[contains(@class, "mx-team-home-name mx-break-small")]/a/text()' ).re('\t+([^\n]+[^\t]+)\n+\t+') fullTeamNameAway = response.xpath( '//div[@class="mx-team-away-name mx-break-small"]/a/text()').re( '\t+([^\n]+[^\t]+)\n+\t+') teamIdHome = response.xpath( '//div[@class="mx-team-home-name mx-break-small"]/a/@data-team' ).extract() teamIdAway = response.xpath( '//div[@class="mx-team-away-name mx-break-small"]/a/@data-team' ).extract() awayAcronym = response.xpath( '//div[@class="mx-team-away-name mx-show-small"]/a/text()').re( '\t+([^\n]+[^\t]+)\n+\t+') homeAcronym = response.xpath( '//div[@class="mx-team-home-name mx-show-small"]/a/text()').re( '\t+([^\n]+[^\t]+)\n+\t+') homeTeamGoal = response.xpath( '//div[@class="mx-res-home mx-js-res-home"]/@data-res' ).extract_first() awayTeamGoal = response.xpath( '//div[@class="mx-res-away mx-js-res-away"]/@data-res' ).extract_first() date = response.xpath( '//html/body/div/div/div/div[3]/div/div[3]/div[1]/div[2]/@data-startdate_utc' ).extract() match['homeTeamFullName'] = fullTeamNameHome match['awayTeamFullName'] = fullTeamNameAway match['homeTeamAcronym'] = homeAcronym match['awayTeamAcronym'] = awayAcronym match['homeTeamId'] = teamIdHome match['awayTeamId'] = teamIdAway match['homeTeamGoal'] = homeTeamGoal match['awayTeamGoal'] = awayTeamGoal match["date"] = date matchId = match['matchId'] url = 'http://ronaldopt.mx-api.enetscores.com/page/xhr/event_gamecenter/' + matchId + '%2Fv2_lineup/' yield scrapy.Request(url, callback=self.parseSquad, meta={'match': match}) #MATCH SQUADS def parseSquad(self, response): match = response.meta['match'] players = response.xpath( '//div[@class="mx-lineup-incident-name"]/text()').extract() playersId = response.xpath('//a/@data-player').extract() subsId = response.xpath( '//div[@class="mx-lineup-container mx-float-left"]//div[@class="mx-collapsable-content"]//a/@data-player' ).extract() titularPlayerId = [x for x in playersId if x not in subsId] # player x y pitch position playersX = response.xpath( '//div[contains(@class,"mx-lineup-pos")]/@class').re( 'mx-pos-row-([0-9]+)\s') playersY = response.xpath( '//div[contains(@class,"mx-lineup-pos")]/@class').re( 'mx-pos-col-([0-9]+)\s') playersPos = response.xpath( '//div[contains(@class,"mx-lineup-pos")]/@class').re( 'mx-pos-([0-9]+)\s') match['homePlayers'] = players[:11] match['homePlayersId'] = titularPlayerId[:11] match['homePlayersX'] = playersX[:11] match['homePlayersY'] = playersY[:11] match['awayPlayers'] = players[11:] match['awayPlayersId'] = titularPlayerId[11:22] match['awayPlayersX'] = playersX[11:] match['awayPlayersY'] = playersY[11:] matchId = match['matchId'] if self.detailed_stats: url = 'http://json.mx-api.enetscores.com/live_data/actionzones/' + matchId + '/0?_=1' yield scrapy.Request(url, callback=self.parseMatchEvents, meta={'match': match}) else: yield match #MATCH EVENTS def parseMatchEvents(self, response): #matchId = match['matchId'] #url = 'http://json.mx-api.enetscores.com/live_data/actionzones/' + matchId + '/0?_=1' match = response.meta['match'] jsonresponse = json.loads(response.body_as_unicode()) try: goal = [s for s in jsonresponse["i"] if s['type'] == 'goal'] shoton = [s for s in jsonresponse["i"] if s['type'] == 'shoton'] shotoff = [s for s in jsonresponse["i"] if s['type'] == 'shotoff'] foulcommit = [ s for s in jsonresponse["i"] if s['type'] == 'foulcommit' ] card = [s for s in jsonresponse["i"] if s['type'] == 'card'] corner = [s for s in jsonresponse["i"] if s['type'] == 'corner'] subtypes = [s for s in jsonresponse["i"] if 'subtype' in s] cross = [s for s in subtypes if s['subtype'] == 'cross'] possession = [s for s in subtypes if s['subtype'] == 'possession'] match['goal'] = goal match['shoton'] = shoton match['shotoff'] = shotoff #match['foulcommit'] = foulcommit match['card'] = card match['cross'] = cross match['corner'] = corner match['possession'] = possession except: e = sys.exc_info()[0] print('No Match Events: ' + str(e)) yield match
print('connected... %s (count = %s)' % (request.sid,client_count)) @socketio.on('disconnect') def test_disconnect(): global client_count if client_count is None: client_count = 0 client_count -=1 print('Client disconnected (count=%s)', (request.sid,client_count)) if __name__ == '__main__': usage = """\ """ import win32file win32file._setmaxstdio(2048) parser = OptionParser(usage=usage) parser.add_option( "-l", "--loglevel", dest="loglevel", default=None, type='choice', choices=[ 'debug', 'warning', 'info', 'error', 'critical'],
def test_max_open_files(self): with TstSuiteLoggingHandler() as handler: logmsg_dbg = "DEBUG:This salt-master instance has accepted {0} minion keys." logmsg_chk = ( "{0}:The number of accepted minion keys({1}) should be lower " "than 1/4 of the max open files soft setting({2}). According " "to the system's hard limit, there's still a margin of {3} " "to raise the salt's max_open_files setting. Please consider " "raising this value." ) logmsg_crash = ( "{0}:The number of accepted minion keys({1}) should be lower " "than 1/4 of the max open files soft setting({2}). " "salt-master will crash pretty soon! According to the " "system's hard limit, there's still a margin of {3} to " "raise the salt's max_open_files setting. Please consider " "raising this value." ) if sys.platform.startswith("win"): logmsg_crash = ( "{0}:The number of accepted minion keys({1}) should be lower " "than 1/4 of the max open files soft setting({2}). " "salt-master will crash pretty soon! Please consider " "raising this value." ) if sys.platform.startswith("win"): # Check the Windows API for more detail on this # http://msdn.microsoft.com/en-us/library/xt874334(v=vs.71).aspx # and the python binding http://timgolden.me.uk/pywin32-docs/win32file.html mof_s = mof_h = win32file._getmaxstdio() else: mof_s, mof_h = resource.getrlimit(resource.RLIMIT_NOFILE) tempdir = tempfile.mkdtemp(prefix="fake-keys") keys_dir = os.path.join(tempdir, "minions") os.makedirs(keys_dir) mof_test = 256 if sys.platform.startswith("win"): win32file._setmaxstdio(mof_test) else: resource.setrlimit(resource.RLIMIT_NOFILE, (mof_test, mof_h)) try: prev = 0 for newmax, level in ( (24, None), (66, "INFO"), (127, "WARNING"), (196, "CRITICAL"), ): for n in range(prev, newmax): kpath = os.path.join(keys_dir, str(n)) with salt.utils.files.fopen(kpath, "w") as fp_: fp_.write(str(n)) opts = {"max_open_files": newmax, "pki_dir": tempdir} check_max_open_files(opts) if level is None: # No log message is triggered, only the DEBUG one which # tells us how many minion keys were accepted. self.assertEqual([logmsg_dbg.format(newmax)], handler.messages) else: self.assertIn(logmsg_dbg.format(newmax), handler.messages) self.assertIn( logmsg_chk.format( level, newmax, mof_test, mof_test - newmax if sys.platform.startswith("win") else mof_h - newmax, ), handler.messages, ) handler.clear() prev = newmax newmax = mof_test for n in range(prev, newmax): kpath = os.path.join(keys_dir, str(n)) with salt.utils.files.fopen(kpath, "w") as fp_: fp_.write(str(n)) opts = {"max_open_files": newmax, "pki_dir": tempdir} check_max_open_files(opts) self.assertIn(logmsg_dbg.format(newmax), handler.messages) self.assertIn( logmsg_crash.format( "CRITICAL", newmax, mof_test, mof_test - newmax if sys.platform.startswith("win") else mof_h - newmax, ), handler.messages, ) handler.clear() except OSError as err: if err.errno == 24: # Too many open files self.skipTest("We've hit the max open files setting") raise finally: if sys.platform.startswith("win"): win32file._setmaxstdio(mof_h) else: resource.setrlimit(resource.RLIMIT_NOFILE, (mof_s, mof_h)) shutil.rmtree(tempdir)
def create_mosaic(in_files, out_file): """ Creates mosaic from in_files. :param in_files: list of paths to input files :param out_file: path to output mosaic :return: path to output file """ # This is some hacky, dumb shit # There is a limit on how many file descriptors we can have open at once # So we will up that limit for a bit and then set it back if os.name == 'posix': import resource soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) if len(in_files) >= soft: new_limit = len(in_files) * 2 resource.setrlimit(resource.RLIMIT_NOFILE, (new_limit, hard)) logger.debug(f'Hard limit changed from: {hard} to {new_limit}') logger.debug(f'Soft limit: {soft}') elif os.name == 'nt': import win32file soft = win32file._getmaxstdio() if len(in_files) >= soft: new_limit = len(in_files) * 2 win32file._setmaxstdio(new_limit) logger.debug(f'Limit changed from {soft} to {new_limit}') file_objs = [] for file in in_files: src = rasterio.open(file) file_objs.append(src) mosaic, out_trans = rasterio.merge.merge(file_objs) out_meta = src.meta.copy() out_meta.update({ "driver": "GTiff", "height": mosaic.shape[1], "width": mosaic.shape[2], "transform": out_trans }) with rasterio.open(out_file, "w", **out_meta) as dest: dest.write(mosaic) logger.debug( f'Mosaic created at {out_file} with resolution: {dest.res}') # Reset soft limit if os.name == 'posix': import resource soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) if len(in_files) >= soft: # Todo: this doesn't seem correct resource.setrlimit(resource.RLIMIT_NOFILE, (len(in_files) * 2, hard)) elif os.name == 'nt': import win32file soft = win32file._getmaxstdio() if len(in_files) >= soft: win32file._setmaxstdio(len(in_files) * 2) # Todo: log limit reset return Path(out_file).resolve()
def test_max_open_files(self): with TstSuiteLoggingHandler() as handler: logmsg_dbg = ( 'DEBUG:This salt-master instance has accepted {0} minion keys.' ) logmsg_chk = ( '{0}:The number of accepted minion keys({1}) should be lower ' 'than 1/4 of the max open files soft setting({2}). According ' 'to the system\'s hard limit, there\'s still a margin of {3} ' 'to raise the salt\'s max_open_files setting. Please consider ' 'raising this value.') logmsg_crash = ( '{0}:The number of accepted minion keys({1}) should be lower ' 'than 1/4 of the max open files soft setting({2}). ' 'salt-master will crash pretty soon! According to the ' 'system\'s hard limit, there\'s still a margin of {3} to ' 'raise the salt\'s max_open_files setting. Please consider ' 'raising this value.') if sys.platform.startswith('win'): logmsg_crash = ( '{0}:The number of accepted minion keys({1}) should be lower ' 'than 1/4 of the max open files soft setting({2}). ' 'salt-master will crash pretty soon! Please consider ' 'raising this value.') if sys.platform.startswith('win'): # Check the Windows API for more detail on this # http://msdn.microsoft.com/en-us/library/xt874334(v=vs.71).aspx # and the python binding http://timgolden.me.uk/pywin32-docs/win32file.html mof_s = mof_h = win32file._getmaxstdio() else: mof_s, mof_h = resource.getrlimit(resource.RLIMIT_NOFILE) tempdir = tempfile.mkdtemp(prefix='fake-keys') keys_dir = os.path.join(tempdir, 'minions') os.makedirs(keys_dir) mof_test = 256 if sys.platform.startswith('win'): win32file._setmaxstdio(mof_test) else: resource.setrlimit(resource.RLIMIT_NOFILE, (mof_test, mof_h)) try: prev = 0 for newmax, level in ((24, None), (66, 'INFO'), (127, 'WARNING'), (196, 'CRITICAL')): for n in range(prev, newmax): kpath = os.path.join(keys_dir, six.text_type(n)) with salt.utils.files.fopen(kpath, 'w') as fp_: fp_.write( str(n) ) # future lint: disable=blacklisted-function opts = {'max_open_files': newmax, 'pki_dir': tempdir} check_max_open_files(opts) if level is None: # No log message is triggered, only the DEBUG one which # tells us how many minion keys were accepted. self.assertEqual([logmsg_dbg.format(newmax)], handler.messages) else: self.assertIn(logmsg_dbg.format(newmax), handler.messages) self.assertIn( logmsg_chk.format( level, newmax, mof_test, mof_test - newmax if sys.platform.startswith('win') else mof_h - newmax, ), handler.messages) handler.clear() prev = newmax newmax = mof_test for n in range(prev, newmax): kpath = os.path.join(keys_dir, six.text_type(n)) with salt.utils.files.fopen(kpath, 'w') as fp_: fp_.write(str( n)) # future lint: disable=blacklisted-function opts = {'max_open_files': newmax, 'pki_dir': tempdir} check_max_open_files(opts) self.assertIn(logmsg_dbg.format(newmax), handler.messages) self.assertIn( logmsg_crash.format( 'CRITICAL', newmax, mof_test, mof_test - newmax if sys.platform.startswith('win') else mof_h - newmax, ), handler.messages) handler.clear() except IOError as err: if err.errno == 24: # Too many open files self.skipTest('We\'ve hit the max open files setting') raise finally: if sys.platform.startswith('win'): win32file._setmaxstdio(mof_h) else: resource.setrlimit(resource.RLIMIT_NOFILE, (mof_s, mof_h)) shutil.rmtree(tempdir)
from PyPDF3 import PdfFileWriter, PdfFileReader, PdfFileMerger import os import tqdm import sys from collections import OrderedDict import win32file import fitz win32file._setmaxstdio(4096) i = 0 print(win32file._getmaxstdio()) sys.setrecursionlimit(30000) #with open('allPDFs.txt') as f: # pdflines = f.readlines() pdffiles = [ os.path.join(name) for root, dirs, files in os.walk(os.getcwd()) for name in files if name.endswith((".pdf")) ] #get page number def getPageNr(arg1): stro = str(arg1) stro = stro.replace('.pdf', '') listR = stro.split(' - ') listR[len(listR) - 1] = listR[len(listR) - 1].replace('-', '') listR[len(listR) - 1] = listR[len(listR) - 1].replace('Page ', '') pgNr = int(listR[len(listR) - 1])
from __future__ import absolute_import, division, print_function, unicode_literals import tensorflow as tf from tensorflow.keras import datasets, layers, models import matplotlib.pyplot as plt from DataGrabbing.DataFormatting import generateStructures from ImageFormatting.ImageFormatter import cropImageByColorDetection import win32file import numpy as np import gc #PREPARATION gc.collect() win32file._setmaxstdio(2048) #REMOVE WINDOWS RAM ACCESS LIMIT print(tf.version) #CHECK TFLOW IS WORKING # CLASS_NAMES = ["0", "1", "2", "3", "4", "5", "6", "7"] DATA_GEN = tf.keras.preprocessing.image.ImageDataGenerator( horizontal_flip=True) def twoD2threeD(array): #Converts a 2d flat array into a 3d array return np.reshape(list(array.getdata()), (256, 256, 3)).tolist() def nparray2list(array): #CONVERT 3D NUMPY ARRAY INTO A LIST return [np.array(i).tolist() for i in array] def singleTFlowImage(