def __init__(self): """Initialize the target The build_flags dictionnary is used to set attributes of runtime_build.gpr""" TargetConfiguration.__init__(self) ArchSupport.__init__(self) self.config_files = {} self.runtimes = {} self.rts_options = RTSProfiles(self) self.build_flags = { 'source_dirs': None, 'common_flags': [ '-fcallgraph-info=su,da', '-ffunction-sections', '-fdata-sections' ], 'common_gnarl_flags': [], 'asm_flags': [], 'c_flags': ['-DIN_RTS', '-Dinhibit_libc'] } readme = self.readme_file if readme: self.config_files.update({'README': readfile(readme)}) for profile in self.system_ads: # Set the scenario variable values for the base profile rts = FilesHolder() self.runtimes[profile] = rts if 'ravenscar' not in profile: rts.rts_vars = self.rts_options.zfp_scenarios(math_lib=False) elif 'full' in profile: rts.rts_vars = self.rts_options.full_scenarios(math_lib=True) else: rts.rts_vars = self.rts_options.sfp_scenarios(math_lib=False) # By default, system.ads files are searched for in # bb-runtimes/src/system. # This works fine in general, however, for custom runtimes, we may # need to change the location of this file for various reasons # so if we detect a slash in the base name, this means that we # lookup the file as any other regular source file. system_ads = self.system_ads[profile] if '/' in system_ads: rts.add_source_alias('gnat', 'system.ads', system_ads) else: rts.add_source_alias('gnat', 'system.ads', 'src/system/%s' % system_ads) rts.build_flags = copy.deepcopy(self.build_flags) rts.config_files = {} # Update the runtimes objects according to target specifications self.amend_rts(profile, rts) # Check that dependencies are met self.rts_options.check_deps(rts.rts_vars) assert len(self.runtimes) > 0, "No runtime defined"
def amend_rts(self, rts_profile, conf): super(PPC6XXTarget, self).amend_rts(rts_profile, conf) if rts_profile == 'ravenscar-full': conf.config_files.update( {'link-zcx.spec': readfile('powerpc/prep/link-zcx.spec')}) conf.rts_xml = conf.rts_xml.replace( '"-nostartfiles"', ('"-u", "_Unwind_Find_FDE", "-Wl,--eh-frame-hdr",\n' ' "--specs=${RUNTIME_DIR(ada)}/link-zcx.spec"'))
def dump_runtime_xml(self, rts_name, rts): cnt = readfile('pikeos/runtime.xml') if self.pikeos_version == 'pikeos3': cnt = cnt.replace('@version@', 'pikeos-3.4') else: cnt = cnt.replace('@version@', 'pikeos-4.1') cnt = cnt.replace('@target@', self.pikeos_target) return cnt
def amend_rts(self, rts_profile, conf): super(PPCBookETarget, self).amend_rts(rts_profile, conf) # kill shrink-wrap-separate when building the runtime as this prevents # the frame to be properly built and thus prevents gdb from unwinding # the runtime (see R220-013). conf.build_flags['common_flags'] += ['-fno-shrink-wrap-separate'] if rts_profile == 'ravenscar-full': conf.config_files.update( {'link-zcx.spec': readfile('powerpc/prep/link-zcx.spec')})
def __init__(self, mem_routines, small_mem): """Initialize the target :param mem_routines: True for adding memory functions (memcpy..) :param small_mem: True when targetting a board with minimal memory The build_flags dictionnary is used to set attributes of runtime_build.gpr""" TargetConfiguration.__init__(self) BSP.__init__(self) self._mem_routines = mem_routines self._small_mem = small_mem self.config_files = {} self.runtimes = {} self.rts_options = RTSOptions(self) self.build_flags = { 'source_dirs': None, 'common_flags': [ '-fcallgraph-info=su,da', '-ffunction-sections', '-fdata-sections' ], 'asm_flags': [], 'c_flags': ['-DIN_RTS', '-Dinhibit_libc'] } readme = self.readme_file if readme: self.config_files.update({'README': readfile(readme)}) for profile in self.system_ads: rts = FilesHolder() self.runtimes[profile] = rts if 'ravenscar' not in profile: rts.rts_vars = \ self.rts_options.zfp_scenarios( self._mem_routines, math_lib=False) elif 'full' in profile: rts.rts_vars = \ self.rts_options.full_scenarios( mem_routines, math_lib=True, small_mem=small_mem) else: rts.rts_vars = \ self.rts_options.sfp_scenarios( self._mem_routines, math_lib=False, small_mem=self._small_mem) rts.add_sources( 'arch', {'system.ads': 'src/system/%s' % self.system_ads[profile]}) rts.build_flags = copy.deepcopy(self.build_flags) rts.config_files = {} assert len(self.runtimes) > 0, "No runtime defined"
def amend_rts(self, rts_profile, conf): super(LeonTarget, self).amend_rts(rts_profile, conf) conf.rts_xml = \ conf.rts_xml.replace( ' "-nolibc",', '') if rts_profile == 'ravenscar-full': # Use leon-zcx.specs to link with -lc. conf.config_files.update( {'link-zcx.spec': readfile('sparc/leon/leon-zcx.specs')}) conf.rts_xml = conf.rts_xml.replace( '"-nostartfiles",', '"--specs=${RUNTIME_DIR(ada)}/link-zcx.spec",')
def __init__(self): """Initialize the target The build_flags dictionnary is used to set attributes of runtime_build.gpr""" TargetConfiguration.__init__(self) ArchSupport.__init__(self) self.config_files = {} self.runtimes = {} self.rts_options = RTSProfiles(self) self.build_flags = { 'source_dirs': None, 'common_flags': [ '-fcallgraph-info=su,da', '-ffunction-sections', '-fdata-sections' ], 'asm_flags': [], 'c_flags': ['-DIN_RTS', '-Dinhibit_libc'] } readme = self.readme_file if readme: self.config_files.update({'README': readfile(readme)}) for profile in self.system_ads: # Set the scenario variable values for the base profile rts = FilesHolder() self.runtimes[profile] = rts if 'ravenscar' not in profile: rts.rts_vars = self.rts_options.zfp_scenarios(math_lib=False) elif 'full' in profile: rts.rts_vars = self.rts_options.full_scenarios(math_lib=True) else: rts.rts_vars = self.rts_options.sfp_scenarios(math_lib=False) rts.add_sources( 'arch', {'system.ads': 'src/system/%s' % self.system_ads[profile]}) rts.build_flags = copy.deepcopy(self.build_flags) rts.config_files = {} # Update the runtimes objects according to target specifications self.amend_rts(profile, rts) # Check that dependencies are met self.rts_options.check_deps(rts.rts_vars) assert len(self.runtimes) > 0, "No runtime defined"
def install(self, destination, rts_descriptor=None): # Build target directories destination = os.path.abspath(destination) if not os.path.exists(destination): os.mkdir(destination) # Retrieve runtime sources runtime_sources = self._find_rts_sources(destination, rts_descriptor) projects = [] for rts_base_name, rts_obj in self.tgt.runtimes.items(): if self.tgt.is_native or self.tgt.is_pikeos: rtsname = 'rts-%s' % rts_base_name else: rtsname = '%s-%s' % (rts_base_name, self.tgt.name) rts_path = os.path.join(destination, rtsname) if os.path.exists(rts_path): if not self.overwrite: print("ERROR: a runtime already exists in") print(" %s" % rts_path) print("remove the runtime, use a different installation" " path or use --force to overwrite") sys.exit(1) else: # remove everything there print("WARNING: replacing a previously existing runtime") print(" %s" % rts_path) shutil.rmtree(rts_path) scenario_vars = rts_obj.rts_vars if 'ravenscar' in rts_base_name: libs = ('gnat', 'gnarl') else: libs = ('gnat', ) # Amend the scenario variables with the default values for lib in libs: for scenario, vals in runtime_sources.scenarios(lib).items(): if scenario not in scenario_vars: scenario_vars[scenario] = vals[0] # Placeholder for user-defined sources user_libs = ['%s_user' % d for d in libs] for lib in user_libs: dest = os.path.join(rts_path, lib) os.makedirs(dest) # GNARL extra directory for ravenscar-full: # With the ravenscar full, we can't split properly libgnat # and libgnarl, as we don't have the same soft-link # mechanism as the native runtime. This means that # atomic operations from libgnat need to call the # libgnarl functions, making the two libs inter-dependent. # To remove linking headache, we thus combine the two in # a single lib, keeping libgnarl as an empty lib (gnatlink will # still try to link with it when tasking is used, so we need to # have one available). if rts_base_name == 'ravenscar-full': dest = os.path.join(rts_path, 'gnarl_empty') os.makedirs(dest) with open(os.path.join(dest, 'empty.c'), 'w') as fp: fp.write('\n') # Now copy the full set of sources to use for the runtime langs = {} for lib in libs: langs[lib] = ['Ada'] dest = os.path.join(rts_path, lib) # Install sources from the shared rts sources dirs = self._get_rts_dirs(runtime_sources.sources(lib), scenario_vars) install_files(dirs, dest) # and install sources from the BSP for pair in self.tgt.get_sources(lib): pair.install(dest) if lib in rts_obj.dirs: for pair in rts_obj.dirs[lib]: pair.install(dest) # Check the list of languages used there, to produce the proper # _build.gpr project. for fname in os.listdir(dest): _, ext = os.path.splitext(fname) if 'C' not in langs[lib] and (ext == '.c' or ext == '.h'): langs[lib].append('C') if 'Asm' not in langs[lib] and ext == '.s': langs[lib].append('Asm') if 'Asm_Cpp' not in langs[lib] and ext == '.S': langs[lib].append('Asm_Cpp') # Copy the ld scripts if len(self.tgt.ld_scripts) > 0: dest = os.path.join(rts_path, 'ld') if not os.path.isdir(dest): os.makedirs(dest) for script in self.tgt.ld_scripts: script.install(dest) # Add user-defined placeholder for ld scripts dest = os.path.join(rts_path, 'ld_user') if not os.path.isdir(dest): os.makedirs(dest) # Install target and run-time specific configuration files for name, content in self.tgt.config_files.items(): with open(os.path.join(rts_path, name), 'w') as fp: fp.write(content) for name, content in rts_obj.config_files.items(): with open(os.path.join(rts_path, name), 'w') as fp: fp.write(content) with open(os.path.join(rts_path, 'runtime.xml'), 'w') as fp: fp.write(self.tgt.dump_runtime_xml(rts_base_name, rts_obj)) with open(os.path.join(rts_path, 'ada_source_path'), 'w') as fp: # Make sure the user-defined sources come first to preempt # default sources when needed fp.write('%s\n' % '\n'.join(list(user_libs) + list(libs))) with open(os.path.join(rts_path, 'ada_object_path'), 'w') as fp: fp.write('adalib\n') # And generate the project files used to build the rts build_flags = {} for f in ['common_flags', 'asm_flags', 'c_flags']: build_flags[f] = '",\n "'.join(rts_obj.build_flags[f]) cnt = readfile(getdatafilepath('target_options.gpr')) # Format cnt = cnt.format(**build_flags) # Write with open(os.path.join(rts_path, 'target_options.gpr'), 'w') as fp: fp.write(cnt) runtime_build = os.path.join(rts_path, "runtime_build.gpr") runtime_build_tmpl = getdatafilepath('runtime_build.gpr.in') with open(runtime_build_tmpl, 'r') as fp: template = fp.read() with open(runtime_build, 'w') as fp: if self.is_native: target_directive = '' else: target_directive = 'for Target use "%s";' % self.tgt.target source_dirs = ['gnat_user', 'gnat'] languages = langs['gnat'] if rts_base_name == 'ravenscar-full': # ravenscar-full: combine libgnat and libgnarl source_dirs.extend(['gnarl_user', 'gnarl']) for lang in langs['gnarl']: if lang not in languages: languages.append(lang) fp.write( template.format(target_directive=target_directive, source_dirs='", "'.join(source_dirs), languages='", "'.join(languages))) if 'gnarl' in libs: ravenscar_build = os.path.join(rts_path, "ravenscar_build.gpr") ravenscar_build_tmpl = getdatafilepath( "ravenscar_build.gpr.in") with open(ravenscar_build_tmpl, 'r') as fp: template = fp.read() if rts_base_name != 'ravenscar-full': source_dirs = ['gnarl_user', 'gnarl'] languages = langs['gnarl'] else: # see above: libgnarl and libgnat are merged in # ravenscar-full, and libgnarl remains there as an empty # lib source_dirs = ['gnarl_empty'] languages = ['C'] with open(ravenscar_build, 'w') as fp: fp.write( template.format(source_dirs='", "'.join(source_dirs), languages='", "'.join(languages))) projects.append(ravenscar_build) else: projects.append(runtime_build) # Finally install extra sources and projects if requested by the # target extra = self.tgt.other_sources(rts_base_name) if extra is not None: for subdir, src_list in extra.items(): dest = os.path.join(rts_path, subdir) if not os.path.exists(dest): os.makedirs(dest) install_files(src_list, dest) extra_prjs = self.tgt.other_projects(rts_base_name) if extra_prjs is not None: projects += [os.path.join(rts_path, prj) for prj in extra_prjs] return projects
def payload(sp, ep): filepath = "./keyword5.csv" keywords_list = S.readfile(filepath) #keywords_list = [['0','Representative Steve Kestell']] #dates_list = ['2013-09-04','2013-09-05','2013-09-06','2013-09-07'] #dates_list = ['2013-09-04','2013-09-05'] #dates_list = S.date_list() filepath1 = "./date_range.csv" dates_list = S.readfile(filepath1) tweet_info = csv.writer( open("./tweet_ids_terri_remaining" + sp + "_" + ep + ".csv", 'ab+')) spoint = int(sp) epoint = int(ep) for k1 in range(spoint, epoint): print keywords_list[k1][1] #k2= keywords_list[k1][1].strip("@") k2 = keywords_list[k1][1] print k2 for d1 in range(len(dates_list)): k4 = k2 + "-" + dates_list[d1][0] #k2 = "\""+k2+"\"" print k4 start_date = dates_list[d1][0] start_epoch = int( (datetime.datetime.strptime(start_date, "%Y-%m-%d") - datetime.datetime(1970, 1, 1)).total_seconds()) end_date = dates_list[d1][1] end_epoch = start_epoch - 5000 #k2 = "Representative Steve Kestell " #q = "Eagles since:" + start_date + " until:" + end_date q = k2 + " since:" + start_date + " until:" + end_date print q scroll_cursor = "TWEET-t%s-t%s" % (start_epoch, end_epoch) scroll_cursor = "" has_more_items = True count = 0 tweetHTML = "" totalTweets = 0 while count < 300: falseCounter = 0 #url = 'https://twitter.com/i/search/timeline?q=%s&src=typd&include_available_features=1&include_entities=1&last_note_ts=35&scroll_cursor=%s' % (q, scroll_cursor) url = 'https://twitter.com/i/search/timeline?f=realtime&q=%s&src=typd&include_available_features=1&include_entities=1&last_note_ts=35&scroll_cursor=%s' % ( q, scroll_cursor) headers = { "cookie": '''external_referer="sQr0xRwtlCwrmSiFmo9Ms3PWH0SmSUXaGIME+lo2c9Bx3f5tDT8nfQ==|1"; guest_id=v1%3A139954374768358805; auth_token=22566a46611f84d2d1879036ade92cf1a74524f3; secure_session=true; twll=l%3D1399543820; remember_checked=1; remember_checked_on=1; ad_partner=; lang=en; twid=u%3D2483484974%7C3ZOg8IXYJYQtGds69V4htE8n4hc%3D; ssExp4974=1399543821; pid="v3:1399543833876948256857997"; webn=2483484974; external_referer=sQr0xRwtlCwrmSiFmo9Ms3PWH0SmSUXaGIME%2Blo2c9Bx3f5tDT8nfQ%3D%3D%7C1; __utma=43838368.1948717281.1399543745.1399543745.1399543745.1; __utmb=43838368.15.10.1399543745; __utmc=43838368; __utmz=43838368.1399543745.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none); _twitter_sess=BAh7DjoMY3NyZl9pZCIlYWNiYWQ0MjY5MzRkN2EzOTgyOTFlNzFhYzE5MmU4%250AM2Y6CXVzZXJsKwcu%252BQaUOhhpbl9lbWFpbF92YWxpZGF0aW9uVDoHaWQiJWFj%250ANDcxOGYzZjYyNzcyZWQxNGViOGEzN2IyZThjYjlkOhFmaXJzdF9pbnZpdGVU%250AOhVpbl9uZXdfdXNlcl9mbG93VCIKZmxhc2hJQzonQWN0aW9uQ29udHJvbGxl%250Acjo6Rmxhc2g6OkZsYXNoSGFzaHsABjoKQHVzZWR7ADoQc3RheV9zZWN1cmVU%250AOg9jcmVhdGVkX2F0bCsIZtRO20UB--1f324b7ee6038b2af80f049ecb00c76ca2b61d6b''' } #print response try: r = requests.get(url, headers=headers) response = r.json() has_more_items = response["has_more_items"] scroll_cursor = response["scroll_cursor"] focused_refresh_interval = response[ "focused_refresh_interval"] # Append New Tweets tweetHTML += response["items_html"] # Count the number of Tweets soup = bs("<ol>" + response["items_html"].encode("utf-8") + "</ol>") newTweets = len( soup.find("ol").find_all("li", recursive=False)) totalTweets += newTweets #print l.get("data-item-id") tid = soup.findAll("li") for l in tid: info = [] tid = l.get("data-item-id") #print tid if tid is None: pass else: info.append(keywords_list[k1][0]) k3 = '@' + keywords_list[k1][1] #print k3 info.append(k3) info.append(start_date) info.append(str(tid)) tweet_info.writerow(info) #dbHandler.insert_tid(info) # Sleep if false returned for has_more_items if not has_more_items: falseCounter += 1 # End loop if count of tweets is 0 if newTweets == 0: break #print "%s. %s, %s, %s tweets\t\t%s total" % (count+1, scroll_cursor, has_more_items, newTweets, totalTweets) count += 1 except Exception, e: pass #raise e # break #print "<ol>" + tweetHTML.encode('utf-8') + "</ol>" print "filename", k4 html_twitter = (open("./Dump/" + k4 + '.html', 'wb+')) html_twitter.write(tweetHTML.encode('utf8', 'replace')) print "[%s requests] [%s tweets]" % (count, totalTweets)
def dump_runtime_xml(self, rts_name, rts): return readfile('pikeos/runtime42.xml')
def dump_runtime_xml(self, rts_name, rts): return readfile('vx7r2cert/runtime.xml')
def install(self, destination, prefix): # Build target directories destination = os.path.abspath(destination) if not os.path.exists(destination): os.mkdir(destination) installed_files = [] gnarl_dirs = [] gnarl_langs = [] gnat_dirs = [] gnat_langs = [] script_files = [] # Install the bsp base = destination base_bsp = os.path.join(base, self.tgt.rel_path) if 'README' in self.tgt.config_files: cnt = self.tgt.config_files['README'] readme_fname = os.path.join(destination, 'README-%s.txt' % self.tgt.name) with open(readme_fname, 'w') as fp: fp.write(cnt) scripts = [] self.tgt.install_ld_scripts(destination, scripts, installed_files) for d in scripts: full = os.path.join(base, d) rel = os.path.join('..', os.path.relpath(full, base_bsp)) script_files.append(rel) # Install source files for the BSP/RTSs bsp_gnat = [] bsp_gnarl = [] self.tgt.install_libgnat(destination, bsp_gnat, installed_files) has_ravenscar = False for rts in self.tgt.runtimes: if 'ravenscar' in rts: has_ravenscar = True break if has_ravenscar: # install ravenscar support self.tgt.install_libgnarl(destination, bsp_gnarl, installed_files) for d in bsp_gnat: full = os.path.join(base, d) rel = os.path.join('..', os.path.relpath(full, base_bsp)) # gnat_dirs is used to generate libgnat.gpr, so relative to the # bsp directory gnat_dirs.append(rel) if 'C' not in gnat_langs and self.tgt.has_c(d): gnat_langs.append('C') if 'Asm' not in gnat_langs and self.tgt.has_asm(d): gnat_langs.append('Asm') if 'Asm_Cpp' not in gnat_langs and self.tgt.has_asm_cpp(d): gnat_langs.append('Asm_Cpp') for d in bsp_gnarl: full = os.path.join(base, d) rel = os.path.join('..', os.path.relpath(full, base_bsp)) gnarl_dirs.append(rel) if 'C' not in gnarl_langs and self.tgt.has_c(d): gnarl_langs.append('C') if 'Asm' not in gnarl_langs and self.tgt.has_asm(d): gnarl_langs.append('Asm') if 'Asm_Cpp' not in gnarl_langs and self.tgt.has_asm_cpp(d): gnarl_langs.append('Asm_Cpp') # Now install the rts-specific sources for rts_name, rts_obj in self.tgt.runtimes.items(): base_rts = os.path.join(base_bsp, rts_name) rts_gnat = [d for d in gnat_dirs] rts_gnarl = [d for d in gnarl_dirs] rts_gnat_langs = [l for l in gnat_langs] rts_gnarl_langs = [l for l in gnarl_langs] if prefix is not None: if prefix.endswith('/'): install_prefix = prefix else: install_prefix = prefix + '/' elif self.tgt.target is not None: if self.tgt.is_pikeos: install_prefix = 'lib/gcc/%s/%s/' % ( self.tgt.target, FilesHolder.gcc_version()) else: install_prefix = self.tgt.target + '/lib/gnat/' else: install_prefix = 'lib/gnat/' if self.tgt.is_pikeos or self.tgt.target is None: install_prefix += 'rts-%s' % rts_name else: install_prefix += '%s-%s' % (rts_name, self.tgt.name) if not os.path.exists(base_rts): os.makedirs(base_rts) for d in ['obj', 'adalib']: path = os.path.join(base_rts, d) if not os.path.exists(path): os.mkdir(path) for dirname, l in rts_obj.dirs.items(): if l is None or len(l) == 0: continue if 'gnarl' in dirname: if dirname not in gnarl_dirs: rts_gnarl.append(dirname) if 'C' not in rts_gnarl_langs and \ dirname in rts_obj.c_srcs: rts_gnarl_langs.append('C') if 'Asm' not in rts_gnarl_langs and \ dirname in rts_obj.asm_srcs: rts_gnarl_langs.append('Asm') if 'Asm_Cpp' not in rts_gnarl_langs and \ dirname in rts_obj.asm_cpp_srcs: rts_gnarl_langs.append('Asm_Cpp') else: if dirname not in gnat_dirs: rts_gnat.append(dirname) if 'C' not in rts_gnat_langs and \ dirname in rts_obj.c_srcs: rts_gnat_langs.append('C') if 'Asm' not in rts_gnat_langs and \ dirname in rts_obj.asm_srcs: rts_gnat_langs.append('Asm') if 'Asm_Cpp' not in rts_gnat_langs and \ dirname in rts_obj.asm_cpp_srcs: rts_gnat_langs.append('Asm_Cpp') full = os.path.join(base_rts, dirname) if not os.path.exists(full): os.makedirs(full) for srcname, pair in l.items(): self.tgt._copy_pair(srcname, pair, full) # user-defined sources rts_gnat.append('user_srcs') path = os.path.join(base_rts, 'user_srcs') if not os.path.exists(path): os.mkdir(path) # Generate ada_source_path, used for the rts bootstrap with open(os.path.join(base_rts, 'ada_source_path'), 'w') as fp: for d in sorted(rts_gnat + rts_gnarl): fp.write(d + '\n') # Generate ada_object_path with open(os.path.join(base_rts, 'ada_object_path'), 'w') as fp: fp.write('adalib\n') # Write config files for name, content in self.tgt.config_files.iteritems(): with open(os.path.join(base_rts, name), 'w') as fp: fp.write(content) with open(os.path.join(base_rts, 'runtime.xml'), 'w') as fp: fp.write(self.tgt.dump_runtime_xml(rts_name, rts_obj)) # and now install the rts project with the proper scenario values self.dump_rts_project_file(rts_name, rts_obj.rts_vars, destination, install_prefix) inst_files = ['runtime.xml'] support_dir = os.path.relpath(os.path.join(destination, 'support'), base_rts) inst_files.append(os.path.join(support_dir, 'ada_source_path')) inst_files.append(os.path.join(support_dir, 'ada_object_path')) for name, content in rts_obj.config_files.iteritems(): inst_files.append(name) with open(os.path.join(base_rts, name), 'w') as fp: fp.write(content) if len(script_files) > 0: link_sources = '"%s"' % '",\n "'.join(script_files) else: link_sources = '' build_flags = { 'link_sources': link_sources, 'rts_files': '",\n "'.join(inst_files) } cnt = readfile(datapath('install.gpr')) # Format cnt = cnt.format(**build_flags) # Write with open(os.path.join(base_rts, 'install.gpr'), 'w') as fp: fp.write(cnt) # and the potentially runtime specific target_options.gpr project build_flags = {} for f in ['common_flags', 'asm_flags', 'c_flags']: build_flags[f] = '",\n "'.join(rts_obj.build_flags[f]) cnt = readfile(datapath('target_options.gpr')) # Format cnt = cnt.format(**build_flags) # Write with open(os.path.join(base_rts, 'target_options.gpr'), 'w') as fp: fp.write(cnt) # Set source_dirs and languages prj_values = {} prj_values['gnat_source_dirs'] = '"%s"' % ('",\n "'.join( sorted(rts_gnat)), ) if len(rts_gnarl) == 0: prj_values['gnarl_source_dirs'] = '' else: prj_values['gnarl_source_dirs'] = '"%s"' % ('",\n "'.join( sorted(rts_gnarl)), ) prj_values['gnat_langs'] = '", "'.join(["Ada"] + rts_gnat_langs) prj_values['gnarl_langs'] = '", "'.join(["Ada"] + rts_gnarl_langs) all_langs = [] for l in rts_gnat_langs + rts_gnarl_langs: if l not in all_langs: all_langs.append(l) prj_values['all_langs'] = '", "'.join(all_langs) if 'ravenscar' not in rts_name: projects = ('libgnat', ) elif 'full' in rts_name: projects = ('libgnat_full', 'libgnarl_full') else: projects = ('libgnat', 'libgnarl') for fname in projects: cnt = readfile(datapath('%s.gpr' % fname)) # Format cnt = cnt.format(**prj_values) # Write if '_full' in fname: dest = fname.replace('_full', '') empty_c = os.path.join(base_rts, 'empty.c') with open(empty_c, 'w') as fp: fp.write('') else: dest = fname with open(os.path.join(base_rts, '%s.gpr' % dest), 'w') as fp: fp.write(cnt)
def amend_rts(self, rts_profile, conf): conf.rts_xml = readfile('riscv/spike/runtime.xml')
def dump_runtime_xml(self, rts_name, rts): return readfile('visium/mcm/runtime.xml')
def payload(sp,ep): filepath = "./keyword5.csv" keywords_list = S.readfile(filepath) #keywords_list = [['0','Representative Steve Kestell']] #dates_list = ['2013-09-04','2013-09-05','2013-09-06','2013-09-07'] #dates_list = ['2013-09-04','2013-09-05'] #dates_list = S.date_list() filepath1 = "./date_range.csv" dates_list = S.readfile(filepath1) tweet_info = csv.writer(open("./tweet_ids_terri_remaining"+sp+"_"+ep+".csv",'ab+')) spoint = int(sp) epoint = int(ep) for k1 in range(spoint,epoint): print keywords_list[k1][1] #k2= keywords_list[k1][1].strip("@") k2 = keywords_list[k1][1] print k2 for d1 in range(len(dates_list)): k4 = k2+"-"+dates_list[d1][0] #k2 = "\""+k2+"\"" print k4 start_date = dates_list[d1][0] start_epoch = int((datetime.datetime.strptime(start_date, "%Y-%m-%d") - datetime.datetime(1970,1,1)).total_seconds()) end_date = dates_list[d1][1] end_epoch = start_epoch - 5000 #k2 = "Representative Steve Kestell " #q = "Eagles since:" + start_date + " until:" + end_date q = k2 +" since:" + start_date + " until:" + end_date print q scroll_cursor = "TWEET-t%s-t%s" % (start_epoch, end_epoch) scroll_cursor = "" has_more_items = True count = 0 tweetHTML = "" totalTweets = 0 while count < 300: falseCounter = 0 #url = 'https://twitter.com/i/search/timeline?q=%s&src=typd&include_available_features=1&include_entities=1&last_note_ts=35&scroll_cursor=%s' % (q, scroll_cursor) url = 'https://twitter.com/i/search/timeline?f=realtime&q=%s&src=typd&include_available_features=1&include_entities=1&last_note_ts=35&scroll_cursor=%s' % (q, scroll_cursor) headers = { "cookie": '''external_referer="sQr0xRwtlCwrmSiFmo9Ms3PWH0SmSUXaGIME+lo2c9Bx3f5tDT8nfQ==|1"; guest_id=v1%3A139954374768358805; auth_token=22566a46611f84d2d1879036ade92cf1a74524f3; secure_session=true; twll=l%3D1399543820; remember_checked=1; remember_checked_on=1; ad_partner=; lang=en; twid=u%3D2483484974%7C3ZOg8IXYJYQtGds69V4htE8n4hc%3D; ssExp4974=1399543821; pid="v3:1399543833876948256857997"; webn=2483484974; external_referer=sQr0xRwtlCwrmSiFmo9Ms3PWH0SmSUXaGIME%2Blo2c9Bx3f5tDT8nfQ%3D%3D%7C1; __utma=43838368.1948717281.1399543745.1399543745.1399543745.1; __utmb=43838368.15.10.1399543745; __utmc=43838368; __utmz=43838368.1399543745.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none); _twitter_sess=BAh7DjoMY3NyZl9pZCIlYWNiYWQ0MjY5MzRkN2EzOTgyOTFlNzFhYzE5MmU4%250AM2Y6CXVzZXJsKwcu%252BQaUOhhpbl9lbWFpbF92YWxpZGF0aW9uVDoHaWQiJWFj%250ANDcxOGYzZjYyNzcyZWQxNGViOGEzN2IyZThjYjlkOhFmaXJzdF9pbnZpdGVU%250AOhVpbl9uZXdfdXNlcl9mbG93VCIKZmxhc2hJQzonQWN0aW9uQ29udHJvbGxl%250Acjo6Rmxhc2g6OkZsYXNoSGFzaHsABjoKQHVzZWR7ADoQc3RheV9zZWN1cmVU%250AOg9jcmVhdGVkX2F0bCsIZtRO20UB--1f324b7ee6038b2af80f049ecb00c76ca2b61d6b''' } #print response try: r = requests.get(url, headers=headers) response = r.json() has_more_items = response["has_more_items"] scroll_cursor = response["scroll_cursor"] focused_refresh_interval = response["focused_refresh_interval"] # Append New Tweets tweetHTML += response["items_html"] # Count the number of Tweets soup = bs("<ol>" + response["items_html"].encode("utf-8") + "</ol>") newTweets = len(soup.find("ol").find_all("li", recursive=False)) totalTweets += newTweets #print l.get("data-item-id") tid = soup.findAll("li") for l in tid: info = [] tid= l.get("data-item-id") #print tid if tid is None: pass else: info.append(keywords_list[k1][0]) k3 = '@'+keywords_list[k1][1] #print k3 info.append(k3) info.append(start_date) info.append(str(tid)) tweet_info.writerow(info) #dbHandler.insert_tid(info) # Sleep if false returned for has_more_items if not has_more_items: falseCounter += 1 # End loop if count of tweets is 0 if newTweets == 0: break #print "%s. %s, %s, %s tweets\t\t%s total" % (count+1, scroll_cursor, has_more_items, newTweets, totalTweets) count += 1 except Exception, e: pass #raise e # break #print "<ol>" + tweetHTML.encode('utf-8') + "</ol>" print "filename",k4 html_twitter =(open("./Dump/" +k4+'.html','wb+')) html_twitter.write(tweetHTML.encode('utf8','replace')) print "[%s requests] [%s tweets]" % (count, totalTweets)
def amend_rts(self, rts_profile, conf): super(DFBBTarget, self).amend_rts(rts_profile, conf) if rts_profile == 'ravenscar-full': conf.config_files.update( {'link-zcx.spec': readfile('riscv/link-zcx.spec')})
def dump_runtime_xml(self, rts_name, rts): return readfile('riscv/spike/runtime.xml')
except Exception,e: print traceback.format_exc() return 1 # following = twitter.get_friends_ids(screen_name = screenName,count = 5000) # jsonFollowing = json.dumps(following) # print jsonFollowing # return 1 # if __name__ == '__main__': try: #file_path = "D:\'twitter'\'twitter_tokens.csv" file_path = 'D:/twitter/_tokens.csv' #print file_path file_path1 = 'D:/twitter/official_handles/_ids_official_handles_final_0_32.csv' twee_id_list = s.readfile(file_path1) end = 14600 start = 0 t_token=s.readfile(file_path) dbList = [] while(True): i =1 s1 = strftime("%H:%M:%S",gmtime()) for i in range(1,len(t_token)): tid_list =[] #print time.time() #for i in range(1,2): CONSUMER_TOKEN = t_token[i][2] CONSUMER_SECRET = t_token[i][3] Access_token = t_token[i][4] Access_token_secret = t_token[i][5]
print traceback.format_exc() return 1 # following = twitter.get_friends_ids(screen_name = screenName,count = 5000) # jsonFollowing = json.dumps(following) # print jsonFollowing # return 1 # if __name__ == '__main__': try: #file_path = "D:\'twitter'\'twitter_tokens.csv" file_path = 'D:/twitter/_tokens.csv' #print file_path file_path1 = 'D:/twitter/official_handles/_ids_official_handles_final_0_32.csv' twee_id_list = s.readfile(file_path1) end = 14600 start = 0 t_token = s.readfile(file_path) dbList = [] while (True): i = 1 s1 = strftime("%H:%M:%S", gmtime()) for i in range(1, len(t_token)): tid_list = [] #print time.time() #for i in range(1,2): CONSUMER_TOKEN = t_token[i][2] CONSUMER_SECRET = t_token[i][3] Access_token = t_token[i][4] Access_token_secret = t_token[i][5]
def amend_rts(self, rts_profile, conf): conf.rts_vars['Has_libc'] = 'yes' conf.rts_xml = readfile('visium/mcm/runtime.xml') conf.build_flags['common_flags'] += ['-muser-mode']
def amend_rts(self, rts_profile, conf): super(LeonTarget, self).amend_rts(rts_profile, conf) if rts_profile == 'ravenscar-full': # Use leon-zcx.specs to link with -lc. conf.config_files.update( {'link-zcx.spec': readfile('sparc/leon/leon-zcx.specs')})
def amend_rts(self, rts_profile, conf): super(PikeOS, self).amend_rts(rts_profile, conf) conf.rts_xml = readfile('pikeos/runtime.xml') if rts_profile == 'ravenscar-full': # Register ZCX frames (for pikeos-cert-app.c) conf.build_flags['c_flags'] += ['-DUSE_ZCX']