def __init__(self, path=None, arabic=True): self.arabic = arabic # Browsing and writing managers self.br = Browser() self.wr = Writer(path) if path else Writer() # An array for scrapped books self._books_ids = []
class SignatureWriter: def __init__(self): self.writer = Writer('signature_output.csv', ['VECTORS', 'RSA-PSS', 'DSA', 'ECDSA']) def write(self, row): self.writer.write(row)
def __init__(self, config): self.config = config self.merge_mode = self.config['merge_mode'] self.flag = 0 self.slice_num = self.config['slice_num'] try: if type(config['thread_id_list']) == type(u""): # "thread_id.txt" thread_id_list = config['thread_id_list'] if not os.path.isabs(thread_id_list): thread_id_list = os.path.split(os.path.realpath( __file__))[0] + os.sep + thread_id_list with open(config['thread_id_list'], 'rb') as f: lines = f.read().splitlines() lines = [line.decode('utf-8') for line in lines] config['thread_id_list'] = [ line.split(' ')[0] for line in lines if len(line.split(' ')) > 0 and line.split(' ')[0].isdigit() ] elif config['thread_id_list']: self.config['thread_id_list'] = range(774061, 1792000) else: raise Exception except Exception: print( '如果想输入帖子id,请到thread_id.txt输入。如果想把整个S1爬下来,请把config.json中thread_id_list的值改为true。' ) sys.exit() self.parser = Stage1stParser(self.config) self.session = Stage1stParser(self.config).loginSession() self.writer = Writer(self.config)
def create_netcdf(self): self.globalAttributes.write_attributes(self.ncFile) self.dimensions.write_dimensions(self.ncFile) if self.naming_authority == 'EGO': self.dimensionsEgo.write_dimensions(self.ncFile) self.variables = self.ego_standard_metadata.get_glider_characteristics_variables( ) writer = writer_ego_standard( self.dimensions.get_metadata_dimension()) writer.write(self.variables, self.metadataData['glider_characteristics'], self.ncFile) #self.writer.write_variables_data(self.metadataData['glider_characteristics'], self.variables, self.version) self.variables = self.ego_standard_metadata.get_glider_deployment_variables( ) writer = writer_ego_standard( self.dimensions.get_metadata_dimension()) writer.write(self.variables, self.metadataData['glider_deployment'], self.ncFile) self.variables = self.metadata.get_variables() self.writer_ego = writer_ego(self.data_ego, self.dimensions, self.ncFile) self.writer_ego.write_variables_data( self.metadataData['variables'], self.variables, self.version) else: self.variables = self.metadata.get_variables() self.writer = Writer(self.data, self.dimensions, self.ncFile) self.writer.write_variables_data(self.metadataData['variables'], self.variables, self.version)
def migrationMode(matrix, settings, loadIntoDB, clinicalNotes): print("Migration mode!") results = Harmonizer.harmonize(matrix, settings["harmonisation"]["usagi_output"], clinicalNotes) Writer.writeMigratedDataCSV(results, settings["tables"]) if loadIntoDB: Writer.writeMigratedDataDB(settings["database"], settings["tables"]) print("Done!")
def __init__(self, parser, output_path=""): Writer.__init__(self) self.parser = parser self.out_file = open( os.path.join(output_path, self.parser.parsedModel.name + ".py"), "w") self.rename()
def convert_to_enum(self, cls, use_type='string'): Writer.convert_to_enum(self, cls, use_type) function = Function() function.name = '__toString' function.operations.append('return $this->_value;') cls.functions.append(function) function = Function() function.name = 'str' function.operations.append('return (string)$this;') cls.functions.append(function) for i, member in enumerate(cls.members): if member.name == '_value': member.type = 'string' function = Function() function.name = 'set' function.args.append(['value', '']) function.operations.append('$this->_value = $value;') cls.functions.append(function) function = Function() function.name = 'serialize' cls.functions.append(function) function = Function() function.name = 'deserialize' cls.functions.append(function)
class DecryptionWriter: def __init__(self): self.writer = Writer('decryption_output.csv',['VECTORS','AES-EBC','AES-CBC','RSA_OAEP']) def write(self, row): self.writer.write( row )
class HashingWriter: def __init__(self): self.writer = Writer('hashing_output.csv', ['VECTORS', 'SHA-1', 'SHA-2', 'SHA-3']) def write(self, row): self.writer.write(row)
class VerifyWriter: def __init__(self): self.writer = Writer('verify_output.csv', ['VECTORS', 'RSA_PSS', 'ECDSA', 'DSA']) def write(self, row): self.writer.write(row)
def __init__(self, sbmlFileName, modelName="", inputPath="", outputPath=""): Writer.__init__(self, sbmlFileName, modelName, inputPath, outputPath) self.out_file = open( os.path.join(outputPath, self.parsedModel.name + ".py"), "w")
def process(self): wb = openpyxl.load_workbook(self.file) sheet = wb.get_sheet_by_name('Sheet1') data_new_file = {} data_old_file = {} data_final = {} for row in range(1, sheet.max_row + 1): current_row = str(row) new_var = sheet['A' + current_row].value old_var = sheet['C' + current_row].value new_label = sheet['B' + current_row].value old_label = sheet['D' + current_row].value if new_var and new_label: data_new_file[row] = {'var': new_var, 'label': new_label} if old_var and old_label: data_old_file[row] = {'var': old_var, 'label': old_label} for item in data_new_file: label = data_new_file[item]['label'] new_variable = data_new_file[item]['var'].split('_')[0] new_variable_s = new_variable.replace('xxx', '') tmp_old_item = [] for row in data_old_file: old_variable = data_old_file[row]['var'].split('_')[0] old_variable_s = old_variable.replace('xxx', '') if label == data_old_file[row][ 'label'] and old_variable_s == new_variable_s: if item in data_old_file: tmp_old_item.append({ 'updated': 1, 'var': data_old_file[row]['var'], 'label': data_old_file[row]['label'] }) if len(tmp_old_item) == 1: data_final[item] = tmp_old_item[0] else: data_final[item] = { 'updated': 0, 'var': data_new_file[item]['var'], 'label': data_new_file[item]['label'] } ''' for item in data_final: print(str(item) + ' :: ' + str(data_final[item]['updated']) + ' : ' + str(data_final[item]['var']) + ' : ' + str(data_final[item]['label'])) ''' w = Writer(data_final, data_old_file, self.file) w.create_file() return data_final
def __init__(self, path=None, lang="ar", edition_reviews=False): # Language of reviews to be scraped self._lang = lang # Instantiate browsing and writing managers self.wr = Writer(path) if path else Writer() self.br = Browser(edition_reviews) # Initialize an empty threads list self._threads = [] # Counter for reviews from different languages self._invalid = None
def ticker_wrapper(ticker: str, last_open: datetime, resultswriter: Writer, debug_level=0): magic_object = MagicFormula(ticker, last_open, debug_level) try: print(magic_object.__str__()) resultswriter.write_row(magic_object.__str__()) except: magic_object.debug_writer(0, "*** CRITICAL FAILURE: Failure on ticker ")
def annotationMode(settings, read): print("Annotation mode!") clinicalNotes = DatasetReader.readClinicalNotes(settings["dataset"]["directory"], settings["dataset"]["name"]) if read: nejiAnnotations = Annotator.readNejiAnnotations(settings["dataset"]["neji_annotations"]) else: nejiAnnotations = Annotator.annotate(clinicalNotes) Writer.writeAnnotations(nejiAnnotations, settings["dataset"]["neji_annotations"]) annotations = Annotator.postProcessing(clinicalNotes, nejiAnnotations, settings["post_vocabularies"]) matrix = Writer.writeMatrix(annotations, settings["dataset"]["matrix_location"]) print("Done!") return matrix, clinicalNotes
def run(self): with open(Util.Config.astFile, "rb") as ff: ast = pickle.load(ff) if not (Util.Config.disableAllOpti): if not (Util.Config.disableRMO): print("Performing Relu-maxpool optimization...") ReluMaxpoolOpti.ReluMaxpoolOpti().visit(ast) print("Relu-maxpool optimization done.") if not (Util.Config.disableLivenessOpti): print("Performing Garbage collection...") mtdAST = MtdAST() GC = GarbageCollector.GarbageCollector(ast) GC.run([mtdAST]) print("Garbage collection done.") # Perform type inference and annotate nodes with type information InferType().visit(ast) # if Util.Config.printASTBool : if False: PrintAST().visit(ast) print("\n") sys.stdout.flush() IRUtil.init() compiler = IRBuilderCSF() res = compiler.visit(ast) res = self.fixOuputScale(res, compiler) res = self.fixNames(res, compiler) Util.write_debug_info(compiler.name_mapping) # Insert a generic start_computation and end_computation function call after all input IR statements. res = self.insertStartEndFunctionCalls(res) writer = Writer(Util.Config.outputFileName) debugVarEzPCName = (compiler.name_mapping[Util.Config.debugVar] if (Util.Config.debugVar in compiler.name_mapping) else None) if Util.forEzPC(): codegen = EzPCCodegen(writer, compiler.globalDecls, debugVarEzPCName) else: assert False codegen.printAll(*res) writer.close()
def write_object(self, object_, flags): out = Writer.write_object(self, object_, flags) if flags == FLAG_HPP: out[flags] += self.build_type_str(object_) + ';\n' if flags == FLAG_CPP: if object_.is_static: if object_.initial_value is None and self._current_class.type != 'enum': Error.exit(Error.STATIS_MEMBER_SHOULD_HAVE_INITIALISATION, self._current_class.name, object_.name) if self._current_class.type == 'enum': pattern = '{4}{0} {2}::{1}' else: pattern = '{4}{0} {2}::{1} = {3}' pattern += ';\n' modifier = '' if object_.is_const: modifier = 'const ' out[flags] += pattern.format(convert_type(object_.type), object_.name, self._current_class.name, object_.initial_value, modifier) pass return out
def __init__(self, parameter, liveView, pieView): self.sentimentAnalyzer = SentimentAnalyzer() self.writer = Writer() self.parameter = parameter print('Creating token') self.liveView = liveView self.pieView = pieView
def start(): data = OrderedDict() data.update({'Sheet -' : [[1, 5, 9], [2, 'f**k', 0]]}) writer = Writer('inout/Testy.ods') writer.setData(data) matrix = [[1, 2, '3'], [4, 5, 6], ['7', '8', '9']] writer.addSheet('Hi there', matrix) writer.write('ods')
def run(self): with open(Util.Config.astFile, 'rb') as ff: ast = pickle.load(ff) if not (Util.Config.disableAllOpti): if not (Util.Config.disableRMO): print("Performing Relu-maxpool optimization...") # Perform optimizations on the AST ReluMaxpoolOpti.ReluMaxpoolOpti().visit(ast) if not (Util.Config.disableLivenessOpti): print("Performing Liveness Optimization...") # Perform liveness analysis optimization on the AST mtdAST = MtdAST() LivenessOpti.LivenessAnalysis().visit(ast) LivenessOpti.LivenessOpti().visit(ast, [mtdAST, 0, {}]) if Util.Config.printASTBool: PrintAST().visit(ast) sys.stdout.flush() # Perform type inference InferType().visit(ast) IRUtil.init() compiler = IRBuilderCSF() res = compiler.visit(ast) Util.write_debug_info(compiler.name_mapping) # Insert a generic start_computation and end_computation function call after all input IR statements. res = self.insertStartEndFunctionCalls(res) writer = Writer(Util.Config.outputFileName) debugVarEzPCName = compiler.name_mapping[Util.Config.debugVar] if ( Util.Config.debugVar in compiler.name_mapping) else None if Util.forEzPC(): codegen = EzPCCodegen(writer, compiler.decls, debugVarEzPCName) else: assert False codegen.printAll(*res) writer.close()
def main(): b = Book() g = Gui() # GuiRunner(g, "animation").start() # Don't know why I(linux mint) can but other member(windows) will get error with this. # alternatively we don't assingn a thread to gui but make it operate in main g.change_state("W", 7, g.nowhere, g.scheduling) Writer(b, 7, g).start() for i in range(0, 3): g.change_state("R", i, g.nowhere, g.scheduling) Reader(b, i, g).start() for i in range(0, 2): g.change_state("W", i, g.nowhere, g.scheduling) Writer(b, i, g).start() g.animation(50, 50, 5)
def _write_class_hpp(self, class_): out = Writer.write_class(self, class_, FLAG_HPP) self._current_class = class_ behaviors = list() for c in class_.behaviors: behaviors.append('public ' + c.name) behaviors = ', '.join(behaviors) objects = self.write_objects(class_.members, FLAG_HPP) functions = self.write_functions(class_.functions, FLAG_HPP) constructor = _create_constructor_function_hpp(class_) destructor = _create_destructor_function_hpp(class_) includes, forward_declarations, forward_declarations_out = self._find_includes( class_, FLAG_HPP) includes = list(set(includes.split('\n'))) includes.sort() includes = '\n'.join(includes) forward_declarations = list(set(forward_declarations.split('\n'))) forward_declarations.sort() forward_declarations = '\n'.join(forward_declarations) self._current_class = None pattern = '' if len(class_.behaviors) > 0: pattern += '{0} {1} : {2}' else: pattern += '{0} {1}' if functions[FLAG_HPP].strip() == '': f = '' else: f = '\n{4}' if objects[FLAG_HPP].strip() == '': o = '' else: o = '{3}' if class_.type != 'enum': pattern += '\n__begin__\npublic:\n{5}{6}' + f + o + '__end__;\n\n' else: pattern += '\n__begin__{5}' + o + 'public:' + f + '__end__;\n\n' pattern = '{3}\nnamespace {0}\n__begin__{2}\n\n{1}__end__//namespace {0}'.\ format(_get_namespace(), pattern, forward_declarations, forward_declarations_out) pattern = '#ifndef __mg_{0}_h__\n#define __mg_{0}_h__\n{2}\n\n{1}\n\n#endif //#ifndef __{0}_h__'.\ format(class_.name, pattern, includes) out[FLAG_HPP] += pattern.format('class', class_.name, behaviors, objects[FLAG_HPP], functions[FLAG_HPP], constructor, destructor) out[FLAG_HPP] = re.sub('__begin__', '{', out[FLAG_HPP]) out[FLAG_HPP] = re.sub('__end__', '}', out[FLAG_HPP]) return out
def distribute_test_suite(node_dict, test_suite, data): """SSH into the remote instance and transfer data with SCP.""" # compress project-dir project = shutil.make_archive(expanduser('~/tmp/project'), 'gztar', root_dir=data['project']) writer = Writer(data, test_suite.content) #distribute test suite among instances for node, bundle in zip(node_dict.iteritems(), test_suite): config, cl = writer.generate_input(bundle) ip = node[1] key_file = data['ssh-key'] user = data['username'] client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client.connect(hostname=ip, username=user, key_filename=key_file) with SCPClient(client.get_transport()) as scp: scp.put(config, "/home/" + user + "/tmp/config.tar.gz") scp.put(cl, "/home/" + user + "/tmp/params.tar.gz") scp.put(project, "/home/" + user + "/tmp/project.tar.gz") client.close()
def start_writer(self): ''' Creates a new thread responsible for creating a downstream connection to the ground station. ''' writerThread = Writer('127.0.0.1', 9000, self.broadcastQueue) writerThread.setName('ISRU Writer') writerThread.start() return writerThread
def daily_news(self): # get all articles paper_bundle = Paper_Boy().get_the_paper() # add the statistics Analyzer(paper_bundle).fill_stats() # write tweets tweets = Writer(paper_bundle).write_all_tweets() # publish tweets publisher = Publisher() for tweet in tweets: publisher.post_tweet(tweet) return tweets
def main (): inputFile = sys.argv [1] outputFile = sys.argv [2] text, ids = preProcess (inputFile) vector = lda (text) w = Writer (outputFile, vector, ids) w.getTopics () w.writeJson () print ('Done!')
def __init__(self): """Constructor""" self.base = "http://www.icd10data.com"; self.writer = Writer("temp.txt"); self.direction = Set(["right","left"]); self.areas = [u'finger(s)', u'leg',u'thigh', u'femur', u'thumb', u'jaw', u'pelvic region and thigh', u'initial encounter for fracture', u'humerus', 'joint', u'foot', u'mid-cervical region', u"angle's class ii", 'shoulder', u'ankle and toes', u'occipito-atlanto-axial region', u'bone', u'ulna and radius', u'ring finger', u'thoracolumbar region', u'tibia and fibula', u'vertebrae', u'ankle and joints of foot', u'arm', u'thoracic region', u'lumbar region', u'distal tibia', u'finger', u'ulna', u'subsequent encounter for fracture with malunion', 'head region', u'little finger', u"angle's class iii", u'with tophus (tophi)', u'fibula', u'central', u'proximal tibia', u'radius and ulna',u'radius', u'upper arm', u'organ involvement unspecified', u'bone plate', u'upper arms', u'high cervical region', u'excluding foot', u'distal femur', u'middle finger', u'distal humerus', u'subsequent encounter for fracture with nonunion', u'ankle', u'joints of hand', u'multiple sites in spine', u'sequela', u'proximal femur', u'index finger', u'distal radius', u'ear', u'organ or system involvement unspecified', u'sequela of fracture', u'without tophus (tophi)', u'with other organ involvement', u'with respiratory involvement', 'elbow', u'lumbosacral region', u'hip', u'forearm', u'thoracolumbar and lumbosacral intervertebral disc disorder', u'pelvis', u'toe(s)', u'proximal humerus', u'tibia', u'with myopathy', u'subsequent encounter for fracture with routine healing', u'ankle and joints of foot', u'hand', u'finger joints', u'wrist', u'overuse and pressure other site', u'ankle and foot', u'knee', u'cervicothoracic region', u"angle's class i", u'cervical region', 'vertebra', u'upper limb', u'sacral and sacrococcygeal region', u'lower leg']; self.areas.sort(key=lambda x: len(x.split(" ")),reverse=True);
def analyze(self, audioFilename): sonic = SonicApi(audioFilename) writer = Writer(audioFilename) # Analyze Melody SonicApi melody = sonic.analyzeMelody() # Analyze Beat SonicApi beat = sonic.analyzeBeat() # Extract metadata (BPM, key, etc.) meta = self.extractMetadata(beat, melody) # AnalyzeNotes Aubio # AnalyzePitch Aubio # AnalyzeOnset Aubio # AnalyzeTSS Aubio # AnalyzeTSS Aubio # write output: writer.writeTable(beat['click_marks'], 'beat') writer.writeTable(melody['melody_result'], 'melody') writer.writeTable(meta, 'meta')
def save_generated_classes(self, out_directory): Writer.save_generated_classes(self, out_directory) self.createFactory() self.createVisitorAcceptors()
def __init__(self, parser, serialize_format): Writer.__init__(self, parser, serialize_format) self.current_class = None
def update_display(display, payload): def calculate_width(font, text): w = 0 for c in text: glyph, char_height, char_width = font.get_ch(c) w += char_width return w try: command = {} if payload is None: # An example command for testing command = { "heating_state": True, "msg": "auto", "inside_temp": 23.456, "outside_temp": -9.876 } else: try: import ujson as json command = json.loads(payload) except (OSError, ValueError): import kiota.Util as Util Util.log(update_display, "Can't parse payload: '{}'".format(payload)) display.fill(0) ink = 1 heating_state = command["heating_state"] if heating_state: ink = 0 display.fill_rect(0, 0, 128, 14, 1) if command["msg"] is not None: display.text(str(command["msg"]), 0, 1, ink) if command["heating_state"]: display.text("ON", 104, 1, ink) else: display.text("OFF", 104, 1, ink) import KameronRegularNumbers25 as font from Writer import Writer writer = Writer(display, font) writer.set_clip(True, True) inside_temp = "--" try: inside_temp = str(int(round(float(command["inside_temp"])))) except: pass writer.set_textpos(23, int((64 - calculate_width(font, inside_temp)) / 2)) display.fill_rect(0, 15, 64, 41, 0) writer.printstring(inside_temp) outside_temp = "--" try: outside_temp = str(int(round(float(command["outside_temp"])))) except: pass writer.set_textpos( 23, 64 + int((64 - calculate_width(font, outside_temp)) / 2)) display.fill_rect(64, 15, 64, 41, 0) writer.printstring(outside_temp) display.text("inside", 0, 56) display.text("outside", 72, 56) display.show() except Exception as e: display.text("ERROR", 0, 0) display.show() import sys sys.print_exception(e) return True
def __init__(self, sbmlFileName, modelName="", inputPath="", outputPath=""): Writer.__init__(self, sbmlFileName, modelName, inputPath, outputPath) self.out_file=open(os.path.join(outputPath,self.parsedModel.name+".cu"),"w")
def __init__(self): self.writer = Writer('decryption_output.csv',['VECTORS','AES-EBC','AES-CBC','RSA_OAEP'])
class Parser: """""" #---------------------------------------------------------------------- def __init__(self): """Constructor""" self.base = "http://www.icd10data.com"; self.writer = Writer("temp.txt"); self.direction = Set(["right","left"]); self.areas = [u'finger(s)', u'leg',u'thigh', u'femur', u'thumb', u'jaw', u'pelvic region and thigh', u'initial encounter for fracture', u'humerus', 'joint', u'foot', u'mid-cervical region', u"angle's class ii", 'shoulder', u'ankle and toes', u'occipito-atlanto-axial region', u'bone', u'ulna and radius', u'ring finger', u'thoracolumbar region', u'tibia and fibula', u'vertebrae', u'ankle and joints of foot', u'arm', u'thoracic region', u'lumbar region', u'distal tibia', u'finger', u'ulna', u'subsequent encounter for fracture with malunion', 'head region', u'little finger', u"angle's class iii", u'with tophus (tophi)', u'fibula', u'central', u'proximal tibia', u'radius and ulna',u'radius', u'upper arm', u'organ involvement unspecified', u'bone plate', u'upper arms', u'high cervical region', u'excluding foot', u'distal femur', u'middle finger', u'distal humerus', u'subsequent encounter for fracture with nonunion', u'ankle', u'joints of hand', u'multiple sites in spine', u'sequela', u'proximal femur', u'index finger', u'distal radius', u'ear', u'organ or system involvement unspecified', u'sequela of fracture', u'without tophus (tophi)', u'with other organ involvement', u'with respiratory involvement', 'elbow', u'lumbosacral region', u'hip', u'forearm', u'thoracolumbar and lumbosacral intervertebral disc disorder', u'pelvis', u'toe(s)', u'proximal humerus', u'tibia', u'with myopathy', u'subsequent encounter for fracture with routine healing', u'ankle and joints of foot', u'hand', u'finger joints', u'wrist', u'overuse and pressure other site', u'ankle and foot', u'knee', u'cervicothoracic region', u"angle's class i", u'cervical region', 'vertebra', u'upper limb', u'sacral and sacrococcygeal region', u'lower leg']; self.areas.sort(key=lambda x: len(x.split(" ")),reverse=True); #---------------------------------------------------------------------- def getmainlist(self): """""" response = urllib2.urlopen("http://www.icd10data.com/ICD10CM/Codes/M00-M99"); self.htmlparser = etree.HTMLParser() tree = etree.parse(response, self.htmlparser); self.hreflist = tree.xpath("/html/body/div[2]/div/div[4]/ul/li/a/@href"); self.hreflist = self.hreflist; self.getsublist(self.hreflist); self.writer.close(); #---------------------------------------------------------------------- def getsublist(self,hreflist): """""" for href in hreflist: response = urllib2.urlopen(self.base+href); soup = BeautifulSoup(response.read(),"lxml"); lists = soup.select("ul li span a"); for l in lists: self.selectcode(l.attrs["href"]); #---------------------------------------------------------------------- def selectcode(self,link): """""" response = urllib2.urlopen(self.base+link); soup = BeautifulSoup(response.read().decode("gbk").encode("utf-8"),"html.parser"); greenimgs = soup.select('img[src="/images/bullet_triangle_green.png"]'); for greenimg in greenimgs: sibilings = greenimg.parent.findChildren("span"); code = sibilings[0].a.text; description = sibilings[1].text; side = "NULL"; area = "NULL"; area,side = self.setarea_side(description); description = self.setdescription(sibilings[1],sibilings[0].a); self.writer.insert(code, description, 10, side, area, 0); #---------------------------------------------------------------------- def setdescription(self,description_obj,link_obj): """""" if (description_obj.text.find(u"\u2026\u2026")!=-1): response = urllib2.urlopen(self.base+link_obj.attrs["href"]); soup = BeautifulSoup(response.read().decode("gbk").encode("utf-8"),"html.parser"); description = soup.select("div div div h2")[0].text; return description; return description_obj.text; #---------------------------------------------------------------------- def setarea_side(self,description): """""" area = "NULL"; side = "NULL"; desc= description; for direction in self.direction: if (desc.find(direction)!=-1): side = direction; desc = desc.replace(direction+" ",""); break; for pos in self.areas: if (desc.find(pos)!=-1): area = pos; break; if (area=="joint" and side=="NULL"): area = "NULL" return area, side;
W.writeTableRow("March 30, 2017:", "**129**") W.writeTableRow("February 28, 2017:", "**102**") W.writeTableRow("January 30, 2017:", "**62**") W.writeTableRow("December 30, 2016:", "**40**") W.writeTableRow("November 30, 2016:", "**20**") W.writeTableRow("October 30, 2016:", "**5** ") def pushToGit(): os.system("cd ~/personal/LEETCodePractice/") os.system("git add .") os.system("git commit -m \"added files\" ") os.system("git push") S = None W = None if __name__ == "__main__": S = Scraper() W = Writer() writeHeader() writeQsSolved() writeLog() W.cleanup() S.cleanup() pushToGit()
class TweetObtainer(StreamListener): writer = None sentimentAnalyzer = None tokens = '' parameter = '' liveView = None pieView = None currentNumber = 0 stream = None def __init__(self, parameter, liveView, pieView): self.sentimentAnalyzer = SentimentAnalyzer() self.writer = Writer() self.parameter = parameter print('Creating token') self.liveView = liveView self.pieView = pieView def init_stream(self): self.writer.setSaveFile('StreamedTweets.txt') def start(self): print("Setting up tweetobtainer") #TwitterAPI authorization auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_secret) self.stream = Stream(auth, self) self.stream.filter(track=[self.parameter], languages=['en']) ''' Wordt elke keer als er een tweet binnenkomt aangeroepen Stuurt de opgehaalde tweet door naar de analyse en schrijft de analyse+tweet weg in een bestand als er minder dan 10.000 zijn opgehaald deze sessie. Slaapt voor 1 seconde zodat er genoeg tijd is om de tweet te verwerken. ''' def on_data(self, data): text = json.loads(data) #Use only the text field of obtained JSON String if 'text' in text: text = text['text'] tweet = self.sentimentAnalyzer.preprocess(text) print(tweet) sentiment = self.sentimentAnalyzer.analyse(tweet) if self.currentNumber <= 10000: self.writer.write(sentiment + text) self.currentNumber += 1 self.liveView.update(sentiment) self.pieView.update() time.sleep(1) return True def on_error(self, status_code): print('Got an error with status code: ' + str(status_code)) return True # To continue listening def on_timeout(self): print('Timeout...') return True # To continue listening def stop_stream(self): self.stream.disconnect()