def getSettingsFromDe(self, link='', just_check=False): cf = path.join(Pds.config_path, 'share/config/kioslaverc') config = Pds.parse(cf, force=True) proxyType = config.value('Proxy Settings/ProxyType').toString() if proxyType: if int(proxyType) > 0: if just_check: return True items = parse_proxy( config.value('Proxy Settings/httpProxy').toString()) self.settings.httpProxy.setText(items['host']) self.settings.httpProxyPort.setValue(int(items['port'])) items = parse_proxy( config.value('Proxy Settings/httpsProxy').toString()) self.settings.httpsProxy.setText(items['host']) self.settings.httpsProxyPort.setValue(int(items['port'])) items = parse_proxy( config.value('Proxy Settings/ftpProxy').toString()) self.settings.ftpProxy.setText(items['host']) self.settings.ftpProxyPort.setValue(int(items['port'])) return True return False
def configure_part_1(gen_timer, config): return protocol.Protocol([ set_conf_par(gen_timer(), config, "00", parse_mac(config.value('client_mac'))), set_conf_par(gen_timer(), config, "03", binascii.hexlify(bytearray([config.value('slip_channel')]))), ])
def read_matrix( filename, atox=float, comment_char=None, return_numpy=None ): if comment_char is None: comment_char = config.value('read_matrix','comment_char') if return_numpy is None: return_numpy = config.value('read_matrix','return_numpy') m = read_rows( filename, atox=atox, comment_char=comment_char, \ return_numpy=0 ) if return_numpy: import numpy return numpy.array( m ) return m
def read_matrix(filename, atox=float, comment_char=None, return_numpy=None): if comment_char is None: comment_char = config.value('read_matrix', 'comment_char') if return_numpy is None: return_numpy = config.value('read_matrix', 'return_numpy') m = read_rows( filename, atox=atox, comment_char=comment_char, \ return_numpy=0 ) if return_numpy: import numpy return numpy.array(m) return m
def __init__(self): self.logging_in_status = 'unknown' self.form = cgi.FieldStorage() # Title of the HTML page. self.title = 'Reverse Gossip' # Name of the server. # If they set "single", this is the only server # we allow access to. self.server = config.value('general', 'single') self.single = (self.server != None) # What they want us to do. self.verb = '' # Some more parameters for the verb. self.params = [] # URL prefix for static content # FIXME: This should go away self.static_prefix = config.value('web', 'static-prefix') # Previous, next, etc links. Keys should be as defined # in sec.6.12 of the HTML spec. self.headlinks = {'Contents': ('Index', '', ''), 'Appendix': ('Message of the day', 'motd', '')} # Cookie time: if os.environ.has_key('HTTP_COOKIE'): # They've sent us some cookies; better read them. self.incoming_cookies = Cookie.SimpleCookie(os.environ['HTTP_COOKIE']) else: # No cookies. Start with a blank sheet. self.incoming_cookies = Cookie.SimpleCookie() # Set up a cookie list ready for sending new ones. self.outgoing_cookies = Cookie.SimpleCookie() # We don't know who you are or what you plan to do. self.user=None # No connection yet. self.connection=None # The collated index self.collater = None # You can turn chrome off entirely if you like. # (This is done automatically if the MIME type doesn't contain "html".) self.no_chrome = False
def lwm2m_reset(timer, config): return protocol.OnTimeoutRetry( protocol.Protocol([ emb6.command("e13f"), expect_lwm2m_ok, ]), timer, config.value('timeout'))
def ping(timer, config): return protocol.OnTimeoutRetry( protocol.Protocol([ emb6.command("10"), expect_ok, ]), timer, config.value('timeout'))
def configure_part_2(gen_timer, config, resources): port = config.value('host_port') objects = {resource.obj_id for resource in resources.list()} return protocol.Protocol([ set_lwm2m_conf_par(gen_timer(), config, "02", parse_ip(config.value('host_ip'))), set_lwm2m_conf_par(gen_timer(), config, "03", binascii.hexlify(bytearray([port//256,port%256]))), set_lwm2m_conf_par(gen_timer(), config, "04", binascii.hexlify(config.value('client_name'))), ] + [create_object(gen_timer(), config, obj, 0) for obj in objects] + [create_resource(gen_timer(), config, resource.obj_id, 0, resource.res_id, resource.type, resource.size, 0) for resource in resources.list()])
def stop(timer, config): return protocol.OnTimeoutRetry( protocol.Protocol([ send_stop_command, expect_ok, emb6.expect_response("4130",), emb6.expect_response("e14130"), ]), timer, config.value('timeout'))
def set_conf_par(timer, config, conf_par, value): return protocol.OnTimeoutRetry( protocol.Protocol([ emb6.command("20" + conf_par + value), expect_ok, emb6.command("21" + conf_par), emb6.expect_response("22" + conf_par + value, lambda m: m[:2]=="22"), ]), timer, config.value('timeout'))
def start_part_2a(gen_timer, error, config): """Start LWM2M.""" return protocol.ReportError( protocol.OnTimeoutRetry( protocol.Protocol([ emb6.command("e131"), emb6.expect_response("e14131"), ]), gen_timer(), config.value('timeout')), lambda: error("LWM2M start failed."))
def get_statuses(api, tag, limit): auth = tweepy.OAuthHandler( config.value(['twitter', api, 'consumer_key']), config.value(['twitter', api, 'consumer_secret'])) auth.set_access_token( config.value(['twitter', api, 'access_token']), config.value(['twitter', api, 'access_token_secret'])) listener = StdOutListener() listener.api = tweepy.API(auth) listener.limit = limit # There are different kinds of streams: public stream, user stream, multi-user streams # In this example follow #programming tag # For more details refer to https://dev.twitter.com/docs/streaming-apis while True: try: stream = tweepy.Stream(auth, listener) stream.filter(track=[tag]) return listener.statuses except: print('Goodbye, world!')
def getSettingsFromKde(self, toggled): if toggled: cf = path.join(Pds.config_path, 'share/config/kioslaverc') config = Pds.parse(cf, force=True) proxyType = config.value('Proxy Settings/ProxyType').toString() if proxyType: if int(proxyType) > 0: http = str(config.value('Proxy Settings/httpProxy').toString()).rsplit(':', 1) self.settings.httpsProxy.setText(http[0]) self.settings.httpsProxyPort.setValue(int(http[1])) https = str(config.value('Proxy Settings/httpsProxy').toString()).rsplit(':', 1) self.settings.httpProxy.setText(https[0]) self.settings.httpProxyPort.setValue(int(https[1])) ftp = str(config.value('Proxy Settings/ftpProxy').toString()).rsplit(':', 1) self.settings.ftpProxy.setText(ftp[0]) self.settings.ftpProxyPort.setValue(int(ftp[1])) else: self.clear()
def create_resource(timer, config, obj, inst, res, type_, size, mod): return protocol.OnTimeoutRetry( protocol.Protocol([ emb6.command("e170" + binascii.hexlify(bytearray( [obj//256, obj%256, inst, res//256, res%256, type_, size, mod]))), emb6.expect_response("e172" + binascii.hexlify(bytearray( [obj//256, obj%256, inst, res//256, res%256, 0]))), ]), timer, config.value('timeout'))
def create_object(timer, config, obj, inst): return protocol.OnTimeoutRetry( protocol.Protocol([ emb6.command("e160" + binascii.hexlify(bytearray( [obj//256, obj%256, inst]))), emb6.expect_response("e162" + binascii.hexlify(bytearray( [obj//256, obj%256, inst])) + "00"), ]), timer, config.value('timeout'))
def read_rows(filename, atox=float, comment_char=None, return_numpy=None): import string if comment_char is None: comment_char = config.value('read_rows', 'comment_char') if return_numpy is None: return_numpy = config.value('read_rows', 'return_numpy') if return_numpy: import numpy x = [] f = open(filename) lines = map(string.strip, f.readlines()) f.close() lines = filter(None, lines) # get rid of blank lines for line in lines: if line[0] == comment_char[0]: continue row = map(atox, string.split(line)) if return_numpy: row = numpy.array(row) x.append(row) return x
def read_rows( filename, atox=float, comment_char=None, return_numpy=None ): import string if comment_char is None: comment_char = config.value('read_rows','comment_char') if return_numpy is None: return_numpy = config.value('read_rows','return_numpy') if return_numpy: import numpy x = [] f = open( filename ) lines = map( string.strip, f.readlines() ) f.close() lines = filter( None, lines ) # get rid of blank lines for line in lines: if line[0] == comment_char[0]: continue row = map( atox, string.split(line) ) if return_numpy: row = numpy.array( row ) x.append( row ) return x
def set_resource(gen_timer, config, resource, inst, value): obj = resource.obj_id res = resource.res_id return protocol.OnTimeoutRetry( protocol.Protocol([ emb6.command("e182" + binascii.hexlify(bytearray( [obj//256, obj%256, inst, res//256, res%256])) + any_data(resource,value)), expect_lwm2m_ok, ]), gen_timer(), config.value('timeout'))
def read_column( num, filename, atox=float, \ comment_char=None, return_numpy=None ): import string if comment_char is None: comment_char = config.value('read_column','comment_char') if return_numpy is None: return_numpy = config.value('read_column','return_numpy') x = [] f = open( filename ) lines = map( string.strip, f.readlines() ) f.close() lines = filter( None, lines ) # get rid of '' for line in lines: if line[0] == comment_char[0]: continue column = string.split( line ) x.append( column[num] ) x = map( atox, x ) if return_numpy: import numpy return numpy.array( x ) return x
def read_column( num, filename, atox=float, \ comment_char=None, return_numpy=None ): import string if comment_char is None: comment_char = config.value('read_column', 'comment_char') if return_numpy is None: return_numpy = config.value('read_column', 'return_numpy') x = [] f = open(filename) lines = map(string.strip, f.readlines()) f.close() lines = filter(None, lines) # get rid of '' for line in lines: if line[0] == comment_char[0]: continue column = string.split(line) x.append(column[num]) x = map(atox, x) if return_numpy: import numpy return numpy.array(x) return x
def getSettingsFromDe(self, link = '', just_check = False): cf = path.join(Pds.config_path, 'share/config/kioslaverc') config = Pds.parse(cf, force=True) proxyType = config.value('Proxy Settings/ProxyType').toString() if proxyType: if int(proxyType) > 0: if just_check: return True items = parse_proxy(config.value('Proxy Settings/httpProxy').toString()) self.settings.httpProxy.setText(items['host']) self.settings.httpProxyPort.setValue(int(items['port'])) items = parse_proxy(config.value('Proxy Settings/httpsProxy').toString()) self.settings.httpsProxy.setText(items['host']) self.settings.httpsProxyPort.setValue(int(items['port'])) items = parse_proxy(config.value('Proxy Settings/ftpProxy').toString()) self.settings.ftpProxy.setText(items['host']) self.settings.ftpProxyPort.setValue(int(items['port'])) return True return False
def update_instance(gen_timer, config, obj, inst, resources_and_values): """Preconditions: - resources_and_values is a list of pairs (resource,value). - each value matches the type of the respective resource. - all resources have obj as obj_id. """ data = "" for (resource,value) in resources_and_values: res = resource.res_id value_data = any_data(resource,value) data += ( binascii.hexlify(bytearray([res//256, res%256, len(value_data)//2])) + value_data) return protocol.OnTimeoutRetry( protocol.Protocol([ emb6.command("e192" + binascii.hexlify(bytearray( [obj//256, obj%256, inst, len(resources_and_values)])) + data), expect_lwm2m_ok, ]), gen_timer(), config.value('timeout'))
def host(): return config.value(['elasticsearch', 'host'])
def port(): return config.value(['elasticsearch', 'port'])
#Parametros: api_tw + tag + lang_ml + index + type #Ej: python src/stream.py alpha obama en twitter stream if (len(sys.argv) == 6): api = sys.argv[1] tag = sys.argv[2] lang = sys.argv[3] index_es = sys.argv[4] type_es = sys.argv[5] else: raise Exception( 'Error en cantidad de parametros ingresados!!!: api+tag+lang+index+type' ) access_token = config.value(['twitter', api, 'access_token']) access_token_secret = config.value(['twitter', api, 'access_token_secret']) consumer_key = config.value(['twitter', api, 'consumer_key']) consumer_secret = config.value(['twitter', api, 'consumer_secret']) ext = 'ext' dune = 'dune' api = ext ml_token = config.value(['monkeylearn', api, lang, 'token']) ml_module = config.value(['monkeylearn', api, lang, 'module']) ml = MonkeyLearn(ml_token) ml_module_id = ml_module class StreamHTMLParser(HTMLParser):
import engine import config import sys #import logging import time import threading from datetime import datetime #Parametros: api_tw #Ej: python src/scrapy.py alpha #logging.basicConfig(filename='indexer.log',level=logging.INFO) INDEX = 'scrapy' accounts = config.value(['twitter','accounts']) timestamp_start = str(datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')) start = 'Start: ' + timestamp_start print start if (len(sys.argv)==2): api_param = sys.argv[1] else: raise Exception('Error en cantidad de parametros ingresados!!!') api = social.api(api_param) for account in accounts: print 'Indexing ' + account user = social.GetUser(api,account)
def accessTokenSecret(app): return config.value(['twitter', app, 'access_token_secret'])
import sys from pymongo import MongoClient import datetime client = MongoClient('localhost', 27017) db = client.local collection = db.twitter_monitor if (len(sys.argv) == 3): app = sys.argv[1] username = sys.argv[2] else: raise Exception('Error. Parameters must be 3.') api = twitter.Api( consumer_key=config.value(['twitter', app, 'consumer_key']), consumer_secret=config.value(['twitter', app, 'consumer_secret']), access_token_key=config.value(['twitter', app, 'access_token']), access_token_secret=config.value(['twitter', app, 'access_token_secret'])) followers_in_twitter = api.GetFollowers(screen_name='margostino') followers_in_database = collection.find() new_followers = [] unfollows = [] current_followers = [] for follower in followers_in_twitter: exists = collection.find_one({"id": follower.id}) != None current_followers.append(follower.id) if not exists:
def accessToken(app): return config.value(['twitter', app, 'access_token'])
def consumerKey(app): return config.value(['twitter', app, 'consumer_key'])
def consumerSecret(app): return config.value(['twitter', app, 'consumer_secret'])
from monkeylearn import MonkeyLearn #Parametros: api_tw + tag + lang_ml + index + type #Ej: python src/stream.py alpha obama en twitter stream if (len(sys.argv)==6): api = sys.argv[1] tag = sys.argv[2] lang = sys.argv[3] index_es = sys.argv[4] type_es = sys.argv[5] else: raise Exception('Error en cantidad de parametros ingresados!!!: api+tag+lang+index+type') access_token = config.value(['twitter', api, 'access_token']) access_token_secret = config.value(['twitter', api, 'access_token_secret']) consumer_key = config.value(['twitter', api, 'consumer_key']) consumer_secret = config.value(['twitter', api, 'consumer_secret']) ext = 'ext' dune = 'dune' api = ext ml_token = config.value(['monkeylearn', api, lang, 'token']) ml_module = config.value(['monkeylearn', api, lang, 'module']) ml = MonkeyLearn(ml_token) ml_module_id = ml_module class StreamHTMLParser(HTMLParser): def handle_data(self, data):
import engine import config import sys #import logging import time import threading from datetime import datetime #Parametros: api_tw #Ej: python src/scrapy.py alpha #logging.basicConfig(filename='indexer.log',level=logging.INFO) INDEX = 'scrapy' accounts = config.value(['twitter','accounts']) timestamp_start = str(datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S')) start = 'Start: ' + timestamp_start print(start) if (len(sys.argv)==2): api_param = sys.argv[1] else: raise Exception('Error en cantidad de parametros ingresados!!!') api = social.api(api_param) for account in accounts: print('Indexing ' + account) user = social.GetUser(api,account)
def username(): return config.value('twitter', 'username')