def write_body(file, limit):
    canceled = False
    todo = done = 0
    filename = os.path.join(os.path.dirname(__file__), "whatsnew.dat")
    for feed in Feed.iter(filename):
        todo += 1
        try:
            ok, result = Feed.read(feed, limit)
            if not ok:
                Qtrac.report(result, True)
            elif result is not None:
                Qtrac.report("read {} at {}".format(feed.title, feed.url))
                for item in result:
                    file.write(item)
                done += 1
        except KeyboardInterrupt:
            Qtrac.report("canceling...")
            canceled = True
            break
    return todo, done, canceled
示例#2
0
def worker(limit, jobs, results):
    while True:
        try:
            feed = jobs.get()
            ok, result = Feed.read(feed, limit)
            if not ok:
                Qtrac.report(result, True)
            elif result is not None:
                Qtrac.report("read {}".format(result[0][4:-6]))
                results.put(result)
        finally:
            jobs.task_done()
示例#3
0
def worker(limit, jobs, results):
    while True:
        try:
            feed = jobs.get()
            ok, result = Feed.read(feed, limit)
            if not ok:
                Qtrac.report(result, True)
            elif result is not None:
                Qtrac.report("read {}".format(result[0][4:-6]))
                results.put(result)
        finally:
            jobs.task_done()
示例#4
0
def reader(receiver, sink, limit, me):
    while True:
        feed, who = (yield)
        if who == me:
            ok, result = Feed.read(feed, limit)
            if not ok:
                Qtrac.report(result, True)
                result = None
            else:
                Qtrac.report("read {} at {}".format(feed.title, feed.url))
            sink.send(result)
        elif receiver is not None:
            receiver.send((feed, who))
def reader(receiver, sink, limit, me):
    while True:
        feed, who = (yield)
        if who == me:
            ok, result = Feed.read(feed, limit)
            if not ok:
                Qtrac.report(result, True)
                result = None
            else:
                Qtrac.report("read {} at {}".format(feed.title, feed.url))
            sink.send(result)
        elif receiver is not None:
            receiver.send((feed, who))
    def setUp(self):
        CD = ContractDeployer.Contract_Deployer("http://127.0.0.1:8545")
        w3 = CD.web3Connection()
        self.w3 = w3
        self.NULL_ENCODING = b'\x00' * 32

        w3.eth.defaultAccount = w3.eth.accounts[0]
        self.account = w3.eth.defaultAccount

        w3.eth.defaultAccount = w3.eth.accounts[1]
        self.account1 = w3.eth.defaultAccount

        w3.eth.defaultAccount = w3.eth.accounts[2]
        self.account2 = w3.eth.defaultAccount

        w3.eth.defaultAccount = w3.eth.accounts[3]
        self.account3 = w3.eth.defaultAccount

        w3.eth.defaultAccount = w3.eth.accounts[4]
        self.account4 = w3.eth.defaultAccount

        w3.eth.defaultAccount = w3.eth.accounts[5]
        self.account5 = w3.eth.defaultAccount

        self.metadata = b'Test'

        #First deploy the Instance Registry Contract.
        self.Erasure_Posts_Contract = CD.deployContract(
            "../contracts/Erasure_Posts.sol", "Erasure_Posts",
            "Erasure_Posts.sol", self.account)
        self.Erasure_Posts = Erasure_Posts.Erasure_Posts(
            self.Erasure_Posts_Contract, self.account)
        self.Registry = self.Erasure_Posts.Registry()

        #Deploy the contract template that needs to be used.
        self.Feed_contract = CD.deployContract("../contracts/Feed.sol", "Feed",
                                               "Feed.sol", self.account)
        self.Feed = Feed.Feed(self.Feed_contract, self.Feed_contract)

        #Call the contract deployer before deployment of contract, deploy contract manually with specified paramters.
        self.Feed_Factory_Contract = CD.deployContractConstructor(
            "../contracts/Feed_Factory.sol", "Feed_Factory",
            "Feed_Factory.sol", self.account,
            self.Erasure_Posts_Contract.address, self.Feed_contract.address)

        #InstanceRegistry is needed and so is
        self.Feed_Factory = Feed_Factory.Feed_Factory(
            self.Feed_Factory_Contract, self.account)
        self.Factory = self.Feed_Factory.Factory()
        self.Registry.addFactory(self.Feed_Factory_Contract.address,
                                 self.metadata)
示例#7
0
 def testNext(self):
     uq = UpdateQueue()
     f1 = Feed('feed1')
     f2 = Feed('feed2')
     f3 = Feed('feed3')
     f1.lastUpdated = 0
     f2.lastUpdated = time.time() - 5
     f3.lastUpdated = time.time()
     f2.updateInterval = 10
     f3.updateInterval = 6
     uq.add(f1)
     uq.add(f2)
     uq.add(f3)
     self.assertEqual(uq.next(), f1 )
     self.assertEqual(uq.next(), f2 )
     self.assertTrue(time.time() > (f2.lastUpdated + f2.updateInterval))
     self.assertEqual(uq.next(), f3 )
     self.assertTrue(time.time() > (f3.lastUpdated + f3.updateInterval))
     self.assertTrue((time.time() - (f3.lastUpdated + f3.updateInterval)) < 1)
示例#8
0
def main():
    limit, concurrency = handle_commandline()
    Qtrac.report("starting...")
    filename = os.path.join(os.path.dirname(__file__), "whatsnew.dat")
    futures = set()
    with concurrent.futures.ProcessPoolExecutor(
            max_workers=concurrency) as executor:
        for feed in Feed.iter(filename):
            future = executor.submit(Feed.read, feed, limit)
            futures.add(future)
        done, filename, canceled = process(futures)
        if canceled:
            executor.shutdown()
    Qtrac.report("read {}/{} feeds using {} processes{}".format(
        done, len(futures), concurrency, " [canceled]" if canceled else ""))
    print()
    if not canceled:
        webbrowser.open(filename)
def main():
    limit, concurrency = handle_commandline()
    Qtrac.report("starting...")
    filename = os.path.join(os.path.dirname(__file__), "whatsnew.dat")
    futures = set()
    with concurrent.futures.ThreadPoolExecutor(
            max_workers=concurrency) as executor:
        for feed in Feed.iter(filename):
            future = executor.submit(Feed.read, feed, limit)
            futures.add(future)
        done, filename, canceled = process(futures)
        if canceled:
            executor.shutdown()
    Qtrac.report("read {}/{} feeds using {} threads{}".format(done,
            len(futures), concurrency, " [canceled]" if canceled else ""))
    print()
    if not canceled:
        webbrowser.open(filename)
示例#10
0
def main():
    limit, concurrency = handle_commandline()
    Qtrac.report("starting...")
    datafile = os.path.join(os.path.dirname(__file__), "whatsnew.dat")
    filename = os.path.join(tempfile.gettempdir(), "whatsnew.html")
    canceled = False
    with open(filename, "wt", encoding="utf-8") as file:
        write_header(file)
        pipeline = create_pipeline(limit, concurrency, file)
        try:
            for i, feed in enumerate(Feed.iter(datafile)):
                pipeline.send((feed, i % concurrency))
        except KeyboardInterrupt:
            Qtrac.report("canceling...")
            canceled = True
        write_footer(file, results.ok, results.todo, canceled, concurrency)
    if not canceled:
        webbrowser.open(filename)
def main():
    limit, concurrency = handle_commandline()
    Qtrac.report("starting...")
    datafile = os.path.join(os.path.dirname(__file__), "whatsnew.dat")
    filename = os.path.join(tempfile.gettempdir(), "whatsnew.html") 
    canceled = False
    with open(filename, "wt", encoding="utf-8") as file:
        write_header(file)
        pipeline = create_pipeline(limit, concurrency, file)
        try:
            for i, feed in enumerate(Feed.iter(datafile)):
                pipeline.send((feed, i % concurrency))
        except KeyboardInterrupt:
            Qtrac.report("canceling...")
            canceled = True
        write_footer(file, results.ok, results.todo, canceled,
                concurrency)
    if not canceled:
        webbrowser.open(filename)
示例#12
0
def main():
    limit, concurrency = handle_commandline()
    Qtrac.report("starting...")
    filename = os.path.join(os.path.dirname(__file__), "whatsnew.dat")
    futures = set()  # instances set of futures
    with concurrent.futures.ThreadPoolExecutor(
            max_workers=concurrency) as executor:
        for feed in Feed.iter(filename):  # a generator of Feed(title, url)
            future = executor.submit(
                Feed.read, feed,
                limit)  # Feed.read is fn; feed and limit are fn's parameters
            futures.add(future)  # add future instance into futures set
        done, filename, canceled = process(futures)
        if canceled:
            executor.shutdown()
    Qtrac.report("read {}/{} feeds using {} threads{}".format(
        done, len(futures), concurrency, " [canceled]" if canceled else ""))
    print()
    if not canceled:
        webbrowser.open(filename)
class Login(object):

    def print_json(j, prefix=''):
        for key, value in j.items():
            if isinstance(value, dict):
                print '%s%s' % (prefix, key)
            else:
                print '%s%s:%s' % (prefix, key, value)

    username = '******'
    password = '******'
    service = 'NEXTAPI'
    URL = 'api.test.nordnet.se'
    API_VERSION = '2'

    timestamp = int(round(time.time() * 1000))
    timestamp = str(timestamp)
    buf = base64.b64encode(username) + ':' + base64.b64encode(password) + ':' + base64.b64encode(timestamp)
    rsa = RSA.load_pub_key('NEXTAPI_TEST_public.pem')
    encrypted_hash = rsa.public_encrypt(buf, RSA.pkcs1_padding)
    hash = base64.b64encode(encrypted_hash)

    headers = {"Accept": "application/json"}
    conn = httplib.HTTPSConnection(URL)



    # GET server status
    conn.request('GET', '/next/' + API_VERSION + '/', '', headers)
    r = conn.getresponse()
    response = r.read()
    j = json.loads(response)
    print_json(j)

    # POST login
    params = urllib.urlencode({'service': 'NEXTAPI', 'auth': hash})
    conn.request('POST', '/next/' + API_VERSION + '/login', params, headers)
    r = conn.getresponse()
    response = r.read()
    j = json.loads(response)
    print_json(j)
    session_key = j["session_key"]
    Feed.GetRequests().create_market_list(session_key, headers)
    Feed.GetRequests().instrument_name(session_key, headers, 'FINGerprint')


#################################################################################
    # Create SSL-wrapped socket
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    ssl_socket = ssl.wrap_socket(s)
    # Connect to socket
    ssl_socket.connect(("pub.api.test.nordnet.se", 443))
    print repr(ssl_socket.getpeername)
    print ssl_socket.cipher()
    # Send session key
    cmd = {"cmd": "login", "args": {"session_key": session_key, "service": "NEXTAPI"}}
    num_bytes = ssl_socket.write(json.dumps(cmd) + "\n")
    print "Session key sent (%d bytes)" % num_bytes
    # Get account information

    # Subscribe to a stock
    market = 11
    instruments = ["101", "4870"]  # ERIC B and FING B

    for i in range(0, len(instruments)):
        instrument = instruments[i]
        cmd = {"cmd": "subscribe", "args": {"t": "price", "m": 11, "i": instrument}}
        numBytes = ssl_socket.send(json.dumps(cmd) + "\n")
        print("Subscription request sent for market = %d and instrument = %s (%d bytes)" % (11, instrument, numBytes))
    # Read stream
    print "Reading stream"
    for i in range(10):
        output = ssl_socket.read(1024)
        print (output)

    time.sleep(1)
    print ("-")
    output = ssl_socket.recv()
    print (output)

    # print "Closing socket connection..."
    del ssl_socket
    s.close()
示例#14
0
 def get_feeds(self, load_entries):
     return Feed.get_user_feeds(self, load_entries)
示例#15
0
def add_jobs(filename, jobs):
    for todo, feed in enumerate(Feed.iter(filename), start=1):
        jobs.put(feed)
    return todo
示例#16
0
def add_jobs(filename, jobs):
    for todo, feed in enumerate(Feed.iter(filename), start=1):
        jobs.put(feed)
    return todo
示例#17
0
#!/usr/bin/python

import Feed

for f in Feed.getNewFeeds():
    f.reload()    
    Feed.printFeed(f)
    f.save()

for f in Feed.getActiveFeeds():
    f.reload()
    Feed.printFeed(f)
    f.save()

# !!! Once a day or so we should poll inactive feeds to make sure
# nothing has changed
for f in Feed.getInactiveFeeds():
    f.reload()
    Feed.printFeed(f)
    f.save()
示例#18
0
 def testAdd(self):
     uq = UpdateQueue()
     f1 = Feed('feed2')
     f2 = Feed('feed4')
     f3 = Feed('feed5')
     f4 = Feed('feed1')
     f5 = Feed('feed3')
     f1.lastUpdated = 20
     f2.lastUpdated = 40
     f3.lastUpdated = 50
     f4.lastUpdated = 10
     f5.lastUpdated = 30
     f6 = feed('feed1')
     f5.updateInterval = 30 * 60
     uq.add(f1)
     uq.add(f2)
     uq.add(f3)
     uq.add(f4)
     uq.add(f5)
     uq.add(f6)
     self.assertEqual( uq.next(), f4 )
     self.assertEqual( uq.next(), f1 )
     self.assertEqual( uq.next(), f2 )
     self.assertEqual( uq.next(), f3 )
     self.assertEqual( uq.next(), f5 )
示例#19
0
#!/usr/bin/python

import Feed

feeds = Feed.get_all_feeds()
for b in feeds:
    b.reload()
    b.save()
示例#20
0
def add_jobs(filename, jobs):
    # Feed.iter() yields a Feed instance (name, rss_url)
    for todo, feed in enumerate(Feed.iter(filename), start=1):
        jobs.put(feed)  # add the feed to the input queue
    return todo  # number of feeds added to jobs queue
示例#21
0
 def testNext(self):
     uq = UpdateQueue()
     f1 = Feed('feed1')
     f2 = Feed('feed2')
     f3 = Feed('feed3')
     f1.lastUpdated = 0
     f2.lastUpdated = time.time() - 5
     f3.lastUpdated = time.time()
     f2.updateInterval = 10
     f3.updateInterval = 6
     uq.add(f1)
     uq.add(f2)
     uq.add(f3)
     self.assertEqual(uq.next(), f1)
     self.assertEqual(uq.next(), f2)
     self.assertTrue(time.time() > (f2.lastUpdated + f2.updateInterval))
     self.assertEqual(uq.next(), f3)
     self.assertTrue(time.time() > (f3.lastUpdated + f3.updateInterval))
     self.assertTrue((time.time() -
                      (f3.lastUpdated + f3.updateInterval)) < 1)
示例#22
0
            'entryID': {
                'type': 'string'
            },
            'feedID': {
                'type': 'string'
            },
            'link': {
                'type': 'string'
            },
            'mongoID': {
                'type': 'string'
            },
            'publishedDate': {
                'type': 'date'
            },
            'summary': {
                'type': 'string'
            },
            'title': {
                'type': 'string'
            }
        }
    }
}

es.indices.create(index='reader', ignore=400, body=feed_entry_mapping)

for f in Feed.get_all_feeds():
    f.loadEntries()
    f.save()
示例#23
0
def SyncFeed():
    feedInstace = Feed()
    feedInstace.feedParser()
示例#24
0
 def testAdd(self):
     uq = UpdateQueue()
     f1 = Feed('feed2')
     f2 = Feed('feed4')
     f3 = Feed('feed5')
     f4 = Feed('feed1')
     f5 = Feed('feed3')
     f1.lastUpdated = 20
     f2.lastUpdated = 40
     f3.lastUpdated = 50
     f4.lastUpdated = 10
     f5.lastUpdated = 30
     f6 = feed('feed1')
     f5.updateInterval = 30 * 60
     uq.add(f1)
     uq.add(f2)
     uq.add(f3)
     uq.add(f4)
     uq.add(f5)
     uq.add(f6)
     self.assertEqual(uq.next(), f4)
     self.assertEqual(uq.next(), f1)
     self.assertEqual(uq.next(), f2)
     self.assertEqual(uq.next(), f3)
     self.assertEqual(uq.next(), f5)
示例#25
0
def add_jobs(filename, jobs):
    for todo, feed in enumerate(Feed.iter(filename), start=1):  # _todo is 1, 2, 3, 4...  feed is Feed(title ,url) gen
        jobs.put(feed)  # load Feed's generator into jobs' queue
    return todo  # return the title count of mission