Exemple #1
0
def _validate_case(parsed_body):
    test_helper.assertEqual('CENSUS',
                            parsed_body['payload']['collectionCase']['survey'])
    actual_case_ref = parsed_body['payload']['collectionCase']['caseRef']
    test_helper.assertEqual(10, len(actual_case_ref))
    parse_datetime(parsed_body['payload']['collectionCase']['createdDateTime'])
    parse_datetime(parsed_body['payload']['collectionCase']['lastUpdated'])

    test_helper.assertTrue(luhn.verify(actual_case_ref))
	def _get_date(self,typeofdate):
		daybefore = datetime.datetime.now(LocalTZ())
		if 'recurrence' in self.keys():
			rset = dateutil.rrule.rruleset()
			for el in self['recurrence']:
				if el.find('DTSTART') == -1:
					if 'date' in self[typeofdate].keys():
						dtstart = self[typeofdate]['date']+'T00:00:00'+ get_utc_offset(daybefore)
						print(self[typeofdate]['date'])
					elif 'dateTime' in self[typeofdate].keys():
						dtstart = self[typeofdate]['dateTime']
						print(self[typeofdate]['dateTime'])
					print(1,dtstart)
					dtstart = rfc3339.parse_datetime(dtstart)
					print(2,dtstart)
					if el.find('UNTIL') != -1:
						elements = el.split(';')
						ans = ''
						for element in elements:
							if element.startswith('UNTIL='):
								s,e=element.split("=")
								if len(e) == 8:
									e += 'T000000'+ get_utc_offset(daybefore).replace(':','')
								elif len(e) == 17:
									e += get_utc_offset(daybefore)
								element = s+'='+e
							ans += element+';'
						if ans.endswith(';'):
							ans = ans[:-1]
						el = ans
						print(3,el)
					el = 'DTSTART:%s%s\n'%(dtstart.strftime('%Y%m%dT%H%M'),get_utc_offset(daybefore))+el
					print(el)
					try:
						rrule = dateutil.rrule.rrulestr(el)
						rset.rrule(rrule)
					except Exception as e:
						print(e)									
				try:
					ans = rset.after(daybefore,inc=True)
				except Exception as e:
					print(e)
					ans = None
				if ans is not None:
					return ans
		if typeofdate in self.keys():
			if 'date' in self[typeofdate].keys():
				dtstart = self[typeofdate]['date']+'T00:00:00'+ get_utc_offset(daybefore)
				return rfc3339.parse_datetime(dtstart)
			elif 'dateTime' in self[typeofdate].keys():
				dtstart = self[typeofdate]['dateTime']
				return rfc3339.parse_datetime(dtstart)
		return None		
 def _get_date(self,typeofdate):
     daybefore = datetime.datetime.now(LocalTZ())
     if 'recurrence' in self.keys():
         rset = dateutil.rrule.rruleset()
         for el in self['recurrence']:
             if el.find('DTSTART') == -1:
                 if 'date' in self[typeofdate].keys():
                     dtstart = self[typeofdate]['date']+'T00:00:00'+ get_utc_offset(daybefore)
                     print(self[typeofdate]['date'])
                 elif 'dateTime' in self[typeofdate].keys():
                     dtstart = self[typeofdate]['dateTime']
                     print(self[typeofdate]['dateTime'])
                 print(1,dtstart)
                 dtstart = rfc3339.parse_datetime(dtstart)
                 print(2,dtstart)
                 if el.find('UNTIL') != -1:
                     elements = el.split(';')
                     ans = ''
                     for element in elements:
                         if element.startswith('UNTIL='):
                             s,e=element.split("=")
                             if len(e) == 8:
                                 e += 'T000000'+ get_utc_offset(daybefore).replace(':','')
                             elif len(e) == 17:
                                 e += get_utc_offset(daybefore)
                             element = s+'='+e
                         ans += element+';'
                     if ans.endswith(';'):
                         ans = ans[:-1]
                     el = ans
                     print(3,el)
                 el = 'DTSTART:%s%s\n'%(dtstart.strftime('%Y%m%dT%H%M'),get_utc_offset(daybefore))+el
                 print(el)
                 try:
                     rrule = dateutil.rrule.rrulestr(el)
                     rset.rrule(rrule)
                 except Exception as e:
                     print(e)
             try:
                 ans = rset.after(daybefore,inc=True)
             except Exception as e:
                 print(e)
                 ans = None
             if ans is not None:
                 return ans
     if typeofdate in self.keys():
         if 'date' in self[typeofdate].keys():
             dtstart = self[typeofdate]['date']+'T00:00:00'+ get_utc_offset(daybefore)
             return rfc3339.parse_datetime(dtstart)
         elif 'dateTime' in self[typeofdate].keys():
             dtstart = self[typeofdate]['dateTime']
             return rfc3339.parse_datetime(dtstart)
     return None
def process_comment(entry):
	"""
	Parses an <entry> element from a Blogger-exported XML file into a
	BlogComment named tuple.

	Returns the tuple or None if the comment's parent article cannot
	be identified.
	"""

	x_pubdate = rfc3339.parse_datetime(
		entry.xpath('atom:published', namespaces=namespaces)[0].text)

	x_author =    entry.xpath('atom:author', namespaces=namespaces)[0]
	x_uri    = x_author.xpath('atom:uri', namespaces=namespaces)[0].text
	x_author = x_author.xpath('atom:name', namespaces=namespaces)[0].text
	x_text   =    entry.xpath('atom:content', namespaces=namespaces)[0].text

	try:
		x_url_slug = entry.xpath(
			'thr:in-reply-to', namespaces=namespaces)[0].get('href')
	except IndexError:
		stat("no link for comment by %s" % x_author)
		return

	x_url_slug = re.search('[^/]+(?=\.html)', x_url_slug).group(0)

	return BlogComment(
		slug = x_url_slug,
		date = x_pubdate,
		author = x_author,
		uri = x_uri,
		text = x_text)
Exemple #5
0
def process_comment(entry):
    """
	Parses an <entry> element from a Blogger-exported XML file into a
	BlogComment named tuple.

	Returns the tuple or None if the comment's parent article cannot
	be identified.
	"""

    x_pubdate = rfc3339.parse_datetime(
        entry.xpath('atom:published', namespaces=namespaces)[0].text)

    x_author = entry.xpath('atom:author', namespaces=namespaces)[0]
    x_uri = x_author.xpath('atom:uri', namespaces=namespaces)[0].text
    x_author = x_author.xpath('atom:name', namespaces=namespaces)[0].text
    x_text = entry.xpath('atom:content', namespaces=namespaces)[0].text

    try:
        x_url_slug = entry.xpath('thr:in-reply-to',
                                 namespaces=namespaces)[0].get('href')
    except IndexError:
        stat("no link for comment by %s" % x_author)
        return

    x_url_slug = re.search('[^/]+(?=\.html)', x_url_slug).group(0)

    return BlogComment(slug=x_url_slug,
                       date=x_pubdate,
                       author=x_author,
                       uri=x_uri,
                       text=x_text)
Exemple #6
0
def parse_item(item, is_rss):
    title = None
    link = None
    description = None
    pubDate = None
    guid = None
    content_tag = "description" if is_rss else "content"
    id_tag = "guid" if is_rss else "id"

    for node in item:
        node.tag = node.tag.split("}")[-1] # dirty hack, FIXME (it avoids ns issues)
        if node.tag == "title":
            title = node.text
        elif node.tag == "link":
            if is_rss:
                link = node.text
            elif node.attrib.has_key("href"):
                link = node.attrib["href"]
        elif node.tag == content_tag:
            description = node.text
        elif is_rss and node.tag == "pubDate":
            pubDate = email.utils.parsedate(node.text)
        elif not is_rss and node.tag =="updated":
            pubDate = rfc3339.parse_datetime(node.text).timetuple()
        elif node.tag == id_tag:
            guid = node.text
        else:
            print("%s element not handled" % node.tag)

    return dict(title=title, link=link, description=description, updated_parsed=pubDate, id=guid)
Exemple #7
0
def list_childs(parentFolderId, metadata):
    # global debug
    url = "https://www.googleapis.com/drive/v3/files?"
    LAST_EXPORT_TIME = rfc3339.parse_datetime(metadata['last_export_time'])
    if FORCE_PUBLISH == 'FORCE':
        # force it to get everything at least 10 years old
        LAST_EXPORT_TIME = LAST_EXPORT_TIME - datetime.timedelta(weeks=52 * 10)
    parameters = {
        'q':
        "('" + parentFolderId +
        "' in parents and mimeType = 'application/vnd.google-apps.folder') or ('"
        + parentFolderId + "' in parents and modifiedTime > '" +
        LAST_EXPORT_TIME.isoformat() + "')",
        'fields':
        'files(id,mimeType,name)'
    }
    headers = {'Authorization': 'Bearer ' + ACCESS_TOKEN}
    r = requests.get(url + urllib.urlencode(parameters), headers=headers)
    if r.status_code != 200:
        print "url:", r.request.url
        print "headers:", r.request.headers
        print "body:", r.request.body
        print "response:", r.text
        print "auth failed. update access token"
        exit(1)
    else:
        results = json.loads(r.text)
        # process the files returned
        if results['files'] != None:
            # debug += r.text + ' '
            return results['files']
        else:
            return []
    def on_success(self, data):
        now = datetime.now(pytz.utc)

        #print ("-------------------")
        #print (data)
        
        """
        if data.has_key("heartbeat"):
            tick_time = data.get("heartbeat").get("time")
        else:
            tick_time = data.get("time")
        """

        if not data.has_key("heartbeat"):
            #print (data)
            tick_time = data.get("time")
            tick_time = rfc3339.parse_datetime(tick_time)

            #print ( now )
            diff = now - tick_time

            #timestamp = float(time.mktime(tick_time.timetuple()) * 1000 + tick_time.microsecond / 1000)/1000.0
            timestamp = int(calendar.timegm(tick_time.utctimetuple())) + tick_time.microsecond/1000 / 1000.0

            diff_timestamp = diff.total_seconds()*1000

            tick_bid = data.get("bid")
            tick_ask = data.get("ask")

            print ( "%0.3f" % (timestamp) )
            print ( "%.3f" % (diff_timestamp) )
            self.reportwriter.writerow(["%0.3f" % timestamp, "%0.3f" % diff_timestamp, tick_bid, tick_ask])
            
            """
Exemple #9
0
    def check_site():
        try:
            params = parse_params(request.forms.decode(),
                                  domain=string_param('domain', required=True, strip=True,
                                                      min_length=1, max_length=DOMAIN_MAX_LENGTH),
                                  no_rescan=boolean_param('no_rescan', default=False, empty=True,
                                                          strip=True))
        except ParamParseError:
            return template('gpc_invalid', domain=None)

        domain = normalise_domain(params['domain'])
        if not check_domain(domain):
            return template('gpc_invalid', domain=domain)

        result = es_dao.get(domain)
        if result is not None:
            if params['no_rescan'] or result['status'] == 'pending':
                redirect(f'/sites/{domain}')

            # Non-pending scans should have a scan datetime.
            last_scan_dt = rfc3339.parse_datetime(result['last_scan_dt'])
            # If the last scan hasn't expired yet, don't rescan.
            if rfc3339.now() < last_scan_dt + SCAN_TTL:
                if testing_mode:
                    log.info('Would have redirected to existing scan for %(domain)s if on prod.',
                             {'domain': domain})
                else:
                    redirect(f'/sites/{domain}')

        r = requests.post(well_known_sites_endpoint, data={'domain': domain, 'rescan': 'true'})
        r.raise_for_status()

        redirect(f'/sites/{domain}')
    def process_bind_param(self, value, dialect):
        if type(value) is str:
            value = rfc3339.parse_datetime(value)

        if value:
            return value.isoformat()
        else:
            return None
Exemple #11
0
    def populate(self, episode) -> None:
        metadata = self._metadata_by_sound_url[episode.media.url]

        title = metadata['title']
        lead = metadata['lead']
        date = rfc3339.parse_datetime(metadata['createdAt'])

        episode.title = title
        episode.summary = lead
        episode.publication_date = max(date, episode.publication_date)
    def populate(self, episode) -> None:
        metadata = self._metadata_by_sound_url[episode.media.url]

        title = metadata['title']
        lead = metadata['lead']
        date = rfc3339.parse_datetime(metadata['createdAt'])

        episode.title = title
        episode.summary = lead
        episode.publication_date = max(date, episode.publication_date)
def ia5_to_datetime(octets):
    ia5_str, _ = decoder.decode(octets, asn1Spec=IA5String())
    s = str(ia5_str)

    if len(s) == 0:
        dt = None
    else:
        dt = rfc3339.parse_datetime(s)

    return dt
Exemple #14
0
def ia5_to_datetime(octets):
    ia5_str, _ = decoder.decode(octets, asn1Spec=IA5String())
    s = str(ia5_str)

    if len(s) == 0:
        dt = None
    else:
        dt = rfc3339.parse_datetime(s)

    return dt
Exemple #15
0
def datetime_validator(value, schema_name=None):
    msg = make_validation_error(
        value,
        'JSON datetime value. Hint: use datetime.datetime.isoformat(), instead of datetime.datetime',
        schema_name)
    try:
        pvalue = rfc3339.parse_datetime(value)
        #pvalue = datetime.datetime.fromisoformat(value)
    except Exception as e:
        raise ValidationError(str(e), msg)
    if not isinstance(pvalue, datetime.datetime):
        raise ValidationError(msg)
def validate_eq_receipt(message_data,
                        log,
                        expected_keys,
                        expected_metadata_keys,
                        date_time_key='timeCreated'):
    try:
        payload = json.loads(message_data)  # parse metadata as JSON payload
        if 'metadata' not in payload:
            log.error('Pub/Sub Message missing required data',
                      missing_json_key='metadata')
            return None

        for expected_key in expected_keys:
            if expected_key not in payload:
                log.error('Pub/Sub Message missing required data',
                          missing_json_key=expected_key)
                return None

        for expected_metadata_key in expected_metadata_keys:
            if expected_metadata_key not in payload['metadata']:
                log.error('Pub/Sub Message missing required data',
                          missing_json_key=expected_metadata_key)
                return None

        parse_datetime(payload[date_time_key]).isoformat()

        return payload
    except (TypeError, json.JSONDecodeError):
        log.error('Pub/Sub Message data not JSON')
        return None
    except KeyError as e:
        log.error('Pub/Sub Message missing required data',
                  missing_json_key=e.args[0])
        return None
    except ValueError:
        log.error('Pub/Sub Message has invalid datetime string')
        return None
Exemple #17
0
    def parse(k, v):
        try:
            # If value string is a date, attempt to convert to datetime
            rfc3339.parse_date(v)
            if range_end:
                v = v + 'T23:59:59Z'
            else:
                v = v + 'T00:00:00Z'
        except ValueError:
            pass

        try:
            dt = rfc3339.parse_datetime(v)
            return dt
        except ValueError:
            raise InvalidParamError(k, v, 'Must be a valid datetime')
Exemple #18
0
 def issue_lister_cb(self):
     # Send issues updated in the last hour to the main channel w/o details
     if self.last_issue_update is None:
         self.last_issue_update = rfc3339.now()
     feed = IssueUpdateFeed().get_issues('dolphin-emu')
     if feed is None:
         log.msg("Unable to get feed")
         return
     for i in reversed(feed.entry):
         issue_date = rfc3339.parse_datetime(i.updated.text)
         if issue_date > self.last_issue_update:
             self.last_issue_update = issue_date
             issue_fmtd = u"%s %s %s" % (i.title.text, color_f_green +
                                         i.author[0].name.text +
                                         color_normal, i.link[0].href)
             self.msg(self.factory.channel, issue_fmtd.encode("utf-8"))
Exemple #19
0
def _build_hist_stage_timings(hist, timings=None):
    if timings is None:
        timings = []

    stage_svc = hist['svc']
    stage_end_dt = rfc3339.parse_datetime(hist['dt'])

    timings = [(stage_svc, stage_end_dt)] + timings

    refs = [ref for ref in hist['refs'] if ref.get('hist')]

    if not refs:
        return timings

    previous_hist = sorted(refs, key=lambda r: r['hist']['dt'])[-1]['hist']

    return _build_hist_stage_timings(previous_hist, timings=timings)
Exemple #20
0
 def issue_lister_cb(self):
     # Send issues updated in the last hour to the main channel w/o details
     if self.last_issue_update is None:
         self.last_issue_update = rfc3339.now()
     feed = IssueUpdateFeed().get_issues('dolphin-emu')
     if feed is None:
         log.msg("Unable to get feed")
         return
     for i in reversed(feed.entry):
         issue_date = rfc3339.parse_datetime(i.updated.text)
         if issue_date > self.last_issue_update:
             self.last_issue_update = issue_date
             issue_fmtd = u"%s %s %s" % (
                 i.title.text,
                 color_f_green + i.author[0].name.text + color_normal,
                 i.link[0].href)
             self.msg(self.factory.channel, issue_fmtd.encode("utf-8"))
Exemple #21
0
def process_post(entry):
    """
	Parses an <entry> element from a Blogger-exported XML file into a
	BlogPost named tuple.

	Returns a dictionary item with the url slug as the key, or
	None if this entry is marked as a draft, or has no title.
	"""

    if entry.xpath('app:control', namespaces=namespaces):
        stat('draft found!')
        return

    x_title = entry.xpath('atom:title[@type="text"]',
                          namespaces=namespaces)[0].text
    if x_title is None: return

    x_pubdate = rfc3339.parse_datetime(
        entry.xpath('atom:published', namespaces=namespaces)[0].text)

    x_tags = entry.xpath(
        'atom:category[@scheme="http://www.blogger.com/atom/ns#"]',
        namespaces=namespaces)

    x_tags = ','.join([x.get('term') for x in x_tags])

    try:
        x_url_slug = entry.xpath(
            'atom:link[@rel="alternate" and @type="text/html"]',
            namespaces=namespaces)[0].get('href')
    except IndexError:
        stat('No link!')
        return

    x_url_slug = re.search('[^/]+(?=\.html)', x_url_slug).group(0)
    x_text = entry.xpath('atom:content', namespaces=namespaces)[0].text

    return {
        x_url_slug:
        BlogPost(slug=x_url_slug,
                 date=x_pubdate,
                 title=x_title,
                 tags=x_tags,
                 text=x_text)
    }
Exemple #22
0
    def calevents(self, calid):

        # Grab Events
        now = datetime.datetime.utcnow().isoformat() + 'Z'
        eventsresult = self.service.events().list(
            calendarId=calid, timeMin=now, singleEvents=True,
            orderBy='startTime').execute()
        events = eventsresult.get('items', [])

        # Parse Events
        cevents = []
        for event in events:
            start = rfc3339.parse_datetime(event['start']['dateTime'])

            cevents.append({'name': event['summary'],
                            'start': start,
                            'eid': event['id'],
                            'description': event['description']})

        return cevents
def process_post(entry):
	"""
	Parses an <entry> element from a Blogger-exported XML file into a
	BlogPost named tuple.

	Returns a dictionary item with the url slug as the key, or
	None if this entry is marked as a draft, or has no title.
	"""

	if entry.xpath('app:control', namespaces=namespaces):
		stat('draft found!')
		return

	x_title = entry.xpath('atom:title[@type="text"]', namespaces=namespaces)[0].text
	if x_title is None: return

	x_pubdate = rfc3339.parse_datetime(
		entry.xpath('atom:published', namespaces=namespaces)[0].text)

	x_tags = entry.xpath('atom:category[@scheme="http://www.blogger.com/atom/ns#"]',
		namespaces=namespaces)

	x_tags = ','.join([ x.get('term') for x in x_tags ])

	try:
		x_url_slug = entry.xpath('atom:link[@rel="alternate" and @type="text/html"]',
			namespaces=namespaces)[0].get('href')
	except IndexError:
		stat('No link!')
		return

	x_url_slug = re.search('[^/]+(?=\.html)', x_url_slug).group(0)
	x_text = entry.xpath('atom:content', namespaces=namespaces)[0].text

	return {x_url_slug:BlogPost(
		slug = x_url_slug,
		date = x_pubdate,
		title= x_title,
		tags = x_tags,
		text = x_text)}
def run(client, num_trials=15):
    account_id = 3922748
    #time1 = time.time()

    trade_ids = []

    print ("OPEN TRADES")

    for i in range(0, num_trials):
        singletime = time.time()
        now = datetime.now(pytz.utc)
        response = client.create_order(account_id, 
                          instrument="EUR_USD", 
                          side="buy", 
                          units=10, 
                          type="market")
        trade_id = response.get("tradeOpened").get("id")
        trade_ids.append(trade_id)

        trade_time = response.get("time")
        trade_time = rfc3339.parse_datetime(trade_time)

        diff = trade_time - now
        print diff.total_seconds()
        
        diff_timestamp = diff.total_seconds()*1000
        print 'Request to Order: %0.3f' % (diff_timestamp)

        print '%0.3f' % ((time.time()-singletime)*1000.0)
        
        #print response

    print ("\nCLOSE TRADES")

    for i in range(0, num_trials):
        singletime = time.time()
        trade_id = trade_ids[i]
        response = client.close_trade(account_id, trade_id)
        print '%0.3f' % ((time.time()-singletime)*1000.0)
def import_trades():
    """
    Import trades from Bitpanda API
    """
    global resp

    with Session() as session:
        result = session.execute(
            sa.select(Trade).order_by(sa.desc("timestamp")))
        latest = result.scalars().first()

    cursor = None
    s = requests.session()
    s.headers.update({'User-Agent': 'PyBitPandaFetcher'})
    s.headers.update({"Authorization": "Bearer " + APIKEY})

    alltrades = []
    ppppage = 1
    while True:
        ppppage += 1
        print(f"Fetching page {ppppage}")
        url = 'https://api.exchange.bitpanda.com/public/v1/account/trades'
        p = {"max_page_size": 30}
        if latest:
            p["from"] = latest.timestamp.isoformat()
            p["to"] = rfc3339.now().isoformat()
        if cursor:
            p["cursor"] = cursor
        resp = s.get(url, params=p)
        if resp.status_code != 200:
            raise ValueError("Invalid status code")

        j = resp.json()

        trades = j["trade_history"]
        for trade in trades:
            t = trade["trade"]

            ormtrade = Trade(
                id=t["trade_id"],
                trade_pair=t["instrument_code"],
                transaction_type=t["side"],  #BUY, SELL
                amount=D(t["amount"]),
                price=D(t["price"]),
                timestamp=rfc3339.parse_datetime(t["time"]))

            (tradee, traded) = t["instrument_code"].split("_")
            # switch currencies
            if t["side"] == "SELL":
                (tradee, traded) = (traded, tradee)

            f = trade["fee"]
            if f["collection_type"] == "BEST":
                ormtrade.is_best_fee = True
                ormtrade.fee = D(f["fee_amount"])
                ormtrade.fee_currency = f["fee_currency"]
            elif f["collection_type"] == "STANDARD":
                ormtrade.is_best_fee = False
                #fee_amount, fee_currency
                if f["fee_currency"] != tradee:
                    raise ValueError(
                        "Something appears to be wrong with the fee")
                ormtrade.fee = D(f["fee_amount"])
                ormtrade.fee_currency = f["fee_currency"]
            else:
                raise ValueError("Unknown fee collection type")

            alltrades.append(ormtrade)

        if not "cursor" in j:
            break
        else:
            cursor = j["cursor"]

    with Session() as session:
        session.add_all(alltrades)
        session.commit()
        pass
 def process_result_value(self, value, dialect):
     return rfc3339.parse_datetime(value)
	def sync(self,widget):
		gta = googletasksapi.GTAService(token_file = comun.TOKEN_FILE)
		error = True
		while(error):
			if gta.do_refresh_authorization() is None:
				p = Preferences(self.tasks)
				if p.run() == Gtk.ResponseType.ACCEPT:
					p.save_preferences()
				p.destroy()
				gta = googletasksapi.GTAService(token_file = comun.TOKEN_FILE)
				if (not os.path.exists(comun.TOKEN_FILE)) or (gta.do_refresh_authorization() is None):
					md = Gtk.MessageDialog(	parent = None,
											flags = Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
											type = Gtk.MessageType.ERROR,
											buttons = Gtk.ButtonsType.OK_CANCEL,
											message_format = _('You have to authorize Google-Tasks-Indicator to manage your Google Calendar.\n Do you want to authorize?'))
					if md.run() == Gtk.ResponseType.CANCEL:
						md.destroy()
						return
					md.destroy()
				else:
					gta = googletasksapi.GTAService(token_file = comun.TOKEN_FILE)
					if gta.do_refresh_authorization() is None:
						error = False
			else:
				error = False
		## Options
		configuration = Configuration()
		option_local = configuration.get('local')
		option_external = configuration.get('external')
		option_both = configuration.get('both')
		##
		google_tasklists = gta.get_tasklists()
		
		## From Local to Google ##
		google_task_lists_ids = []
		for gtl in google_tasklists.values():
			google_task_lists_ids.append(gtl['id'])			
		for tasklist in self.tasks.tasklists.values():
			if tasklist['id'] not in google_task_lists_ids: # Tasklist only local
				if option_local == 0: # Copy to external
					new_tasklist = gta.create_tasklist(tasklist['title'])
					if new_tasklist is not None:
						for task in tasklist['tasks'].values():
							if 'due' in task.keys() and task['due'] is not None:
								due = rfc3339.parse_datetime(str(task['due']))
							else:
								due = None
							new_task = gta.create_task( tasklist_id = new_tasklist['id'], title = task['title'], notes=task['notes'], iscompleted=task.get_completed(),due=due,data_completed=task['completed'])
							new_tasklist['tasks'][new_task['id']] = new_task
						self.tasks.remove_tasklist(tasklist)
						self.tasks.tasklists[new_tasklist['id']] = new_tasklist
				elif option_local == 1: # delete local
					self.tasks.remove_tasklist(tasklist)
			else: # Tasklist local and external;
				if option_both == 0: # Copy to local
					gtl = google_tasklists[tasklist['id']]
					tasklist['title'] = gtl['title']
				elif option_both == 1: # Copy to external
					if gtl['title'] != tasklist['title']:
						gta.edit_tasklist(tasklist['id'],tasklist['title'])
				########################################################
				## Working with tasks
				localtasks = tasklist['tasks']
				googletasks = gta.get_tasks(tasklist_id=tasklist['id'])
				### From Local to Google
				for task in localtasks.values():
					if task['id'] not in googletasks.keys():
						# Task only local
						if option_local == 0:
							# Copy to external
							new_task = gta.create_task( tasklist_id = task['tasklist_id'], title = task['title'], notes=task['notes'], iscompleted=task.get_completed(),due=task['due'],data_completed=task['completed'])
							if new_task is not None:
								self.tasks.remove_task(task)
								self.tasks.tasklists[task['tasklist_id']]['tasks'][new_task['id']] = new_task
						elif option_local == 1:
							# Delete local
							self.tasks.remove_task(task)
					else:
						#Task local and external
						if option_both == 0:
							# Copy to local
							self.tasks.tasklists[task['tasklist_id']]['tasks'][task['id']] = googletasks[task['id']]
						elif option_both ==1:
							# Copy to external
							task_id = task['id']
							tasklist_id = task['tasklist_id']
							title = task['title']
							notes = task['notes']
							iscompleted = (task['status']=='completed')
							due = task['due']
							gta.edit_task(task_id, tasklist_id , title = title, notes = notes, iscompleted = iscompleted, due = due)
				### From Google to Local
				for task in googletasks.values():
					if task['id'] not in localtasks.keys():
						#Task only external
						if option_external == 0:
							# Copy to local
							self.tasks.tasklists[task['tasklist_id']]['tasks'][task['id']] = googletasks[task['id']]
						elif option_external == 1:
							# Delete external
							gta.delete_task(task['id'], task['tasklist_id'])
				########################################################
		## From Google to Local ##
		alone_task_lists_ids = []
		for atl in self.tasks.tasklists.values():
			alone_task_lists_ids.append(atl['id'])			
		for tasklist in google_tasklists.values():
			if tasklist['id'] not in alone_task_lists_ids: # Tasklist only Google
				if option_external == 0: # Copy to local
					new_tasklist = tasklist
					new_tasklist['tasks'] = gta.get_tasks(tasklist_id = tasklist['id'])
					self.tasks.tasklists[new_tasklist['id']] = new_tasklist	
				elif option_external == 1: # Delete external
					gta.delete_tasklist(tasklist)
		self.tasks.backup()
		self.menu_update()
Exemple #28
0
 def parse_datetime(self, rfc_string):
     return rfc3339.parse_datetime(rfc_string.replace(' ', ''))
Exemple #29
0
    def get_site(domain):
        domain = normalise_domain(domain)
        if not check_domain(domain):
            return template('gpc_invalid', domain=domain)

        # Well-Known doesn't scan www subdomains - redirect to the base domain instead.
        if domain_is_www_subdomain(domain):
            base_domain = extract_base_domain(domain)
            redirect(f'/sites/{base_domain}')

        result = es_dao.get(domain)
        if result is None:
            redirect(f'/?domain={domain}')

        status = result['status']
        scan_data = result.get('scan_data')
        if status == 'pending':
            return template('gpc_pending', domain=domain)
        elif status == 'blocked':
            return template('gpc_blocked', domain=domain)
        elif status == 'failed' and not scan_data:
            return template('gpc_error', domain=domain)

        # Status should be `ok`, or `failed` but with a previously successful scan.
        # In either case, `scan_data` should be present.
        assert scan_data

        scheme = scan_data['scheme']

        scan_dt = rfc3339.parse_datetime(scan_data['scan_dt'])

        if result['scan_priority'] == 0:
            rescan_queued = True
            can_rescan = False
        else:
            rescan_queued = False
            last_scan_dt = rfc3339.parse_datetime(result['last_scan_dt'])
            can_rescan = (last_scan_dt + SCAN_TTL) < rfc3339.now()

        error = scan_data.get('error')
        if error:
            message = None
            if error == 'not-found':
                message = 'The GPC support resource was not found.'
            elif error in ('unexpected-scheme-redirect', 'unexpected-status',
                           'client-error', 'server-error', 'unexpected-status'):
                message = 'Server responded unexpectedly when fetching the GPC support resource.'
            elif error in ('parse-error', 'json-parse-error', 'unexpected-json-root-type',
                           'content-too-long', 'content-length-too-long', 'bad-content'):
                message = 'The GPC support resource is invalid.'
            elif error:
                log.error('Unsupported GPC scan error %(error)s', {'error': error})

            r = template('gpc_unknown', scheme=scheme, domain=domain,
                         message=message, scan_dt=scan_dt,
                         rescan_queued=rescan_queued, can_rescan=can_rescan)
            set_headers(r, SCAN_RESULT_HEADERS)
            return r

        else:
            assert scan_data['found'], 'gpc.json should have been found if no error.'
            gpc_data = scan_data['gpc']

            warnings = scan_data.get('warnings') or []
            warnings += gpc_data.get('warning_codes') or []
            message = None
            if warnings:
                message_parts = []
                for warning in warnings:
                    if warning == 'wrong-content-type':
                        message_parts.append('incorrect content type')
                    elif warning == 'invalid-update-field':
                        message_parts.append('invalid last update field')

                if message_parts:
                    message = ' and '.join(message_parts) + '.'

            last_update = gpc_data['parsed'].get('lastUpdate')
            template_name = 'gpc_supported' if gpc_data['parsed']['gpc'] else 'gpc_unsupported'
            r = template(template_name, scheme=scheme, domain=domain,
                         last_update=last_update, message=message, scan_dt=scan_dt,
                         rescan_queued=rescan_queued, can_rescan=can_rescan)
            set_headers(r, SCAN_RESULT_HEADERS)
            return r
Exemple #30
0
    def get(self, path):
        _usage = """
        USAGE:

        Support arbitrary REST query using following translated
        query (using stenographer API). All terms are AND'd together
        to refine the query. The API does not currently support OR semantics
        Time intervals may be expressed with any combination of: h, m, s, ms, us

        /host/1.2.3.4/ -> 'host 1.2.3.4'
        /host/1.2.3.4/host/4.5.6.7/ -> 'host 1.2.3.4 and host 4.5.6.7'
        /net/1.2.3.0/24/ -> 'net 1.2.3.0/24'
        /port/80/ -> 'port 80'
        /proto/6/ -> 'ip proto 6'
        /tcp/ -> 'tcp'
        /tcp/port/80/ -> 'tcp and port 80'
        /before/2017-04-30/ -> 'before 2017-04-30T00:00:00Z'
        /before/2017-04-30T13:26:43Z/ -> 'before 2017-04-30T13:26:43Z'
        /before/45m/ -> 'before 45m ago'
        /after/3h/ -> 'after 180m ago'
        /after/3h30m/ -> 'after 210m ago'
        /after/3.5h/ -> 'after 210m ago'

        Example query using curl
	```
 	$ curl -s localhost:8080/pcap/host/192.168.254.201/port/53/udp/after/3m/ | tcpdump -nr -
	reading from file -, link-type EN10MB (Ethernet)
	15:38:00.311222 IP 192.168.254.201.31176 > 205.251.197.49.domain: 52414% [1au] A? ping.example.net. (47)
	15:38:00.345042 IP 205.251.197.49.domain > 192.168.254.201.31176: 52414*- 8/4/1 A 198.18.249.85, A 198.18.163.178, ...
	```
        """
        from flask import current_app
        logger = current_app.logger
        from tasks import raw_query
        import re, rfc3339, os.path
        from datetime import timedelta

        logger.debug("Entering Pcap::get")
        logger.debug("arg: path => {}".format(path))
        
        TIME_WINDOW = current_app.config["TIME_WINDOW"]

        _path = path.strip('/')

        logger.debug("Query: {}".format(_path))
        state = None
        argv = _path.split('/')

        query = {
                'host': [],
                'net' : [],
                'port': [],
                'proto': None,
                'udp': None,
                'tcp': None,
                'icmp': None,
                'before': None,
                'after': None,
                }
        for arg in argv:
            if state == None: 
                if arg.upper() in ["HOST", "NET", "PORT", "PROTO", "BEFORE", "AFTER"]:
                    state = arg.upper()
                elif arg.upper() in ["UDP", "TCP", "ICMP"]:
                    query[arg.lower()] = True
                    state = None
                else:
                    raise HTTPException("I'm a teapot.",
                            payload="{}".format(_usage),
                            status_code=418 )
            elif state in ["HOST", "PORT", "PROTO"]:
                # Read an IP
                query[state.lower()].append(arg)
                state = None
            elif state == "NET":
                # Read a network, leave mask blank
                query['net'].append((arg, None))
                state = "NET1"
            elif state == "NET1":
                query['net'][-1] = (query['net'][-1][0], arg)
                state = None
            elif state in ["BEFORE", "AFTER"]:
                # Test if this is indicating a relative time
                # Match on h, m, s, ms, us 
                dur = _parseDuration(arg)
                if dur is not None:
                    logger.debug("Duration is: {}".format(dur.total_seconds()))
                    query[state.lower()] = "{:d}m ago".format(ceildiv(dur.total_seconds(),60))
                else:
                    try:
                        dt = rfc3339.parse_datetime(arg).replace(tzinfo=None)
                        if state == "BEFORE":
                            dt = dt + timedelta(seconds=TIME_WINDOW)
                        else: # This is obviously AFTER
                            dt = dt - timedelta(seconds=TIME_WINDOW)
                        query[state.lower()] = "{}Z".format(dt.isoformat())
                    except ValueError:
                        logger.debug("Failed to parse datetime: {}".format(arg))
                state = None

        # Arg parsing complete
        logger.debug("Query: {}".format(query))

        # Build the query string
        qry_str = []
        for host in query['host']:
            qry_str.append('host {}'.format(host))
        for net in query['net']:
            qry_str.append('net {}/{}'.format(net[0], net[1]))
        for port in query['port']:
            qry_str.append('port {}'.format(port))
        if query['proto']:
            qry_str.append('ip proto {}'.format(query['proto']))
        if query['udp']:
            qry_str.append('udp')
        if query['tcp']:
            qry_str.append('tcp')
        if query['icmp']:
            qry_str.append('icmp')
        if query['before']:
            qry_str.append('before {}'.format(query['before']))
        if query['after']:
            qry_str.append('after {}'.format(query['after']))

        _query = " and ".join(qry_str)
        logger.debug("Query String: {}".format(_query))

        result = raw_query.apply_async(kwargs={'query': _query})

        while not result.ready():
            pass

        if result.successful():
            rc, message = result.result

            # Everything is normal
            if rc == 0:
                if os.path.isfile(message):
                    fname = os.path.basename(message)
                    rv = send_file(
                        message, 
                        mimetype='application/vnd.tcpdump.pcap',
                        as_attachment=True,
                        attachment_filename=fname
                        )
                    return rv
                else:
                    HTTPException(message="Response file not found", status_code=404)
            else:
                # Something failed
                HTTPException(message="RC: {}  Message: {}".format(rc, message), status_code=500)
Exemple #31
0
 def sort(artifact):
     return parse_datetime(artifact.get('createdAt')).timestamp()
Exemple #32
0
 def format_datetime(date_string):
     dt = rfc3339.parse_datetime(date_string)
     return dt.strftime("%A, %B %d, %Y")
TEST_CASES = [
    # starting_batch, expected_number_of_batches, max_cases, count_per_batch
    (0, 0, 1, 2),
    (0, 1, 10, 10),
    (0, 2, 25, 10),
    (1, 3, 30, 10),
    (10, 10, 100, 10),
    (1, 99, 1000, 10),
    (1, 24, 2500000, 100001),
    (1, 12, 2500000, 200000),
    (50, 12, 2500000, 200000),
    (1, 0, 2500000, 2500001),
    (50, 0, 2500000, 2500001),
]
TEST_DATE_TIME = rfc3339.parse_datetime('2020-06-26T06:39:34+00:00')
TEST_ACTION_PLAN_ID = uuid.UUID('6597821B-4D6A-48C4-B249-45C010A57EB1')
TEST_BATCH_COUNT = "','".join([str(i) for i in range(1, 99)])


@pytest.mark.parametrize(
    'starting_batch, expected_number_of_batches, max_cases, count_per_batch',
    TEST_CASES)
@patch('toolbox.reminder_scheduler.reminder_batch.db_helper')
def test_main(patch_db_helper, starting_batch, expected_number_of_batches,
              max_cases, count_per_batch):
    # Given
    patch_db_helper.execute_parametrized_sql_query.return_value = ((
        count_per_batch, ), )
    expected_number_of_database_counts = get_expected_number_of_database_counts(
        expected_number_of_batches)
Exemple #34
0
    def UpdateCalendar(self):
        """
        Establish an API OAuth connection to google calendar and retrieve the next 10 events by
        end date/time. Clear the screen and make it show a message to say what's happenining.
        Mostly, this is taken from the google apps demo code :-)
        TODO: Handle HTTP/API errors more gracefully. For now, catch exceptions, return an
        empty list, and set a 5 minute retry interval.
        """
      
        credentials = get_credentials()
        http = credentials.authorize(httplib2.Http())
        service = discovery.build('calendar', 'v3', http=http)

        now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time

        # Just in case this throws a HTTP exception or something
        try:
            eventsResult = service.events().list(
                calendarId=self.calendarid, timeMin=now, maxResults=11, singleEvents=True,
                orderBy='startTime').execute()
            self.events = eventsResult.get('items', [])
        except:
            self.events=[]

        # if you don't want to refresh immediately after an event finishes uncomment this:
        # self.nextrefresh = self.nextrefresh + datetime.timedelta(minutes=5)
        if not self.events:
            self.maxevents = 0
            self.nextrefresh = datetime.datetime.now(self.localtz) + datetime.timedelta(minutes=5)
            return

        # Let's deal with the API returning fewer results than expected
        if ((len(self.events) > 9)):
            self.maxevents = 8
        else:
            # Subtract one because the index is zero based
            self.maxevents = (len(self.events) - 1)

        # Initialise nextrefresh to the latest possible date allowed by Python.
        self.nextrefresh=datetime.datetime.max
        self.nextrefresh=self.nextrefresh.replace(tzinfo=self.localtz)

        # Calculate start / end times and store them in datetime objects in the event dictionary
        # If an event only has a date property, it's an all day event, so it runs from midnight
        # localtime to midnight localtime. All the RFC parsers will want a time offset if not
        # treating a time as UTC, so we cheat slightly, and parse it as UTC first.
        # Then we strip the tzinfo from the parsed times and localize them using pytz.
        # It's a little dirty, but easier than doing it ourselves. 00:00 should always localize
        # without issue as it's 01:00 - 02:00 during changeover that will be ambiguous.
        for event in self.events:
            if (event['start'].get('date')):   
                event['estart_dt']=rfc3339.parse_datetime(event['start'].get('date') + "T00:00:00Z")
                event['estart_dt']=event['estart_dt'].replace(tzinfo=None)
                event['estart_dt']=self.localtz.localize(event['estart_dt'])
            else:
                event['estart_dt']=rfc3339.parse_datetime(event['start'].get('dateTime'))
            if (event['end'].get('date')):  
                event['eend_dt']=rfc3339.parse_datetime(event['end'].get('date') + "T00:00:00Z")
                event['eend_dt']=event['eend_dt'].replace(tzinfo=None)
                event['eend_dt']=self.localtz.localize(event['eend_dt'])
                event['allday_flag']=True
            else:
                event['eend_dt']=rfc3339.parse_datetime(event['end'].get('dateTime'))
                event['allday_flag']=False

            # If this event finishes earlier than the current assigned date, update it
            # At the end of the loop, we'll have the closest event end date. We use this
            # to kick off a calendar refresh automatically after the event finishes.
            if(event['eend_dt'] < self.nextrefresh):
                self.nextrefresh=event['eend_dt']

            # Event.reminders.overrides is a list of dictionaries. Loop this to look for popup reminders
            # then make a list of dictionaries with start/end datetime objects. 
            # Start times are when we need to turn on the reminder lights, end times we turn off.
            if event['reminders'].get('overrides'):
                tempevent = {}
                for items in event['reminders'].get('overrides'):
                    if (items.get('method') == 'popup'):
                        tempevent['start'] = (event['estart_dt'] - 
                            datetime.timedelta(minutes=items.get('minutes')) )
                        tempevent['end'] = event['estart_dt'] 
                        self.reminders.append(tempevent)




        # Whenever we've re-read the calendar, display from the first event.
        self.d_event=0
        self.c_event=99
 def parse_datetime(self, rfc_string):
     return rfc3339.parse_datetime(rfc_string.replace(' ',''))
	def __init__(self, task = None,tasks = None):		
		Gtk.Dialog.__init__(self)
		if task == None:
			self.set_title(comun.APPNAME + ' | '+_('Add new task'))
		else:
			self.set_title(comun.APPNAME + ' | '+_('Edit task'))
		self.set_modal(True)
		self.add_buttons(Gtk.STOCK_OK, Gtk.ResponseType.ACCEPT,Gtk.STOCK_CANCEL,Gtk.ResponseType.CANCEL)	
		self.set_size_request(250, 160)
		self.set_resizable(False)
		self.set_icon_from_file(comun.ICON)
		self.connect('destroy', self.close_application)
		#
		vbox0 = Gtk.VBox(spacing = 5)
		vbox0.set_border_width(5)
		self.get_content_area().add(vbox0)
		#
		table1 = Gtk.Table(n_rows = 5, n_columns = 2, homogeneous = False)
		table1.set_border_width(5)
		table1.set_col_spacings(5)
		table1.set_row_spacings(5)
		vbox0.add(table1)
		#
		label10 = Gtk.Label.new(_('Task List')+':')
		label10.set_alignment(0,.5)
		table1.attach(label10,0,1,0,1, xoptions = Gtk.AttachOptions.FILL, yoptions = Gtk.AttachOptions.FILL)
		#
		self.liststore = Gtk.ListStore(str,str)
		self.entry0 = Gtk.ComboBox.new_with_model(model=self.liststore)
		renderer_text = Gtk.CellRendererText()
		self.entry0.pack_start(renderer_text, True)
		self.entry0.add_attribute(renderer_text, "text", 0)
		self.entry0.set_active(0)
		table1.attach(self.entry0,1,2,0,1, xoptions = Gtk.AttachOptions.EXPAND, yoptions = Gtk.AttachOptions.SHRINK)
		#
		label11 = Gtk.Label.new(_('Title')+':')
		label11.set_alignment(0,.5)
		table1.attach(label11,0,1,1,2, xoptions = Gtk.AttachOptions.FILL, yoptions = Gtk.AttachOptions.FILL)
		#
		label12 = Gtk.Label.new(_('Notes')+':')
		label12.set_alignment(0,0)
		table1.attach(label12,0,1,2,3, xoptions = Gtk.AttachOptions.FILL, yoptions = Gtk.AttachOptions.FILL)
		#
		label13 = Gtk.Label.new(_('Completed')+':')
		label13.set_alignment(0,.5)
		table1.attach(label13,0,1,3,4, xoptions = Gtk.AttachOptions.FILL, yoptions = Gtk.AttachOptions.SHRINK)
		#
		label14 = Gtk.Label.new(_('Date due')+':')
		label14.set_alignment(0,0)
		table1.attach(label14,0,1,4,5, xoptions = Gtk.AttachOptions.FILL, yoptions = Gtk.AttachOptions.SHRINK)
		#
		self.entry1 = Gtk.Entry()
		self.entry1.set_width_chars(60)
		table1.attach(self.entry1,1,2,1,2, xoptions = Gtk.AttachOptions.EXPAND, yoptions = Gtk.AttachOptions.SHRINK)
		#
		scrolledwindow2 = Gtk.ScrolledWindow()
		scrolledwindow2.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
		scrolledwindow2.set_shadow_type(Gtk.ShadowType.ETCHED_OUT)
		table1.attach(scrolledwindow2,1,2,2,3, xoptions = Gtk.AttachOptions.FILL, yoptions = Gtk.AttachOptions.FILL)
		self.entry2 = Gtk.TextView()
		self.entry2.set_wrap_mode(Gtk.WrapMode.WORD)
		scrolledwindow2.set_size_request(350,150)
		scrolledwindow2.add(self.entry2)
		#
		self.entry3 = Gtk.Switch()
		table1.attach(self.entry3,1,2,3,4, xoptions = Gtk.AttachOptions.SHRINK, yoptions = Gtk.AttachOptions.SHRINK)
		#
		hbox = Gtk.HBox()
		table1.attach(hbox,1,2,4,5, xoptions = Gtk.AttachOptions.SHRINK, yoptions = Gtk.AttachOptions.SHRINK)
		self.entry4 = Gtk.CheckButton()
		self.entry4.connect('toggled', self.toggle_clicked )
		hbox.pack_start(self.entry4,0,0,0)
		self.entry5 = ComboBoxCalendar(self)
		self.entry5.set_sensitive(False)
		hbox.pack_start(self.entry5,0,0,0)
		#table1.attach(self.entry4,1,2,3,4, xoptions = Gtk.AttachOptions.SHRINK, yoptions = Gtk.AttachOptions.SHRINK)
		#
		if tasks is not None:
			for tasklist in tasks.tasklists.values():
				self.liststore.append([tasklist['title'],tasklist['id']])						
		if task is not None:
			for i,item in enumerate(self.liststore):
				if task['tasklist_id'] == item[1]:
					self.entry0.set_active(i)					
					break			
			self.entry0.set_active(False)
			if 'title' in task.keys():
				self.entry1.set_text(task['title'])
			if 'notes' in task.keys() and task['notes'] is not None:
				self.entry2.get_buffer().set_text(task['notes'])
			if 'status' in task.keys():
				self.entry3.set_active(task['status'] == 'completed')
			if 'due' in task.keys() and task['due'] is not None:
				self.entry4.set_active(True)
				self.entry5.set_date(rfc3339.parse_datetime(task['due']))
			else:
				self.entry4.set_active(False)
		else:
			self.entry0.set_active(0)
		self.show_all()
	def sync(self,widget):
		gta = googletasksapi.GTAService(token_file = comun.TOKEN_FILE)
		error = True
		while(error):
			if gta.do_refresh_authorization() is None:
				p = Preferences(self.tasks)
				if p.run() == Gtk.ResponseType.ACCEPT:
					p.save_preferences()
				p.destroy()
				gta = googletasksapi.GTAService(token_file = comun.TOKEN_FILE)
				if (not os.path.exists(comun.TOKEN_FILE)) or (gta.do_refresh_authorization() is None):
					md = Gtk.MessageDialog(	parent = None,
											flags = Gtk.DialogFlags.MODAL | Gtk.DialogFlags.DESTROY_WITH_PARENT,
											type = Gtk.MessageType.ERROR,
											buttons = Gtk.ButtonsType.OK_CANCEL,
											message_format = _('You have to authorize Google-Tasks-Indicator to manage your Google Calendar.\n Do you want to authorize?'))
					if md.run() == Gtk.ResponseType.CANCEL:
						md.destroy()
						return
					md.destroy()
				else:
					gta = googletasksapi.GTAService(token_file = comun.TOKEN_FILE)
					if gta.do_refresh_authorization() is None:
						error = False
			else:
				error = False
		## Options
		configuration = Configuration()
		option_local = configuration.get('local')
		option_external = configuration.get('external')
		option_both = configuration.get('both')
		##
		google_tasklists = gta.get_tasklists()
		
		## From Local to Google ##
		google_task_lists_ids = []
		for gtl in google_tasklists.values():
			google_task_lists_ids.append(gtl['id'])			
		for tasklist in self.tasks.tasklists.values():
			if tasklist['id'] not in google_task_lists_ids: # Tasklist only local
				if option_local == 0: # Copy to external
					new_tasklist = gta.create_tasklist(tasklist['title'])
					if new_tasklist is not None:
						for task in tasklist['tasks'].values():
							if 'due' in task.keys() and task['due'] is not None:
								due = rfc3339.parse_datetime(str(task['due']))
							else:
								due = None
							new_task = gta.create_task( tasklist_id = new_tasklist['id'], title = task['title'], notes=task['notes'], iscompleted=task.get_completed(),due=due,data_completed=task['completed'])
							new_tasklist['tasks'][new_task['id']] = new_task
						self.tasks.remove_tasklist(tasklist)
						self.tasks.tasklists[new_tasklist['id']] = new_tasklist
				elif option_local == 1: # delete local
					self.tasks.remove_tasklist(tasklist)
			else: # Tasklist local and external;
				if option_both == 0: # Copy to local
					gtl = google_tasklists[tasklist['id']]
					tasklist['title'] = gtl['title']
				elif option_both == 1: # Copy to external
					if gtl['title'] != tasklist['title']:
						gta.edit_tasklist(tasklist['id'],tasklist['title'])
				########################################################
				## Working with tasks
				localtasks = tasklist['tasks']
				googletasks = gta.get_tasks(tasklist_id=tasklist['id'])
				### From Local to Google
				print(localtasks)
				for task in localtasks.values():
					if task['id'] not in googletasks.keys():
						# Task only local
						if option_local == 0:
							# Copy to external
							new_task = gta.create_task( tasklist_id = task['tasklist_id'], title = task['title'], notes=task['notes'], iscompleted=task.get_completed(),due=task['due'],data_completed=task['completed'])
							if new_task is not None:
								self.tasks.remove_task(task)
								self.tasks.tasklists[task['tasklist_id']]['tasks'][new_task['id']] = new_task
						elif option_local == 1:
							# Delete local
							self.tasks.remove_task(task)
					else:
						#Task local and external
						if option_both == 0:
							# Copy to local
							self.tasks.tasklists[task['tasklist_id']]['tasks'][task['id']] = googletasks[task['id']]
						elif option_both ==1:
							# Copy to external
							task_id = task['id']
							tasklist_id = task['tasklist_id']
							title = task['title']
							notes = task['notes']
							iscompleted = (task['status']=='completed')
							due = task['due']
							gta.edit_task(task_id, tasklist_id , title = title, notes = notes, iscompleted = iscompleted, due = due)
				### From Google to Local
				for task in googletasks.values():
					if task['id'] not in localtasks.keys():
						#Task only external
						if option_external == 0:
							# Copy to local
							self.tasks.tasklists[task['tasklist_id']]['tasks'][task['id']] = googletasks[task['id']]
						elif option_external == 1:
							# Delete external
							gta.delete_task(task['id'], task['tasklist_id'])
				########################################################
		## From Google to Local ##
		alone_task_lists_ids = []
		for atl in self.tasks.tasklists.values():
			alone_task_lists_ids.append(atl['id'])			
		for tasklist in google_tasklists.values():
			if tasklist['id'] not in alone_task_lists_ids: # Tasklist only Google
				if option_external == 0: # Copy to local
					new_tasklist = tasklist
					new_tasklist['tasks'] = gta.get_tasks(tasklist_id = tasklist['id'])
					self.tasks.tasklists[new_tasklist['id']] = new_tasklist	
				elif option_external == 1: # Delete external
					gta.delete_tasklist(tasklist)
		self.tasks.backup()
		self.menu_update()
def remove_milliseconds_from_firstseen(bad_message_summaries):
    for msg in bad_message_summaries:
        msg['firstSeen'] = rfc3339.parse_datetime(
            msg['firstSeen']).strftime("%Y-%m-%dT%H:%M:%S")

    return bad_message_summaries
def eq_receipt_to_case(message: Message):
    """
    Callback for handling new pubsub messages which attempts to publish a receipt to the events exchange

    NB: any exceptions raised by this callback should nack the message by the future manager
    :param message: a GCP pubsub subscriber Message
    """
    log = logger.bind(message_id=message.message_id,
                      subscription_name=SUBSCRIPTION_NAME,
                      subscription_project=SUBSCRIPTION_PROJECT_ID)
    try:
        if message.attributes[
                'eventType'] != 'OBJECT_FINALIZE':  # only forward on object creation
            log.error('Unknown Pub/Sub Message eventType',
                      eventType=message.attributes['eventType'])
            return
        bucket_name, object_name = message.attributes[
            'bucketId'], message.attributes['objectId']
    except KeyError as e:
        log.error('Pub/Sub Message missing required attribute',
                  missing_attribute=e.args[0])
        return

    log = log.bind(bucket_name=bucket_name, object_name=object_name)
    log.info('Pub/Sub Message received for processing')

    payload = validate_eq_receipt(message.data, log, ['timeCreated'],
                                  ['tx_id', 'questionnaire_id'])
    if not payload:
        return  # Failed validation

    metadata = payload['metadata']
    tx_id, questionnaire_id, case_id = metadata['tx_id'], metadata[
        'questionnaire_id'], metadata.get('case_id')
    time_obj_created = parse_datetime(payload['timeCreated']).isoformat()

    log = log.bind(questionnaire_id=questionnaire_id,
                   created=time_obj_created,
                   tx_id=tx_id,
                   case_id=case_id)

    receipt_message = {
        'event': {
            'type': 'RESPONSE_RECEIVED',
            'source': 'RECEIPT_SERVICE',
            'channel': 'EQ',
            'dateTime': time_obj_created,
            'transactionId': tx_id
        },
        'payload': {
            'response': {
                'caseId': case_id,
                'questionnaireId': questionnaire_id,
                'unreceipt': False
            }
        }
    }

    send_message_to_rabbitmq(json.dumps(receipt_message))
    message.ack()

    log.info('Message processing complete')
Exemple #40
0
def exportGoogleSpreadsheet(projectName, ProjectPath, ParamsPath, DataType,
                            ResourceDir, DataDir, DataFile, DataCreate):
    xlsPath = ParamsPath
    xlsxFileName = DataFile

    resoureces = xlsPath + xlsxFileName

    if os.path.isfile(resoureces) is False:
        Error("project %s file %s not exist in path '%s'" %
              (projectName, xlsxFileName, xlsPath))
        return
        pass

    resourceList = getXlsxData(resoureces)

    for row in resourceList:
        if len(row) == 0:
            continue
            pass

        pyPath = None
        if len(row) == 4:
            subproject_name = row[3]
            pyPath = ProjectPath + subproject_name + "/" + DataDir + "_" + subproject_name + "/"
        else:
            pyPath = ProjectPath + ResourceDir + "/" + DataDir + "/"
            pass

        directory = os.path.dirname(pyPath)
        if not os.path.exists(directory):
            os.makedirs(directory)
            pass

        if not os.path.exists(pyPath + "__init__.py"):
            f = open(pyPath + "__init__.py", "w")
            f.write("#database")
            f.close()
            pass

        docid = row[0]
        username = row[1]
        password = row[2]

        json_key = None
        with open("Burritos.json") as json_burritos:
            json_key = json.load(json_burritos)
            pass

        scope = ['https://spreadsheets.google.com/feeds']

        credentials = SignedJwtAssertionCredentials(
            json_key['client_email'], bytes(json_key['private_key'], 'utf-8'),
            scope)

        client = gspread.authorize(credentials)

        #client = gspread.login(username, password)

        spreadsheet = client.open_by_key(docid)

        spreadsheet_title = spreadsheet.title

        GoogleSpreadsheetsCacheDir = "%sGoogleSpreadsheetsCache/" % (xlsPath)
        if not os.path.exists(GoogleSpreadsheetsCacheDir):
            os.makedirs(GoogleSpreadsheetsCacheDir)
            pass

        GoogleSpreadsheetsCacheTitleDir = "%sGoogleSpreadsheetsCache/%s/" % (
            xlsPath, spreadsheet_title)
        if not os.path.exists(GoogleSpreadsheetsCacheTitleDir):
            os.makedirs(GoogleSpreadsheetsCacheTitleDir)
            pass

        for worksheet in spreadsheet.worksheets():
            filename = "%s%s.xlsx" % (GoogleSpreadsheetsCacheTitleDir,
                                      worksheet.title)
            date_time = rfc3339.parse_datetime(worksheet.updated)
            # date_time.
            if os.path.exists(filename):
                date_of_xls = datetime.datetime.fromtimestamp(
                    os.path.getmtime(filename), date_time.tzinfo)
                if date_of_xls > date_time:
                    print("file %s" % (filename), "Last Revision")
                    continue
                    pass
                pass
            pass

            workbook = xlsxwriter.Workbook(filename)
            excel_sheet = workbook.add_worksheet()
            vals = worksheet.get_all_values()

            for r, row in enumerate(vals):
                for c, cell in enumerate(row):
                    excel_sheet.write(r, c, cell)
                    pass
                pass

            workbook.close()

            py_name = os.path.splitext(os.path.basename(filename))[0]
            py_file_name = "%s%s%s.py" % (pyPath, DataType, py_name)

            print(filename, " - %s export!" % (DataType))
            DataCreate(filename, py_file_name, py_name)
            pass
        pass
    pass
Exemple #41
0
 def __init__(self, task=None, tasks=None):
     Gtk.Dialog.__init__(self)
     if task == None:
         self.set_title(comun.APPNAME + ' | ' + _('Add new task'))
     else:
         self.set_title(comun.APPNAME + ' | ' + _('Edit task'))
     self.set_modal(True)
     self.add_buttons(Gtk.STOCK_OK, Gtk.ResponseType.ACCEPT,
                      Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL)
     self.set_size_request(250, 160)
     self.set_resizable(False)
     self.set_icon_from_file(comun.ICON)
     self.connect('destroy', self.close_application)
     #
     vbox0 = Gtk.VBox(spacing=5)
     vbox0.set_border_width(5)
     self.get_content_area().add(vbox0)
     #
     table1 = Gtk.Table(n_rows=5, n_columns=2, homogeneous=False)
     table1.set_border_width(5)
     table1.set_col_spacings(5)
     table1.set_row_spacings(5)
     vbox0.add(table1)
     #
     label10 = Gtk.Label.new(_('Task List') + ':')
     label10.set_alignment(0, .5)
     table1.attach(label10,
                   0,
                   1,
                   0,
                   1,
                   xoptions=Gtk.AttachOptions.FILL,
                   yoptions=Gtk.AttachOptions.FILL)
     #
     self.liststore = Gtk.ListStore(str, str)
     self.entry0 = Gtk.ComboBox.new_with_model(model=self.liststore)
     renderer_text = Gtk.CellRendererText()
     self.entry0.pack_start(renderer_text, True)
     self.entry0.add_attribute(renderer_text, "text", 0)
     self.entry0.set_active(0)
     table1.attach(self.entry0,
                   1,
                   2,
                   0,
                   1,
                   xoptions=Gtk.AttachOptions.EXPAND,
                   yoptions=Gtk.AttachOptions.SHRINK)
     #
     label11 = Gtk.Label.new(_('Title') + ':')
     label11.set_alignment(0, .5)
     table1.attach(label11,
                   0,
                   1,
                   1,
                   2,
                   xoptions=Gtk.AttachOptions.FILL,
                   yoptions=Gtk.AttachOptions.FILL)
     #
     label12 = Gtk.Label.new(_('Notes') + ':')
     label12.set_alignment(0, 0)
     table1.attach(label12,
                   0,
                   1,
                   2,
                   3,
                   xoptions=Gtk.AttachOptions.FILL,
                   yoptions=Gtk.AttachOptions.FILL)
     #
     label13 = Gtk.Label.new(_('Completed') + ':')
     label13.set_alignment(0, .5)
     table1.attach(label13,
                   0,
                   1,
                   3,
                   4,
                   xoptions=Gtk.AttachOptions.FILL,
                   yoptions=Gtk.AttachOptions.SHRINK)
     #
     label14 = Gtk.Label.new(_('Date due') + ':')
     label14.set_alignment(0, 0)
     table1.attach(label14,
                   0,
                   1,
                   4,
                   5,
                   xoptions=Gtk.AttachOptions.FILL,
                   yoptions=Gtk.AttachOptions.SHRINK)
     #
     self.entry1 = Gtk.Entry()
     self.entry1.set_width_chars(60)
     table1.attach(self.entry1,
                   1,
                   2,
                   1,
                   2,
                   xoptions=Gtk.AttachOptions.EXPAND,
                   yoptions=Gtk.AttachOptions.SHRINK)
     #
     scrolledwindow2 = Gtk.ScrolledWindow()
     scrolledwindow2.set_policy(Gtk.PolicyType.AUTOMATIC,
                                Gtk.PolicyType.AUTOMATIC)
     scrolledwindow2.set_shadow_type(Gtk.ShadowType.ETCHED_OUT)
     table1.attach(scrolledwindow2,
                   1,
                   2,
                   2,
                   3,
                   xoptions=Gtk.AttachOptions.FILL,
                   yoptions=Gtk.AttachOptions.FILL)
     self.entry2 = Gtk.TextView()
     self.entry2.set_wrap_mode(Gtk.WrapMode.WORD)
     scrolledwindow2.set_size_request(350, 150)
     scrolledwindow2.add(self.entry2)
     #
     self.entry3 = Gtk.Switch()
     table1.attach(self.entry3,
                   1,
                   2,
                   3,
                   4,
                   xoptions=Gtk.AttachOptions.SHRINK,
                   yoptions=Gtk.AttachOptions.SHRINK)
     #
     hbox = Gtk.HBox()
     table1.attach(hbox,
                   1,
                   2,
                   4,
                   5,
                   xoptions=Gtk.AttachOptions.SHRINK,
                   yoptions=Gtk.AttachOptions.SHRINK)
     self.entry4 = Gtk.CheckButton()
     self.entry4.connect('toggled', self.toggle_clicked)
     hbox.pack_start(self.entry4, 0, 0, 0)
     self.entry5 = ComboBoxCalendar(self)
     self.entry5.set_sensitive(False)
     hbox.pack_start(self.entry5, 0, 0, 0)
     #table1.attach(self.entry4,1,2,3,4, xoptions = Gtk.AttachOptions.SHRINK, yoptions = Gtk.AttachOptions.SHRINK)
     #
     if tasks is not None:
         for tasklist in tasks.tasklists.values():
             self.liststore.append([tasklist['title'], tasklist['id']])
     if task is not None:
         for i, item in enumerate(self.liststore):
             if task['tasklist_id'] == item[1]:
                 self.entry0.set_active(i)
                 break
         self.entry0.set_active(False)
         if 'title' in task.keys():
             self.entry1.set_text(task['title'])
         if 'notes' in task.keys() and task['notes'] is not None:
             self.entry2.get_buffer().set_text(task['notes'])
         if 'status' in task.keys():
             self.entry3.set_active(task['status'] == 'completed')
         if 'due' in task.keys() and task['due'] is not None:
             self.entry4.set_active(True)
             self.entry5.set_date(rfc3339.parse_datetime(task['due']))
         else:
             self.entry4.set_active(False)
     else:
         self.entry0.set_active(0)
     self.show_all()
Exemple #42
0
def to_time_interval(start_s, end_s):
    """
    Convert a pair of rfc3339 strings to TimeInterval.
    """
    return TimeInterval(parse_datetime(start_s), parse_datetime(end_s))
def parse_trigger_date_time(trigger_date_time):
    if trigger_date_time:
        return rfc3339.parse_datetime(args.trigger_date_time)
Exemple #44
0
 def format_datetime(date_string):
     dt = rfc3339.parse_datetime(date_string)
     return dt.strftime("%A, %B %d, %Y")