from typing import Tuple from fastai.core import PathOrStr from ..classification.transform import Vocab, ProteinTokenizer from enum import IntEnum import pickle import json import random import torch from tqdm import tqdm import warnings from .models.lstm_crf import LSTMCRF from .models.crf import ConditionalRandomField as CRF from pathlib import Path warnings.filterwarnings("ignore") ProtMtd = IntEnum('ProtMtd', 'JSON TOK SPLIT LMTRAIN CLASTRAIN') class DatasetGenerator: ''' Create the LM dataset, and the classifier dataset (train/validation can be shared) Maybe do some pre-processing to make sure train and test don't share too much sequence similarity? - This could be slow though Do all the tokenization / numericalization here so that there isn't # Do all the tokenization / numericalization here, and then just implement ''' def __init__( self, split_perc, json_path: PathOrStr,
class TimeSimulation: weekdays = IntEnum("weekdays", "mon tue wed thu fri sat sun", start=0) ten_min_frequency = datetime.timedelta(minutes=10) def __init__(self, sleep_seconds=5, time_step=None, schedule=None): """Initializes the time simulation""" self.sleep_seconds = sleep_seconds self.time_step = time_step if self.time_step is None: self.time_step = datetime.timedelta(minutes=self.sleep_seconds) # Read data from disk self.raw_df = pd.read_csv( f"{Path(__file__).parents[0]}/data/cta_stations.csv").sort_values( "order") # Define the train schedule (same for all trains) self.schedule = schedule if schedule is None: self.schedule = { TimeSimulation.weekdays.mon: { 0: TimeSimulation.ten_min_frequency }, TimeSimulation.weekdays.tue: { 0: TimeSimulation.ten_min_frequency }, TimeSimulation.weekdays.wed: { 0: TimeSimulation.ten_min_frequency }, TimeSimulation.weekdays.thu: { 0: TimeSimulation.ten_min_frequency }, TimeSimulation.weekdays.fri: { 0: TimeSimulation.ten_min_frequency }, TimeSimulation.weekdays.sat: { 0: TimeSimulation.ten_min_frequency }, TimeSimulation.weekdays.sun: { 0: TimeSimulation.ten_min_frequency }, } self.train_lines = [ Line(Line.colors.blue, self.raw_df[self.raw_df["blue"]]), Line(Line.colors.red, self.raw_df[self.raw_df["red"]]), Line(Line.colors.green, self.raw_df[self.raw_df["green"]]), ] def run(self): curr_time = datetime.datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0) logger.info("Beginning simulation, press Ctrl+C to exit at any time") logger.info("loading kafka connect jdbc source connector") configure_connector() logger.info("beginning cta train simulation") weather = Weather(curr_time.month) try: while True: logger.debug("simulation running: %s", curr_time.isoformat()) # Send weather on the top of the hour if curr_time.minute == 0: weather.run(curr_time.month) _ = [ line.run(curr_time, self.time_step) for line in self.train_lines ] curr_time = curr_time + self.time_step time.sleep(self.sleep_seconds) except KeyboardInterrupt as e: logger.info("Shutting down") _ = [line.close() for line in self.train_lines]
class Weather(Producer): """Defines a simulated weather model""" status = IntEnum("status", "sunny partly_cloudy cloudy windy precipitation", start=0) rest_proxy_url = "http://localhost:8082" key_schema = None value_schema = None winter_months = set((0, 1, 2, 3, 10, 11)) summer_months = set((6, 7, 8)) def __init__(self, month): super().__init__( topic_name= "org.chicago.cta.weather.v1", # create topic name topic name, matching topic_name in consumer.py key_schema=Weather.key_schema, value_schema=Weather.value_schema, num_partitions=4, num_replicas=1, ) self.status = Weather.status.sunny self.temp = 70.0 if month in Weather.winter_months: self.temp = 40.0 elif month in Weather.summer_months: self.temp = 85.0 if Weather.key_schema is None: with open(f"{Path(__file__).parents[0]}/schemas/weather_key.json" ) as f: Weather.key_schema = json.load(f) # Define the value schema in `schemas/weather_value.json if Weather.value_schema is None: with open(f"{Path(__file__).parents[0]}/schemas/weather_value.json" ) as f: Weather.value_schema = json.load(f) def _set_weather(self, month): """Returns the current weather""" mode = 0.0 if month in Weather.winter_months: mode = -1.0 elif month in Weather.summer_months: mode = 1.0 self.temp += min(max(-20.0, random.triangular(-10.0, 10.0, mode)), 100.0) self.status = random.choice(list(Weather.status)) def run(self, month): self._set_weather(month) # TODO: Complete the function by posting a weather event to REST Proxy. Make sure to # specify the Avro schemas and verify that you are using the correct Content-Type header. logger.info("building weather kafka proxy integration") resp = requests.post( # URL should be POSTed to? f"{Weather.rest_proxy_url}/topics/{self.topic_name}", # set Headers headers={"Content-Type": "application/vnd.kafka.avro.v2+json"}, data=json.dumps({ # Provide key schema, value schema, and records "format": "avro", "key_schema": Weather.key_schema, "value_schema": Weather.value_schema, "records": [{ "value": { "temperature": self.temp, "status": self.status } }] })) try: resp.raise_for_status() except: print( f"Failed to send data to REST Proxy {json.dumps(resp.json(), indent=2)}" ) print(f"Sent data to REST Proxy {json.dumps(resp.json(), indent=2)}") logger.debug("sent weather data to kafka, temp: %s, status: %s", self.temp, self.status.name)
class Weather(Producer): """Defines a simulated weather model""" status = IntEnum("status", "sunny partly_cloudy cloudy windy precipitation", start=0) rest_proxy_url = "http://localhost:8082" key_schema = None value_schema = None winter_months = set((0, 1, 2, 3, 10, 11)) summer_months = set((6, 7, 8)) def __init__(self, month): # # # TODO: Complete the below by deciding on a topic name, number of partitions, and number of # replicas # # super().__init__( "com.transitchicago.weather", # TODO: Come up with a better topic name key_schema=Weather.key_schema, value_schema=Weather.value_schema, num_partitions=1, num_replicas=1) self.status = Weather.status.sunny self.temp = 70.0 if month in Weather.winter_months: self.temp = 40.0 elif month in Weather.summer_months: self.temp = 85.0 if Weather.key_schema is None: logger.info( f"loading {Path(__file__).parents[0]}/schemas/weather_key.json" ) with open(f"{Path(__file__).parents[0]}/schemas/weather_key.json" ) as f: Weather.key_schema = json.load(f) # # TODO: Define this value schema in `schemas/weather_value.json # if Weather.value_schema is None: logger.info( f"loading {Path(__file__).parents[0]}/schemas/weather_value.json" ) with open(f"{Path(__file__).parents[0]}/schemas/weather_value.json" ) as f: Weather.value_schema = json.load(f) def _set_weather(self, month): """Returns the current weather""" mode = 0.0 if month in Weather.winter_months: mode = -1.0 elif month in Weather.summer_months: mode = 1.0 self.temp += min(max(-20.0, random.triangular(-10.0, 10.0, mode)), 100.0) self.status = random.choice(list(Weather.status)) logger.info( f"current temperature {self.temp} with {self.status} skies") def run(self, month): self._set_weather(month) # # # TODO: Complete the function by posting a weather event to REST Proxy. Make sure to # specify the Avro schemas and verify that you are using the correct Content-Type header. # # logger.info( f"producing {self.topic_name} with current conditions {self.status.name} and {self.temp}" ) resp = requests.post( # TODO: What URL should be POSTed to? f"{Weather.rest_proxy_url}/topics/{self.topic_name}", # TODO: What Headers need to bet set? headers={"Content-Type": "application/vnd.kafka.avro.v2+json"}, data=json.dumps({ # TODO: Provide key schema, value schema, and records "key_schema": json.dumps(Weather.key_schema), "value_schema": json.dumps(Weather.value_schema), "records": [{ "key": { "timestamp": self.time_millis() }, "value": { "temperature": self.temp, "status": self.status.name } }] }), ) logger.info(f"returned with {resp.text}") resp.raise_for_status() logger.info( "sent weather data to kafka, temp: %s, status: %s", self.temp, self.status.name, )
class Weather(Producer): """Defines a simulated weather model""" status = IntEnum("status", "sunny partly_cloudy cloudy windy precipitation", start=0) key_schema = None value_schema = None winter_months = set((0, 1, 2, 3, 10, 11)) summer_months = set((6, 7, 8)) def __init__(self, month): super().__init__( "com.udacity.project.chicago_transportation.weather.update", key_schema=Weather.key_schema, value_schema=Weather.value_schema, num_partitions=5, num_replicas=1) self.status = Weather.status.sunny self.temp = 70.0 if month in Weather.winter_months: self.temp = 40.0 elif month in Weather.summer_months: self.temp = 85.0 if Weather.key_schema is None: with open(f"{Path(__file__).parents[0]}/schemas/weather_key.json" ) as f: Weather.key_schema = json.load(f) if Weather.value_schema is None: with open(f"{Path(__file__).parents[0]}/schemas/weather_value.json" ) as f: Weather.value_schema = json.load(f) def _set_weather(self, month): """Returns the current weather""" mode = 0.0 if month in Weather.winter_months: mode = -1.0 elif month in Weather.summer_months: mode = 1.0 self.temp += min(max(-20.0, random.triangular(-10.0, 10.0, mode)), 100.0) self.status = random.choice(list(Weather.status)) def run(self, month): self._set_weather(month) event_key = {'timestamp': self.time_millis()} event_value = { 'status': Weather.status(self.status).name, 'temperature': self.temp } resp = requests.post( f"{os.getenv('REST_PROXY_URL')}/topics/{self.topic_name}", headers={"Content-Type": "application/vnd.kafka.avro.v2+json"}, data=json.dumps({ 'key_schema': json.dumps(Weather.key_schema), 'value_schema': json.dumps(Weather.value_schema), 'records': [{ 'key': event_key, 'value': event_value }] }), ) try: resp.raise_for_status() except Exception as e: logger.error(f""" Failed to send event to topic {self.topic_name} throughout Rest API. key: {json.dumps(event_key)} value: {json.dumps(event_value)} exception: ${e} response: {resp.text} """) logger.debug( "sent weather data to kafka, temp: %s, status: %s", self.temp, self.status.name, )
import argparse import csv import logging import re from enum import IntEnum from django.core.management.base import BaseCommand from djenealog import models STATES = IntEnum("States", "lieu individu couple famille") mapping = {state: {} for state in STATES} logger = logging.getLogger("djenealog.import_csv") def get_or_create_event(cls, inst, ymd, lieu): instance, created = cls.objects.get_or_create(inst=inst) if lieu and not instance.lieu: instance.lieu = mapping[STATES.lieu][lieu] y, m, d = [ int(i) for i in re.match(r"(\d+)?-?(\d+)?-?(\d+)?", ymd).groups(default=0) ] if y and not instance.y: instance.y = y if m and not instance.m: instance.m = m if d and not instance.d: instance.d = d instance.save()
from enum import IntEnum, IntFlag from bs4 import BeautifulSoup, SoupStrainer HTML_PARSER = 'html.parser' TZ = 'Australia/Sydney' LOGIN_URL = 'https://mq-edu-web.t1cloud.com/T1SMDefault/WebApps/eStudent/login.aspx' TIMETABLE_URL = 'https://mq-edu-web.t1cloud.com/T1SMDefault/WebApps/eStudent/SM/StudentTtable10.aspx?r=MQ.ESTU.UGSTUDNTB&f=MQ.EST.TIMETBL.WEB' ClassFlags = IntFlag('ClassFlags', 'registered swappable stream') Day = IntEnum('DayOfWeek', 'sun mon tue wed thu fri sat', start=0) DAY_NAMES = tuple( 'Sunday Monday Tuesday Wednesday Thursday Friday Saturday'.split()) DAY_NAME_LOOKUP = dict(zip(Day, DAY_NAMES)) DAY_ENUM_LOOKUP = dict(zip(DAY_NAMES, Day)) def make_soup(markup, strain=None): if strain is None: strain = {} return BeautifulSoup(markup, HTML_PARSER, parse_only=SoupStrainer(**strain))
class Weather(Producer): """Defines a simulated weather model""" status = IntEnum("status", "sunny partly_cloudy cloudy windy precipitation", start=0) rest_proxy_url = "http://localhost:8082" key_schema = None value_schema = None winter_months = set((0, 1, 2, 3, 10, 11)) summer_months = set((6, 7, 8)) def __init__(self, month): # # # TODO: Complete the below by deciding on a topic name, number of partitions, and number of # replicas # # super().__init__( "weather", # TODO: Come up with a better topic name key_schema=Weather.key_schema, value_schema=Weather.value_schema, ) self.status = Weather.status.sunny self.temp = 70.0 if month in Weather.winter_months: self.temp = 40.0 elif month in Weather.summer_months: self.temp = 85.0 if Weather.key_schema is None: with open(f"{Path(__file__).parents[0]}/schemas/weather_key.json" ) as f: Weather.key_schema = json.load(f) # # TODO: Define this value schema in `schemas/weather_value.json # if Weather.value_schema is None: with open(f"{Path(__file__).parents[0]}/schemas/weather_value.json" ) as f: Weather.value_schema = json.load(f) def _set_weather(self, month): """Returns the current weather""" mode = 0.0 if month in Weather.winter_months: mode = -1.0 elif month in Weather.summer_months: mode = 1.0 self.temp += min(max(-20.0, random.triangular(-10.0, 10.0, mode)), 100.0) self.status = random.choice(list(Weather.status)) def run(self, month): self._set_weather(month) # # # TODO: Complete the function by posting a weather event to REST Proxy. Make sure to # specify the Avro schemas and verify that you are using the correct Content-Type header. # # logger.info("weather kafka proxy integration incomplete - skipping") # resp = requests.post( # # # # # # TODO: What URL should be POSTed to? # # # # # f"{Weather.rest_proxy_url}/TODO", # # # # # # TODO: What Headers need to bet set? # # # # # headers={"Content-Type": "TODO"}, # data=json.dumps( # { # # # # # # TODO: Provide key schema, value schema, and records # # # # # } # ), # ) # resp.raise_for_status() logger.debug( "sent weather data to kafka, temp: %s, status: %s", self.temp, self.status.name, )
CharMarkers = IntEnum( "CharMarkers", [ # regular Tibetan "CONS", "SUB_CONS", "VOW", "TSEK", # punctuation "NORMAL_PUNCT", "SPECIAL_PUNCT", # others "NUMERAL", "SYMBOL", "IN_SYL_MARK", "NON_BO_NON_SKRT", # lexica_skrt "SKRT_CONS", "SKRT_SUB_CONS", "SKRT_VOW", "SKRT_LONG_VOW", # other languages "CJK", "LATIN", # misc "OTHER", "TRANSPARENT", "NFC", ], start=1, )
class Weather(Producer): """Defines a simulated weather model""" status = IntEnum("status", "sunny partly_cloudy cloudy windy precipitation", start=0) rest_proxy_url = "http://localhost:8082" key_schema = None value_schema = None winter_months = set((0, 1, 2, 3, 10, 11)) summer_months = set((6, 7, 8)) def __init__(self, month): super().__init__("weather", key_schema=Weather.key_schema, value_schema=Weather.value_schema, num_partitions=3, num_replicas=1) self.status = Weather.status.sunny self.temp = 70.0 if month in Weather.winter_months: self.temp = 40.0 elif month in Weather.summer_months: self.temp = 85.0 if Weather.key_schema is None: with open(f"{Path(__file__).parents[0]}/schemas/weather_key.json" ) as f: Weather.key_schema = json_load(f) if Weather.value_schema is None: with open(f"{Path(__file__).parents[0]}/schemas/weather_value.json" ) as f: Weather.value_schema = json_load(f) def _set_weather(self, month): """Returns the current weather""" mode = 0.0 if month in Weather.winter_months: mode = -1.0 elif month in Weather.summer_months: mode = 1.0 self.temp += min(max(-20.0, random.triangular(-10.0, 10.0, mode)), 100.0) self.status = random.choice(list(Weather.status)) def run(self, month): self._set_weather(month) headers = {"Content-Type": "application/vnd.kafka.json.v2+json"} data = { "key_schema": json_dumps(Weather.key_schema), "value_schema": json_dumps(Weather.value_schema), "records": [{ "value": { "temperature": int(self.temp), "status": self.status.name }, "key": { "timestamp": self.time_millis() } }] } resp = requests.post(f"{REST_PROXY_URL}/topics/weather", data=json_dumps(data), headers=headers) resp.raise_for_status() logger.debug( "sent weather data to kafka, temp: %s, status: %s", self.temp, self.status.name, )
class Weather(Producer): """Defines a simulated weather model""" status = IntEnum( "status", "sunny partly_cloudy cloudy windy precipitation", start=0 ) rest_proxy_url = "http://localhost:8082" key_schema = None value_schema = None winter_months = set((0, 1, 2, 3, 10, 11)) summer_months = set((6, 7, 8)) def __init__(self, month): # :: Init (Topic created in base class) super().__init__( WEATHER_MSG_TOPIC_NAME, key_schema=Weather.key_schema, value_schema=Weather.value_schema, num_partitions=3, num_replicas=2, ) self.status = Weather.status.sunny self.temp = 70.0 if month in Weather.winter_months: self.temp = 40.0 elif month in Weather.summer_months: self.temp = 85.0 if Weather.key_schema is None: with open(f"{Path(__file__).parents[0]}/schemas/weather_key.json") as f: Weather.key_schema = json.load(f) # # :: Value schema is defined in `schemas/weather_value.json # if Weather.value_schema is None: with open(f"{Path(__file__).parents[0]}/schemas/weather_value.json") as f: Weather.value_schema = json.load(f) def _set_weather(self, month): """Returns the current weather""" mode = 0.0 if month in Weather.winter_months: mode = -1.0 elif month in Weather.summer_months: mode = 1.0 self.temp += min(max(-20.0, random.triangular(-10.0, 10.0, mode)), 100.0) self.status = random.choice(list(Weather.status)) def run(self, month): self._set_weather(month) # :: Posting a weather event to REST Proxy. logger.info("weather kafka proxy integration") resp = requests.post( f"{Weather.rest_proxy_url}/topics/{self.topic_name}", headers={"Content-Type": "application/vnd.kafka.avro.v2+json"}, data=json.dumps( { "key_schema": json.dumps(Weather.key_schema), "value_schema": json.dumps(Weather.value_schema), "records": [ { "key": {"timestamp": self.time_millis()}, "value": { "temperature": self.temp, "status": self.status.name, } } ] } ), ) resp.raise_for_status() logger.debug( "sent weather data to kafka, temp: %s, status: %s", self.temp, self.status.name, )
def make_enum(descriptor): vals = [(x.name, x.number) for x in descriptor.values] return IntEnum(descriptor.name, vals)
# This sample tests the type checker's handling of Enum. from enum import Enum, IntEnum TestEnum1 = Enum("TestEnum1", "A B C D") TestEnum2 = IntEnum("TestEnum2", "AA BB CC DD") class TestEnum3(Enum): A = 0 B = 1 C = 2 D = 3 a = TestEnum1["A"] aa = TestEnum1.A # This should generate an error because "Z" isn't # a valid member. z = TestEnum1.Z bb = TestEnum2.BB # This should generate an error because "A" isn't # a valid member. z = TestEnum2.A b = TestEnum3.B # This should generate an error because "Z" isn't
class Participation(PonyConfModel): LICENCES = IntEnum( 'Video licence', 'CC-Zero CC-BY CC-BY-SA CC-BY-ND CC-BY-NC CC-BY-NC-SA CC-BY-NC-ND') ACCOMMODATION_NO = 0 ACCOMMODATION_HOTEL = 1 ACCOMMODATION_HOMESTAY = 2 ACCOMMODATION_CHOICES = ( (ACCOMMODATION_NO, _('No')), (ACCOMMODATION_HOTEL, _('Hotel')), (ACCOMMODATION_HOMESTAY, _('Homestay')), ) site = models.ForeignKey(Site, on_delete=models.CASCADE) user = models.ForeignKey(User) need_transport = models.NullBooleanField( verbose_name=_('Defray transportation?'), default=None) arrival = models.DateTimeField(blank=True, null=True) departure = models.DateTimeField(blank=True, null=True) transport = models.ManyToManyField(Transport, verbose_name=_("I want to travel by"), blank=True) transport_city_outward = models.CharField(blank=True, default='', max_length=256, verbose_name=_("Departure city")) transport_city_return = models.CharField( blank=True, default='', max_length=256, verbose_name=_("Return city"), help_text=_("If different from departure city")) transport_booked = models.BooleanField(default=False) accommodation = models.IntegerField(choices=ACCOMMODATION_CHOICES, verbose_name=_('Need accommodation?'), null=True, blank=True) accommodation_booked = models.BooleanField(default=False) constraints = models.TextField(blank=True, verbose_name=_("Constraints")) connector = models.ManyToManyField(Connector, verbose_name=_("I can output"), blank=True) sound = models.BooleanField(_("I need sound"), default=False) videotaped = models.BooleanField(_("I'm ok to be recorded on video"), default=True) video_licence = models.IntegerField(choices=enum_to_choices(LICENCES), default=2, verbose_name=_("Video licence")) notes = models.TextField( default='', blank=True, verbose_name=_("Notes"), help_text=_('This field is only visible by organizers.')) orga = models.BooleanField(default=False) class Meta: # A User can participe only once to a Conference (= Site) unique_together = ('site', 'user') def __str__(self): return str(self.user.profile) def get_absolute_url(self): return reverse('show-speaker', kwargs={'username': self.user.username}) def is_orga(self): return self.orga def is_staff(self): return self.is_orga() or self.topic_set.exists( ) or self.track_set.exists() @property def topic_set(self): return self.user.topic_set.filter(site=self.site) @property def track_set(self): return self.user.track_set.filter(site=self.site) @property def talk_set(self): return self.user.talk_set.filter(site=self.site) @property def accepted_talk_set(self): return self.talk_set.filter(accepted=True) @property def pending_talk_set(self): return self.talk_set.filter(accepted=None) @property def refused_talk_set(self): return self.talk_set.filter(accepted=False) @property def not_refused_talk_set(self): # accepted + pending return self.talk_set.exclude(accepted=False) # return True, False or None if availabilities have not been filled def is_available(self, start, end=None): if not self.availabilities.exists(): return None for timeslot in self.availabilities.all(): if start < timeslot.start: continue if start > timeslot.end: continue if end: assert (start < end) if end > timeslot.end: continue return True return False
# Copyright (C) 2004-2020 CS GROUP - France. All Rights Reserved. # Author: Yoann Vandoorselaere <*****@*****.**> from __future__ import absolute_import, division, print_function, unicode_literals import operator import pkg_resources from enum import IntEnum from prewikka import auth, error, hookmanager, localization, log, resource, response, template, usergroup, view from prewikka.utils.viewhelpers import GridAjaxResponse, GridParameters from prewikka.database import use_transaction ReloadEnum = IntEnum("ReloadEnum", "none .commonlisting view window") class GenericListing(view.View): _template = template.PrewikkaTemplate(__name__, "templates/userlisting.mak") @hookmanager.register("HOOK_PLUGINS_LOAD") def _load(self): # Views are loaded before auth/session plugins if env.auth != env.session: # Do not declare routes with anonymous auth view.route("/settings/users", self.list_users, permissions=[N_("USER_MANAGEMENT")], menu=(N_("Access control"), N_("Users")), help="#users", parameters=GridParameters("users"))
class FtdiEeprom: """FTDI EEPROM management """ _PROPS = namedtuple('PROPS', 'size user dynoff') """Properties for each FTDI device release. * size is the size in bytes of the EEPROM storage area * user is the size in bytes of the user storage area, if any/supported * dynoff is the offset in EEPROM of the first bytes to store strings """ _PROPERTIES = { 0x0200: _PROPS(0, None, 0), # FT232AM 0x0400: _PROPS(256, 0x14, 0x94), # FT232BM 0x0500: _PROPS(256, 0x16, 0x96), # FT2232D 0x0600: _PROPS(128, None, 0x18), # FT232R 0x0700: _PROPS(256, 0x1A, 0x9A), # FT2232H 0x0800: _PROPS(256, 0x1A, 0x9A), # FT4232H 0x0900: _PROPS(256, 0x1A, 0xA0), # FT232H 0x1000: _PROPS(1024, 0x1A, 0xA0), # FT230X/FT231X/FT234X } """EEPROM properties.""" CBUS = IntEnum('CBus', 'TXDEN PWREN TXLED RXLED TXRXLED SLEEP CLK48 CLK24 CLK12 ' 'CLK6 GPIO BB_WR BB_RD', start=0) """Alternate features for legacy FT232R devices.""" CBUSH = IntEnum('CBusH', 'TRISTATE TXLED RXLED TXRXLED PWREN SLEEP DRIVE0 DRIVE1 ' 'GPIO TXDEN CLK30 CLK15 CLK7_5', start=0) """Alternate features for FT232H/FT2232H/FT4232H devices.""" CBUSX = IntEnum('CBusX', 'TRISTATE TXLED RXLED TXRXLED PWREN SLEEP DRIVE0 DRIVE1 ' 'GPIO TXDEN CLK24 CLK12 CLK6 BAT_DETECT BAT_NDETECT ' 'I2C_TXE I2C_RXF VBUS_SENSE BB_WR BB_RD TIMESTAMP AWAKE', start=0) """Alternate features for FT230X devices.""" UART_BITS = IntFlag('UartBits', 'TXD RXD RTS CTS DTR DSR DCD RI') """Inversion flags for FT232R and FT-X devices.""" CHANNEL = IntFlag('Channel', 'FIFO OPTO CPU FT128 RS485') """Alternate port mode.""" DRIVE = IntFlag('Drive', 'LOW HIGH SLOW_SLEW SCHMITT _10 _20 _40 PWRSAVE_DIS') """Driver options for I/O pins.""" CFG1 = IntFlag( 'Cfg1', 'CLK_IDLE_STATE DATA_LSB FLOW_CONTROL _08 ' 'HIGH_CURRENTDRIVE _20 _40 SUSPEND_DBUS7') """Configuration bits stored @ 0x01.""" VAR_STRINGS = ('manufacturer', 'product', 'serial') """EEPROM strings with variable length.""" def __init__(self): self.log = getLogger('pyftdi.eeprom') self._ftdi = Ftdi() self._eeprom = bytearray() self._dev_ver = 0 self._valid = False self._config = OrderedDict() self._dirty = set() self._modified = False def __getattr__(self, name): if name in self._config: return self._config[name] raise AttributeError('No such attribute: %s' % name) def open(self, device: Union[str, UsbDevice], ignore: bool = False) -> None: """Open a new connection to the FTDI USB device. :param device: the device URL or a USB device instance. :param ignore: whether to ignore existing content """ if self._ftdi.is_connected: raise FtdiError('Already open') if isinstance(device, str): self._ftdi.open_from_url(device) else: self._ftdi.open_from_device(device) if not ignore: self._eeprom = self._read_eeprom() if self._valid: self._decode_eeprom() def close(self) -> None: """Close the current connection to the FTDI USB device, """ if self._ftdi.is_connected: self._ftdi.close() self._eeprom = bytearray() self._dev_ver = 0 self._config.clear() def connect(self, ftdi: Ftdi, ignore: bool = False) -> None: """Connect a FTDI EEPROM to an existing Ftdi instance. :param ftdi: the Ftdi instance to use :param ignore: whether to ignore existing content """ self._ftdi = ftdi self._eeprom = bytearray() self._dev_ver = 0 self._valid = False self._config = OrderedDict() self._dirty = set() if not ignore: self._eeprom = self._read_eeprom() if self._valid: self._decode_eeprom() self._decode_eeprom() @property def device_version(self) -> int: """Report the version of the FTDI device. :return: the release """ if not self._dev_ver: if not self._ftdi.is_connected: raise FtdiError('Not connected') self._dev_ver = self._ftdi.device_version return self._dev_ver @property def size(self) -> int: """Report the EEPROM size. The physical EEPROM size may be greater. :return: the size in bytes """ try: eeprom_size = self._PROPERTIES[self.device_version].size except (AttributeError, KeyError): raise FtdiError('No EEPROM') return eeprom_size @property def data(self) -> bytes: """Returns the content of the EEPROM. :return: the content as bytes. """ self._sync_eeprom() return bytes(self._eeprom) @property def properties(self) -> Set[str]: """Returns the supported properties for the current device. :return: the supported properies. """ props = set(self._config.keys()) props -= set(self.VAR_STRINGS) return props @property def is_empty(self) -> bool: """Reports whether the EEPROM has been erased, or no EEPROM is connected to the FTDI EEPROM port. :return: True if no content is detected """ if len(self._eeprom) != self._PROPERTIES[self.device_version].size: return False for byte in self._eeprom: if byte != 0xFF: return False return True @property def cbus_pins(self) -> List[int]: """Return the list of CBUS pins configured as GPIO, if any :return: list of CBUS pins """ pins = [ pin for pin in range(0, 10) if self._config.get('cbus_func_%d' % pin, '') == 'GPIO' ] return pins @property def cbus_mask(self) -> int: """Return the bitmask of CBUS pins configured as GPIO. The bitmap contains four bits, ordered in natural order. :return: CBUS mask """ if self.device_version == 0x900: # FT232H cbus = [5, 6, 8, 9] else: cbus = list(range(4)) mask = 0 for bix, pin in enumerate(cbus): if self._config.get('cbus_func_%d' % pin, '') == 'GPIO': mask |= 1 << bix return mask def save_config(self, file: TextIO) -> None: """Save the EEPROM content as an INI stream. :param file: output stream """ self._sync_eeprom() cfg = ConfigParser() cfg.add_section('values') for name, value in self._config.items(): cfg.set('values', name, str(value)) cfg.add_section('raw') length = 16 for i in range(0, len(self._eeprom), length): chunk = self._eeprom[i:i + length] hexa = hexlify(chunk).decode() cfg.set('raw', '@%02x' % i, hexa) cfg.write(file) def load_config(self, file: TextIO, section: Optional[str] = None) -> None: """Load the EEPROM content from an INI stream. The ``section`` argument selects which section(s) to load: * ``raw`` only loads the raw data (hexabytes) from a previous dump * ``values`` only loads the values section, that is the human readable configuration. * ``all``, which is the default section selection, load the raw section, then overwrite part of it with any configuration value from the ``values`` section. This provides a handy way to use an existing dump from a valid EEPROM content, while customizing some parameters, such as the serial number. :param file: input stream :paran section: which section to load from the ini file """ self._sync_eeprom() cfg = ConfigParser() cfg.read_file(file) loaded = False sect = 'raw' if section in (None, 'all', sect, ''): if not cfg.has_section(sect): raise FtdiEepromError("No '%s' section in INI file" % sect) options = cfg.options(sect) try: for opt in options: if not opt.startswith('@'): raise ValueError() address = int(opt[1:], 16) hexval = cfg.get(sect, opt).strip() buf = unhexlify(hexval) self._eeprom[address:address + len(buf)] = buf except IndexError: raise ValueError("Invalid address in '%s'' section" % sect) except ValueError: raise ValueError("Invalid line in '%s'' section" % sect) self._compute_crc(self._eeprom, True) if not self._valid: raise ValueError('Loaded RAW section is invalid (CRC mismatch') loaded = True sect = 'values' vmap = { 'manufacturer': 'manufacturer_name', 'product': 'product_name', 'serial': 'serial_number' } if section in (None, 'all', sect, ''): if not cfg.has_section(sect): raise FtdiEepromError("No '%s' section in INI file" % sect) options = cfg.options(sect) for opt in options: value = cfg.get(sect, opt).strip() if opt in vmap: func = getattr(self, 'set_%s' % vmap[opt]) func(value) else: try: self.set_property(opt, value) except ValueError: self.log.warning( "Ignoring setting '%s': " "not implemented", opt) loaded = True if not loaded: raise ValueError('Invalid section: %s' % section) self._sync_eeprom() def set_serial_number(self, serial: str) -> None: """Define a new serial number.""" self._validate_string(serial) self._update_var_string('serial', serial) self.set_property('has_serial', True) def set_manufacturer_name(self, manufacturer: str) -> None: """Define a new manufacturer string.""" self._validate_string(manufacturer) self._update_var_string('manufacturer', manufacturer) def set_product_name(self, product: str) -> None: """Define a new product name.""" self._validate_string(product) self._update_var_string('product', product) def set_property(self, name: str, value: Union[str, int, bool], out: Optional[TextIO] = None) -> None: """Change the value of a stored property. :see: :py:meth:`properties` for a list of valid property names. Note that for now, only a small subset of properties can be changed. :param name: the property to change :param value: the new value (supported values depend on property) :param out: optional output stream to report hints """ mobj = match(r'cbus_func_(\d)', name) if mobj: if not isinstance(value, str): raise ValueError("'%s' should be specified as a string" % name) self._set_cbus_func(int(mobj.group(1)), value, out) self._dirty.add(name) return mobj = match(r'([abcd])bus_(drive|slow_slew|schmitt)', name) if mobj: self._set_bus_control(mobj.group(1), mobj.group(2), value, out) self._dirty.add(name) return hwords = { 'vendor_id': 0x02, 'product_id': 0x04, 'type': 0x06, 'usb_version': 0x0c } if name in hwords: val = to_int(value) if not 0 <= val <= 0xFFFF: raise ValueError('Invalid value for %s' % name) offset = hwords[name] self._eeprom[offset:offset + 2] = spack('<H', val) return confs = { 'remote_wakeup': (0, 5), 'self_powered': (0, 6), 'in_isochronous': (2, 0), 'out_isochronous': (2, 1), 'suspend_pull_down': (2, 2), 'has_serial': (2, 3), } if name in confs: val = to_bool(value, permissive=False, allow_int=True) offset, bit = confs[name] mask = 1 << bit if val: self._eeprom[0x08 + offset] |= mask else: self._eeprom[0x0a + offset] &= ~mask return if name == 'power_max': val = to_int(value) >> 1 self._eeprom[0x09] = val return if name.startswith('invert_'): if not self.device_version in (0x600, 0x1000): raise ValueError('UART control line inversion not available ' 'with this device') self._set_invert(name[len('invert_'):], value, out) self._dirty.add(name) return if name in self.properties: raise NotImplementedError("Change to '%s' is not yet supported" % name) raise ValueError("Unknown property '%s'" % name) def erase(self) -> None: """Erase the whole EEPROM.""" self._eeprom = bytearray([0xFF] * self.size) self._config.clear() self._dirty.add('eeprom') def initialize(self) -> None: """Initialize the EEPROM with some default sensible values. """ dev_ver = self.device_version dev_name = Ftdi.DEVICE_NAMES[dev_ver] vid = Ftdi.FTDI_VENDOR pid = Ftdi.PRODUCT_IDS[vid][dev_name] self.set_manufacturer_name('FTDI') self.set_product_name(dev_name.upper()) sernum = ''.join([chr(randint(ord('A'), ord('Z'))) for _ in range(5)]) self.set_serial_number('FT%d%s' % (randint(0, 9), sernum)) self.set_property('vendor_id', vid) self.set_property('product_id', pid) self.set_property('type', dev_ver) self.set_property('usb_version', 0x200) self.set_property('power_max', 150) self._sync_eeprom() def sync(self) -> None: """Force re-evaluation of configuration after some changes. This API is not useful for regular usage, but might help for testing when the EEPROM does not go through a full save/load cycle """ self._sync_eeprom() def dump_config(self, file: Optional[BinaryIO] = None) -> None: """Dump the configuration to a file. :param file: the output file, default to stdout """ if self._dirty: self._decode_eeprom() for name, value in self._config.items(): print('%s: %s' % (name, value), file=file or sys.stdout) def commit(self, dry_run: bool = True) -> bool: """Commit any changes to the EEPROM. :param dry_run: log what should be written, do not actually change the EEPROM content :return: True if some changes have been committed to the EEPROM """ self._sync_eeprom() if not self._modified: self.log.warning('No change to commit, EEPROM not modified') return False self._ftdi.overwrite_eeprom(self._eeprom, dry_run=dry_run) if not dry_run: eeprom = self._read_eeprom() if eeprom != self._eeprom: pos = 0 for pos, (old, new) in enumerate(zip(self._eeprom, eeprom)): if old != new: break pos &= ~0x1 raise FtdiEepromError('Write to EEPROM failed @ 0x%02x' % pos) self._modified = False return dry_run def reset_device(self): """Execute a USB device reset.""" self._ftdi.reset(usb_reset=True) @classmethod def _validate_string(cls, string): for invchr in ':/': # do not accept characters which are interpreted as URL seperators if invchr in string: raise ValueError("Invalid character '%s' in string" % invchr) def _update_var_string(self, name: str, value: str) -> None: if name not in self.VAR_STRINGS: raise ValueError('%s is not a variable string' % name) try: if value == self._config[name]: return except KeyError: # not yet defined pass self._config[name] = value self._dirty.add(name) def _generate_var_strings(self, fill=True) -> None: stream = bytearray() dynpos = self._PROPERTIES[self.device_version].dynoff data_pos = dynpos tbl_pos = 0x0e for name in self.VAR_STRINGS: try: ustr = self._config[name].encode('utf-16le') except KeyError: ustr = '' length = len(ustr) + 2 stream.append(length) stream.append(0x03) # string descriptor stream.extend(ustr) self._eeprom[tbl_pos] = data_pos tbl_pos += 1 self._eeprom[tbl_pos] = length tbl_pos += 1 data_pos += length self._eeprom[dynpos:dynpos + len(stream)] = stream crc_size = scalc('<H') if fill: mtp = self._ftdi.device_version == 0x1000 crc_pos = 0x100 if mtp else len(self._eeprom) crc_pos -= crc_size rem = len(self._eeprom) - (dynpos + len(stream)) - crc_size self._eeprom[dynpos + len(stream):crc_pos] = bytes(rem) def _sync_eeprom(self): if not self._dirty: self.log.debug('No change detected for EEPROM content') return if any([x in self._dirty for x in self.VAR_STRINGS]): self._generate_var_strings() for varstr in self.VAR_STRINGS: self._dirty.discard(varstr) self._update_crc() self._decode_eeprom() self._dirty.clear() self._modified = True self.log.debug('EEPROM content regenerated (not yet committed)') def _compute_crc(self, eeprom: Union[bytes, bytearray], check=False): mtp = self._ftdi.device_version == 0x1000 crc_pos = 0x100 if mtp else len(eeprom) crc_size = scalc('<H') if not check: # check mode: add CRC itself, so that result should be zero crc_pos -= crc_size crc = self._ftdi.calc_eeprom_checksum(eeprom[:crc_pos]) if check: self._valid = not bool(crc) if not self._valid: self.log.debug('CRC is now 0x%04x', crc) else: self.log.debug('CRC OK') return crc, crc_pos, crc_size def _update_crc(self): crc, crc_pos, crc_size = self._compute_crc(self._eeprom, False) self._eeprom[crc_pos:crc_pos + crc_size] = spack('<H', crc) def _read_eeprom(self) -> bytes: buf = self._ftdi.read_eeprom(0, eeprom_size=self.size) eeprom = bytearray(buf) crc = self._compute_crc(eeprom, True)[0] if crc: if self.is_empty: self.log.info('No EEPROM or EEPROM erased') else: self.log.error('Invalid CRC or EEPROM content') return eeprom def _decode_eeprom(self): cfg = self._config cfg.clear() cfg['vendor_id'] = Hex4Int(sunpack('<H', self._eeprom[0x02:0x04])[0]) cfg['product_id'] = Hex4Int(sunpack('<H', self._eeprom[0x04:0x06])[0]) cfg['type'] = Hex4Int(sunpack('<H', self._eeprom[0x06:0x08])[0]) power_supply, power_max, conf = sunpack('<3B', self._eeprom[0x08:0x0b]) cfg['self_powered'] = bool(power_supply & (1 << 6)) cfg['remote_wakeup'] = bool(power_supply & (1 << 5)) cfg['power_max'] = power_max << 1 cfg['has_serial'] = bool(conf & (1 << 3)) cfg['suspend_pull_down'] = bool(conf & (1 << 2)) cfg['out_isochronous'] = bool(conf & (1 << 1)) cfg['in_isochronous'] = bool(conf & (1 << 0)) cfg['usb_version'] = Hex4Int(sunpack('<H', self._eeprom[0x0c:0x0e])[0]) cfg['manufacturer'] = self._decode_string(0x0e) cfg['product'] = self._decode_string(0x10) cfg['serial'] = self._decode_string(0x12) name = None try: name = Ftdi.DEVICE_NAMES[cfg['type']].replace('-', '') if name.startswith('ft'): name = name[2:] func = getattr(self, '_decode_%s' % name) except (KeyError, AttributeError): self.log.warning('No EEPROM decoder for device %s', name or '?') else: func() def _decode_string(self, offset): str_offset, str_size = sunpack('<BB', self._eeprom[offset:offset + 2]) if str_size: str_offset &= self.size - 1 str_size -= scalc('<H') str_offset += scalc('<H') manufacturer = self._eeprom[str_offset:str_offset + str_size] return manufacturer.decode('utf16', errors='ignore') return '' def _set_cbus_func(self, cpin: int, value: str, out: Optional[TextIO]) -> None: cmap = { 0x600: (self.CBUS, 5, 0x14, 4), # FT232R 0x900: (self.CBUSH, 10, 0x18, 4), # FT232H 0x1000: (self.CBUSX, 4, 0x1A, 8) } # FT230X/FT231X/FT234X try: cbus, count, offset, width = cmap[self.device_version] except KeyError: raise ValueError('This property is not supported on this device') pin_filter = getattr(self, '_filter_cbus_func_x%x' % self.device_version, None) if value == '?' and out: items = {item.name for item in cbus} if pin_filter: items = {val for val in items if pin_filter(cpin, val)} print(', '.join(sorted(items)) if items else '(none)', file=out) return if not 0 <= cpin < count: raise ValueError("Unsupported CBUS pin '%d'" % cpin) try: code = cbus[value.upper()].value except KeyError: raise ValueError("CBUS pin %d does not have function '%s'" % (cpin, value)) if pin_filter and not pin_filter(cpin, value.upper()): raise ValueError("Unsupported CBUS function '%s' for pin '%d'" % (value, cpin)) addr = offset + (cpin * width) // 8 if width == 4: bitoff = 4 if cpin & 0x1 else 0 mask = 0x0F << bitoff else: bitoff = 0 mask = 0xFF old = self._eeprom[addr] self._eeprom[addr] &= ~mask self._eeprom[addr] |= code << bitoff self.log.debug('Cpin %d, addr 0x%02x, value 0x%02x->0x%02x', cpin, addr, old, self._eeprom[addr]) @classmethod def _filter_cbus_func_x900(cls, cpin: int, value: str): if cpin == 7: # nothing can be assigned to ACBUS7 return False if value in 'TRISTATE TXLED RXLED TXRXLED PWREN SLEEP DRIVE0'.split(): # any pin can be assigned these functions return True if cpin in (5, 6, 8, 9): # any function can be assigned to ACBUS5, ACBUS6, ACBUS8, ACBUS9 return True if cpin == 0: return value != 'GPIO' return False @classmethod def _filter_cbus_func_x600(cls, cpin: int, value: str): if value == 'BB_WR': # this signal is only available on CBUS0, CBUS1 return cpin < 2 return True def _set_bus_control(self, bus: str, control: str, value: Union[str, int, bool], out: Optional[TextIO]) -> None: if self.device_version == 0x1000: self._set_bus_control_230x(bus, control, value, out) return # for now, only support FT-X devices raise ValueError('Bus control not implemented for this device') def _set_bus_control_230x(self, bus: str, control: str, value: Union[str, int, bool], out: Optional[TextIO]) -> None: if bus not in 'cd': raise ValueError('Invalid bus: %s' % bus) try: if control == 'drive': candidates = (4, 8, 12, 16) if value == '?' and out: print(', '.join([str(v) for v in candidates]), file=out) return value = int(value) if value not in candidates: raise ValueError('Invalid drive current: %d mA' % value) value //= 4 value -= 1 elif control in ('slow_slew', 'schmitt'): if value == '?' and out: print('off, on', file=out) return value = int(to_bool(value)) else: raise ValueError('Unsupported control: %s' % control) except (ValueError, TypeError): raise ValueError('Invalid %s value: %s' % (control, value)) config = self._eeprom[0x0c] if bus == 'd': conf = config & 0x0F config &= 0xF0 cshift = 0 else: conf = config >> 4 config &= 0x0F cshift = 4 if control == 'drive': conf &= 0b1100 conf |= value elif control == 'slow_slew': conf &= 0b1011 conf |= value << 2 elif control == 'schmitt': conf &= 0b0111 conf |= value << 3 else: raise RuntimeError('Internal error') config |= conf << cshift self._eeprom[0x0c] = config def _set_invert(self, name, value, out): if value == '?' and out: print('off, on', file=out) return if name.upper() not in self.UART_BITS.__members__: raise ValueError('Unknown property: %s' % name) value = to_bool(value, permissive=False) code = getattr(self.UART_BITS, name.upper()) invert = self._eeprom[0x0B] if value: invert |= code else: invert &= ~code self._eeprom[0x0B] = invert def _decode_x(self): # FT-X series cfg = self._config misc, = sunpack('<H', self._eeprom[0x00:0x02]) cfg['channel_a_driver'] = 'VCP' if misc & (1 << 7) else 'D2XX' for bit in self.UART_BITS: value = self._eeprom[0x0B] cfg['invert_%s' % self.UART_BITS(bit).name] = bool(value & bit) max_drive = self.DRIVE.LOW | self.DRIVE.HIGH value = self._eeprom[0x0c] for grp in range(2): conf = value & 0xF bus = 'c' if grp else 'd' cfg['%sbus_drive' % bus] = 4 * (1 + (conf & max_drive)) cfg['%sbus_schmitt' % bus] = bool(conf & self.DRIVE.SCHMITT) cfg['%sbus_slow_slew' % bus] = bool(conf & self.DRIVE.SLOW_SLEW) value >>= 4 for bix in range(4): value = self._eeprom[0x1A + bix] try: cfg['cbus_func_%d' % bix] = self.CBUSX(value).name except ValueError: pass cfg['chip'] = Hex2Int(self._eeprom[0x1E]) def _decode_232h(self): cfg = self._config cfg0, cfg1 = self._eeprom[0x00], self._eeprom[0x01] cfg['channel_a_type'] = cfg0 & 0x0F cfg['channel_a_driver'] = 'VCP' if (cfg0 & (1 << 4)) else 'D2XX' cfg['clock_polarity'] = 'high' if (cfg1 & self.CFG1.CLK_IDLE_STATE) \ else 'low' cfg['lsb_data'] = bool(cfg1 & self.CFG1.DATA_LSB) cfg['flow_control'] = 'on' if (cfg1 & self.CFG1.FLOW_CONTROL) \ else 'off' cfg['powersave'] = bool(cfg1 & self.DRIVE.PWRSAVE_DIS) max_drive = self.DRIVE.LOW | self.DRIVE.HIGH for grp in range(2): conf = self._eeprom[0x0c + grp] cfg['group_%d_drive' % grp] = bool((conf & max_drive) == max_drive) cfg['group_%d_schmitt' % grp] = conf & self.DRIVE.SCHMITT cfg['group_%d_slew' % grp] = conf & self.DRIVE.SLOW_SLEW for bix in range(5): value = self._eeprom[0x18 + bix] low, high = value & 0x0F, value >> 4 try: cfg['cbus_func_%d' % ((2 * bix) + 0)] = self.CBUSH(low).name except ValueError: pass try: cfg['cbus_func_%d' % ((2 * bix) + 1)] = self.CBUSH(high).name except ValueError: pass cfg['chip'] = Hex2Int(self._eeprom[0x1E]) def _decode_232r(self): cfg = self._config cfg0 = self._eeprom[0x00] cfg['channel_a_driver'] = 'VCP' if (~cfg0 & (1 << 3)) else '' cfg['high_current'] = bool(~cfg0 & (1 << 2)) cfg['external_oscillator'] = cfg0 & 0x02 for bit in self.UART_BITS: value = self._eeprom[0x0B] cfg['invert_%s' % self.UART_BITS(bit).name] = bool(value & bit) bix = 0 while True: value = self._eeprom[0x14 + bix] low, high = value & 0x0F, value >> 4 try: cfg['cbus_func_%d' % ((2 * bix) + 0)] = self.CBUS(low).name except ValueError: pass if bix == 2: break try: cfg['cbus_func_%d' % ((2 * bix) + 1)] = self.CBUS(high).name except ValueError: pass bix += 1 def _decode_2232h(self): cfg = self._config self._decode_x232h(cfg) cfg0, cfg1 = self._eeprom[0x00], self._eeprom[0x01] cfg['channel_a_type'] = self.CHANNEL(cfg0 & 0x7).name or 'UART' cfg['channel_b_type'] = self.CHANNEL(cfg1 & 0x7).name or 'UART' cfg['suspend_dbus7'] = cfg1 & self.CFG1.SUSPEND_DBUS7 def _decode_4232h(self): cfg = self._config self._decode_x232h(cfg) cfg0, cfg1 = self._eeprom[0x00], self._eeprom[0x01] cfg['channel_c_driver'] = 'VCP' if ((cfg0 >> 4) & (1 << 3)) else 'D2XX' cfg['channel_d_driver'] = 'VCP' if ((cfg1 >> 4) & (1 << 3)) else 'D2XX' conf = self._eeprom[0x0B] rs485 = self.CHANNEL.RS485 for chix in range(4): cfg['channel_%x_rs485' % (0xa + chix)] = bool(conf & (rs485 << chix)) def _decode_x232h(self, cfg): # common code for2232h and 4232h cfg0, cfg1 = self._eeprom[0x00], self._eeprom[0x01] cfg['channel_a_driver'] = 'VCP' if (cfg0 & (1 << 3)) else 'D2XX' cfg['channel_b_driver'] = 'VCP' if (cfg1 & (1 << 3)) else 'D2XX' max_drive = self.DRIVE.LOW | self.DRIVE.HIGH for bix in range(4): if not bix & 1: val = self._eeprom[0x0c + bix // 2] else: val >>= 4 cfg['group_%d_drive' % bix] = bool(val & max_drive) cfg['group_%d_schmitt' % bix] = bool(val & self.DRIVE.SCHMITT) cfg['group_%d_slew' % bix] = bool(val & self.DRIVE.SLOW_SLEW) cfg['chip'] = Hex2Int(self._eeprom[0x18])
import random from enum import IntEnum, Enum from typing import Tuple, List, Optional, TypeVar, Iterable, Protocol, Any, NamedTuple, Generic, Callable, Set, Deque, \ Dict from .data_structures import Stack, C, T, Node, Queue, PriorityQueue Nucleotide: IntEnum = IntEnum("Nucleotide", ["A", "C", "G", "T"]) Codon = Tuple[Nucleotide, Nucleotide, Nucleotide] # type alias Gene = List[Codon] # type alias class Comparable(Protocol): def __eq__(self, other: Any) -> bool: ... def __lt__(self: C, other: C) -> bool: ... def __gt__(self: C, other: C) -> bool: return (not self < other) and self != other def __le__(self: C, other: C) -> bool: return self < other or self == other def __ge__(self: C, other: C) -> bool: return not self < other def string_to_gene(gene_str: str) -> Gene: gene: Gene = []
from enum import IntEnum from typing import List import unittest from collections import namedtuple Nucleotide = IntEnum("Nucleotide", ("A", "C", "G", "T")) Codon = namedtuple("Codon", ["N1", "N2", "N3"]) Gene = List[Codon] GENE_STR = "ACGTGGCTCTCTAACGTACGTACGTACGGGGTTTATATATACCCTAGGACTCCCTTT" def str_to_gene(s: str) -> Gene: iters = [iter(s)] * 3 return [Codon(a, b, c) for a, b, c in zip(*iters)] def linear_contains(gene: Gene, codon: Codon) -> bool: return any(g == codon for g in gene) def binary_contains(gene: Gene, codon: Codon) -> bool: low = 0 high = len(gene) - 1 while low <= high: mid = (high + low) // 2 if codon < gene[mid]: high = mid - 1 elif gene[mid] < codon: low = mid + 1
class TimeSimulation: weekdays = IntEnum('weekdays', 'mon tue wed thu fri sat sun', start=0) ten_min_frequency = datetime.timedelta(minutes=10) def __init__(self, sleep_seconds=5, time_step=None, schedule=None): ''' Initializes the time simulation ''' self.sleep_seconds = sleep_seconds self.time_step = time_step if self.time_step is None: self.time_step = datetime.timedelta(minutes=self.sleep_seconds) # Read data from disk data = utils.get_data_path('cta_stations.csv') self.raw_df = pd.read_csv(data).sort_values('order') # Define the train schedule (same for all trains) self.schedule = schedule if schedule is None: self.schedule = { TimeSimulation.weekdays.mon: { 0: TimeSimulation.ten_min_frequency }, TimeSimulation.weekdays.tue: { 0: TimeSimulation.ten_min_frequency }, TimeSimulation.weekdays.wed: { 0: TimeSimulation.ten_min_frequency }, TimeSimulation.weekdays.thu: { 0: TimeSimulation.ten_min_frequency }, TimeSimulation.weekdays.fri: { 0: TimeSimulation.ten_min_frequency }, TimeSimulation.weekdays.sat: { 0: TimeSimulation.ten_min_frequency }, TimeSimulation.weekdays.sun: { 0: TimeSimulation.ten_min_frequency }, } self.train_lines = [ Line(Line.colors.blue, self.raw_df[self.raw_df['blue']]), Line(Line.colors.red, self.raw_df[self.raw_df['red']]), Line(Line.colors.green, self.raw_df[self.raw_df['green']]), ] def run(self): curr_time = datetime.datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0) logger.info('Beginning simulation, press Ctrl+C to exit at any time') logger.info('loading kafka connect jdbc source connector') configure_connector() logger.info('beginning cta train simulation') weather = Weather(curr_time.month) try: while True: logger.debug('simulation running: %s', curr_time.isoformat()) # Send weather on the top of the hour if curr_time.minute == 0: weather.run(curr_time.month) _ = [ line.run(curr_time, self.time_step) for line in self.train_lines ] curr_time = curr_time + self.time_step time.sleep(self.sleep_seconds) except KeyboardInterrupt: logger.info('shutting down') _ = [line.close() for line in self.train_lines]
# See the License for the specific language governing permissions and # limitations under the License. from enum import IntEnum from abc import ABC, abstractmethod from .mycroft_skill import MycroftSkill class CQSMatchLevel(IntEnum): EXACT = 1 # Skill could find a specific answer for the question CATEGORY = 2 # Skill could find an answer from a category in the query GENERAL = 3 # The query could be processed as a general quer # Copy of CQSMatchLevel to use if the skill returns visual media CQSVisualMatchLevel = IntEnum('CQSVisualMatchLevel', [e.name for e in CQSMatchLevel]) def is_CQSVisualMatchLevel(match_level): return isinstance(match_level, type(CQSVisualMatchLevel.EXACT)) VISUAL_DEVICES = ['mycroft_mark_2'] def handles_visuals(platform): return platform in VISUAL_DEVICES class CommonQuerySkill(MycroftSkill, ABC): """Question answering skills should be based on this class.
from enum import IntEnum from datetime import datetime import numpy as np import scipy.integrate as solve import scipy.optimize as opt import matplotlib.pylab as plt from scripts.tsv import parse as parse_tsv from scripts.R0_estimator import get_Re_guess # ------------------------------------------------------------------------ # Indexing enums compartments = ['S', 'E1', 'E2', 'E3', 'I', 'H', 'C', 'D', 'R', 'T', 'NUM'] Sub = IntEnum('Sub', compartments, start=0) groups = ['_0', '_1', '_2', '_3', '_4', '_5', '_6', '_7', '_8', 'NUM'] Age = IntEnum('Age', groups, start=0) # ------------------------------------------------------------------------ # Default parameters DefaultRates = { "latency": 1 / 3.0, "logR0": 1.0, "infection": 1 / 3.0, "hospital": 1 / 7.0, "critical": 1 / 14, "imports": .1, "efficacy": 0.5 }
elapsed_time = timedelta(seconds=0) #count = 0 # for debug mem_usage_outfile = 'summary-top-mem-usage.csv' proc_status_outfile = 'summary-top-proc-status.csv' mem_usage = MemUsage(mem_usage_outfile) proc_status = ProcStatus(proc_status_outfile) mem_usage.write_headers() proc_status.write_headers() is_first = True is_matlab_running = False RunningState = IntEnum('RunningState', 'NONE START_RUNNING RUNNING JUST_FINISHED FINISHED') running_state = RunningState.NONE for line in open(topfile, 'r'): # print line, line = line.rstrip() if running_state == RunningState.FINISHED: break if re.search('^top -', line): m = comp_time_load_averages.search(line) if m == None: print('load_avr regex is wrong.') sys.exit(1)
'WeaponID', 'WeaponProficiency', 'WeaponSlot', 'WeaponSound', 'WeaponState', 'WeaponType', ) # ============================================================================= # >> GLOBAL VARIABLES # ============================================================================= # Get the base path... _path = SP_DATA_PATH / 'weapons' / 'constants' # Get the MuzzleFlashStyle enumerator... MuzzleFlashStyle = IntEnum('MuzzleFlashStyle', GameConfigObj(_path / 'MuzzleFlashStyle.ini')) # Get the WeaponID enumerator... WeaponID = IntEnum('WeaponID', GameConfigObj(_path / 'WeaponID.ini')) # Get the WeaponType enumerator... WeaponType = IntEnum('WeaponType', GameConfigObj(_path / 'WeaponType.ini')) # Get the WeaponSlot enumerator... WeaponSlot = IntEnum('WeaponSlot', GameConfigObj(_path / 'WeaponSlot.ini')) # ============================================================================= # >> ENUMERATORS # ============================================================================= class ItemFlags(IntFlag):
from enum import IntEnum from typing import Tuple, List Nucleotide: IntEnum = IntEnum('Nucleotide', ('A', 'C', 'G', 'T')) Condon = Tuple[Nucleotide, Nucleotide, Nucleotide] Gene = List[Condon] gene_str: str = 'ACGTGGTC' def string_to_gene(s: str) -> Gene: gene: Gene = [] for i in range(0, len(s), 3): if (i + 2) >= len(s): return gene condon: Condon = (Nucleotide[s[i]], Nucleotide[s[i + 1]], Nucleotide[s[i + 2]]) gene.append(condon) return condon string_to_gene(gene_str)
import tensorflow as tf import tf_extended as tfe from tensorflow.python.ops import control_flow_ops import cv2 import util from preprocessing import tf_image_multiphase_multislice slim = tf.contrib.slim # Resizing strategies. Resize = IntEnum( 'Resize', ( 'NONE', # Nothing! 'CENTRAL_CROP', # Crop (and pad if necessary). 'PAD_AND_RESIZE', # Pad, and resize to output shape. 'WARP_RESIZE')) # Warp resize. import config # VGG mean parameters. _R_MEAN = config.r_mean _G_MEAN = config.g_mean _B_MEAN = config.b_mean # Some training pre-processing parameters. MAX_EXPAND_SCALE = config.max_expand_scale BBOX_CROP_OVERLAP = config.bbox_crop_overlap # Minimum overlap to keep a bbox after cropping. MIN_OBJECT_COVERED = config.min_object_covered CROP_ASPECT_RATIO_RANGE = config.crop_aspect_ratio_range # Distortion ratio during cropping.
class Weather(Producer): """Defines a simulated weather model""" status = IntEnum("status", "sunny partly_cloudy cloudy windy precipitation", start=0) rest_proxy_url = "http://localhost:8082" key_schema = None value_schema = None winter_months = set((0, 1, 2, 3, 10, 11)) summer_months = set((6, 7, 8)) def __init__(self, month): # TODO: (done) Complete the below by deciding on a topic name, number of partitions, and number of # replicas topic_name = f"{self.TOPIC_BASE_NAME}.weather.v1" super().__init__( topic_name, key_schema=Weather.key_schema, value_schema=Weather.value_schema, num_partitions=1, # TODO: find out optimal partitions value num_replicas=1, # TODO: find out optimal replicas value ) self.status = Weather.status.sunny self.temp = 70.0 if month in Weather.winter_months: self.temp = 40.0 elif month in Weather.summer_months: self.temp = 85.0 if Weather.key_schema is None: with open(f"{Path(__file__).parents[0]}/schemas/weather_key.json" ) as f: Weather.key_schema = json.load(f) # TODO: (done) Define this value schema in `schemas/weather_value.json if Weather.value_schema is None: with open(f"{Path(__file__).parents[0]}/schemas/weather_value.json" ) as f: Weather.value_schema = json.load(f) def _set_weather(self, month): """Returns the current weather""" mode = 0.0 if month in Weather.winter_months: mode = -1.0 elif month in Weather.summer_months: mode = 1.0 self.temp += min(max(-20.0, random.triangular(-10.0, 10.0, mode)), 100.0) self.status = random.choice(list(Weather.status)) def run(self, month): self._set_weather(month) # TODO: (done) Complete the function by posting a weather event to REST Proxy. Make sure to # specify the Avro schemas and verify that you are using the correct Content-Type header. resp = requests.post( # TODO: (done) What URL should be POSTed to? f"{Weather.rest_proxy_url}/topics/{self.topic_name}", # TODO: (done) What Headers need to bet set? headers={"Content-Type": "application/vnd.kafka.avro.v2+json"}, data=json.dumps({ "key_schema": json.dumps(Weather.key_schema), "value_schema": json.dumps(Weather.value_schema), "records": [{ "key": { "timestamp": self.time_millis() }, "value": { "temperature": self.temp, "status": self.status.name } }] })) try: resp.raise_for_status() except Exception: logger.error( f"Error getting response from Kafka REST Proxi: {json.dumps(resp.json(), indent=2)}" ) exit(1) logger.debug( "sent weather data to kafka, temp: %s, status: %s", self.temp, self.status.name, )
self.rank, len(data))) if self.args.echo: df = pd.DataFrame(data, columns=['SMILE']) else: mols = [Chem.MolFromSmiles(smi) for smi in data] df = self.calc.pandas(mols, quiet=True) df.fill_missing(inplace=True) df.insert(0, 'SMILE', data) if self.args.verbose: print('rank {} generated data: {}'.format(self.rank, len(df))) return df Tags = IntEnum('Tags', 'CONTINUE EXIT') def parse_arguments(): parser = argparse.ArgumentParser() parser.add_argument('--batch_size', type=int, default=10, help='Batch size') parser.add_argument('--smiles', type=str, default='smiles.txt', help='Input Smile path') parser.add_argument('--format', default='hdf5', choices=['csv', 'tsv', 'hdf5'],
class Line: ''' Contains Chicago Transit Authority (CTA) Elevated Loop Train ("L") Station Data ''' colors = IntEnum('colors', 'blue green red', start=0) num_directions = 2 def __init__(self, color, station_data, num_trains=10): self.color = color self.num_trains = num_trains self.stations = self._build_line_data(station_data) # We must always discount the terminal station at the end of each direction self.num_stations = len(self.stations) - 1 self.trains = self._build_trains() def _build_line_data(self, station_df): ''' Constructs all stations on the line ''' stations = station_df['station_name'].unique() station_data = station_df[station_df['station_name'] == stations[0]] line = [ Station(station_data['station_id'].unique()[0], stations[0], self.color) ] prev_station = line[0] for station in stations[1:]: station_data = station_df[station_df['station_name'] == station] new_station = Station( station_data['station_id'].unique()[0], station, self.color, prev_station, ) prev_station.dir_b = new_station prev_station = new_station line.append(new_station) return line def _build_trains(self): ''' Constructs and assigns train objects to stations ''' trains = [] curr_loc = 0 b_dir = True for train_id in range(self.num_trains): tid = str(train_id).zfill(3) train = Train(f'{self.color.name[0].upper()}L{tid}', Train.status.in_service) trains.append(train) if b_dir: self.stations[curr_loc].arrive_b(train, None, None) else: self.stations[curr_loc].arrive_a(train, None, None) curr_loc, b_dir = self._get_next_idx(curr_loc, b_dir) return trains def run(self, timestamp, time_step): ''' Advances trains between stations in the simulation. Runs turnstiles ''' self._advance_turnstiles(timestamp, time_step) self._advance_trains() def close(self): ''' Called to stop the simulation ''' _ = [station.close() for station in self.stations] def _advance_turnstiles(self, timestamp, time_step): ''' Advances the turnstiles in the simulation ''' _ = [ station.turnstile.run(timestamp, time_step) for station in self.stations ] def _advance_trains(self): ''' Advances trains between stations in the simulation ''' # Find the first b train curr_train, curr_index, b_direction = self._next_train() self.stations[curr_index].b_train = None trains_advanced = 0 while trains_advanced < self.num_trains - 1: # The train departs the current station if b_direction is True: self.stations[curr_index].b_train = None else: self.stations[curr_index].a_train = None prev_station = self.stations[curr_index].station_id prev_dir = 'b' if b_direction else 'a' # Advance this train to the next station curr_index, b_direction = self._get_next_idx(curr_index, b_direction, step_size=1) if b_direction is True: self.stations[curr_index].arrive_b(curr_train, prev_station, prev_dir) else: self.stations[curr_index].arrive_a(curr_train, prev_station, prev_dir) # Find the next train to advance move = 1 if b_direction else -1 next_train, curr_index, b_direction = self._next_train( curr_index + move, b_direction) if b_direction is True: curr_train = self.stations[curr_index].b_train else: curr_train = self.stations[curr_index].a_train curr_train = next_train trains_advanced += 1 # The last train departs the current station if b_direction is True: self.stations[curr_index].b_train = None else: self.stations[curr_index].a_train = None # Advance last train to the next station prev_station = self.stations[curr_index].station_id prev_dir = 'b' if b_direction else 'a' curr_index, b_direction = self._get_next_idx(curr_index, b_direction, step_size=1) if b_direction is True: self.stations[curr_index].arrive_b(curr_train, prev_station, prev_dir) else: self.stations[curr_index].arrive_a(curr_train, prev_station, prev_dir) def _next_train(self, start_index=0, b_direction=True, step_size=1): ''' Given a starting index, finds the next train in either direction ''' if b_direction is True: curr_index = self._next_train_b(start_index, step_size) if curr_index == -1: curr_index = self._next_train_a( len(self.stations) - 1, step_size) b_direction = False else: curr_index = self._next_train_a(start_index, step_size) if curr_index == -1: curr_index = self._next_train_b(0, step_size) b_direction = True if b_direction is True: return self.stations[curr_index].b_train, curr_index, True return self.stations[curr_index].a_train, curr_index, False def _next_train_b(self, start_index, step_size): ''' Finds the next train in the b direction, if any ''' for i in range(start_index, len(self.stations), step_size): if self.stations[i].b_train is not None: return i return -1 def _next_train_a(self, start_index, step_size): ''' Finds the next train in the a direction, if any ''' for i in range(start_index, 0, -step_size): if self.stations[i].a_train is not None: return i return -1 def _get_next_idx(self, curr_index, b_direction, step_size=None): ''' Calculates the next station index. Returns next index and if it is b direction ''' if step_size is None: step_size = int( (self.num_stations * Line.num_directions) / self.num_trains) if b_direction is True: next_index = curr_index + step_size if next_index < self.num_stations: return next_index, True else: return self.num_stations - (next_index % self.num_stations), False else: next_index = curr_index - step_size if next_index > 0: return next_index, False else: return abs(next_index), True def __str__(self): return '\n'.join(str(station) for station in self.stations) def __repr__(self): return str(self)
class Weather(Producer): """Defines a simulated weather model""" status = IntEnum("status", "sunny partly_cloudy cloudy windy precipitation", start=0) rest_proxy_url = "http://localhost:8082" key_schema = None value_schema = None winter_months = set((0, 1, 2, 3, 10, 11)) summer_months = set((6, 7, 8)) def __init__(self, month): topic_name = "org.chicago.cta.weather" self.status = Weather.status.sunny self.temp = 70.0 if month in Weather.winter_months: self.temp = 40.0 elif month in Weather.summer_months: self.temp = 85.0 if Weather.key_schema is None: with open(f"{Path(__file__).parents[0]}/schemas/weather_key.json" ) as f: Weather.key_schema = json.load(f) if Weather.value_schema is None: with open(f"{Path(__file__).parents[0]}/schemas/weather_value.json" ) as f: Weather.value_schema = json.load(f) super().__init__( topic_name, # TODO: Come up with a better topic name key_schema=Weather.key_schema, value_schema=Weather.value_schema, num_partitions=1, num_replicas=1, ) def _set_weather(self, month): """Returns the current weather""" mode = 0.0 if month in Weather.winter_months: mode = -1.0 elif month in Weather.summer_months: mode = 1.0 self.temp += min(max(-20.0, random.triangular(-10.0, 10.0, mode)), 100.0) self.status = random.choice(list(Weather.status)) def run(self, month): self._set_weather(month) logger.info( "weather kafka proxy integration complete - sending request") resp = requests.post( f"{Weather.rest_proxy_url}/topics/{self.topic_name}", headers={"Content-Type": "application/vnd.kafka.avro.v2+json"}, data=json.dumps({ "key_schema": json.dumps(Weather.key_schema), "value_schema": json.dumps(Weather.value_schema), "records": [{ "key": { "timestamp": self.time_millis() }, "value": { "temperature": self.temp, "status": self.status.name } }] }), ) try: resp.raise_for_status() except: print( f"Failed to send data to REST Proxy {json.dumps(resp.json(), indent=2)}" ) logger.error( f"Failed to send data to REST Proxy {json.dumps(resp.json(), indent=2)}" ) logger.debug( "sent weather data to kafka, temp: %s, status: %s", self.temp, self.status.name, )
#!/usr/bin/env python3.7 from enum import Enum, IntEnum enum = Enum('Menu', 'Pizza Lasagna Spaghetti') enum3 = IntEnum('Menu', 'Pizza Lasagna Spaghetti') choice = input(""" Co zjesz?: 1. Pizza 2. Lasagna 3. Spaghetti """) if int(choice) == enum.Pizza.value: print("Pizza time!") if int(choice) == enum3.Pizza: print("Pizza time!")