def __init__(self, dbname=os.path.join(base_path, 'data', 'autiagenda.db')): first_run = True if os.path.exists(dbname): first_run = False self._conn = sqlite3.connect(dbname) self._cur = self._conn.cursor() self._activity_type = enum('lesson', 'homework') self._homework_type = enum('read', 'make', 'learn', 'test-small', 'test-large', 'exam') if first_run: self._ddl()
def parse_class(self, tr): states = utils.enum(SEARCHING_HEADER=1, SEARCHING_DATA=2, DONE=3) state = states.SEARCHING_HEADER item = DataItem() #print "tr class : "# + str(tr) for elem in tr.getElementsByTagName("*"): #print " elem : " + str(elem) if (state == states.SEARCHING_HEADER and \ elem.nodeName == "th" and elem.firstChild != None and elem.firstChild.nodeValue != None): state = states.SEARCHING_DATA #print " header : " + elem.firstChild.nodeValue.encode('utf-8') item.name = elem.firstChild.nodeValue.encode('utf-8') continue if (state == states.SEARCHING_DATA): #print "calling recurse" res = self.recurseNode(elem) ignoreChars = ["(","[","]"] if (res != None and res != "" and not res in ignoreChars): data = res.encode('utf-8') #print " " + data if (re.search("\d\d\d\d\-\d\d\-\d\d", data) != None or \ re.search("...-\d\d\-\d\d\d\d", data) != None): item.date = data else: item.location = data return item
class EXContact(Contact): prop_update_t = utils.enum('PROP_REPLACE', 'PROP_APPEND') def __init__(self, folder, ews_con=None, con=None, con_itemid=None): """Constructor for EXContact. The starting properties of the contact can be initialized either from an existing Contact object, or from an pyews contact object. It is an error to provide both. """ if (ews_con and con): raise EXContactError( 'Both ews con and con cannot be specified in EXContact()') Contact.__init__(self, folder, con) conf = self.get_config() if con: if con_itemid: self.set_itemid(con_itemid) else: logging.debug('Potential new EXContact: %s', con.get_disp_name()) self.set_ews_con(ews_con) if ews_con is not None: self.init_props_from_ews_con(ews_con) ## ## First the inherited abstract methods from the base classes ## def save(self): """Saves the current contact on the server. For now we are only handling a new contact creation scneario. The protocol for updates is different""" logging.debug('Saving contact (%s) to server...', self.get_disp_name()) ews_con = self.init_ews_con_from_props() resp = ews_con.save() logging.debug('Saving contact to server...done') # FIXME: Get the contact ID and do something meaningful with it ## ## Now onto the non-abstract methods. ## def get_parent_folder_id(self): """Fetch and return the itemid of the parent folder of this contact in the Exchange store. This will be none if this is a new contact that has not yet be written to the server""" try: return self._get_att('parentid') except Exception, e: return None
def _mock_text(text): cases = utils.enum(UPPER=0, LOWER=1) dest = "" case = cases.LOWER for char in text: if char.isalpha(): if case == cases.LOWER: char = char.lower() case = cases.UPPER elif case == cases.UPPER: char = char.upper() case = cases.LOWER
class OLContact(Contact): prop_update_t = utils.enum('PROP_REPLACE', 'PROP_APPEND') def __init__(self, folder, olprops=None, eid=None, con=None): """Constructor for OLContact. The starting properties of the contact can be initialized either from an existing Contact object, or from an Outlook item property list. It is an error to provide both. It is redundant to provide both olprops (array of property tuples) and an entryid. The entryid will override the property list. """ if ((olprops and con) or (eid and con)): raise OLContactError( 'Both olprops/eid and con cannot be specified in OLContact()') if olprops and eid: logging.warning('olprops and eid are not null. Ignoring olprops') olprops = None Contact.__init__(self, folder, con) ## Sometimes we might be creating a contact object from GC or other ## entry which might have the Entry ID in its sync tags ## field. if that is present, we should use it to initialize the ## itemid field for the current object conf = self.get_config() if con: try: pname_re = conf.get_profile_name_re() label = conf.make_sync_label(pname_re, self.get_dbid()) tag, itemid = con.get_sync_tags(label)[0] self.set_entryid(base64.b64decode(itemid)) except Exception, e: logging.debug('Potential new OLContact: %s', con.get_name()) ## Set up some of the basis object attributes and parent folder/db ## properties to make it easier to access them self.set_synchable_fields_list() self.set_proptags(folder.get_proptags()) self.set_olprops(olprops) if olprops: self.init_props_from_olprops(olprops) elif eid: self.init_props_from_eid(eid) self.in_init(False)
def __init__(self, db, auth, profile): self.db = db self.auth = auth self.profile = profile self.handlers = Result( { "SummationHandler": SummationHandler(), "AdditionHandler": AdditionHandler(), "ConversionHandler": ConversionHandler(), } ) self.type = enum(Science=1, Technology=2, Engineering=3, Art=4, Math=5) self.problems = self.db.get_table("problems") self.problemsets = self.db.get_table("problemsets") self.links = self.db.get_table("set_links") self.urls = self.db.get_table("problem_urls") self.instances = self.db.get_table("problem_instances")
class OLContact(Contact): prop_update_t = utils.enum('PROP_REPLACE', 'PROP_APPEND') def __init__(self, folder, olprops=None, eid=None, con=None, con_itemid=None): """Constructor for OLContact. The starting properties of the contact can be initialized either from an existing Contact object, or from an Outlook item property list. It is an error to provide both. It is redundant to provide both olprops (array of property tuples) and an entryid. The entryid will override the property list. """ if ((olprops and con) or (eid and con)): raise OLContactError( 'Both olprops/eid and con cannot be specified in OLContact()') if olprops and eid: logging.warning('olprops and eid are not null. Ignoring olprops') olprops = None Contact.__init__(self, folder, con) conf = self.get_config() if con: if con_itemid: self.set_entryid(base64.b64decode(con_itemid)) else: logging.debug('Potential new OLContact: %s', con.get_name()) ## Set up some of the basis object attributes and parent folder/db ## properties to make it easier to access them self.set_synchable_fields_list() self.set_proptags(folder.get_proptags()) pvalu = self.get_proptags().valu self.addr_map = { 'work': { 'street': pvalu('ASYNK_PR_WORK_ADDRESS_STREET'), 'city': pvalu('ASYNK_PR_WORK_ADDRESS_CITY'), 'state': pvalu('ASYNK_PR_WORK_ADDRESS_STATE_OR_PROVINCE'), 'country': pvalu('ASYNK_PR_WORK_ADDRESS_COUNTRY'), 'zip': pvalu('ASYNK_PR_WORK_ADDRESS_POSTAL_CODE') }, 'home': { 'street': mt.PR_HOME_ADDRESS_STREET_W, 'city': mt.PR_HOME_ADDRESS_CITY_W, 'state': mt.PR_HOME_ADDRESS_STATE_OR_PROVINCE_W, 'country': mt.PR_HOME_ADDRESS_COUNTRY_W, 'zip': mt.PR_HOME_ADDRESS_POSTAL_CODE_W, }, 'other': { 'street': mt.PR_OTHER_ADDRESS_STREET_W, 'city': mt.PR_OTHER_ADDRESS_CITY_W, 'state': mt.PR_OTHER_ADDRESS_STATE_OR_PROVINCE_W, 'country': mt.PR_OTHER_ADDRESS_COUNTRY_W, 'zip': mt.PR_OTHER_ADDRESS_POSTAL_CODE_W, }, } self.set_olprops(olprops) if olprops: self.init_props_from_olprops(olprops) elif eid: self.init_props_from_eid(eid) self.in_init(False) def set_synchable_fields_list(self): fields = self.get_db().get_db_config()['sync_fields'] fields = self._process_sync_fields(fields) ptags = self.get_folder().get_proptags() fields.append(ptags.valu('ASYNK_PR_FILE_AS')) fields.append(ptags.valu('ASYNK_PR_EMAIL_1')) fields.append(ptags.valu('ASYNK_PR_EMAIL_2')) fields.append(ptags.valu('ASYNK_PR_EMAIL_3')) fields.append(ptags.valu('ASYNK_PR_IM_1')) fields.append(ptags.valu('ASYNK_PR_WORK_ADDRESS_POST_OFFICE_BOX')) fields.append(ptags.valu('ASYNK_PR_WORK_ADDRESS_STREET')) fields.append(ptags.valu('ASYNK_PR_WORK_ADDRESS_CITY')) fields.append(ptags.valu('ASYNK_PR_WORK_ADDRESS_STATE_OR_PROVINCE')) fields.append(ptags.valu('ASYNK_PR_WORK_ADDRESS_COUNTRY')) fields.append(ptags.valu('ASYNK_PR_WORK_ADDRESS_POSTAL_CODE')) fields.append(ptags.valu('ASYNK_PR_TASK_DUE_DATE')) fields.append(ptags.valu('ASYNK_PR_TASK_STATE')) fields.append(ptags.valu('ASYNK_PR_TASK_RECUR')) fields.append(ptags.valu('ASYNK_PR_TASK_COMPLETE')) fields.append(ptags.valu('ASYNK_PR_TASK_DATE_COMPLETED')) fields.append(ptags.valu('ASYNK_PR_CUSTOM_PROPS')) self._append_sync_proptags(fields) self.set_sync_fields(fields) def _append_sync_proptags(self, fields): olcf = self.get_folder() pts = olcf.get_proptags() sts = pts.sync_tags for tag, value in sts.iteritems(): fields.append(olcf.get_proptags().valu(tag)) ## This method is already defined in item.py, but we need to override it ## here to actually save just the property back to Outlook def update_sync_tags(self, destid, val, save=False): """Update the specified sync tag with given value. If the tag does not already exist an entry is created.""" self._update_prop('sync_tags', destid, val) if save: return self.save_sync_tags() return True def save_sync_tags(self): olitem = self.get_olitem() olprops = [] self._add_sync_tags_to_olprops(olprops) if olprops == []: ## this is happening because the item could not be saved for ## whatever reason on remote, and a sync tag was not set as a result. return try: hr, res = olitem.SetProps(olprops) olitem.SaveChanges(0) return True except Exception, e: logging.critical('Could not save synctags(%s) for %s (reason: %s)', olprops, self.get_name(), e) logging.critical('Will try to continue...') return False
import copy import random import string import utils VERBOSE_DEBUGGING = False Operations = utils.enum(READ=1, WRITE=2) class Block(object): def __init__(self, address=-1, leaf_target=-1, contents=None): alphabet = string.ascii_uppercase + string.digits self.address = address self.leaf_target = leaf_target if contents is None: # initialize some junk data self.contents = ''.join(random.choice(alphabet) for _ in range(8)) else: self.contents = contents def is_valid(self): return self.address >= 0 def invalidate(self): self.address = -1 self.leaf_target = -1 def __str__(self):
# REQUIRE lib getopt # # helper functions to deal with # handlers. # import config import re import utils Msg = utils.enum(RAW=0, OK=1, ERR=2) # TODO: documentation on how this whole file # works, so when I inevitably leave this project # to rot for a month I won't come back completely # confused. # # FIXME: so, when we check to make sure that # all the required arguments to a command are there, # we use the *very* naive method of checking # that the number of arguments is greater than # the amount of required arguments. # # however... some arguments actually take # *more than one* value, which makes this dumb # method fall to pieces. # # example: the keywords arguments of :rds # (see mod/reddit.py). it's type is "list", # which means that it takes a list of strings as
_component_stores = dict() """Dictionary of all components""" _all_systems = set() """Dict of all systems.""" ComponentTypes = enum([ # Macro-level properties "AI", "Graphics", "Music", "Physics", "SoundEffects", # Game object (primarily HexCell) properties "Damageable", "EnergyConsumer", "EnergyGenerator", "EnergyStorage", "Propulsion", "Weapon" ]) """List of valid component types.""" #TODO: According to Adam, a better choice would be defining DB tables here. # For the time being, the best bet is to read from the DB at startup and # not write back until exiting. Each component holds one DB table. Each system # operates on one or more DB tables. #TODO: synchronize for multi-threaded support
from utils import enum #------------------------------------------------------------------------# #------------ READ ANNOTATIONS REGRADING [ORGANISM/ALIGNMENT] -----------# alignment_count_status = enum ( #0b0000000xx NO_ALIGNMENT =0b0000000001, ONE_ALIGNMENT =0b0000000010, MULTIPLE_ALIGNMENTS =0b0000000011, MASK =0b0000000011 ) organism_count_status = enum ( #0b000000xx00 NO_ORGANISMS =0b0000000000, ONE_ORGANISM =0b0000000100, MULTIPLE_ORGANISMS =0b0000001000, MASK =0b0000001100 ) organism_type_status = enum ( #0b0000xx0000 TARGET_ORGANISM =0b0000010000, NONTARGET_ORGANISM =0b0000100000, MIXED_ORGANISMS =0b0000110000, MASK =0b0000110000 ) coding_region_aln_count_status = enum ( #0b00xx000000 NO_CODING_ALIGNMENTS =0b0000000000, ONE_CODING_ALIGNMENT =0b0001000000, MULTIPLE_CODING_ALIGNMENTS =0b0010000000, MASK =0b0011000000 ) coding_region_alignment_status = enum ( #0bxx00000000 ONE_TARGET_ORGANISM =0b0000000000, MULTIPLE_TARGET_ORGANISMS =0b0100000000, NONTARGET_ORGANISMS =0b1000000000,
import sqlite3 import config from utils import enum MessageDirection = enum('INBOUND', 'OUTBOUND') get_client_from_number_query = "SELECT * FROM clients WHERE phone_number = ?;" # 0 = incoming # 1 = outgoing def log_message(phone_number, body, direction): con = sqlite3.connect(config.SQLITE_DATABASE) cur = con.cursor() cur.execute(get_client_from_number_query, (phone_number,)) client = cur.fetchone() if client is None: q = "INSERT INTO clients VALUES (NULL, ?);" cur.execute(q, (phone_number,)) client_id = cur.lastrowid else: client_id = client[0] q = "INSERT INTO messages VALUES (NULL, ?, ?, ?, CURRENT_TIMESTAMP);" cur.execute(q, (client_id, direction, body,)) con.commit() cur.close() cur.close()
from os.path import abspath, dirname, exists, join from bifilter import BinaryFilter from utils import enum, memoized_by_uid, add import pickle BASE = 3 LABELS = " XO" E_LABEL, X_LABEL, O_LABEL = LABELS STATUS = enum('DRAW', 'WINNING', 'LOSING', 'WINNING_FINAL', 'LOSING_FINAL', 'IMPOSSIBLE', 'UNKNOWN') class GameBoardException(Exception): def __init__(self, message): super(GameBoardException, self).__init__(message) class GameBoard(object): def __init__(self, height, width): """ Game board. Parameters ---------- height (int): tic tac toe board height. width (int): tic tac toe board width. """ self.height = height
class Organism(object): ''' Placeholder for organism data from XML as described here: http://commondatastorage.googleapis.com/solverfiles/Organisms_XML_Schema_overview.pdf ''' organismType = enum(GENUS='genus', GENUS_SPECIES='genus_species', GENUS_SPECIES_STRAIN='genus_species_strain', ORGANISM_NAME='organism_name', NEAREST_NEIGHBOR='nearest_neighbor', MISSING_TAXID_ORG='missing_taxid_org') @autoassign def __init__(self, relativeAmount, count, taxon_id, taxonomy, type, organismName=None, genus=None, species=None, strain=None, nearestNeighbor=None, reads=None, genes=None): ''' :param relativeAmount: (float) :param count: (int) :param taxon_id: (int) :param taxonomy: list of taxonomical node names (lineage) :param type: (str) level of taxonomic assignment (check organismType) :param reads: list of read IDs or None for Host :param genes: list of gene objects mapped to this organism or None if there are no reported genes ''' @classmethod def from_xml_organism_node(org, organism_node): ''' Loads available organism data from organism XML node :param organism_node: xml.etree.ElementTree.Element instance ''' # determine organism count relative_amount_node = organism_node.find('relativeAmount') relative_amount = eval(relative_amount_node.text) count = eval(relative_amount_node.attrib['count']) # determine taxonomy (taxon_id, taxonomy) = Organism.determine_taxonomy(organism_node) # determine organism type and available naming (organism_type, names) = Organism.determine_org_type(organism_node) if taxon_id == MISSING_TAXID: organims_type = Organism.organismType.MISSING_TAXID_ORG nearest_neighbor = names['nearestNeighbor'] org_name = names['organismName'] strain = names['strain'] species = names['species'] genus = names['genus'] # fetch genes and reads genes = Organism.get_genes(organism_node) reads = Organism.get_reads(organism_node) organism = Organism(relative_amount, count, taxon_id, taxonomy, organism_type, org_name, genus, species, strain, nearest_neighbor, reads, genes) return organism @classmethod def determine_taxonomy(org, organism_node): ''' Determines tax_id and taxonomical lineage from organism XML node. :param organism_node: xml.etree.ElementTree.Element intance :rtype: tuple(tax_id(int), taxonomy(list)) ''' taxonomy_node = organism_node.find('taxonomy') try: taxon_id = eval(taxonomy_node.attrib['taxon_id']) except KeyError: # so it is a nearest neighbor node log.info('No taxon ID info for organism with taxonomy %s' % taxonomy_node.text) return (MISSING_TAXID, []) taxonomy_str = taxonomy_node.text if taxonomy_str.endswith('.'): taxonomy_str = taxonomy_str[0:-1] taxonomy = taxonomy_str.split('; ') return (taxon_id, taxonomy) @classmethod def get_genes(org, organism_node): ''' Creates a list of genes (:class:`Gene`) from organism XML node. :param organism_node: :class:`xml.etree.ElementTree.Element` instance :rtype: list of :class:`Gene` objects or empty list if no genes were reported ''' genes_node = organism_node.find('genes') if genes_node is None: return [] genes = [] for gene_node in genes_node: genes.append(Gene.from_xml_gene_node(gene_node)) return genes @classmethod def get_reads(org, organism_node): ''' Creates a list of reads (str) from organism XML node. :param organism_node: xml.etree.ElementTree.Element intance :rtype: list of strings (read IDs) ''' reads_node = organism_node.find('reads') if reads_node is None: return [] reads = [] for read_node in reads_node: reads.append(read_node.text) return reads @classmethod def determine_org_type(org, organism_node): ''' Determines taxonomical assignment type. As described here: http://commondatastorage.googleapis.com/solverfiles/Organisms_XML_Schema_overview.pdf there are four types of tax assignment: * nearest neighbor * only organism name (viruses, plasmids) * genus * genus, species * genus, species, strain :param organism_node: xml.etree.ElementTree.Element intance :rtype: tuple(organism_type, names). Organism type is value from organismType enum, and names is dictionary with string keys: 'nearestNeighbor', 'organismName', 'strain', 'species', 'genus' ''' neighbor_node = organism_node.find('nearestNeighbor') org_name_node = organism_node.find('organismName') strain_node = organism_node.find('strain') species_node = organism_node.find('species') genus_node = organism_node.find('genus') nearest_neighbor = None organism_name = None strain = None species = None genus = None # ruzne sifrice if neighbor_node is not None: organism_type = org.organismType.NEAREST_NEIGHBOR nearest_neighbor = neighbor_node.text else: organism_name = org_name_node.text if strain_node is not None: organism_type = org.organismType.GENUS_SPECIES_STRAIN strain = strain_node.text else: if species_node is not None: species = species_node.text organism_type = org.organismType.GENUS_SPECIES else: if genus_node is not None: genus = genus_node.text organism_type = org.organismType.GENUS else: organism_type = org.organismType.ORGANISM_NAME names = { 'nearestNeighbor': nearest_neighbor, 'organismName': organism_name, 'strain': strain, 'species': species, 'genus': genus } return (organism_type, names)
import utils types = utils.enum(NONE=None, ZOOM_IN_TOOL="ZoomInTool", ZOOM_OUT_TOOL="ZoomOutTool", ZOOM_ENVELOPE_TOOL="ZoomEnvelopeTool", ZOOM_EXTENT_TOOL="ZoomExtentTool", PAN_TOOL="PanTool") def validateToolType(type): if type not in (types.NONE, types.ZOOM_IN_TOOL, types.ZOOM_OUT_TOOL, types.ZOOM_ENVELOPE_TOOL, types.ZOOM_EXTENT_TOOL, types.PAN_TOOL): raise ValueError('map tool not valid') import mapnik class PanTool: def __init__(self, map): self.map = map def pan(self, point): self.map.pan(int(point.x), int(point.y)) import geometry """ Zoom tool class
# coding:utf-8 import logging import threading from bitarray import bitarray from model.image import ChunksImage from scheduler.network_aware_scheduler import NetworkAwareScheduler from scheduler.random_scheduler import RandomScheduler from storage.backend_storage import BackendStorage from storage.stats import Statistics from storage.zmq_rpc import ZmqStorageNetworkingRpc from utils import enum MSG_TAGS = enum(HASH_AVIL=1, CHUNK_REC=2, NEW_IMG=3, REQ_CHUNK=4) class DedupBackendStorage(BackendStorage): """ Represent the proxy layer System logic is implemented here Central point and communicate with storage layer and network layer """ def __init__(self, dal, cfg): BackendStorage.__init__(self, 'deduplication', dal) self.cfg = cfg
def getMongoConfPath(): l = ["db.conf", 'mongod.conf', 'mongodB.conf'] l = map(lambda x: x if path.isfile("/etc/" + x) else False, l) for i in l: if i: return "/etc/" + i MONGO_CONF_DEFAULTPATH = getMongoConfPath() mv = enum(_gt='$gt', _lt='$lt', _all='$all', _exists='$exists', _mod='$mod', _ne='$ne', _in='$in', nin='$nin', _nor='$nor', _or='$or', _and='$and', _size='$size', _type='$type', _set="$set", _atomic='$atomic', _id='_id') def andVal(andVal=[]): """just a helper for producing and queries """ def getMongoConf(path=MONGO_CONF_DEFAULTPATH): ''' returns a dictionary like object with mongo configuration keys and values
class DepthFirstSearch(Tree): Colors = utils.enum('WHITE', 'BLACK', 'GRAY') def __init__(self, directedg, rootID): Tree.__init__(self) assert rootID in directedg.the_vertices.keys( ), "Unable to find vertex %d from which to initiate depth-first search" % rootID self.rootID = rootID self.pre_order = [] self.post_order = [] self.vertex_post_order_numbering = {} self.vertex_pre_order_numbering = {} self.back_edges = [] self.initialise(directedg, rootID) self.do_search(directedg, rootID) def initialise(self, directedg, rootID): for v in directedg: self.the_vertices[v.vertexID] = vertices.TreeVertex(v.vertexID) self.vertex_pre_order_numbering[v.vertexID] = 0 self.vertex_post_order_numbering[v.vertexID] = 0 self.pre_orderID = 1 self.post_orderID = 1 def do_search(self, directedg, vertexID): self.vertex_pre_order_numbering[vertexID] = self.pre_orderID self.pre_order.append(vertexID) self.pre_orderID += 1 v = directedg.get_vertex(vertexID) for succID in v.successors.keys(): if self.vertex_pre_order_numbering[succID] == 0: self.add_edge(vertexID, succID) self.do_search(directedg, succID) elif self.vertex_pre_order_numbering[ vertexID] < self.vertex_pre_order_numbering[succID]: pass elif self.vertex_post_order_numbering[succID] == 0: self.back_edges.append((vertexID, succID)) self.vertex_post_order_numbering[vertexID] = self.post_orderID self.post_order.append(vertexID) self.post_orderID += 1 def getPreorderVertexID(self, preID): assert preID - 1 < len( self.pre_order), "Pre-order number %d too high" % preID return self.pre_order[preID - 1] def getPostorderVertexID(self, postID): assert postID - 1 < len( self.post_order), "Post-order number %d too high" % postID return self.post_order[postID - 1] def getPreID(self, vertexID): assert vertexID in self.vertex_pre_order_numbering, "Unable to find pre-order numbering for vertex %d" % vertexID return self.vertex_pre_order_numbering[vertexID] def getPostID(self, vertexID): assert vertexID in self.vertex_post_order_numbering, "Unable to find post-order numbering for vertex %d" % vertexID return self.vertex_post_order_numbering[vertexID] def isDFSBackedge(self, sourceID, destinationID): return (sourceID, destinationID) in self.back_edges
from cryptography.x509.base import load_pem_x509_certificate, load_der_x509_certificate from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey from cryptography.x509.oid import NameOID from cryptography.x509.oid import ExtensionOID from cryptography import x509 import base64 import time import input_obj import lz4framed import newline_reader logger = logging.getLogger(__name__) coloredlogs.install(level=logging.INFO) tags = utils.enum('READY', 'DONE', 'EXIT', 'START') def get_backend(backend=None): return default_backend() if backend is None else backend class DecompressorCheckpoint(object): """ Represents simple point in the data stream for random access read. """ def __init__(self, pos, rec_pos=None, plain_pos=None, ctx=None,
import simplejson as json from random import randint, shuffle from struct import * from crypto_factory import CryptoFactory import zlib # Constants, put in constants/config json file??? n_tuples = 3 n_seeds = 3 # TODO: seeds and tuples in Node/Setup Package --> # These are for the dummy setup package creation and streaming key generation # though we could do streaming keys in the second round, but make the setup packages # the same size NodeTypes = enum(X=1, Y=2) crypto_factory = CryptoFactory() class Node: def __init__(self, name, ip_addr, port, key_hex, cert_hex): self.name = name self.type = None self.ip_addr = ip_addr self.port = port # 2 bytes self.terminating = 0 # certs self.path_building_key = PrivateKey(key_hex, encoder=nacl.encoding.HexEncoder) self.path_building_key_hex = key_hex self.path_building_cert = PrivateKey(cert_hex, encoder=nacl.encoding.HexEncoder) self.path_building_cert_hex = cert_hex # transmitted to other nodes
"""Matches entity IDs with human-readable names""" _component_stores = dict() """Dictionary of all components""" _all_systems = set() """Dict of all systems.""" ComponentTypes = enum([ # Macro-level properties "AI", "Graphics", "Music", "Physics", "SoundEffects", # Game object (primarily HexCell) properties "Damageable", "EnergyConsumer", "EnergyGenerator", "EnergyStorage", "Propulsion", "Weapon" ]) """List of valid component types.""" #TODO: According to Adam, a better choice would be defining DB tables here. # For the time being, the best bet is to read from the DB at startup and # not write back until exiting. Each component holds one DB table. Each system # operates on one or more DB tables. #TODO: synchronize for multi-threaded support
import random import collections import operator import uuid import threading import signal import json import email_report from utils import daemonize, randpass, enum, LinodeCommand # Rotation Policies Policy = enum( 'ROTATION_RANDOM', # Least recently used 'ROTATION_LRU', # Switch to another region 'ROTATION_NEW_REGION', # LRU + New region 'ROTATION_LRU_NEW_REGION') region_dict = { 2: 'Dallas', 3: 'Fremont', 4: 'Atlanta', 6: 'Newark', 7: 'London', 8: 'Tokyo', 9: 'Singapore', 10: 'Frankfurt' }
unicode('').encode('idna') #@note: fixes bug on gevent 1.0 see https://github.com/surfly/gevent/issues/349 #os.environ["GEVENT_RESOLVER"] = "ares" doesn't work #MongoClient.copy_database(self, from_name, to_name, from_host, username, password) from __init__ import _PATHROOT PATH_JS = _PATHROOT + "/js/mr_fun.js" # print 'root', os.listdir(_ROOT) def getMongoConfPath(): l=["db.conf",'mongod.conf','mongodB.conf'] l=map(lambda x: x if path.isfile("/etc/"+x) else False, l) for i in l: if i:return "/etc/"+i MONGO_CONF_DEFAULTPATH = getMongoConfPath() mv = enum(_gt='$gt', _lt='$lt', _all='$all', _exists='$exists', _mod='$mod', _ne='$ne', _in='$in', nin='$nin', _nor='$nor', _or='$or', _and='$and', _size='$size', _type='$type', _set="$set", _atomic='$atomic', _id='_id') def andVal(andVal=[]): """just a helper for producing and queries """ def getMongoConf(path=MONGO_CONF_DEFAULTPATH): ''' returns a dictionary like object with mongo configuration keys and values and/remove/change values and keys and save it back using obj.toFile() ''' return confFileDict(path) def mongoConfToPy(path=MONGO_CONF_DEFAULTPATH):
import requests from requests_oauthlib import OAuth1 import exceptions from models import AccessToken, LinkedInInvitation, LinkedInMessage from utils import enum, to_utf8, raise_for_error, json, StringIO __all__ = ['LinkedInAuthentication', 'LinkedInApplication', 'PERMISSIONS'] PERMISSIONS = enum('Permission', COMPANY_ADMIN='rw_company_admin', BASIC_PROFILE='r_basicprofile', FULL_PROFILE='r_fullprofile', EMAIL_ADDRESS='r_emailaddress', NETWORK='r_network', CONTACT_INFO='r_contactinfo', NETWORK_UPDATES='rw_nus', GROUPS='rw_groups', MESSAGES='w_messages') ENDPOINTS = enum('LinkedInURL', PEOPLE='https://api.linkedin.com/v1/people', PEOPLE_SEARCH='https://api.linkedin.com/v1/people-search', GROUPS='https://api.linkedin.com/v1/groups', POSTS='https://api.linkedin.com/v1/posts', COMPANIES='https://api.linkedin.com/v1/companies', COMPANY_SEARCH='https://api.linkedin.com/v1/company-search', JOBS='https://api.linkedin.com/v1/jobs', JOB_SEARCH='https://api.linkedin.com/v1/job-search')
from utils import enum opcode = enum("notop", "neg", "incop", "decop", "dup", "add", "sub", "mult", "divop", "modop", "swp", "andop", "orop", "gt", "lt", "ge", "le", "eq", "ne", "lod", "_str", "ldc", "lda", "ujp", "tjp", "fjp", "chkh", "chkl", "ldi", "sti", "call", "ret", "retv", "ldp", "proc", "endop", "nop", "bgn", "sym" ) opcodeName = [ "notop", "neg", "inc", "dec", "dup", "add", "sub", "mult", "div", "mod", "swp", "and", "or", "gt", "lt", "ge", "le", "eq", "ne", "lod", "str", "ldc", "lda", "ujp", "tjp", "fjp", "chkh", "chkl", "ldi", "sti", "call", "ret", "retv", "ldp", "proc", "end", "nop", "bgn", "sym" ] typeEnum = enum( "INT_TYPE", "VOID_TYPE", "VAR_TYPE", "CONST_TYPE", "FUNC_TYPE" )
import sqlite3 import config from utils import enum MessageDirection = enum('INBOUND', 'OUTBOUND') get_client_from_number_query = "SELECT * FROM clients WHERE phone_number = ?;" # 0 = incoming # 1 = outgoing def log_message(phone_number, body, direction): con = sqlite3.connect(config.SQLITE_DATABASE) cur = con.cursor() cur.execute(get_client_from_number_query, (phone_number, )) client = cur.fetchone() if client is None: q = "INSERT INTO clients VALUES (NULL, ?);" cur.execute(q, (phone_number, )) client_id = cur.lastrowid else: client_id = client[0] q = "INSERT INTO messages VALUES (NULL, ?, ?, ?, CURRENT_TIMESTAMP);" cur.execute(q, ( client_id, direction, body, ))
class Stmt(object): """Statement object """ def __init__(self): self._labels = [] self.ip = None self._next = [] self._prev = None self._parent_proc = None self._starts_atomic = False self._ends_atomic = False self._starts_dstep = False self._ends_dstep = False self._omittable = False self._endstate = False def __str__(self): return self.debug_repr() def debug_repr(self): """Returns human-readable representation of statement """ return self.execute() def executable(self): """Generates C expression which evaluates to 1 if statement is executable """ return "1" def execute(self): """Generates C code which executes statement Needs not to end with semicolon """ return "" def pre_exec(self): """Generates C code to execute before the statement Needs not to end with semicolon """ lines = [] if self.starts_dstep and not self.ends_dstep: lines.append("BEGIN_DSTEP()") if self.starts_atomic and not self.ends_atomic: lines.append("BEGIN_ATOMIC()") return len(lines) and "; ".join(lines) or None def post_exec(self): """Generates C code to execute after the statement Needs not to end with semicolon """ lines = [] if not self.starts_atomic and self.ends_atomic: lines.append("END_ATOMIC()") if not self.starts_dstep and self.ends_dstep: lines.append("END_DSTEP()") return len(lines) and "; ".join(lines) or None def set_atomic(self, starts, ends): """Sets atomicity context of statement Arguments: - `starts`: if True, starts atomic context - `ends`: if True, ends atomic context If any argument is None, corresponding flag is left unchanged """ if starts is not None: self._starts_atomic = starts if ends is not None: self._ends_atomic = ends def set_dstep(self, starts, ends): """Sets d_step context of statement Arguments: - `starts`: if True, starts d_step context - `ends`: if True, ends d_step context If any argument is None, corresponding flag is left unchanged """ if starts is not None: self._starts_dstep = starts if ends is not None: self._ends_dstep = ends @property def starts_atomic(self): return self._starts_atomic @property def ends_atomic(self): return self._ends_atomic @property def starts_dstep(self): return self._starts_dstep @property def ends_dstep(self): return self._ends_dstep @property def omittable(self): """If True, statement may be left out (by graph reduction) with no side effect """ return self._omittable @property def endstate(self): """If True, this statement is considered a valid endstate """ return self._endstate def set_endstate(self, endstate): """Sets statement endstate flag to given value, if not None """ if endstate: self._endstate = endstate def add_label(self, label): """Adds label to statement Arguments: - `label`: Label object """ self._labels.append(label) label.parent_stmt = self def set_next(self, stmt): """Sets next statement for current statement MUST be called before complementary set_prev Arguments: - `stmt`: Stmt object """ self._next = [stmt] @property def next(self): """List of next statements (reachable immeaditaly after current is executed) All statements in list are simple statements (no if/do blocks) """ return self._next @property def prev(self): """Prev statement (from which this statement is reachable) Could be a compound statement (if/do) """ return self._prev def set_prev(self, stmt): """Sets next statement for current statement Is used in compound statements to fixup links of preceding statements MUST be called after complementary set_next """ self._prev = stmt def find_break_stmts(self): """Finds all BreakStmt instances in current scope (statement and sub-statements) For most statements, does nothing. TO be overloaded in block statements. Returns (possibly deep) list of BreakStmt objects """ return [] SettlePass = enum('PRE', 'POST_MINI') def settle(self, pass_no): """Settles Stmt object, must be called after all statements in proctype have been parsed Actually this is usable for statements that depend on other statements only There are currently 2 settle passes: 1. Right after adding all statements 2. After minimization (throwing away omittables) Arguments: - `pass_no`: settle pass number """ pass def next_reduced(self): """Calculates next statements in reduced (minimized) graph Returns: tuple ( (1) is valid endstate (True or None), (2) ends atomic (bool or None), (3) ends d_step (bool or None) (3) next statements (list) ) Deduces whether current statement ends atomic/d_step context or is a valid endstate """ ends_atomic = None ends_dstep = None endstate = None next_stmts = [] # 1) if all next statements are omittable and end atomic context, True # 2) if each of next statements either does not end atomic context or is not omittable, False # Error otherwise (when there are some omittable statement that end atomic, but not all) def deduce_atomic(acc, e): if acc is not None and acc != e: raise RuntimeError, "Cannot reduce statement atomicity context" return e or acc for stmt in self._next: if stmt.omittable: es, ea, ed, n = stmt.next_reduced() # Deduce atomic and d_step context for current statement ends_atomic = deduce_atomic(ends_atomic, ea) ends_dstep = deduce_atomic(ends_dstep, ed) next_stmts += n # If any of reachable omittable statements is valid endstate, # this statement should also be endstate endstate = es or endstate else: # Next non-omittable statements do not affect atomicity ends_atomic = deduce_atomic(ends_atomic, False) ends_dstep = deduce_atomic(ends_dstep, False) next_stmts.append(stmt) return ((self.endstate or endstate), (self.ends_atomic or ends_atomic), (self.ends_dstep or ends_dstep), next_stmts) def minimize(self): """Minimizes statement graph, fixing _next to point to non-omittables Is called for all statements, including omittables themselves Should be called after settle(PRE) Ignores _prev! Updates atomicity and endstate validness """ endstate, ends_atomic, ends_dstep, self._next = self.next_reduced() if not self.omittable: self.set_atomic(None, ends_atomic) self.set_dstep(None, ends_dstep) self.set_endstate(endstate)
import struct import urllib2 from cryptography.hazmat.primitives.asymmetric import ec from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.serialization \ import Encoding, PrivateFormat, NoEncryption from keychain import Token import utils AUTHENTICATION_ENDPOINT = 'http://developer-portal.oneid.com/api/{project}/authenticate' ONEID_TYPES = utils.enum(DEVICE=0, SERVER=1, USER=2) def create_secret_key(output=None): """ Create a secret key and save it to a secure location :param output: Path to save the secret key :return: Secret key bytes. """ secret_key = ec.generate_private_key(ec.SECP256R1(), default_backend()) secret_key_bytes = secret_key.private_bytes(Encoding.PEM, PrivateFormat.PKCS8, NoEncryption()) # Save the secret key bytes to a secure file if output and os.path.exists(os.path.dirname(output)): with open(output, 'w') as f:
''' Created on Jun 21, 2013 @author: nick ''' from utils import enum colors = enum(WHITE=1, BLACK=-1) piece_types = enum(PAWN=0, KNIGHT=1, BISHOP=2, ROOK=3, QUEEN=4, KING=5) move_types = enum(MAX=0, EXACT=1)
42: lambda _: f"(CGI error)", 43: lambda _: f"(Proxy error)", 44: lambda _: f"(Rate limited)", 50: lambda m: f"(Permanent failure: {m})", 51: lambda _: f"(Area 51: Not found)", 52: lambda _: f"(Resource gone)", 53: lambda _: f"(Proxy request refused)", 59: lambda _: f"(Malformed request)", 60: lambda m: f"(Need client certificate)", 61: lambda _: f"(Unauthorised client certificate)", 62: lambda _: f"(Invalid client certificate)", } sslctx = ssl.create_default_context() gemtext = utils.enum( TEXT=0, HEADER1=1, HEADER2=2, HEADER3=3, LINK=4, QUOTE=5, LIST=6, PREFORMAT=7 ) @dataclasses.dataclass class GeminiDoc: doctype: int # status code "family". e.g. 3, 2, 1 status: int # exact status code. e.g. 31, 20, 11 meta: str # text that comes after the status code. body: list # the document body. comment unneeded. # Enable TOFU, which Gemini needs. Stupid, right? sslctx.check_hostname = False sslctx.verify_mode = ssl.CERT_NONE
def __init__(self): self.log_type = utils.enum(INFO="INFO", DEBUG="DEBUG", WARNING="WARN", ERROR="ERROR")
import materials from utils import ( Vec, enum, iterate_cube, get_tile_entity_tags, weighted_choice ) dv = [ Vec(0, -1, 0), Vec(1, 0, 0), Vec(0, 1, 0), Vec(-1, 0, 0) ] dirs = enum('N', 'E', 'S', 'W') class Blank(object): _name = 'blank' _min_width = 0 _max_width = 32 _min_length = 0 _max_length = 32 def __init__(self, parent, position, size, length, direction): self.parent = parent self.position = position self.size = size self.length = length self.direction = direction
import logging from utils import enum, translate Designation = enum( USER='******', PW='password', URL='url', SECTION='section', OTHER='other', ) logger = logging.getLogger('PassPy.KeychainItemEntry') class KeychainItemEntry(object): def __init__(self, key, value, isSecret=False, isVisible=True, designation=Designation.OTHER, translate=None): self.key = key self.value = value self.designation = designation self.isUsername = designation == Designation.USER self.isPassword = designation == Designation.PW self.isSection = designation == Designation.SECTION self.isUrl = designation == Designation.URL self.isSecret = isSecret or self.isPassword \ or 'password' in key.lower() \ or ('pin' in key.lower() and value.isdigit()) \ or key == 'cvv' self.isVisible = isVisible self.translate = translate
trips = get_possible_trips(tracker_id) print 'Trip count = %d' %(len(trips)) for t in trips: trip_stop_times = gtfs.models.StopTime.objects.filter(trip = t).order_by('arrival_time') print "trip id: %s" % (t) for x in trip_stop_times: print db_time_to_datetime(x.arrival_time), db_time_to_datetime(x.departure_time), x.stop print def print_tracked_stop_times(tracker_id): #for tracked_stop_time in self.stop_times: # print tracked_stop_time res = cl.zrange(get_train_tracker_tracked_stops_key(tracker_id), 0, -1, withscores=True) for cur in res: arrival = ot_utils.unix_time_to_localtime(int(cur[1])) cur_0_split = cur[0].split('_') name = stops.all_stops[cur_0_split[0]].name departure = ot_utils.unix_time_to_localtime(int(cur_0_split[1])) if cur_0_split[1] != '' else None print TrackedStopTime.get_str(arrival, departure, name) def add_report(report): bssid_tracker.tracker.add(report) add_report_to_tracker(report.device_id, report) hmm, hmm_non_stop_component_num = setup_hmm() tracker_states = enum(INITIAL='initial', NOSTOP='nostop', STOP='stop', UNKNOWN='unknown') nostop_id = stops.all_stops.id_list[hmm_non_stop_component_num] cl = get_redis_client() p = get_redis_pipeline()
import logging import math import os import tempfile import shutil import pdb import subprocess import sys import pickle LOG = logging.getLogger('storyTime.models') # TODO: cluster mappings that affect each other or create event pool presets Mappings = utils.enum( 'isRecording', 'isPlaying', 'fps', 'curTime', 'timeDisplay', 'isTimeDisplayFrames', 'audioEnabled', 'audioInputDeviceIndex', 'audioOutputDeviceIndex', 'imageCount', 'curImageIndex', 'curImageIndexLabel', 'curImagePath', 'curImage', 'prevImage', 'nextImage', 'recordingIndex', 'recordingName', 'recordingFps', 'recordingDuration', 'recordingDurationDisplay', 'recordingImageCount', 'end', ) if sys.platform in ('win32', 'win64'): FFMPEG = "bin\\windows\\ffmpeg.exe".replace("/","\\") print "Windows FFMPEG: {0}".format(FFMPEG) # TESTING elif sys.platform == 'darwin': FFMPEG = os.path.abspath("bin/mac/ffmpeg") print "Mac FFMPEG: {0}".format(FFMPEG) # TESTING elif sys.platform == 'linux': FFMPEG = os.path.abspath("bin/linux/ffmpeg") print "Linux FFMPEG: {0}".format(FFMPEG) # TESTING class PixmapCache(object): def __init__(self):
import utils from datasource import DataSource from shapefile import ShapeFileDataSource sourceTypes = utils.enum(SHAPE_FILE="ZoomInTool", XML_FILE="ZoomOutTool") def validateSourceType(type): if type not in (sourceTypes.SHAPE_FILE, sourceTypes.XML_FILE): raise ValueError('data source not valid')
''' Created on Jun 20, 2013 @author: nick ''' from utils import enum player_types = enum(HUMAN=0, AI=1)
class Node(object): """A party in BGW MPC. Each party has an id and a messaging client, used to communicate with other Nodes and the Master. The Node uses a secret sharing scheme to share inputs and perform various calculation. Every Node has an internal state machine - for more info see automata diagram. """ # Node possible states. See automata diagram. State = enum( "UNINITIALIZED" , \ "INITIALIZED" , \ "RECEIVING_INPUTS" , \ "RUNNING" , \ "GEN_BULK_MUL_MASKS" , \ "GEN_BULK_REGEN_MASKS", \ "MUL_DIK_DEALER" , \ "MUL_DIK_NON_DEALER" , \ "MUL_RANDPOLY_SHARE" , \ "MUL_RANDPOLY_DONE" , \ "MUL_REDUCTION_SHARE" , \ "MUL_REDUCTION_CALC" , \ "MUL_RECONSTRUCTION" , \ "RECEIVING_OUTPUTS" , \ "DONE" ) def __init__(self, id, messenger_client, n_parties, master_id): """Initialize a Node. @id - the Node's unique id (for communication). @messenger_client - a NodeClient used for communication with Nodes and Master. @party_ids - ordered list of ids of all parties (including self). @master_id - id of the Master. @ss_args - additional arguments to the secret sharing schemes. """ self._id = id self._master_id = master_id self._io = messenger_client self._n_parties = n_parties self.reset() def reset(self): """Reinitialize Node. Deletes any state gained after __init__, except NodeClient's state""" self._input = None self._circuit = None self._truncinator = None self._resampler = None self._ss = None self._ss2 = None self._t = Config.THRESHOLD(self._n_parties) self._t2 = Config.THRESHOLD2(self._n_parties) # Random mask shares for DIK10 multiplication if Config.MULT_METHOD == Config.MULT_METHODS.DIK: self._dik_low = [] self._dik_high = [] self._dik_ptr = 0 self._state = self.State.UNINITIALIZED def __repr__(self): """String representation of object""" ret = "** Node %d: %s **\n" % \ (self._id, self.State.reverse_mapping[self._state]) ret += repr(self._io) return ret def run(self, runs=1): """Make @runs steps of execution. A step includes the following sub-steps: 1. Check and handle all of CTRL messages received from Master 2. Perform additional operations according to the current inner state (evaluate a gate / send shares / collect shares / ...) """ for i in xrange(runs): # Check for ctrl (high-priority) messages ctrl_msgs, reg_msgs = self._io.queue_status() while ctrl_msgs > 0: m = self._io.get_ctrl_msg() assert m is not None, "No message received..." self._handle_ctrl_msg(m) ctrl_msgs, reg_msgs = self._io.queue_status() # Perform additional operations according to the current inner state self._run_step() return def _handle_ctrl_msg(self, m): """Handle all types of CTRL messages. May change inner state (see diagram). @m - a CTRL message """ # switch case on msg type typ = m.get_type() # Reset Node if typ == Message.TYPE.Reset: self._io.reset() self.reset() self._state = self.State.UNINITIALIZED # Send debug data to Master elif typ == Message.TYPE.DebugData: self._send_to_master(Message.TYPE.DebugData, repr(self)) # Read and set given serialized circuit elif typ == Message.TYPE.SetCircuit: self._assert_state("SetCircuit", self.State.UNINITIALIZED) if self._circuit is not None: log.warn("Setting new circuit without reset!") c = unserialize(m.get_msg(), Circuit) self._set_circuit(c) if self._fully_initialized(): self._state = self.State.INITIALIZED # Read and set given parameters for secret sharing schemes elif typ == Message.TYPE.SetSecretSharing: self._assert_state("SetSecretSharing", self.State.UNINITIALIZED) ss_args, ss2_args = unserialize(m.get_msg()) self._set_secret_sharing(ss_args, ss2_args) if self._fully_initialized(): self._state = self.State.INITIALIZED # Read and set given input as party's input (FFE) elif typ == Message.TYPE.SetInput: self._assert_state("SetInput", self.State.UNINITIALIZED) secret_input = unserialize(m.get_msg(), FFE) self._set_input(secret_input) if self._fully_initialized(): self._state = self.State.INITIALIZED # Set party's input to a random input in the given serialized field elif typ == Message.TYPE.SetRandInput: self._assert_state("SetRandInput", self.State.UNINITIALIZED) field = unserialize(m.get_msg()) secret_input = field.rand() log.info("Node %d: Setting my input to %r (random)", self._id, secret_input) self._set_input(secret_input) if self._fully_initialized(): self._state = self.State.INITIALIZED # Share party's input. Node must INITIALIZED (input and circuit received) elif typ == Message.TYPE.ShareInput: self._assert_state("ShareInput", self.State.INITIALIZED) self._input_shares = [None] * self._n_parties self._share(self._ss, self._input, Message.TYPE.InputShare) self._state = self.State.RECEIVING_INPUTS # Read and set given Truncinator as party's truncinator (for multiplication) elif typ == Message.TYPE.SetTruncinator: self._assert_state("SetTruncinator", [ self.State.UNINITIALIZED, self.State.INITIALIZED, self.State.RUNNING ]) msg = m.get_msg() self._truncinator = unserialize(msg, Truncinator) # Read and set given Resampler as party's resampler (for bulk random masks) elif typ == Message.TYPE.SetResampler: self._assert_state("SetResampler", [ self.State.UNINITIALIZED, self.State.INITIALIZED, self.State.RUNNING ]) msg = m.get_msg() self._resampler = unserialize(msg, Resampler) # Generate a bulk of masks for DIK10 multiplication elif typ == Message.TYPE.GenBulkMulMasks: self._assert_state("GenBulkMulMasks", [self.State.INITIALIZED, self.State.RUNNING]) assert self._resampler is not None, "Cannot generate bulk masks - Resampler not set!" rand_val = self._field.rand() self._share(self._ss, rand_val, Message.TYPE.DIKBulkShareLow) self._share(self._ss2, rand_val, Message.TYPE.DIKBulkShareHigh) self._dik_low_shares = [None] * self._n_parties self._dik_high_shares = [None] * self._n_parties self._old_state = self._state self._state = self.State.GEN_BULK_MUL_MASKS # Generation of a bulk of masks for node going down - Phase1 elif typ == Message.TYPE.GenBulkRegenMasks: self._assert_state("GenBulkRegenMasks", [self.State.INITIALIZED, self.State.RUNNING]) assert self._resampler is not None, "Cannot generate bulk masks - Resampler not set!" # Choose a random polynomial with zero in the position to be regenerated & send shares self._regen_pos = unserialize(m.get_msg()) zero = self._field.zero() self._share(self._ss, zero, Message.TYPE.RegenRandPolyShare, pos=self._regen_pos) # Init structures for collecting shares and masks self._regen_shares = [None] * self._n_parties if self._regen_pos not in self._regen_masks: self._regen_masks[self._regen_pos] = [] self._old_state = self._state self._state = self.State.GEN_BULK_REGEN_MASKS # Evaluate a linear gate. Gate's name received in message elif typ == Message.TYPE.EvalGate: self._assert_state("EvalGate", self.State.RUNNING) self._eval_linear_gate(m.get_msg()) ### <DIK multiplication> ### # Initialize and start evaluation of multiplication gate. Gate's name received in message elif typ == Message.TYPE.EvalMulGateDIK: self._assert_state("EvalMulGateDIK", self.State.RUNNING) assert len(self._dik_low) == len( self._dik_high ), "DIK sanity check fail - low & high masks out of sync!" assert self._dik_ptr < len( self._dik_low ), "Cannot multiply DIK - Not enough masks generated!" # Parse message for gate's name and the party performing the calculation self._mul_gate, self._mul_party = unserialize(m.get_msg()) # Calculate local multiplication, add current (high degree) random mask & send to "dealer" naive_mult = self._circuit.evaluate_gate(self._mul_gate) masked_mult = self._dik_high[self._dik_ptr] + naive_mult processed_share = self._ss2.preprocess( masked_mult, self._id, pos=None) # pos=None for secret pos self._send_to_party(self._mul_party, Message.TYPE.DIKMulHighShare, processed_share) # Am I the dealer? if self._id == self._mul_party: self._state = self.State.MUL_DIK_DEALER self._mul_shares = [None] * self._n_parties else: self._state = self.State.MUL_DIK_NON_DEALER ### </DIK multiplication> ### ### <BGW multiplication> ### # Initialize and start evaluation of multiplication gate. Gate's name received in message elif typ == Message.TYPE.EvalMulGateInit: self._assert_state("EvalMulGateInit", self.State.RUNNING) assert self._truncinator is not None, "Cannot multiply - Truncinator not set!" # Init data structures for multiplication self._mul_gate = m.get_msg() self._mul_shares = [None] * self._n_parties # Generate a my part in the random mask & share zero = self._field.zero() self._share(self._ss2, zero, Message.TYPE.BGWMulRandPolyShare) self._state = self.State.MUL_RANDPOLY_SHARE # Perform local multiplication and mask result using the joint mask by received shares elif typ == Message.TYPE.EvalMulGateReduce: self._assert_state("EvalMulGateReduce", self.State.MUL_RANDPOLY_DONE) # Calculate regular multiplication and add joint random mask naive_mult = self._circuit.evaluate_gate(self._mul_gate) masked_mult = sum(self._mul_shares, naive_mult) # Share result (in high dimension) & reset data structure for new shares self._share(self._ss2, masked_mult, Message.TYPE.BGWMulReductionShare) self._mul_shares = [None] * self._n_parties self._state = self.State.MUL_REDUCTION_SHARE # Perform degree reduction on shares received, and send each party back its appropriate part elif typ == Message.TYPE.EvalMulGateFinalize: self._assert_state("EvalMulGateFinalize", self.State.MUL_REDUCTION_CALC) # Calculate linear degree reduction on the vector of evaluations reduced = self._truncinator.reduce(self._mul_shares) # Send back the reduced result & reset data structure for new shares processed_shares = [ self._ss2.preprocess(s, self._id, pos=None) for s in reduced ] self._send_to_parties(Message.TYPE.BGWMulReductionResult, processed_shares) self._mul_shares = [None] * self._n_parties self._state = self.State.MUL_RECONSTRUCTION ### </BGW multiplication> ### # Send my final output to all of the parties elif typ == Message.TYPE.EvalOutput: self._assert_state("ShareOutput", self.State.RUNNING) self._output_shares = [None] * self._n_parties out_share = self._circuit.get_output() output_msg = self._ss.preprocess( out_share, self._id, pos=None) # pos=None for secret pos msgs = [output_msg] * self._n_parties self._send_to_parties(Message.TYPE.OutputShare, msgs) self._state = self.State.RECEIVING_OUTPUTS else: log.error("Unknown ctrl msg received:\n%r", m) def _run_step(self): """Run additional operations according to inner state""" # switch case on states # UNINITIALIZED - Nothing to do (waiting for orders) if self._state is self.State.UNINITIALIZED: pass #RECEIVING_INPUTS - collect inputs received so far, initialize circuit when all inputs received elif self._state is self.State.RECEIVING_INPUTS: is_done = self._parse_shares(Message.TYPE.InputShare, self._input_shares) # All input shares received? if is_done: # Set in right order and initialize circuit self._circuit.init_all_inputs(self._input_shares) self._state = self.State.RUNNING del self._input_shares # RUNNING - Nothing to do (waiting for orders) elif self._state is self.State.RUNNING: pass # GEN_BULK_MUL_MASKS - collect shares received so far, until all shares received elif self._state is self.State.GEN_BULK_MUL_MASKS: is_done_low = self._parse_shares(Message.TYPE.DIKBulkShareLow, self._dik_low_shares) is_done_high = self._parse_shares(Message.TYPE.DIKBulkShareHigh, self._dik_high_shares) # All shares received? if is_done_low and is_done_high: # Create vector of the random evaluations and calculate linear combinations self._dik_low += self._resampler(self._dik_low_shares) self._dik_high += self._resampler(self._dik_high_shares) self._state = self._old_state del self._old_state del self._dik_low_shares del self._dik_high_shares # MUL_DIK_DEALER - collect shares received so far, until all received # then reconstruct masked multiplication and reshare elif self._state is self.State.MUL_DIK_DEALER: is_done = self._parse_shares(Message.TYPE.DIKMulHighShare, self._mul_shares) # All shares received? if is_done: # Reconstruct masked mult (in high dimension) & reshare in low dimension log.debug("Node %d Redealing", self._id) masked_mul = self._ss2.reconstruct(self._mul_shares, pos=None) self._share(self._ss, masked_mul, Message.TYPE.DIKMulLowShare) # Back to being like everybody else (wait for the share I sent myself) self._state = self.State.MUL_DIK_NON_DEALER del self._mul_shares # MUL_DIK_NON_DEALER - receive masked multiplication, remove mask & update the circuit elif self._state is self.State.MUL_DIK_NON_DEALER: result = {self._mul_party: None} is_done = self._parse_shares(Message.TYPE.DIKMulLowShare, result, expected_parties=[self._mul_party]) # Result received? if is_done: # Remove the mask from the output -> finally update the circuit masked_output = result[self._mul_party] output = masked_output - self._dik_low[self._dik_ptr] self._circuit.set_gate_output(self._mul_gate, output) self._dik_ptr += 1 #Done with this random mask pair self._state = self.State.RUNNING del self._mul_gate del self._mul_party # GEN_BULK_REGEN_MASKS - collect shares received so far, until all shares received elif self._state is self.State.GEN_BULK_REGEN_MASKS: is_done_low = self._parse_shares(Message.TYPE.RegenRandPolyShare, self._regen_shares) # All shares received? if is_done: # Create vector of the random evaluations and calculate linear combinations self._regen_masks[self._regen_pos] += self._resampler( self._regen_shares) self._state = self._old_state del self._old_state del self._regen_shares del self._regen_pos # MUL_RANDPOLY_SHARE - collect shares received so far, until all shares received elif self._state is self.State.MUL_RANDPOLY_SHARE: # Read all received shares to _mul_shares is_done = self._parse_shares(Message.TYPE.BGWMulRandPolyShare, self._mul_shares) # All shares received? if is_done: self._state = self.State.MUL_RANDPOLY_DONE # MUL_RANDPOLY_DONE - Nothing to do (waiting for orders) elif self._state is self.State.MUL_RANDPOLY_DONE: # Nothing to do (waiting for orders) pass # MUL_REDUCTION_SHARE - collect shares received so far, until all shares received elif self._state is self.State.MUL_REDUCTION_SHARE: is_done = self._parse_shares(Message.TYPE.BGWMulReductionShare, self._mul_shares) # All shares received? if is_done: self._state = self.State.MUL_REDUCTION_CALC # MUL_RANDPOLY_DONE - Nothing to do (waiting for orders) elif self._state is self.State.MUL_REDUCTION_CALC: pass # MUL_RECONSTRUCTION - collect shares received so far, until all received # then reconstruct gate's output & update the circuit elif self._state is self.State.MUL_RECONSTRUCTION: is_done = self._parse_shares(Message.TYPE.BGWMulReductionResult, self._mul_shares) # All shares received? if is_done: # Reconstruct our output (now in low dimension) -> finally update the circuit output = self._ss2.reconstruct( self._mul_shares, pos=None) #Reconstruct secret pos self._circuit.set_gate_output(self._mul_gate, output) self._state = self.State.RUNNING del self._mul_shares del self._mul_gate # RECEIVING_OUTPUTS - collect outputs received so far, until all received # then reconstruct circuit's output elif self._state is self.State.RECEIVING_OUTPUTS: is_done = self._parse_shares(Message.TYPE.OutputShare, self._output_shares) # All outputs received? if is_done: self._output = self._ss.reconstruct(self._output_shares, pos=None) self._circuit.set_gate_output(self._circuit.OUTPUT, self._output) self._state = self.State.DONE log.info("Node %d outputs %r, I'm done.", self._id, self._output) del self._output_shares # DONE - Nothing to do (waiting for orders) elif self._state is self.State.DONE: pass def _assert_state(self, msg_type, state): """Make sure inner state is @state. If not - raises an AssertionError with @msg_type in error message""" if hasattr(state, '__iter__') == False: valid_state = (self._state == state) state_name = self.State.reverse_mapping[state] else: valid_state = (self._state in state) state_name = map(self.State.reverse_mapping.get, state) assert valid_state, \ "Invalid state for %s, expected state/s %s\n%s" % (msg_type, state_name, repr(self)) def _set_input(self, secret_input): """Sets the Node's input to the FFE @secret_input""" self._input = secret_input self._field = secret_input.get_field() def _set_secret_sharing(self, ss_args, ss2_args): """Sets the Node's circuit according to the serialized @circuit""" if ss_args is None: ss_args = [] if isinstance(ss_args, dict): self._ss = SSS(self._n_parties, self._t, **ss_args) else: self._ss = SSS(self._n_parties, self._t, *ss_args) if ss2_args is None: ss2_args = [] if isinstance(ss2_args, dict): self._ss2 = SSS2(self._n_parties, self._t2, **ss2_args) else: self._ss2 = SSS2(self._n_parties, self._t2, *ss2_args) def _set_circuit(self, circuit): """Sets the Node's circuit according to the given @circuit""" assert circuit.get_num_inputs() == self._n_parties, \ "Number of circuit inputs (%d) != (%d) Number of parties" % (circuit.get_num_inputs(), self._n_parties) self._circuit = circuit self._cur_gate = None def _fully_initialized(self): """Check if Node is fully initialized (input & circuit set). Note that Truncinator is not a must (as some circuits have no multiplication gates) """ return (self._circuit is not None and \ self._input is not None and \ self._ss is not None and \ self._ss2 is not None \ ) def _parse_shares(self, msg_type, shares, expected_parties=None): """Reads shares received from other nodes. Expects that all messages are of @msg_type, containing serialized objects. @shares - a dict / list of length _n_parties holding current party->share/None mapping, that will be updated during the run. @expected_parties - a list of party ids of which inputs are to be received. If None - all parties needed. Returns True iff all expected shares received. """ m = self._io.get_data_msg(msg_type) while m is not None: assert m.get_type( ) == msg_type, "Unexpected message received, expecting %d:\n%r" % ( msg_type, m) data = m.get_msg() share = unserialize(data) src = m.get_src() if shares[src] is not None: log.warn("Src %d already sent a share...", src) shares[src] = share m = self._io.get_data_msg(msg_type) # Are we done? if expected_parties is None: expected_parties = xrange(self._n_parties) remaining = [p for p in expected_parties if (shares[p] is None)] return (remaining == []) def _eval_linear_gate(self, gate_name): """Evaluate a linear gate @gate_name""" self._circuit.evaluate_gate(gate_name) def _share(self, ss, secret, msg_type, *args, **kwargs): """Share @secret using the initialized SecretSharingScheme @ss. Then send each party a Message with their share, of type @msg_type. Additional positional/keyword arguments may be supplied for @ss. """ log.debug("Sharing %r with msg_type %r", secret, msg_type) shares = ss.share(secret, *args, **kwargs) self._send_to_parties(msg_type, shares) def _send_to_parties(self, msg_type, msgs): """Sends a Message of type @msg_type to multiple parties. @msgs - n-long list of messages to send or a party->msg dictionary. """ items = msgs.iteritems() if isinstance(msgs, dict) else enumerate(msgs) for party, data in items: self._send_to_party(party, msg_type, data) def _send_to_party(self, dst, msg_type, data): """Sends a Message of type @msg_type to party @dst with the payload @data""" assert 0 <= dst < self._n_parties, "Invalid party id %d for msg sending" % dst if not isinstance(data, str): data = serialize(data) m = Message(self._id, dst, msg_type, data) self._io.send(m) def _send_to_master(self, msg_type, data): """Build & send the Master a Message of type @msg_type and data (str) @data""" if not isinstance(data, str): data = serialize(data) m = Message(self._id, self.MASTER_ID, msg_type, data) self._io.send(m)
from utils import enum nodeNumber = enum("ACTUAL_PARAM", "ADD", "ADD_ASSIGN", "ARRAY_VAR", "ASSIGN_OP", "CALL", "COMPOUND_ST", "CONST_NODE", "DCL", "DCL_ITEM", "DCL_LIST", "DCL_SPEC", "DIV", "DIV_ASSIGN", "EQ", "ERROR_NODE", "EXP_ST", "FORMAL_PARA", "FUNC_DEF", "FUNC_HEAD", "GE", "GT", "IDENT", "IF_ELSE_ST", "IF_ST", "INDEX", "INT_NODE", "LE", "LOGICAL_AND", "LOGICAL_NOT", "LOGICAL_OR", "LT", "MOD", "MOD_ASSIGN", "MUL", "MUL_ASSIGN", "NE", "NUMBER", "PARAM_DCL", "POST_DEC", "POST_INC", "PRE_DEC", "PRE_INC", "PROGRAM", "RETURN_ST", "SIMPLE_VAR", "STAT_LIST", "SUB", "SUB_ASSIGN", "UNARY_MINUS", "VOID_NODE", "WHILE_ST") nodeName = ["ACTUAL_PARAM", "ADD", "ADD_ASSIGN", "ARRAY_VAR", "ASSIGN_OP", "CALL", "COMPOUND_ST", "CONST_NODE", "DCL", "DCL_ITEM", "DCL_LIST", "DCL_SPEC", "DIV", "DIV_ASSIGN", "EQ", "ERROR_NODE", "EXP_ST", "FORMAL_PARA", "FUNC_DEF", "FUNC_HEAD", "GE", "GT", "IDENT", "IF_ELSE_ST", "IF_ST", "INDEX", "INT_NODE", "LE", "LOGICAL_AND", "LOGICAL_NOT", "LOGICAL_OR", "LT", "MOD", "MOD_ASSIGN", "MUL", "MUL_ASSIGN", "NE", "NUMBER", "PARAM_DCL", "POST_DEC", "POST_INC", "PRE_DEC", "PRE_INC", "PROGRAM", "RETURN_ST", "SIMPLE_VAR", "STAT_LIST", "SUB", "SUB_ASSIGN", "UNARY_MINUS", "VOID_NODE", "WHILE_ST"] ruleName = [0, nodeNumber.PROGRAM, 0, 0, 0, 0, nodeNumber.FUNC_DEF, nodeNumber.FUNC_HEAD, nodeNumber.DCL_SPEC, 0, 0, 0, 0, nodeNumber.CONST_NODE, nodeNumber.INT_NODE, nodeNumber.VOID_NODE, 0, nodeNumber.FORMAL_PARA, 0, 0, 0, 0, nodeNumber.PARAM_DCL, nodeNumber.COMPOUND_ST, nodeNumber.DCL_LIST,
import dataclasses import re import utils from functools import partial from dataclasses import field from typing import List, Tuple ArgType = utils.enum(INT=0, STR=1, LIST=2) @dataclasses.dataclass class Arg: name: str desc: str = "" argtype: ArgType = ArgType.STR optional: bool = False ConfigScope = utils.enum(USER=0, CHAN=1) HookType = utils.enum(COMMAND="cmd", RAW="raw", PATTERN="reg") AccessType = utils.enum(ANY=0, IDENTIFIED=1, ADMIN=2, CHAN_OP=3, CHAN_HOP=4, CHAN_VOP=5) FNINFO_ATTR = "fninfo"
import uuid import json from collections import defaultdict from ws4py.websocket import WebSocket from utils import head, enum, tail, inverse MESSAGES = enum({ # http://wamp.ws/spec "WELCOME": 0, "PREFIX": 1, "SUBSCRIBE": 5, "UNSUBSCRIBE": 6, "PUBLISH": 7, "EVENT": 8, }) PROTOCOL_VERSION = 1 SERVER_IDENT = "twister" class Channel(list): def publish(self, message, exclude=None): for client in self: if exclude is client: continue client.send(message)
import uuid import json from collections import defaultdict from ws4py.websocket import WebSocket from utils import head, enum, tail, inverse MESSAGES = enum({ # http://wamp.ws/spec "WELCOME": 0, "PREFIX": 1, "SUBSCRIBE": 5, "UNSUBSCRIBE": 6, "PUBLISH": 7, "EVENT": 8, }) PROTOCOL_VERSION = 1 SERVER_IDENT = "twister" class Channel(list): def publish(self, message, exclude=None): for client in self: if exclude is client: continue client.send(message) class Twister(WebSocket):