Пример #1
0
def generateDictsFromShp(shapeFile,outputPath): 
    'Reads nodes and node weights from a point shapefile.'
    rootDir, fc = os.path.split(shapeFile)
    file, ext = os.path.splitext(fc)

    if not os.path.exists(outputPath):
        try:
            os.mkdir(outputPath)
        except:
            print "ERROR: could not create new directory", outputPath
    ds = ogr.Open(shapeFile)
    ptLayer = ds.GetLayer(0)
    
    nodesByClusterID=collections.defaultdict(list)
    clusterByNode={}
    nodes={}
    centers={}
    LVCostDict={}
    
    feat = ptLayer.GetNextFeature()
    while feat is not None:
        nodeWeight = 1
        geomRef = feat.GetGeometryRef()
        x = geomRef.GetX()
        y = geomRef.GetY()
        FID = feat.GetFID()
        nodes[FID] = network.Node(FID, x, y, nodeWeight) #Households
        centers[FID]=network.Node(FID, x, y, nodeWeight) #Transformers (center of mass of the cluster)
        
        clusterByNode[nodes[FID]]=FID 
        nodesByClusterID[FID].append(nodes[FID])
        LVCostDict[FID]=0
        feat = ptLayer.GetNextFeature()
    ds.Destroy()
    return nodesByClusterID,clusterByNode,nodes,centers,LVCostDict
def generateDictsFromShp(shapeFile, outputPath):
    'Reads nodes and node weights from a point shapefile.'
    rootDir, fc = os.path.split(shapeFile)
    file, ext = os.path.splitext(fc)

    if not os.path.exists(outputPath):
        try:
            os.mkdir(outputPath)
        except:
            print("ERROR: could not create new directory", outputPath)
    ds = ogr.Open(shapeFile)
    ptLayer = ds.GetLayer(0)

    nodesByClusterID = collections.defaultdict(list)
    clusterByNode = {}
    nodes = {}
    centers = {}
    LVCostDict = {}
    # X = np.array([[]]).reshape(0,4)
    feat = ptLayer.GetNextFeature()
    #    nodes_weights_output = []
    nodes_demands_output = []
    # import ipdb; ipdb.set_trace()
    np.random.seed(7)
    indices = np.random.permutation(ptLayer.GetFeatureCount())
    high = indices[:int(0.3 * ptLayer.GetFeatureCount())]

    while feat is not None:
        FID = feat.GetFID()
        nodeWeight = 1  # np.random.randint(low=1,high=101)# testing the effect of weights on the design 1
        if FID in high:
            nodeDemand = 100  # at 30 kWh/ month
        else:
            nodeDemand = 30  # np.random.randint(low=60,high=150) # at 60 kWh/month
        geomRef = feat.GetGeometryRef()
        x = geomRef.GetX()
        y = geomRef.GetY()
        FID = feat.GetFID()
        nodes[FID] = network.Node(FID, x, y, nodeWeight,
                                  nodeDemand)  # Households
        centers[FID] = network.Node(
            FID, x, y, nodeWeight,
            nodeDemand)  # Transformers (center of mass of the cluster)
        # X =np.concatenate((X,np.array([[FID,x,y,nodeWeight]])), axis = 0)
        clusterByNode[nodes[FID]] = FID
        nodesByClusterID[FID].append(nodes[FID])
        LVCostDict[FID] = 0
        #        nodes_weights_output.append([x,y,nodeWeight])
        nodes_demands_output.append([x, y, nodeDemand])
        feat = ptLayer.GetNextFeature()
    ds.Destroy()
    nodes_demands_output = pd.DataFrame(nodes_demands_output,
                                        columns=['x', 'y', 'demands'])
    return nodesByClusterID, clusterByNode, nodes, centers, LVCostDict, nodes_demands_output
Пример #3
0
def generateDictsFromShp(shapeFile, outputPath):
    'Reads nodes and node weights from a point shapefile.'
    rootDir, fc = os.path.split(shapeFile)
    file, ext = os.path.splitext(fc)

    if not os.path.exists(outputPath):
        try:
            os.mkdir(outputPath)
        except:
            print("ERROR: could not create new directory", outputPath)

    ds = ogr.Open(shapeFile)
    ptLayer = ds.GetLayer(0)

    nodesByClusterID = collections.defaultdict(list)
    clusterByNode = {}
    nodes = {}
    centers = {}
    LVCostDict = {}
    feat = ptLayer.GetNextFeature()
    nodes_weights_output = []

    np.random.seed(7)
    indices = np.random.permutation(ptLayer.GetFeatureCount())
    high = indices[:int(0.3 * ptLayer.GetFeatureCount())]

    while feat is not None:
        nodeWeight = 1
        geomRef = feat.GetGeometryRef()
        FID = feat.GetFID()
        x = geomRef.GetX()
        y = geomRef.GetY()

        # Trying putting nearest neigbors to every customer

        if FID in high:
            nodeDemand = 100  # at xx kWh/ month
        else:
            nodeDemand = 30
        nodes[FID] = network.Node(FID, x, y, nodeWeight, nodeDemand)
        centers[FID] = network.Node(FID, x, y, nodeWeight, nodeDemand)
        clusterByNode[nodes[FID]] = FID
        nodesByClusterID[FID].append(nodes[FID])
        LVCostDict[FID] = 0
        nodes_weights_output.append([x, y, nodeWeight])
        feat = ptLayer.GetNextFeature()
    ds.Destroy()
    nodes_weights_output = pd.DataFrame(nodes_weights_output,
                                        columns=['x', 'y', 'weights'])
    return nodesByClusterID, clusterByNode, nodes, centers, LVCostDict, nodes_weights_output
def readNetFromShp(shapefile):
    'Reads segs and nodes from the given shapefile'
    ds = ogr.Open(shapefile)
    layer = ds.GetLayer(0)
    net = network.Network()
    feat = layer.GetNextFeature()
    lengthField = "Length"
    nodeWeightFields = ["pt1Weight", "pt2Weight"]
    nodeIDFields = ["pt1", "pt2"]
    while feat is not None:
        geomRef = feat.GetGeometryRef()
        length = feat.GetField(lengthField)

        endPts = []
        for n in xrange(2):
            x, y = geomRef.GetX(n), geomRef.GetY(n)
            try:
                nodeWeight = feat.GetField(nodeWeightFields[n])
                print "try nodefield", nodeWeight
            except ValueError, msg:
                print msg
                print "ERROR: field \"" + nodeWeightFields[
                    n] + "\" doesn't exist"
            nodeID = feat.GetField(nodeIDFields[n])
            endPts.append(network.Node(nodeID, x, y, nodeWeight))
            print "node weight", nodeWeight
        newSeg = network.Seg(feat.GetFID(), endPts[0], endPts[1], length)
        net.addSeg(newSeg)
        feat = layer.GetNextFeature()
Пример #5
0
def readNetFromShp(inputShapefile):
    'Reads segs and nodes from the given shapefile'
    rows = gp.searchCursor(inputShapefile)
    desc = gp.describe(inputShapefile)
    net = network.Network()
    row = rows.next()
    while row:
        feat = row.GetValue(desc.ShapeFieldName)
        ptIDs = [row.getValue("pt1"), row.getValue("pt2")]
        ptWeights = [row.getValue("pt1Weight"), row.getValue("pt2Weight")]
        length = row.getValue("Length")
        FID = row.getValue("FID")

        # read nodes
        part = feat.getPart(0)
        part.reset()
        pt = part.next()
        nodes = []
        for n in xrange(2):
            nodes.append(network.Node(ptIDs[n], pt.x, pt.y, ptWeights[n]))
            pt = part.next()
        row = rows.next()
        net.addSeg(network.Seg(FID, nodes[0], nodes[1], length))
    del rows  # ensure cursor closes
    return net
Пример #6
0
def generateDictsFromShp(shapeFile, outputPath):
    'Reads nodes and node weights from a point shapefile.'
    rootDir, fc = os.path.split(shapeFile)
    file, ext = os.path.splitext(fc)

    if not os.path.exists(outputPath):
        try:
            os.mkdir(outputPath)
        except:
            print "ERROR: could not create new directory", outputPath
    ds = ogr.Open(shapeFile)
    ptLayer = ds.GetLayer(0)

    nodes = {}
    feat = ptLayer.GetNextFeature()
    while feat is not None:
        nodeWeight = 1
        geomRef = feat.GetGeometryRef()
        x = geomRef.GetX()
        y = geomRef.GetY()
        FID = feat.GetFID()
        nodes[FID] = network.Node(FID, x, y, nodeWeight)  #Households
        feat = ptLayer.GetNextFeature()
    ds.Destroy()
    return nodes
Пример #7
0
def run_simulation(network_status):

    ## Config network
    GENESIS = solana.Block(initial_validator_set=VALIDATOR_IDS)
    network = solana.Network(poisson_latency(AVG_LATENCY), GENESIS)

    ## Attach nodes to network
    nodes = [solana.Node(network, i) for i in VALIDATOR_IDS]

    ## Assign leader rotration
    leaders = np.random.choice(VALIDATOR_IDS, POOL_SIZE, replace=False)
    network.round_robin = leaders

    ## Set network partition
    ## Currently static...

    ##    logging.info("Partitioned nodes: ",network.partition_nodes)
    ## run sim...
    cur_partition_time = -1
    network.partition_nodes = []
    long_lived_partition = False

    for t in range(POOL_SIZE * 2):

        ## each tick, some % chance of long-lived partition
        if long_lived_partition == False and cur_partition_time < 0:
            long_lived_partition = np.random.uniform() < 0.05

        ## generate partitions
        if long_lived_partition == True:
            network.partition_nodes = list(compress(VALIDATOR_IDS,\
                                                    [np.random.uniform() < NETWORK_PARTITION for _ in nodes]))
            cur_partition_time = randint(1, POOL_SIZE / 5)  ## next partition
            long_lived_partition = False

        print("Partition size: %s for: %s" %
              (len(network.partition_nodes), cur_partition_time))

        network.tick()

        do_unique_chain_analysis = ((t + 1) % 25) == 0
        network_snapshot = network_status.update_status(
            network,
            chain_analysis=do_unique_chain_analysis,
            print_snapshot=False)

        #        network_snapshot = network.snapshot(t)
        #        network_status.print_snapshot(network_snapshot)

        ## if time is up, reset partition nodes
        if cur_partition_time <= 0:
            network.partition_nodes = []
            cur_partition_time = -1
        else:
            cur_partition_time -= 1

    return network
Пример #8
0
def CMST_Caller(households, capacity, root):

    household_data = [(h.getID(), h.getX(), h.getY(), h.getWeight())
                      for h in households]
    household_data = np.array(household_data)
    root_data = [root.getID(), root.getX(), root.getY(), root.getWeight()]
    root_data = np.array(root_data)

    connections = CMST_only_rewrite(household_data, capacity, root_data)
    #print([(household_data[c[0],0],household_data[c[1],0]) for c in connections])

    top_parents = set(range(len(household_data))) - set(
        [child for (child, parent) in connections])
    segments = [(int(household_data[child, 0]), int(household_data[parent, 0]))
                for (child, parent) in connections]
    segments += [(int(household_data[top_parent, 0]), root.getID())
                 for top_parent in top_parents]

    # total_LV_length = 0
    # for (child, parent) in connections:
    #     total_LV_length += np.linalg.norm(household_data[child, 1:3] - household_data[parent, 1:3])
    # for child in top_parents:
    #     total_LV_length += np.linalg.norm(household_data[child, 1:3] - root_data[1:3])
    # return segments, total_LV_length
    #import ipdb
    #ipdb.set_trace()
    connections += [(child, -root.getID() - 100) for child in top_parents]
    tree_segments = {}
    total_LV_length = 0
    #tmp_root =root
    tmp_root = network.Node((-root.getID() - 100), root.getX(), root.getY(),
                            root.getWeight())

    for seg_id, (child, parent) in enumerate(connections):
        if parent < 0:
            length = np.linalg.norm(household_data[child, 1:3] -
                                    root_data[1:3])
            tree_segments[(int(household_data[child, 0]),
                           parent)] = network.Seg(seg_id + int(1e7),
                                                  households[child], tmp_root,
                                                  length)
        else:
            length = np.linalg.norm(household_data[child, 1:3] -
                                    household_data[parent, 1:3])
            tree_segments[(int(household_data[child, 0]),
                           int(household_data[parent, 0]))] = network.Seg(
                               seg_id + int(1e7), households[child],
                               households[parent], length)
        total_LV_length += length

    return tree_segments, total_LV_length
Пример #9
0
def generateDictsFromShp(shapeFile, outputPath):
    'Reads nodes and node weights from a point shapefile.'
    rootDir, fc = os.path.split(shapeFile)
    file, ext = os.path.splitext(fc)

    if not os.path.exists(outputPath):
        try:
            os.mkdir(outputPath)
        except:
            print("ERROR: could not create new directory", outputPath)

    ds = ogr.Open(shapeFile)
    ptLayer = ds.GetLayer(0)

    nodesByClusterID = collections.defaultdict(list)
    clusterByNode = {}
    nodes = {}
    centers = {}
    LVCostDict = {}
    feat = ptLayer.GetNextFeature()
    nodes_weights_output = []
    while feat is not None:
        nodeWeight = 1
        geomRef = feat.GetGeometryRef()
        x = geomRef.GetX()
        y = geomRef.GetY()
        FID = feat.GetFID()
        nodes[FID] = network.Node(FID, x, y, nodeWeight)
        centers[FID] = network.Node(FID, x, y, nodeWeight)
        clusterByNode[nodes[FID]] = FID
        nodesByClusterID[FID].append(nodes[FID])
        LVCostDict[FID] = 0
        nodes_weights_output.append([x, y, nodeWeight])
        feat = ptLayer.GetNextFeature()
    ds.Destroy()
    nodes_weights_output = pd.DataFrame(nodes_weights_output,
                                        columns=['x', 'y', 'weights'])
    return nodesByClusterID, clusterByNode, nodes, centers, LVCostDict, nodes_weights_output
Пример #10
0
 def __init__(self, pos, size=(50, 50)):
     self.nodes = [
         network.Node(arg[0], self, arg[1]) for arg in self.nodes_arguments
     ]
     self.pos = pos
     self.size = size
     self.world = None
     if pos is not None:
         for n in self.nodes:
             old_pos = n.pos
             n.pos = tuple(old_pos[i] + self.pos[i] for i in [0, 1])
     for c in self.con_indices:
         con = network.Connection(self.nodes[c[0]], self.nodes[c[1]], c[2])
         self.nodes[c[0]].connections.add(con)
def readNodesFromShp(shapefile):
    'Reads nodes and node weights from a point shapefile.'
    ds = ogr.Open(shapefile)
    ptLayer = ds.GetLayer(0)
    nodes = {}
    feat = ptLayer.GetNextFeature()
    while feat is not None:
        try:
            weightField = feat.GetFieldIndex("Weight")
            nodeWeight = feat.GetField(weightField)
        except:
            nodeWeight = 1
        geomRef = feat.GetGeometryRef()
        x = geomRef.GetX()
        y = geomRef.GetY()
        FID = feat.GetFID()
        nodes[FID] = network.Node(FID, x, y, nodeWeight)
        feat = ptLayer.GetNextFeature()
    ds.Destroy()
    return nodes
Пример #12
0
def readNodesFromShp(shapefile):
    'Reads nodes and node weights from a point shapefile.'
    rows = gp.searchCursor(shapefile)
    desc = gp.describe(shapefile)
    nodes = {}
    row = rows.next()
    while row:
        feat = row.GetValue(desc.ShapeFieldName)
        try:
            weight = row.getValue("Weight")
        except:
            weight = 1
        FID = row.getValue("FID")

        pt = feat.getPart(0)
        #part.reset()
        #pt = part.next()
        nodes[FID] = network.Node(FID, pt.x, pt.y, weight)
        row = rows.next()
    del rows  # ensure cursor closes
    return nodes
Пример #13
0
###############################
# Parameters
T = 20
tau = .001 # execution timestep for the rate model

tc_primary = 0.01 # thalamo-cortical weights
tc_secondary = 0.01 # thalamo-cortical weights
cc_ei = .03 # intra-cortical weights
cc_ie = -0.9 # intra-cortical weights


###############################
# Building and instructing

# update_func, tau, rand_mean=0.0, rand_std=0.00001, isSensory=False, isReadout=False ):
L1 = nw.Node( nw._load, tau )
L2 = nw.Node( nw._load, tau )
L3 = nw.Node( nw._load, tau )

W1e = nw.Node( nw._sigmoid, tau )
W1i = nw.Node( nw._sigmoid, tau )
W2e = nw.Node( nw._sigmoid, tau )
W2i = nw.Node( nw._sigmoid, tau )
W3e = nw.Node( nw._sigmoid, tau )
W3i = nw.Node( nw._sigmoid, tau )

# Arcs
tc11 = nw.Arc( target=W1e, source=L1, weight=tc_primary )
tc12 = nw.Arc( target=W2e, source=L1, weight=tc_secondary )
tc13 = nw.Arc( target=W3e, source=L1, weight=tc_secondary )
tc11 = nw.Arc( target=W1i, source=L1, weight=tc_primary )
Пример #14
0
import spotify_ops

import plotly

plotly.tools.set_credentials_file(username=config.plotly_user,
                                  api_key=config.plotly_api_key)

sp = spotify_ops.sp

artist = Artist("64KEffDW9EtZ1y2vBYgq8T")

print("test")
#print(artist)
thisNetwork = network.Network()
thisArtist = network.Node(artist.id, thisNetwork)
thisBranch = network.Edge("first", "second", thisNetwork)

#spotify:artist:64KEffDW9EtZ1y2vBYgq8T
# playlists = sp.user_playlists('spotify')
# print(playlists)
# while playlists:
#     for i, playlist in enumerate(playlists['items']):
#         print("%4d %s %s" % (i + 1 + playlists['offset'], playlist['uri'],  playlist['name']))
#     if playlists['next']:
#         playlists = sp.next(playlists)
#     else:
#         playlists = None

# import plotly.plotly as py
# import plotly.graph_objs as go