def set_platform(self, **write):
     """ Sets fabric.api.env attributes from the selected platform
     The setup follows the sequence:
     fabfile.env.platforms > tests_integration.platforms.common > tests_integration.platforms.<selected_platform>
     :param write: dictionary to inject into fabric.api.env
     """
     module = import_module('.platforms.' + self.platform.platform_name, ROOT)
     getattr(module, self.platform.platform_name)(**self.platform.get_hosts(True))
     self.platform.user = getattr(self.env, 'default_ssh_user', 'root')
     for k, v in write.iteritems():
         setattr(self.env, k, v)
     # wait sshd daemon running on host platform
     self.platform.wait_process('/usr/sbin/sshd')
     # save and replace all references to fabric.api.env
     self.org_api_env = pyjack.replace_all_refs(api.env, _AttributeDict(api.env))
     self.org_instances = pyjack.replace_all_refs(api.env.instances, dict(api.env.instances))
     return self
示例#2
0
print "SomeCls.someiterable:", SomeCls.someiterable
print "SomeCls.anotheriterable:", SomeCls.anotheriterable
print "Contents of innerfun:"

#>And inner fun:
innerfun_gen = innerfun()
print "First yield:", innerfun_gen.next()
print "Second yield:", innerfun_gen.next()

#>Now, let's replace iterable with some new data
new_iterable = (
    'new',
    'data',
    'set',
)
org_iterable = pyjack.replace_all_refs(iterable, new_iterable)

#>Then look at the new results
print "iterable:", iterable
print "SomeCls.someiterable:", SomeCls.someiterable
print "SomeCls.anotheriterable:", SomeCls.anotheriterable

#>And inner fun, notice the function closure was updated:
innerfun_gen = innerfun()
print "First yield:", innerfun_gen.next()
print "Second yield:", innerfun_gen.next()

#>Then, reverse:
new_iterable = pyjack.replace_all_refs(new_iterable, org_iterable)

#>Then look at the new results
示例#3
0
            datum,
        )

    return inner


inner = outer(item)

print item
print data
print Foobar.the_item
print inner()

#>Then replace them:
new = (101, 'one hundred and one')
org_item = pyjack.replace_all_refs(item, new)

print item
print data
print Foobar.the_item
print inner()

#>But you still have the org data:
print org_item

#>So the process is reversible:
new = pyjack.replace_all_refs(new, org_item)

print item
print data
print Foobar.the_item
示例#4
0
def prune_resnet101_conv_layer(model, layer_index, filter_index):
	#layer_to_prune = []
	#layers_affected = []
	layer_affected = []
	layer = 0
	activation_index = 0
	for block in model.features:
		if block == model.conv1 or block == model.bn1:
			# print "Adding initial conv1 or bn1 to graph"
			# print block
			if block == model.conv1:
				pass
				#print "conv1"
			else:
				pass
				#print "bn1"
			if isinstance(block, torch.nn.modules.conv.Conv2d):
				if layer_index == layer:
					layer_to_prune = block
					print model.layer1
					layer_affected.append(model.bn1)
					for (bottleneck_name, bottleneck) in model.layer1._modules.items():
						if bottleneck_name == '0':
							for (layer_name, module) in bottleneck._modules.items():
								if layer_name == 'conv1':
									layer_affected.append(module)
				activation_index += 1
				layer += 1
		elif block == model.layer1 or block == model.layer2 or block == model.layer3 or block == model.layer4:
			# print "Adding Residual Block to graph"
			for (block_name, bottleneck) in block._modules.items():
				# print block_name
				for (name, module) in bottleneck._modules.items():
					# print name,module
					if isinstance(module, torch.nn.modules.Sequential):
						pass
						#print "Sequential Block"
						#out += module(x)
						#x = F.relu(out)
					else:
						# print "Conv or BN Inside Bottleneck"
						# print name, module
						if name == 'conv1':
							pass
							#print "bottleneck conv1"
							#out = module(x)
						elif name == 'bn1' or name == 'bn2':
							pass
							#print "bottleneck bn1 or bn2"
							#out = F.relu(module(out))
						elif name == 'conv2' or name == 'conv3':
							pass
							#print "bottleneck conv2 or conv3"
							#out = module(out)
						elif name == 'bn3':
							pass
							#print "bottleneck bn3"
							#out = module(out)
						else:
							pass
							#out = module(out)
						if isinstance(module, torch.nn.modules.conv.Conv2d):
							if layer_index == layer:
								layer_to_prune = module
							activation_index += 1
							layer += 1

	conv = layer_to_prune
	print "Pruning This Conv Block"
	print conv
	print "Layers Affected"
	print layer_affected
	#next_conv = None
	#offset = 1

	# while layer_index + offset <  len(model.features._modules.items()):
	# 	res =  model.features._modules.items()[layer_index+offset]
	# 	if isinstance(res[1], torch.nn.modules.conv.Conv2d):
	# 		next_name, next_conv = res
	# 		break
	# 	offset = offset + 1
	#
	new_conv = \
		torch.nn.Conv2d(in_channels = conv.in_channels, \
			out_channels = conv.out_channels - 1,
			kernel_size = conv.kernel_size, \
			stride = conv.stride,
			padding = conv.padding,
			dilation = conv.dilation,
			groups = conv.groups,
			bias = conv.bias)

	old_weights = conv.weight.data.cpu().numpy()
	new_weights = new_conv.weight.data.cpu().numpy()

	new_weights[: filter_index, :, :, :] = old_weights[: filter_index, :, :, :]
	new_weights[filter_index : , :, :, :] = old_weights[filter_index + 1 :, :, :, :]
	new_conv.weight.data = torch.from_numpy(new_weights).cuda()
	if conv.bias:
		bias_numpy = conv.bias.data.cpu().numpy()

		bias = np.zeros(shape = (bias_numpy.shape[0] - 1), dtype = np.float32)
		bias[:filter_index] = bias_numpy[:filter_index]
		bias[filter_index : ] = bias_numpy[filter_index + 1 :]
		new_conv.bias.data = torch.from_numpy(bias).cuda()

	print "Adding new Conv"
	print new_conv
	
	conv = pyjack.replace_all_refs(conv,new_conv)

	new_in_channels = new_conv.out_channels	

	for layers in layer_affected:
		if isinstance(layers, torch.nn.modules.conv.Conv2d):
			new_layer = \
				torch.nn.Conv2d(in_channels = new_in_channels, \
				out_channels = layers.out_channels,
				kernel_size = layers.kernel_size, \
				stride = layers.stride,
				padding = layers.padding,
				dilation = layers.dilation,
				groups = layers.groups,
				bias = layers.bias)

			old_weights = layers.weight.data.cpu().numpy()
			new_weights = new_layer.weight.data.cpu().numpy()

			new_weights[:, : filter_index, :, :] = old_weights[:, : filter_index, :, :]
			new_weights[:, filter_index :, :, :] = old_weights[:, filter_index + 1 :, :, :]
			new_layer.weight.data = torch.from_numpy(new_weights).cuda()
			if layers.bias:
				bias_numpy = layers.bias.data.cpu().numpy()
				new_layer.bias.data = torch.from_numpy(bias_numpy).cuda()
		if isinstance(layers, torch.nn.modules.batchnorm.BatchNorm2d):
			new_layer = torch.nn.BatchNorm2d(new_in_channels)
			old_weights = layers.weight.data.cpu().numpy()
			

		new_in_channels = new_layer.out_channels
	
	layers = pyjack.replace_all_refs(layers,new_layer)

	new_in_channels = new_conv.out_channels	


	#print "Conv1 New"
	#print model.conv1

	return model
示例#5
0
innerfun = myfun(iterable)

#>So look at the org results
print "iterable:", iterable
print "SomeCls.someiterable:", SomeCls.someiterable
print "SomeCls.anotheriterable:", SomeCls.anotheriterable
print "Contents of innerfun:"

#>And inner fun: 
innerfun_gen = innerfun()
print "First yield:", innerfun_gen.next()
print "Second yield:", innerfun_gen.next()

#>Now, let's replace iterable with some new data
new_iterable = ('new', 'data', 'set',)
org_iterable = pyjack.replace_all_refs(iterable, new_iterable)

#>Then look at the new results
print "iterable:", iterable
print "SomeCls.someiterable:", SomeCls.someiterable
print "SomeCls.anotheriterable:", SomeCls.anotheriterable

#>And inner fun, notice the function closure was updated:
innerfun_gen = innerfun()
print "First yield:", innerfun_gen.next()
print "Second yield:", innerfun_gen.next()

#>Then, reverse:
new_iterable = pyjack.replace_all_refs(new_iterable, org_iterable)

#>Then look at the new results
示例#6
0
def outer(datum):
    def inner():
        return ("Here is the datum:", datum,)
    return inner
    
inner = outer(item)

print item
print data
print Foobar.the_item
print inner()

#>Then replace them:
new = (101, 'one hundred and one')
org_item = pyjack.replace_all_refs(item, new)

print item
print data
print Foobar.the_item
print inner()

#>But you still have the org data:
print org_item

#>So the process is reversible: 
new = pyjack.replace_all_refs(new, org_item)

print item
print data
print Foobar.the_item
 def unset_platform(self):
     # restore fabric.api.env
     pyjack.replace_all_refs(api.env, self.org_api_env)
     pyjack.replace_all_refs(api.env.instances, self.org_instances)
     return self