def request(): """Called when plugin data are requested. When this function is called, Location data are valid. In this function should be set data for provider. Data are set with Provider.serProperty(pluginName, key, value) method. This method can by called multiple times, all keys are added to this plugin datasource. At the end, when all data are set, Provider.done(pluginName) method has to be called indicating that all data are ready and set. Location methods: QString getCountryCode(); QString getCountry(); QString getCity(); QString getAddress(); // this method is usefull only when user has chosen // manual configuration of location, otherwise its // return is empty QString getLongitude(); QString getLatitude(); double getRange(); // range in KM around location for which user wants // information Provider methods: void setProperty(QString source, QString key, QString value); void done(const QString source); """ Provider.setProperty(PLUGIN_NAME, "Hello!", "You are in %s." % Location.getCity()) Provider.done(PLUGIN_NAME)
def push_image_to_providers(self, image_id, build_id, providers, credentials, job_cls = BuildJob, *args, **kwargs): if not build_id: build_id = self._latest_unpushed(image_id) watcher = PushWatcher(image_id, build_id, len(providers), self.warehouse) jobs = [] for provider in providers: target = Provider.map_provider_to_target(provider) target_image_id = self._target_image_for_build_and_target(build_id, target) template = self._template_for_target_image_id(target_image_id) job = job_cls(template, target, image_id, build_id, *args, **kwargs) job.push_image(target_image_id, provider, credentials, watcher) jobs.append(job) self.job_registry.register(jobs) return jobs
from os import listdir import numpy as np import Provider import tensorflow as tf # Directories subsetDirBase = r'C:\Users\polat\Desktop\TEZ\LUNA\subset' candidates = r'C:\Users\polat\Desktop\TEZ\LUNA\CSVFILES\candidates_V2.csv' # Constants NUM_SUBSET = 10 voxelWidth = 42 ZWidth = 2 candidatesList = Provider.readCSV(candidates) # ***** TRAINING STARTS***** inputVolume = tf.placeholder(tf.float32) output = tf.placeholder(tf.float32) W_conv1 = Provider.weight_variable([3, 3, 3, 1, 32]) B_conv1 = Provider.bias_variable([32]) h_conv1 = tf.nn.relu( tf.nn.conv3d(inputVolume, W_conv1, [1, 1, 1, 1, 1], padding='SAME') + B_conv1) layer1 = tf.nn.max_pool3d(h_conv1, ksize=[1, 2, 3, 3, 1], strides=[1, 2, 2, 2, 1], padding='SAME')
from os import listdir import Provider # Directories ModelDirectory = r'ModelResults' ModelList = [] outputFile = [] outputFile.append('seriesuid,coordX,coordY,coordZ,probability') list = listdir(ModelDirectory) for modelFile in list: path = ModelDirectory+'/'+modelFile ModelList.append(Provider.readCSV(path)) NumberOfCandidates = len(ModelList[0]) NumberOfModel = len(ModelList) for i in range(NumberOfCandidates): if (i == 0) : continue average = 0 for j in range(NumberOfModel): average = average + float(ModelList[j][i][4]) average = average / NumberOfModel outputFile.append(ModelList[0][i][0]+','+ModelList[0][i][1]+','+ModelList[0][i][2]+','+ModelList[0][i][3]+','+str(average)) print (str(i)+'\n')
import Provider from datetime import datetime import json from aiohttp import web import asyncio import Orchestrator import Authorization import Consumer consumer = Consumer.Consumer("emilsnyaste", "127.0.0.1", 8082) provider = Provider.Provider("CurrentTimeSweden", "CurrentTime","/", 8081, "127.0.0.1", ["JSON"] ,"127.0.0.1:8442", {}) #Authorization.register_to_authorization(provider,"emilsnyaste", "127.0.0.1", 8082, "null") print(Authorization.authorize(consumer, provider.name, provider.address, provider.port, provider.definition)) #provider.start() #provider.registerToOrch() #Orchestrator.register_to_orchestrator(provider, "emilsnya", "127.0.0.1", 8082, "null") async def handle_request(request): try: time = str(datetime.now()) response = {'time':time} print (response) provider.stop() return web.Response(text=json.dumps(response), status=200) except Exception as e: return web.Response(text=json.dumps({'message': 'Something went wrong'}), status=400)
0.65, 0, 0, ], [0, 0.8, 0.3, 0], [0, 0.6, 1, 0], [0, 0, 0, 1]) print(x) prediction = tf.equal(tf.argmax(x, 1), tf.argmax(y, 1)) accuracy = tf.reduce_mean(tf.cast(prediction, tf.float32)) sess = tf.Session() print(sess.run(prediction)) anArray = np.array([1, 2, 3, 4]).reshape(1, 4) dizi = Provider.numToArray(anArray) a = np.array([1, 1, 1, 2, 2, 2]) print("dimension a: ") print(a.shape) b = np.array([[1, 1, 1], [1, 1, 1], [2, 2, 2], [1, 1, 1], [1, 1, 1], [1, 2, 3]]) print("dimension b: ") print(b.shape) c = np.matmul(a, b) print(c) print(Provider.numToArray(a[0]))
import numpy as np from scipy import ndimage import Provider subsetDirb= 'subset' resampledSubsetDir = 'resample' cropSubsetDir = 'crop' candidates = 'CSVFILES/candidates_V2.csv' RESIZE_SPACING = [1, 1, 1] voxelWidthXY = 36 voxelWidthZ = 24 if __name__ == '__main__': candidatesList = Provider.readCSV(candidates) for i in range(10): subsetDirn=subsetDirb+str(i) subsetDir=subsetDirb + '/' + subsetDirn list = listdir(subsetDir) subsetOuterList = [] for file in list: if file.endswith(".mhd"): file = file[:-4] subsetOuterList.append(file) count0 = 0 count1=0 label = [] for cand in candidatesList: if (cand[0] in file): fileName = cand[0] + '.mhd'
model.add(Dropout(0.20)) model.add(Flatten()) model.add(Dense(300, activation='relu')) model.add(Dropout(0.20)) model.add(Dense(num_classes, activation='softmax')) model.compile(loss=keras.losses.binary_crossentropy, optimizer=keras.optimizers.Adadelta(), metrics=['accuracy']) # Save the model in order to restart it later model.save_weights('model.h5') writeToDebugFile('Keras Model Created') candidatesList = Provider.readCSV(candidates) writeToDebugFile('candidates Read') for outerSet in range(NUM_SET): # Restart the Model model.load_weights('model.h5') # Create test folder subsetDir = originalSubsetDirectoryBase + str(outerSet) list = listdir(subsetDir) subsetOuterList = [] # Create Test Set for file in list: if file.endswith(".mhd"): file = file[:-4]
key_size=2048, backend=default_backend()) blockchain = blockchain.Blockchain() # the blockchain used in this test # create a test record for a patient patient_test_vc = [ "Administration Date: MAY-01-2021 10:00 AM", "Patient ID: 10132", "Patient Name: John Doe", "Patient Address: 10 Example St. NE", "Administered by: Dr. Jill Fakeington" ] # additional info is non-personally identifying info stored with transaction additional_data = ["Vaccine Type: Pfizer", "Vaccine ID: 1234"] # create the provider provider = Provider.Provider(provider_key) # create the patient patient = Patient.Patient(patient_key) # create the third party verifier verifier = Verifier.Verifier(verifier_key, blockchain) # generate patient vaccine card provider.generate_card(patient_test_vc) # provider posts the transaction to the blockchain provider.post_transaction(blockchain, patient_key.public_key(), additional_data) # a new block is created blockchain.new_block() # provider sends encrypted vaccine care to the patient provider.send_patient_info(patient) # Patient sends encrypted record to the verifier to prove his vaccination patient.send_records(verifier, verifier.get_pub_key())