Exemple #1
0
def transmit_sample(host, port):
    """
    Transmits random JSON-encoded data sample
    to a given IP address and port number.

    host : IP address of host (string)
    port : Port number (int)
    """

    #read samples and split to secrets
    living_data = np.loadtxt('living_data.csv', delimiter=",")
    temperature = living_data[:, 0].tolist()
    airco_status = living_data[:, 1].tolist()

    secnum = mpc.SecFxp()
    #secnum = mpc.SecInt()

    temperature_sec_ = [secnum(a) for a in temperature]
    airco_status_sec_ = [secnum(a) for a in airco_status]
    stype = type(temperature_sec_[0])
    field = stype.field

    temperature_sec = [a.df for a in temperature_sec_]
    airco_status_sec = [a.df for a in airco_status_sec_]

    m = len(host)
    t = 1
    N = len(temperature)
    for i in np.arange(N):
        # Generate shares for each data sample in temperature and airco_status
        temperature_shares = [None] * m
        aircostatus_shares = [None] * m
        temperature_shares = thresha.random_split([temperature_sec[i]], t, m)
        aircostatus_shares = thresha.random_split([airco_status_sec[i]], t, m)

        temperature_shares_str = []
        for other_pid, data in enumerate(temperature_shares):
            data = field.to_bytes(data)
            temperature_shares_str.append(base64.b64encode(data).decode())

        aircostatus_shares_str = []
        for other_pid, data in enumerate(aircostatus_shares):
            data = field.to_bytes(data)
            aircostatus_shares_str.append(base64.b64encode(data).decode())

        print(temperature_shares_str)

        #send shares to MPC servers
        for j in np.arange(m):
            sample = {
                "timestamp": datetime.datetime.now().isoformat(),
                "temperature": temperature_shares_str[j],
                "airco": aircostatus_shares_str[j]
            }
            sample_json = json.dumps(sample)
            l.debug(f"Sample: {sample_json}")
            r = requests.put(f"http://{host[j]}:{port[j]}/store",
                             json=sample_json)
            #verify=os.getenv("CERT_PATH"))
            l.debug(f"HTTP response code: {r.status_code}")
Exemple #2
0
    def test_secretsharing(self):
        for field in (self.f2, self.f256):
            t = 0
            m = 1
            a = [field(0), field(1)]
            shares = thresha.random_split(a, t, m)
            b = thresha.recombine(field, [(j + 1, shares[j]) for j in range(len(shares))])
            self.assertEqual(a, b)
            b = thresha.recombine(field, [(j + 1, shares[j]) for j in range(len(shares))], [0])[0]
            self.assertEqual(a, b)

        for field in (self.f19, self.f27, self.f256):
            for t in range(8):
                m = 2 * t + 1
                for i in range(t):
                    a = [field(i), field(i+1), field(i**2), field((i+1)**2)]
                    shares = thresha.random_split(a, t, m)
                    b = thresha.recombine(field, [(j + 1, shares[j]) for j in range(len(shares))])
                    self.assertEqual(a, b)
            m = 17
            for t in range((m + 1) // 2):
                for i in range(t):
                    a = [field(i), field(i+1), field(i**2), field((i+1)**2)]
                    shares = thresha.random_split(a, t, m)
                    b = thresha.recombine(field, [(j + 1, shares[j]) for j in range(len(shares))])
                    self.assertEqual(a, b)
Exemple #3
0
    def test_secretsharing(self):
        field = self.f2
        t = 0
        n = 1
        a = [field(0), field(1)]
        shares = thresha.random_split(a, t, n)
        b = thresha.recombine(field,
                              [(j + 1, shares[j]) for j in range(len(shares))])
        self.assertEqual(a, b)

        field = self.f19
        for t in range(8):
            n = 2 * t + 1
            for i in range(t):
                a = [field(i), field(-i), field(i**2), field(-i**2)]
                shares = thresha.random_split(a, t, n)
                b = thresha.recombine(field, [(j + 1, shares[j])
                                              for j in range(len(shares))])
                self.assertEqual(a, b)
        n = 17
        for t in range((n + 1) // 2):
            for i in range(t):
                a = [field(i), field(-i), field(i**2), field(-i**2)]
                shares = thresha.random_split(a, t, n)
                b = thresha.recombine(field, [(j + 1, shares[j])
                                              for j in range(len(shares))])
                self.assertEqual(a, b)
Exemple #4
0
    async def _reshare(self, x):
        x_is_list = isinstance(x, list)
        if not x_is_list:
            x = [x]
        sftype = type(x[0])  # all elts assumed of same type
        if issubclass(sftype, Share):
            field = sftype.field
            if not field.frac_length:
                await returnType(sftype, len(x))
            else:
                await returnType((sftype, x[0].integral), len(x))
            x = await gather_shares(x)
        else:
            field = sftype
            await returnType(Future)

        m = len(self.parties)
        t = self.threshold
        in_shares = thresha.random_split(x, t, m)
        in_shares = [field.to_bytes(elts) for elts in in_shares]
        # Recombine the first 2t+1 output_shares.
        out_shares = await gather_shares(self._exchange_shares(in_shares)[:2 * t + 1])
        points = [(j + 1, field.from_bytes(out_shares[j])) for j in range(len(out_shares))]
        y = thresha.recombine(field, points)

        if issubclass(sftype, Share):
            y = [sftype(s) for s in y]
        if not x_is_list:
            y = y[0]
        return y
    async def _distribute(self, x, senders):
        """Distribute shares for each x provided by a sender."""
        stype = type(x[0])  # all elts assumed of same type
        field = stype.field
        await returnType(stype, len(senders), len(x))
        value = x[0].df if not isinstance(x[0].df, Future) else None
        assert value is None or self.pid in senders

        m = len(self.parties)
        t = self.threshold
        x = [a.df for a in x]  # Extract values from all elements of x.
        shares = [None] * len(senders)
        for i, peer_pid in enumerate(senders):
            if peer_pid == self.pid:
                in_shares = thresha.random_split(x, t, m)
                for other_pid, data in enumerate(in_shares):
                    data = field.to_bytes(data)
                    if other_pid == self.pid:
                        shares[i] = data
                    else:
                        self._send_share(other_pid, data)
            else:
                shares[i] = self._expect_share(peer_pid)
        shares = await gather_shares(shares)
        return [[field(a) for a in field.from_bytes(r)] for r in shares]
Exemple #6
0
    async def _reshare(self, a):
        if not isinstance(a, list):
            a = tuple([a])
        sftype = type(a[0])  # all elts assumed of same type
        if issubclass(sftype, Share):
            if sftype.field.frac_length == 0:
                await returnType(sftype, len(a))
            else:
                await returnType((sftype, a[0].integral), len(a))
            a = await mpc.gather(a)
            field = sftype.field
        else:
            await returnType(Share)
            field = sftype

        in_shares = thresha.random_split(a, self.threshold, len(self.parties))
        in_shares = [field.to_bytes(elts) for elts in in_shares]
        # Recombine the first 2t+1 output_shares.
        out_shares = await gather_shares(
            self._exchange_shares(in_shares)[:2 * self.threshold + 1])
        b = thresha.recombine(field, [(j + 1, field.from_bytes(out_shares[j]))
                                      for j in range(len(out_shares))])

        if issubclass(sftype, Share):
            b = [sftype(s) for s in b]
        if isinstance(a, tuple):
            return b[0]
        else:
            return b
Exemple #7
0
    async def _distribute(self, a, senders=None):
        """Distribute shares for each secret a provided by a sender."""
        value = a.df if not isinstance(a.df, Future) else None
        if senders is None:
            senders = list(range(len(self.parties)))
        assert value is None or self.id in senders
        stype = type(a)
        await returnType(stype, len(senders))

        field = stype.field
        shares = [None] * len(senders)
        for i, peer_id in enumerate(senders):
            if peer_id == self.id:
                in_shares = thresha.random_split([value], self.threshold,
                                                 len(self.parties))
                for other_id, data in enumerate(in_shares):
                    data = field.to_bytes(data)
                    if other_id == self.id:
                        shares[i] = data
                    else:
                        self._send_share(other_id, data)
            else:
                shares[i] = self._expect_share(peer_id)
        shares = await gather_shares(shares)
        shares = [field(field.from_bytes(r)[0]) for r in shares]
        return shares
Exemple #8
0
def send_shares_mpc_single(data, dataname, datapart, hosts, ports):
    #print(f'Sending each data sample...')
    if data.ndim == 1:
        cols = data.shape[0]
        rows = 1
        name_cols = len(dataname)
    elif data.ndim == 2:
        rows, cols = data.shape
        name_cols = len(dataname)

    if cols != name_cols:
        raise ValueError('Data and dataname columns do no match %d and %d' % (cols,name_cols))

    secnum = mpc.SecFxp()

    test_sample = secnum(10)
    stype = type(test_sample)
    field = stype.field

    data_sec = np.vectorize(secnum)(data)
    print(data_sec.shape)

    m = len(hosts)
    t = 1
    N = rows
    #N = 3
    for k in np.arange(cols):
        for i in np.arange(N):
            # Generate shares for each data sample
            data_shares = [None]*m
            data_shares = thresha.random_split([data_sec[k, i].df], t, m)

            data_shares_str = []
            for other_pid, data in enumerate(data_shares):
                data = field.to_bytes(data)
                data_shares_str.append(base64.b64encode(data).decode())

            #print(data_shares_str)
            timestamp = datetime.datetime.now().isoformat()
            for j in np.arange(m):
                sample = {
                    "timestamp" : timestamp,
                    dataname[k] : data_shares_str[j]
                }
                sample_json = json.dumps(sample)
                send_data(sample_json, datapart, hosts[j], ports[j])
Exemple #9
0
def send_shares_mpc_combined(data, dataname, datapart, hosts, ports):
    #print(f'Sending combined data...')

    if isinstance(data, np.ndarray):
        data = data.flatten().tolist()

    secnum = mpc.SecFxp()
    test_sample = secnum(10)
    stype = type(test_sample)
    field = stype.field

    data_sec_ = np.vectorize(secnum)(data)
    data_sec_ = data_sec_.tolist()
    data_sec = [a.df for a in data_sec_]

    m = len(hosts)
    t = 1

    start = timer()
    # Generate shares for each data sample
    #data_shares = [None]*m
    data_shares = thresha.random_split(data_sec, t, m)
    end = timer()
    global running_time_compute_share
    running_time_compute_share = end - start

    data_shares_str = []
    for other_pid, data in enumerate(data_shares):
        data = field.to_bytes(data)
        data_shares_str.append(base64.b64encode(data).decode())

    #print(data_shares_str)
    start = timer()

    parallel = True
    if parallel:
        #print('send parallel')
        timestamp = datetime.datetime.now().isoformat()
        sample_json = []
        for j in np.arange(m):
            sample = {
                "timestamp" : timestamp,
                dataname[0] : data_shares_str[j]
            }
            sample_json.append(json.dumps(sample))
            #send_data(sample_json, datapart, hosts[j], ports[j])
        Parallel(n_jobs=m)(delayed(send_data)(sample_json[i], datapart, hosts[i], ports[i]) for i in np.arange(m))
    else:

        timestamp = datetime.datetime.now().isoformat()
        for j in np.arange(m):
            sample = {
                "timestamp" : timestamp,
                dataname[0] : data_shares_str[j]
            }
            sample_json = json.dumps(sample)
            send_data(sample_json, datapart, hosts[j], ports[j])

    global running_time_upload_share
    end = timer()
    running_time_upload_share = end - start