def custom_cc(conn, airports, maxHops):
    new_list = {}
    i = 0
    json = []

    #since some country has hundreds of airports, which would cause 414 error
    #separate the request to small pieces
    if len(airports) < 20:
        for i in range(len(airports)):
            new_list["source[{}]".format(i)] = airports.loc[i, 'Vertex_ID']
            new_list["source[{}].type".format(i)] = "Airport"
        new_list['display'] = False
        new_list['maxHops'] = maxHops
        json.extend(
            conn.runInstalledQuery('custom_cc', params=new_list,
                                   timeout=16000)[0]['@@topScores'])
    else:
        for index in range(len(airports)):
            new_list["source[{}]".format(i)] = airports.loc[i, 'Vertex_ID']
            new_list["source[{}].type".format(i)] = "Airport"
            i += 1
            if (index % 20 == 0 and i != 0):
                new_list['display'] = False
                new_list['maxHops'] = maxHops
                json.extend(
                    conn.runInstalledQuery('custom_cc',
                                           params=new_list,
                                           timeout=16000)[0]['@@topScores'])
                new_list = {}
                i = 0

    res = pd.DataFrame(json)
    return res
Ejemplo n.º 2
0
    def get_rhsm_subs(self, host, user, pw):
        verify = getattr(settings, 'REDHAT_CANDLEPIN_VERIFY', True)
        json = []
        try:
            subs = requests.get('/'.join(
                [host, 'subscription/users/{}/owners'.format(user)]),
                                verify=verify,
                                auth=(user, pw))
        except requests.exceptions.ConnectionError as error:
            raise error
        except OSError as error:
            raise OSError(
                'Unable to open certificate bundle {}. Check that the service is running on Red Hat Enterprise Linux.'
                .format(verify)) from error  # noqa
        subs.raise_for_status()

        for sub in subs.json():
            resp = requests.get('/'.join([
                host, 'subscription/owners/{}/pools/?match=*tower*'.format(
                    sub['key'])
            ]),
                                verify=verify,
                                auth=(user, pw))
            resp.raise_for_status()
            json.extend(resp.json())
        return json
Ejemplo n.º 3
0
def request_posts(*tags):
    request = RequestPosts(*tags)
    json = []
    while len(json) < max_posts:
        new_json = request.execute()
        if not new_json:
            break

        json.extend(new_json[:max_posts - len(json)])
        request.nextpage()

    print('\nA total of {} posts found for downloading'.format(len(json)))
    print('----------------------------------------------------')

    downloader = Downloader(tags, len(json))
    for post in json:
        md5 = post.get('md5')
        file_ext = post.get('file_ext')
        if file_ext in extension_map:
            file_ext = extension_map[file_ext]

        if not md5 or not file_ext:
            continue

        downloader.image(md5, file_ext)
        yield downloader

    print('----------------------------------------------------')
Ejemplo n.º 4
0
def request_posts(*tags):
    print('Searching with tags: {}'.format(tags))

    request = RequestPosts(*tags)
    json = []

    while len(json) < max_posts:
        print('\rGrabbing posts from page: {}'.format(request.current_page()), end='')
        new_json = request.execute()
        if not new_json:
            break

        json.extend(new_json[:max_posts - len(json)])
        request.next_page()

    clear_line()
    print('\rGrabbed posts from {} pages'.format(request.current_page()-1))

    num_posts = len(json)

    downloader = Downloader(tags, num_posts)
    for post in json:
        md5 = post.get('md5')
        file_ext = post.get('file_ext')
        if not md5 or not file_ext:
            downloader.debug_log('No md5 or file_ext in JSON')
            continue

        downloader.set_target_image(md5, file_ext)
        yield downloader

    print('\nCompleted')
Ejemplo n.º 5
0
    def test_csv_json(self):
        with filetext('1,1\n2,2\n') as csv_fn:
            with filetext('') as json_fn:
                schema = '{a: int32, b: int32}'
                csv = CSV(csv_fn, schema=schema)
                json = JSON_Streaming(json_fn, mode='r+', schema=schema)

                json.extend(csv)

                self.assertEquals(tuple(map(tuple, json)), ((1, 1), (2, 2)))
    def test_csv_json(self):
        with filetext('1,1\n2,2\n') as csv_fn:
            with filetext('') as json_fn:
                schema = '2 * int'
                csv = CSV(csv_fn, schema=schema)
                json = JSON_Streaming(json_fn, mode='r+', schema=schema)

                json.extend(csv)

                self.assertEquals(list(json), [[1, 1], [2, 2]])
Ejemplo n.º 7
0
    def test_csv_json(self):
        with filetext('1,1\n2,2\n') as csv_fn:
            with filetext('') as json_fn:
                schema = '{a: int32, b: int32}'
                csv = CSV(csv_fn, schema=schema)
                json = JSON_Streaming(json_fn, mode='r+', schema=schema)

                json.extend(csv)

                self.assertEquals(tuple(map(tuple, json)), ((1, 1), (2, 2)))
def custom_bc(conn, airports, maxHops):
    new_list = {}
    i = 0
    json = []

    #since some country has hundreds of airports, which would cause 414 error
    #separate the request to small pieces
    if len(airports) < 20:
        for i in range(len(airports)):
            new_list["source[{}]".format(i)] = airports.loc[i, 'Vertex_ID']
            new_list["source[{}].type".format(i)] = "Airport"
        new_list['maxHops'] = maxHops
        json.extend(
            conn.runInstalledQuery('custom_bc', params=new_list,
                                   timeout=16000)[0]['Start'])
    else:
        for index in range(len(airports)):
            new_list["source[{}]".format(i)] = airports.loc[i, 'Vertex_ID']
            new_list["source[{}].type".format(i)] = "Airport"
            i += 1
            if (index % 20 == 0 and i != 0):
                new_list['maxHops'] = maxHops
                json.extend(
                    conn.runInstalledQuery('custom_bc',
                                           params=new_list,
                                           timeout=16000)[0]['Start'])
                new_list = {}
                i = 0

    res = pd.DataFrame(json)
    data = pd.DataFrame([],
                        columns=['Vertex_ID', 'name', 'lat', 'lng', 'score'])
    for i in range(len(res)):
        id = res.loc[i, 'attributes']['Start.id']
        name = res.loc[i, 'attributes']['Start.name']
        lat = res.loc[i, 'attributes']['Start.latitude']
        lng = res.loc[i, 'attributes']['Start.longitude']
        score = res.loc[i, 'attributes']['Start.@cent']
        data.loc[i] = [id, name, lat, lng, score]
    return data
Ejemplo n.º 9
0
    def layer_to_json(layer):
        json = {}
        layer_weights = []

        if isinstance(layer, keras.layers.InputLayer):
            json = {'name': 'Input', 'shape': layer.batch_input_shape}

        elif isinstance(layer,
                        Conv2D) and not isinstance(layer, DepthwiseConv2D):
            json = {
                'name': 'Conv2D',
                'kernel_size': layer.kernel.get_shape().as_list(),
                'strides': layer.strides,
                'padding': layer.padding,
                'activation': get_activation_name(layer),
                'bits_w': layer.bits_w,
                'bits_x': layer.bits_x
            }

            if isinstance(layer, Conv2DQ):
                kernel = sess.run(layer.kernel_q)
                bias = sess.run(layer.bias_q)

                print(layer, layer.input_shape, layer.output_shape,
                      kernel.shape)

                if verif_preproc:
                    # precompute W*r or r_left * W * r_right
                    k_w, k_h, ch_in, ch_out = kernel.shape
                    h, w = layer.input_shape[1], layer.input_shape[2]
                    h_out, w_out = layer.output_shape[1], layer.output_shape[2]

                    np.random.seed(0)
                    if k_w == 1 and k_h == 1:
                        # pointwise conv
                        r_left = np.array([]).astype(np.float32)
                        r_right = np.ones(shape=(reps,
                                                 ch_out)).astype(np.float32)
                        w_r = kernel.astype(np.float64).reshape(
                            (-1, ch_out)).dot(r_right.T.astype(np.float64))
                        w_r = w_r.T
                        w_r = np.fmod(w_r, p).astype(np.float32)
                        assert np.max(np.abs(w_r)) < 2**52
                        b_r = np.fmod(
                            np.dot(bias.astype(np.float64),
                                   r_right.T.astype(np.float64)),
                            p).astype(np.float32)
                    else:
                        r_left = np.random.randint(
                            low=-r_max,
                            high=r_max + 1,
                            size=(reps, h_out * w_out)).astype(np.float32)
                        r_right = np.random.randint(
                            low=-r_max, high=r_max + 1,
                            size=(reps, ch_out)).astype(np.float32)
                        w_r = np.zeros((reps, h * w, ch_in)).astype(np.float32)
                        b_r = np.zeros(reps).astype(np.float32)

                        X = np.zeros((1, h, w, ch_in)).astype(np.float64)
                        x_ph = tf.placeholder(tf.float64,
                                              shape=(None, h, w, ch_in))
                        w_ph = tf.placeholder(tf.float64,
                                              shape=(k_h, k_w, ch_in, 1))
                        y_ph = tf.placeholder(tf.float64,
                                              shape=(1, h_out, w_out, 1))
                        z = tf.nn.conv2d(x_ph, w_ph,
                                         (1, ) + layer.strides + (1, ),
                                         layer.padding.upper())
                        dz = tf.gradients(z, x_ph, grad_ys=y_ph)[0]

                        for i in range(reps):
                            curr_r_left = r_left[i].astype(np.float64)
                            curr_r_right = r_right[i].astype(np.float64)
                            #print("sum(curr_r_left) = {}".format(np.sum(curr_r_left)))
                            #print("sum(curr_r_right) = {}".format(np.sum(curr_r_right)))

                            w_right = kernel.astype(np.float64).reshape(
                                (-1, ch_out)).dot(curr_r_right)
                            #print("sum(w_right) = {}".format(np.sum(w_right)))
                            assert np.max(np.abs(w_right)) < 2**52

                            w_r_i = sess.run(dz,
                                             feed_dict={
                                                 x_ph:
                                                 X,
                                                 w_ph:
                                                 w_right.reshape(
                                                     k_w, k_h, ch_in, 1),
                                                 y_ph:
                                                 curr_r_left.reshape(
                                                     (1, h_out, w_out, 1))
                                             })
                            #print("sum(w_r) = {}".format(np.sum(w_r_i.astype(np.float64))))
                            w_r[i] = np.fmod(w_r_i,
                                             p).astype(np.float32).reshape(
                                                 (h * w, ch_in))
                            assert np.max(np.abs(w_r[i])) < 2**52
                            #print("sum(w_r) = {}".format(np.sum(w_r[i].astype(np.float64))))
                            b_r[i] = np.fmod(
                                np.sum(curr_r_left) * np.fmod(
                                    np.dot(bias.astype(np.float64),
                                           curr_r_right), p),
                                p).astype(np.float32)
                            #print("sum(b_r) = {}".format(np.sum(b_r[i].astype(np.float64))))

                    print("r_left: {}".format(r_left.astype(np.float64).sum()))
                    print("r_right: {}".format(
                        r_right.astype(np.float64).sum()))
                    print("w_r: {}".format(w_r.astype(np.float64).sum()))
                    print("b_r: {}".format(b_r.astype(np.float64).sum()))
                    layer_weights.append(r_left.reshape(-1))
                    layer_weights.append(r_right.reshape(-1))
                    layer_weights.append(w_r.reshape(-1))
                    layer_weights.append(b_r.reshape(-1))
            else:
                kernel = layer.kernel.eval(sess)
                bias = layer.bias.eval(sess)
                print("sum(abs(conv_w)): {}".format(np.abs(kernel).sum()))

            if not verif_preproc:
                layer_weights.append(kernel.reshape(-1).astype(dtype))
                layer_weights.append(bias.reshape(-1).astype(dtype))

        elif isinstance(layer, MaxPooling2D):
            json = {
                'name': 'MaxPooling2D',
                'pool_size': layer.pool_size,
                'strides': layer.strides,
                'padding': layer.padding
            }

        elif isinstance(layer, AveragePooling2D):
            json = {
                'name': 'AveragePooling2D',
                'pool_size': layer.pool_size,
                'strides': layer.strides,
                'padding': layer.padding
            }

        elif isinstance(layer, Flatten):
            json = {'name': 'Flatten'}

        elif isinstance(layer, Dense):
            assert not (slalom_privacy and verif_preproc)
            json = {
                'name': 'Dense',
                'kernel_size': layer.kernel.get_shape().as_list(),
                'pointwise_conv': False,
                'activation': get_activation_name(layer),
                'bits_w': layer.bits_w,
                'bits_x': layer.bits_x
            }

            if isinstance(layer, DenseQ):
                kernel = sess.run(layer.kernel_q).reshape(-1).astype(dtype)
                bias = sess.run(layer.bias_q).reshape(-1).astype(dtype)

            else:
                kernel = layer.kernel.eval(sess).reshape(-1).astype(dtype)
                bias = layer.bias.eval(sess).reshape(-1).astype(dtype)
            print("sum(abs(dense_w)): {}".format(np.abs(kernel).sum()))
            layer_weights.append(kernel)
            layer_weights.append(bias)

        elif isinstance(layer, DepthwiseConv2D):
            json = {
                'name': 'DepthwiseConv2D',
                'kernel_size': layer.depthwise_kernel.get_shape().as_list(),
                'strides': layer.strides,
                'padding': layer.padding,
                'activation': get_activation_name(layer)
            }

            if isinstance(layer, DepthwiseConv2DQ):
                kernel = sess.run(layer.kernel_q)
                bias = sess.run(layer.bias_q)

                if verif_preproc:
                    # precompute W*r
                    k_w, k_h, ch_in, _ = kernel.shape
                    h, w = layer.input_shape[1], layer.input_shape[2]
                    h_out, w_out = layer.output_shape[1], layer.output_shape[2]

                    np.random.seed(0)
                    r_left = np.random.randint(low=-r_max,
                                               high=r_max + 1,
                                               size=(reps, h_out *
                                                     w_out)).astype(np.float32)
                    w_r = np.zeros((reps, h * w, ch_in)).astype(np.float32)
                    b_r = np.zeros((reps, ch_in)).astype(np.float32)

                    X = np.zeros((1, h, w, ch_in)).astype(np.float64)
                    x_ph = tf.placeholder(tf.float64,
                                          shape=(None, h, w, ch_in))
                    w_ph = tf.placeholder(tf.float64,
                                          shape=(k_h, k_w, ch_in, 1))
                    y_ph = tf.placeholder(tf.float64,
                                          shape=(1, h_out, w_out, ch_in))
                    z = tf.nn.depthwise_conv2d_native(x_ph, w_ph, (1, ) +
                                                      layer.strides + (1, ),
                                                      layer.padding.upper())
                    dz = tf.gradients(z, x_ph, grad_ys=y_ph)[0]

                    for i in range(reps):
                        curr_r_left = r_left[i].astype(np.float64)
                        #print("r_left: {}".format(curr_r_left.astype(np.float64).sum()))
                        w_r_i = sess.run(dz,
                                         feed_dict={
                                             x_ph:
                                             X,
                                             w_ph:
                                             kernel.astype(np.float64),
                                             y_ph:
                                             curr_r_left.reshape(
                                                 (1, h_out, w_out,
                                                  1)).repeat(ch_in, axis=-1)
                                         })
                        w_r[i] = np.fmod(w_r_i, p).astype(np.float32).reshape(
                            (h * w, ch_in))
                        assert np.max(np.abs(w_r[i])) < 2**52
                        #print("sum(w_r) = {}".format(np.sum(w_r[i].astype(np.float64))))

                        b_r[i] = np.fmod(
                            np.sum(curr_r_left) * bias.astype(np.float64), p)
                        #print("sum(b_r) = {}".format(np.sum(b_r[i].astype(np.float64))))

                    print("r_left: {}".format(r_left.astype(np.float64).sum()))
                    print("w_r: {}".format(w_r.astype(np.float64).sum()))
                    print("b_r: {}".format(b_r.astype(np.float64).sum()))
                    layer_weights.append(r_left.reshape(-1))
                    layer_weights.append(w_r.reshape(-1))
                    layer_weights.append(b_r.reshape(-1))

            else:
                kernel = layer.depthwise_kernel.eval(sess)
                bias = layer.bias.eval(sess)
            print("sum(abs(depthwise_w)): {}".format(np.abs(kernel).sum()))

            if not verif_preproc:
                layer_weights.append(kernel.reshape(-1).astype(dtype))
                layer_weights.append(bias.reshape(-1).astype(dtype))

        elif isinstance(layer, GlobalAveragePooling2D):
            json = {'name': 'GlobalAveragePooling2D'}

        elif isinstance(layer, Dropout):
            pass

        elif isinstance(layer, Lambda):
            pass

        elif isinstance(layer, Reshape):
            json = {'name': 'Reshape', 'shape': layer.target_shape}

        elif isinstance(layer, ZeroPadding2D):
            json = {
                'name':
                'ZeroPadding2D',
                'padding':
                layer.padding
                if not hasattr(layer.padding, '__len__') else layer.padding[0]
            }

        elif isinstance(layer, ActivationQ):
            json = {
                'name': 'Activation',
                'type': layer.activation_name(),
                'bits_w': layer.bits_w
            }

            if hasattr(layer,
                       'maxpool_params') and layer.maxpool_params is not None:
                json2 = {
                    'name': 'MaxPooling2D',
                    'pool_size': layer.maxpool_params['pool_size'],
                    'strides': layer.maxpool_params['strides'],
                    'padding': layer.maxpool_params['padding']
                }

                json = [json, json2]

        elif isinstance(layer, ResNetBlock):
            path1 = []
            path2 = []
            for l in layer.path1:
                if isinstance(l, Conv2D) or isinstance(l, ActivationQ):
                    js, w = layer_to_json(l)
                    path1.append(js)
                    layer_weights.extend(w)

            for l in layer.path2:
                if isinstance(l, Conv2D) or isinstance(l, ActivationQ):
                    js, w = layer_to_json(l)
                    path2.append(js)
                    layer_weights.extend(w)

            json = {
                'name': 'ResNetBlock',
                'identity': layer.identity,
                'bits_w': layer.bits_w,
                'bits_x': layer.bits_x,
                'path1': path1,
                'path2': path2
            }

            if slalom_privacy:
                json = [json]
                js2, _ = layer_to_json(layer.merge_act)
                if isinstance(js2, dict):
                    json.append(js2)
                else:
                    json.extend(js2)

        else:
            raise NameError("Unknown layer {}".format(layer))

        return json, layer_weights