def calculateSuperblockChecksum(pathToDevice):
    file = open(pathToDevice, "r")
    # Offset of a superblock
    sb_offset = 0x400
    file.seek(sb_offset)
    # superblock has 1024 bytes length
    bytes = file.read(1024)
    file.close()

    print("ORIGINAL SUPERBLOCK:")
    print(bytes.encode('hex'))
    print("")

    # 0x3FC - offset to checksum
    # bytes_nocs - original superblock without checksum (1020 bytes)
    bytes_nocs = bytes[0:0x3FC]

    checksum_raw = crc32c.crc32(bytes_nocs)
    print("ORIGINAL CHECKSUM (not calculated, big endian!): 0x" +
          rev(bytes[0x3FC:0x3FC + 4]).encode('hex'))
    print(
        "RAW SUPERBLOCK CRC32C CHECKSUM WITHOUT CHECKSUM FIELDS (1020 bytes): "
        + hex(checksum_raw))

    inverter = 0xFFFFFFFF
    checksum_final = inverter - crc32c.crc32(bytes_nocs)
    print("INVERTED (0xFFFFFFFF-previous field): " + hex(checksum_final))
Esempio n. 2
0
    def __init__(self, index, header, records):
        self.index = index
        self.header = header

        header_crc_bytes = struct.pack(
            "<" + HDR_FMT_RP_PREFIX_NO_CRC + HDR_FMT_CRC, *self.header[1:])
        header_crc = crc32c.crc32(header_crc_bytes)
        if self.header.header_crc != header_crc:
            raise CorruptBatchError(self)

        crc = crc32c.crc32(self._crc_header_be_bytes())
        crc = crc32c.crc32(records, crc)
        if self.header.crc != crc:
            raise CorruptBatchError(self)
Esempio n. 3
0
 def masked_crc(data):
     mask = 0xa282ead8
     crc = crc32c.crc32(data)
     masked = ((crc >> 15) | (crc << 17)) + mask
     masked = np.uint32(masked)
     masked_bytes = struct.pack("<I", masked)
     return masked_bytes
Esempio n. 4
0
 def init(cls, request, metadata, media, bucket, is_destination, context):
     timestamp = datetime.datetime.now(datetime.timezone.utc)
     metadata.generation = random.getrandbits(63)
     metadata.metageneration = 1
     metadata.id = "%s/o/%s#%d" % (
         metadata.bucket,
         metadata.name,
         metadata.generation,
     )
     metadata.size = len(media)
     actual_md5Hash = base64.b64encode(
         hashlib.md5(media).digest()).decode("utf-8")
     if metadata.md5_hash != "" and actual_md5Hash != metadata.md5_hash:
         utils.error.mismatch("md5Hash", metadata.md5_hash, actual_md5Hash,
                              context)
     actual_crc32c = crc32c.crc32(media)
     if metadata.HasField(
             "crc32c") and actual_crc32c != metadata.crc32c.value:
         utils.error.mismatch("crc32c", metadata.crc32c.value,
                              actual_crc32c, context)
     metadata.md5_hash = actual_md5Hash
     metadata.crc32c.value = actual_crc32c
     metadata.time_created.FromDatetime(timestamp)
     metadata.updated.FromDatetime(timestamp)
     metadata.owner.entity = utils.acl.get_object_entity("OWNER", context)
     metadata.owner.entity_id = hashlib.md5(
         metadata.owner.entity.encode("utf-8")).hexdigest()
     predefined_acl = utils.acl.extract_predefined_acl(
         request, is_destination, context)
     cls.__insert_predefined_acl(metadata, bucket, predefined_acl, context)
     return cls(metadata, media, bucket)
Esempio n. 5
0
 def _test_runGraphOneDOPerDOM(self, repeats=1):
     g1 = [{"oid": "A", "type": "plain", "storage": Categories.MEMORY}]
     g2 = [
         {
             "oid": "B",
             "type": "app",
             "app": "dlg.apps.crc.CRCApp"
         },
         {
             "oid": "C",
             "type": "plain",
             "storage": Categories.MEMORY,
             "producers": ["B"]
         },
     ]
     rels = [DROPRel("B", DROPLinkType.CONSUMER, "A")]
     a_data = os.urandom(32)
     c_data = six.b(str(crc32(a_data, 0)))
     node_managers = [self._start_dm() for _ in range(2)]
     for n in range(repeats):
         sessionId = 's%d' % n
         self._test_runGraphInTwoNMs(copy.deepcopy(g1),
                                     copy.deepcopy(g2),
                                     rels,
                                     a_data,
                                     c_data,
                                     sessionId=sessionId,
                                     node_managers=node_managers)
 def _validate_crc32c(self):
     """Validate the crc32c field against the stored media."""
     actual = self.metadata.get('crc32c', '')
     expected = base64.b64encode(struct.pack('>I', crc32c.crc32(self.media)))
     if actual != expected:
         raise error_response.ErrorResponse(
             'Mismatched CRC32C checksum expected=%s, actual=%s' % (expected,
                                                                    actual))
Esempio n. 7
0
 def masked_crc(data: bytes) -> bytes:
     """CRC checksum."""
     mask = 0xa282ead8
     crc = crc32c.crc32(data)
     masked = (((crc >> 15) | (crc << 17)) + mask) & 0xffffffff
     masked = np.uint32(masked)
     masked_bytes = struct.pack("<I", masked)
     return masked_bytes
Esempio n. 8
0
    def _checksum(self, data, ipv4_src, ipv4_dst):
        """
        Calculate and return the checksum (in raw bytes) of data.

        :param data Data on which to calculate checksum.
        :type data bytes

        :rtype:  bytes
        :return: Checksum.
        """
        if isinstance(self._algorithm, six.string_types):
            if self._algorithm == "crc32":
                check = struct.pack(self._endian + "L", (zlib.crc32(data) & 0xFFFFFFFF))

            elif self._algorithm == "crc32c":
                check = struct.pack(self._endian + "L", crc32c.crc32(data))

            elif self._algorithm == "adler32":
                check = struct.pack(self._endian + "L", (zlib.adler32(data) & 0xFFFFFFFF))

            elif self._algorithm == "ipv4":
                check = struct.pack(self._endian + "H", helpers.ipv4_checksum(data))

            elif self._algorithm == "udp":
                return struct.pack(
                    self._endian + "H", helpers.udp_checksum(msg=data, src_addr=ipv4_src, dst_addr=ipv4_dst)
                )

            elif self._algorithm == "md5":
                digest = hashlib.md5(data).digest()

                # TODO: is this right?
                if self._endian == ">":
                    (a, b, c, d) = struct.unpack("<LLLL", digest)
                    digest = struct.pack(">LLLL", a, b, c, d)

                check = digest

            elif self._algorithm == "sha1":
                digest = hashlib.sha1(data).digest()

                # TODO: is this right?
                if self._endian == ">":
                    (a, b, c, d, e) = struct.unpack("<LLLLL", digest)
                    digest = struct.pack(">LLLLL", a, b, c, d, e)

                check = digest

            else:
                raise exception.SullyRuntimeError("INVALID CHECKSUM ALGORITHM SPECIFIED: %s" % self._algorithm)
        else:
            check = self._algorithm(data)

        if self._length:
            return check[: self._length]
        else:
            return check
Esempio n. 9
0
 def _validate_crc32c(self):
     """Validate the crc32c field against the stored media."""
     actual = self.metadata.get("crc32c", "")
     expected = base64.b64encode(struct.pack(">I", crc32c.crc32(
         self.media))).decode("utf-8")
     if actual != expected:
         raise error_response.ErrorResponse(
             "Mismatched CRC32C checksum expected={}, actual={}".format(
                 expected, actual))
Esempio n. 10
0
def blob_already_exists(blob, dest_file):
    if isfile(dest_file):
        with open(dest_file, 'rb') as file:
            raw_crc = crc32(file.read())

        encoded = standard_b64encode(struct.pack('>I',
                                                 raw_crc)).decode('ascii')

        return encoded == blob.crc32c
    return False
Esempio n. 11
0
 def __init__(self, data=None):
     try:
         from crc32c import crc32
     except ImportError as e:
         logger.warning(
             "crc32c: %s. Switching to software mode, which may be slow." %
             e)
         os.environ["CRC32C_SW_MODE"] = "auto"
         from crc32c import crc32
     self._checksum_value = crc32(data if data is not None else b"")
     self._crc32c = crc32
Esempio n. 12
0
 def GetObjectMedia(self, request, context):
     blob = db.get_object(request, request.bucket, request.object, False,
                          context)
     yield storage_pb2.GetObjectMediaResponse(
         checksummed_data={
             "content": blob.media,
             "crc32c": {
                 "value": crc32c.crc32(blob.media)
             },
         },
         metadata=blob.metadata,
     )
 def InsertObject(self, request_iterator, context):
     db.insert_test_bucket(context)
     upload, is_resumable = None, False
     for request in request_iterator:
         first_message = request.WhichOneof("first_message")
         if first_message == "upload_id":
             upload = db.get_upload(request.upload_id, context)
             if upload.complete:
                 utils.error.invalid(
                     "Uploading to a completed upload %s" %
                     upload.upload_id, context)
             is_resumable = True
         elif first_message == "insert_object_spec":
             bucket = db.get_bucket_without_generation(
                 request.insert_object_spec.resource.bucket,
                 context).metadata
             upload = gcs_type.holder.DataHolder.init_resumable_grpc(
                 request, bucket, context)
         data = request.WhichOneof("data")
         checksummed_data = None
         if data == "checksummed_data":
             checksummed_data = request.checksummed_data
         elif data == "reference":
             checksummed_data = self.GetObjectMedia(
                 data.reference, context).checksummed_data
         else:
             continue
         content = checksummed_data.content
         crc32c_hash = (checksummed_data.crc32c.value
                        if checksummed_data.HasField("crc32c") else None)
         if crc32c_hash is not None:
             actual_crc32c = crc32c.crc32(content)
             if actual_crc32c != crc32c_hash:
                 utils.error.mismatch(
                     "crc32c in checksummed data",
                     crc32c_hash,
                     actual_crc32c,
                     context,
                 )
         upload.media += checksummed_data.content
         if request.finish_write:
             upload.complete = True
             break
     if not upload.complete:
         if not is_resumable:
             utils.error.missing("finish_write in request", context)
         else:
             return
     blob, _ = gcs_type.object.Object.init(upload.request, upload.metadata,
                                           upload.media, upload.bucket,
                                           False, context)
     db.insert_object(upload.request, upload.bucket.name, blob, context)
     return blob.metadata
Esempio n. 14
0
    def test_run_streaming_consumer_remotely(self):
        """
        A test that checks that a streaming consumer works correctly across
        node managers when its input is in a different node, like this:

        DM #1                 DM #2
        ==================    ==============
        | A --> B --> C -|----|--> D --> E |
        ==================    ==============

        Here B is anormal application and D is a streaming consumer of C.
        We use A and E to compare that all data flows correctly.
        """

        g1 = [
            {
                "oid": "A",
                "type": "plain",
                "storage": Categories.MEMORY
            },
            {
                "oid": "B",
                "type": "app",
                "app": "dlg.apps.simple.CopyApp",
                "inputs": ["A"],
                "outputs": ["C"],
            },
            {
                "oid": "C",
                "type": "plain",
                "storage": Categories.MEMORY
            },
        ]
        g2 = [
            {
                "oid": "D",
                "type": "app",
                "app": "dlg.apps.crc.CRCStreamApp",
                "outputs": ["E"],
            },
            {
                "oid": "E",
                "type": "plain",
                "storage": Categories.MEMORY
            },
        ]
        add_test_reprodata(g1)
        add_test_reprodata(g2)
        rels = [DROPRel("C", DROPLinkType.STREAMING_INPUT, "D")]
        a_data = os.urandom(32)
        e_data = six.b(str(crc32(a_data, 0)))
        self._test_runGraphInTwoNMs(g1, g2, rels, a_data, e_data, leaf_oid="E")
Esempio n. 15
0
    def __init__(self, gcs_url, bucket_name, name, generation, request, media):
        """Initialize a new object revision.

        :param gcs_url:str the base URL for the GCS service.
        :param bucket_name:str the name of the bucket that contains the object.
        :param name:str the name of the object.
        :param generation:int the generation number for this object.
        :param request:flask.Request the contents of the HTTP request.
        :param media:str the contents of the object.
        """
        self.gcs_url = gcs_url
        self.bucket_name = bucket_name
        self.name = name
        self.generation = str(generation)
        self.object_id = bucket_name + "/o/" + name + "/" + str(generation)
        now = time.gmtime(time.time())
        timestamp = time.strftime("%Y-%m-%dT%H:%M:%SZ", now)
        self.media = media
        instructions = request.headers.get("x-goog-testbench-instructions")
        if instructions == "inject-upload-data-error":
            self.media = testbench_utils.corrupt_media(media)

        self.metadata = {
            "timeCreated":
            timestamp,
            "updated":
            timestamp,
            "metageneration":
            "0",
            "generation":
            str(generation),
            "location":
            "US",
            "storageClass":
            "STANDARD",
            "size":
            str(len(self.media)),
            "etag":
            "XYZ=",
            "owner": {
                "entity": "project-owners-123456789",
                "entityId": ""
            },
            "md5Hash":
            base64.b64encode(hashlib.md5(self.media).digest()).decode("utf-8"),
            "crc32c":
            base64.b64encode(struct.pack(">I", crc32c.crc32(
                self.media))).decode("utf-8"),
        }
        if request.headers.get("content-type") is not None:
            self.metadata["contentType"] = request.headers.get("content-type")
Esempio n. 16
0
def hashfunc_crc32c(input_file):
    """Get a crc32c hash digest from a large input file

    Arguments:
        input_file (str): path to input file
    
    Returns:
        (str): crc32c digest of file contents as hex string
    """

    content = open(input_file, "rb").read()
    digest = crc32c.crc32(content)
    hex_string = hex(digest).split("x")[-1]
    hex_string_length = 8
    return pad_hexdigest(hex_string, hex_string_length)
Esempio n. 17
0
def get_hashes(what):
    blocksize = 128 * 256
    try:
        with Path(what).open("rb") as file:
            crcvalue = 0
            while True:
                buf = file.read(blocksize)
                if not buf:
                    break
                crcvalue = (crc32(buf, crcvalue) & 0xffffffff)
            hashstring = f'{crcvalue:x}'
    except Exception:
        print(Style.BRIGHT + Fore.MAGENTA + "    Cannot calculate CRC32 of " +
              str(what),
              file=f)
        hashstring = "XYZ"

    return hashstring
Esempio n. 18
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("-d",
                        "--drive",
                        help="Drive to analyze",
                        action="store",
                        dest='drive')
    args = parser.parse_args()
    if args.drive is None:
        l.error("Error, no arguments passed")
        exit(1)
    dev = os.open(args.drive, os.O_RDONLY)
    os.lseek(dev, 0x10000, os.SEEK_SET)
    bl = os.read(dev, 0x1000)
    decode_superblock(bl)
    chksum = hex(crc32c.crc32(bl) ^ 0xffffffff)
    l.info(f"Calculated crc32c sum for superblock {str(chksum).rstrip('L')}")
    exit(0)
Esempio n. 19
0
 def handle_insert_object_streaming_rpc(self, request_iterator, context):
     """Process an InsertObject streaming RPC, returning the upload object associated with it."""
     upload, is_resumable = None, False
     for request in request_iterator:
         first_message = request.WhichOneof("first_message")
         if first_message == "upload_id":
             upload = db.get_upload(request.upload_id, context)
             if upload.complete:
                 utils.error.invalid(
                     "Uploading to a completed upload %s" %
                     upload.upload_id, context)
             is_resumable = True
         elif first_message == "insert_object_spec":
             bucket = db.get_bucket_without_generation(
                 request.insert_object_spec.resource.bucket,
                 context).metadata
             upload = gcs_type.holder.DataHolder.init_resumable_grpc(
                 request, bucket, context)
         data = request.WhichOneof("data")
         checksummed_data = None
         if data == "checksummed_data":
             checksummed_data = request.checksummed_data
         elif data == "reference":
             checksummed_data = self.GetObjectMedia(
                 data.reference, context).checksummed_data
         else:
             continue
         content = checksummed_data.content
         crc32c_hash = (checksummed_data.crc32c.value
                        if checksummed_data.HasField("crc32c") else None)
         if crc32c_hash is not None:
             actual_crc32c = crc32c.crc32(content)
             if actual_crc32c != crc32c_hash:
                 utils.error.mismatch(
                     "crc32c in checksummed data",
                     crc32c_hash,
                     actual_crc32c,
                     context,
                 )
         upload.media += checksummed_data.content
         if request.finish_write:
             upload.complete = True
             break
     return upload, is_resumable
Esempio n. 20
0
    def test_run_streaming_consumer_remotely2(self):
        """
        Like above, but C is hostd by DM #2.
        """

        g1 = [
            {
                "oid": "A",
                "type": "plain",
                "storage": Categories.MEMORY
            },
            {
                "oid": "B",
                "type": "app",
                "app": "dlg.apps.simple.CopyApp",
                "inputs": ["A"],
            },
        ]
        g2 = [
            {
                "oid": "C",
                "type": "plain",
                "storage": Categories.MEMORY
            },
            {
                "oid": "D",
                "type": "app",
                "app": "dlg.apps.crc.CRCStreamApp",
                "streamingInputs": ["C"],
                "outputs": ["E"],
            },
            {
                "oid": "E",
                "type": "plain",
                "storage": Categories.MEMORY
            },
        ]
        add_test_reprodata(g1)
        add_test_reprodata(g2)
        rels = [DROPRel("C", DROPLinkType.OUTPUT, "B")]
        a_data = os.urandom(32)
        e_data = six.b(str(crc32(a_data, 0)))
        self._test_runGraphInTwoNMs(g1, g2, rels, a_data, e_data, leaf_oid="E")
Esempio n. 21
0
    def test_runGraphOneDOPerDOM(self):
        """
        A test that creates three DROPs in two different DMs and runs the graph.
        For this the graphs that are fed into the DMs must *not* express the
        inter-DM relationships, although they are still passed down
        separately. The graph looks like:

        DM #1      DM #2
        =======    =============
        | A --|----|-> B --> C |
        =======    =============
        """

        g1 = [{"oid":"A", "type":"plain", "storage": "memory"}]
        g2 = [{"oid":"B", "type":"app", "app":"dlg.apps.crc.CRCApp"},
              {"oid":"C", "type":"plain", "storage": "memory", "producers":["B"]}]
        rels = [DROPRel('B', DROPLinkType.CONSUMER, 'A')]
        a_data = os.urandom(32)
        c_data = six.b(str(crc32(a_data, 0)))
        self._test_runGraphInTwoNMs(g1, g2, rels, a_data, c_data)
Esempio n. 22
0
def calc_checksum(update: Message) -> int:
    # BOLT #7: The checksum of a `channel_update` is the CRC32C checksum as
    # specified in [RFC3720](https://tools.ietf.org/html/rfc3720#appendix-B.4)
    # of this `channel_update` without its `signature` and `timestamp` fields.
    bufio = io.BytesIO()
    update.write(bufio)
    buf = bufio.getvalue()

    # BOLT #7:
    # 1. type: 258 (`channel_update`)
    # 2. data:
    #     * [`signature`:`signature`]
    #     * [`chain_hash`:`chain_hash`]
    #     * [`short_channel_id`:`short_channel_id`]
    #     * [`u32`:`timestamp`]
    #     * [`byte`:`message_flags`]

    # Note: 2 bytes for `type` field
    return crc32c.crc32(buf[2 + 64:2 + 64 + 32 + 8] +
                        buf[2 + 64 + 32 + 8 + 4:])
Esempio n. 23
0
    def run(self):
        if len(self.inputs) != 1:
            raise Exception("This application read only from one DROP")
        if len(self.outputs) != 1:
            raise Exception("This application writes only one DROP")

        inputDrop = self.inputs[0]
        outputDrop = self.outputs[0]

        bufsize = 4 * 1024**2
        desc = inputDrop.open()
        buf = inputDrop.read(desc, bufsize)
        crc = 0
        while buf:
            crc = crc32(buf, crc)
            buf = inputDrop.read(desc, bufsize)
        inputDrop.close(desc)

        # Rely on whatever implementation we decide to use
        # for storing our data
        outputDrop.write(six.b(str(crc)))
Esempio n. 24
0
    def run(self):
        if len(self.inputs) != 1:
            raise Exception("This application read only from one DROP")
        if len(self.outputs) != 1:
            raise Exception("This application writes only one DROP")

        inputDrop = self.inputs[0]
        outputDrop = self.outputs[0]

        bufsize = 4 * 1024 ** 2
        desc = inputDrop.open()
        buf = inputDrop.read(desc, bufsize)
        crc = 0
        while buf:
            crc = crc32(buf, crc)
            buf = inputDrop.read(desc, bufsize)
        inputDrop.close(desc)

        # Rely on whatever implementation we decide to use
        # for storing our data
        outputDrop.write(str(crc))
Esempio n. 25
0
    def _test_write_withDropType(self, dropType):
        """
        Test an AbstractDROP and a simple AppDROP (for checksum calculation)
        """
        a = dropType("oid:A", "uid:A", expectedSize=self._test_drop_sz * ONE_MB)
        b = SumupContainerChecksum("oid:B", "uid:B")
        c = InMemoryDROP("oid:C", "uid:C")
        b.addInput(a)
        b.addOutput(c)

        test_crc = 0
        with DROPWaiterCtx(self, c):
            for _ in range(self._test_num_blocks):
                a.write(self._test_block)
                test_crc = crc32(self._test_block, test_crc)

        # Read the checksum from c
        cChecksum = int(droputils.allDropContents(c))

        self.assertNotEquals(a.checksum, 0)
        self.assertEquals(a.checksum, test_crc)
        self.assertEquals(cChecksum, test_crc)
Esempio n. 26
0
def test():
    ascii2bin(7, END + 2)
    with open('out.bin', 'rb') as f:
        content = f.read()
        # print(type(content))
        # print(len(content)/4)
        # print(content[0:4])
        # print(content[4:8])
        # print(content[(END-7)*4:])
        # print(content[-8])
        # for item in content[:-4:-1]:
        # 	print("0x%x" % item)
        for start in [7, 8, 9]:
            for end in [END, END + 1, END + 2]:

                crc = crc32(
                    content[(start - 7) * 4:(end - 7) * 4]) & 0xffffffff
                print('start = %d, end = %d:' % (start, end))
                if crc == 0xfe45c2b8:
                    print('Match!')
                else:
                    print('Mismatch...crc = 0x%x' % crc)
Esempio n. 27
0
    def _test_write_withDropType(self, dropType):
        """
        Test an AbstractDROP and a simple AppDROP (for checksum calculation)
        """
        a = dropType('oid:A', 'uid:A', expectedSize=self._test_drop_sz * ONE_MB)
        b = SumupContainerChecksum('oid:B', 'uid:B')
        c = InMemoryDROP('oid:C', 'uid:C')
        b.addInput(a)
        b.addOutput(c)

        test_crc = 0
        with DROPWaiterCtx(self, c):
            for _ in range(self._test_num_blocks):
                a.write(self._test_block)
                test_crc = crc32(self._test_block, test_crc)

        # Read the checksum from c
        cChecksum = int(droputils.allDropContents(c))

        self.assertNotEqual(a.checksum, 0)
        self.assertEqual(a.checksum, test_crc)
        self.assertEqual(cChecksum, test_crc)
Esempio n. 28
0
    def test_socket_listener(self):
        '''
        A simple test to check that SocketListenerApps are indeed working as
        expected; that is, they write the data they receive into their output,
        and finish when the connection is closed from the client side

        The data flow diagram looks like this:

        A --> B --> C --> D
        '''

        host = 'localhost'
        port = 9933
        data = 'shine on you crazy diamond'

        a = SocketListenerApp('oid:A', 'uid:A', host=host, port=port)
        b = InMemoryDROP('oid:B', 'uid:B')
        c = SumupContainerChecksum('oid:C', 'uid:C')
        d = InMemoryDROP('oid:D', 'uid:D')
        a.addOutput(b)
        b.addConsumer(c)
        c.addOutput(d)

        # Create the socket, write, and close the connection, allowing
        # A to move to COMPLETED
        with DROPWaiterCtx(self, d, 3): # That's plenty of time
            threading.Thread(target=lambda a: a.execute(), args=(a,)).start()
            utils.writeToRemotePort(host, port, data, 1)

        for drop in [a,b,c,d]:
            self.assertEquals(DROPStates.COMPLETED, drop.status)

        # Our expectations are fulfilled!
        bContents = droputils.allDropContents(b)
        dContents = int(droputils.allDropContents(d))
        self.assertEquals(data, bContents)
        self.assertEquals(crc32(data, 0), dContents)
Esempio n. 29
0
    def _test_socket_listener(self, **kwargs):
        '''
        A simple test to check that SocketListenerApps are indeed working as
        expected; that is, they write the data they receive into their output,
        and finish when the connection is closed from the client side

        The data flow diagram looks like this:

        A --> B --> C --> D
        '''

        host = 'localhost'
        port = 9933
        data = os.urandom(1025)

        a = SocketListenerApp('oid:A', 'uid:A', host=host, port=port, **kwargs)
        b = InMemoryDROP('oid:B', 'uid:B')
        c = SumupContainerChecksum('oid:C', 'uid:C')
        d = InMemoryDROP('oid:D', 'uid:D')
        a.addOutput(b)
        b.addConsumer(c)
        c.addOutput(d)

        # Create the socket, write, and close the connection, allowing
        # A to move to COMPLETED
        with DROPWaiterCtx(self, d, 3):  # That's plenty of time
            a.async_execute()
            utils.write_to(host, port, data, 1)

        for drop in [a, b, c, d]:
            self.assertEqual(DROPStates.COMPLETED, drop.status)

        # Our expectations are fulfilled!
        bContents = droputils.allDropContents(b)
        dContents = int(droputils.allDropContents(d))
        self.assertEqual(data, bContents)
        self.assertEqual(crc32(data, 0), dContents)
Esempio n. 30
0
 def _update(self, chunk):
     """Given a chunk from the read in file, update the hexdigest
     """
     self.digest = crc32(chunk, self.digest)
    def __init__(self, gcs_url, bucket_name, name, generation, request, media):
        """Initialize a new object revision.

        :param gcs_url:str the base URL for the GCS service.
        :param bucket_name:str the name of the bucket that contains the object.
        :param name:str the name of the object.
        :param generation:int the generation number for this object.
        :param request:flask.Request the contents of the HTTP request.
        :param media:str the contents of the object.
        """
        self.gcs_url = gcs_url
        self.bucket_name = bucket_name
        self.name = name
        self.generation = generation
        self.object_id = bucket_name + '/o/' + name + '/' + str(generation)
        now = time.gmtime(time.time())
        timestamp = time.strftime('%Y-%m-%dT%H:%M:%SZ', now)
        self.media = media
        instructions = request.headers.get('x-goog-testbench-instructions')
        if instructions == 'inject-upload-data-error':
            self.media = testbench_utils.corrupt_media(media)

        self.metadata = {
            'timeCreated': timestamp,
            'updated': timestamp,
            'metageneration': 0,
            'generation': generation,
            'location': 'US',
            'storageClass': 'STANDARD',
            'size': len(self.media),
            'etag': 'XYZ=',
            'owner': {
                'entity': 'project-owners-123456789',
                'entityId': '',
            },
            'md5Hash': base64.b64encode(hashlib.md5(self.media).digest()),
            'crc32c': base64.b64encode(struct.pack('>I', crc32c.crc32(self.media)))
        }
        if request.headers.get('content-type') is not None:
            self.metadata['contentType'] = request.headers.get('content-type')
        # Update the derived metadata attributes (e.g.: id, kind, selfLink)
        self.update_from_metadata({})
        # Capture any encryption key headers.
        self._capture_customer_encryption(request)
        self._update_predefined_acl(request.args.get('predefinedAcl'))
        acl2json_mapping = {
            'authenticated-read': 'authenticatedRead',
            'bucket-owner-full-control': 'bucketOwnerFullControl',
            'bucket-owner-read': 'bucketOwnerRead',
            'private': 'private',
            'project-private': 'projectPrivate',
            'public-read': 'publicRead',
        }
        if request.headers.get('x-goog-acl') is not None:
            acl = request.headers.get('x-goog-acl')
            predefined = acl2json_mapping.get(acl)
            if predefined is not None:
                self._update_predefined_acl(predefined)
            else:
                raise error_response.ErrorResponse(
                    'Invalid predefinedAcl value %s' % acl, status_code=400)
Esempio n. 32
0
    def __init__(self, gcs_url, bucket_name, name, generation, request, media):
        """Initialize a new object revision.

        :param gcs_url:str the base URL for the GCS service.
        :param bucket_name:str the name of the bucket that contains the object.
        :param name:str the name of the object.
        :param generation:int the generation number for this object.
        :param request:flask.Request the contents of the HTTP request.
        :param media:str the contents of the object.
        """
        self.gcs_url = gcs_url
        self.bucket_name = bucket_name
        self.name = name
        self.generation = str(generation)
        self.object_id = bucket_name + "/o/" + name + "/" + str(generation)
        now = time.gmtime(time.time())
        timestamp = time.strftime("%Y-%m-%dT%H:%M:%SZ", now)
        self.media = media
        instructions = request.headers.get("x-goog-testbench-instructions")
        if instructions == "inject-upload-data-error":
            self.media = testbench_utils.corrupt_media(media)

        self.metadata = {
            "timeCreated": timestamp,
            "updated": timestamp,
            "metageneration": "0",
            "generation": str(generation),
            "location": "US",
            "storageClass": "STANDARD",
            "size": str(len(self.media)),
            "etag": "XYZ=",
            "owner": {"entity": "project-owners-123456789", "entityId": ""},
            "md5Hash": base64.b64encode(hashlib.md5(self.media).digest()).decode(
                "utf-8"
            ),
            "crc32c": base64.b64encode(
                struct.pack(">I", crc32c.crc32(self.media))
            ).decode("utf-8"),
        }
        if request.headers.get("content-type") is not None:
            self.metadata["contentType"] = request.headers.get("content-type")
        # Update the derived metadata attributes (e.g.: id, kind, selfLink)
        self.update_from_metadata({})
        # Capture any encryption key headers.
        self._capture_customer_encryption(request)
        self._update_predefined_acl(request.args.get("predefinedAcl"))
        acl2json_mapping = {
            "authenticated-read": "authenticatedRead",
            "bucket-owner-full-control": "bucketOwnerFullControl",
            "bucket-owner-read": "bucketOwnerRead",
            "private": "private",
            "project-private": "projectPrivate",
            "public-read": "publicRead",
        }
        if request.headers.get("x-goog-acl") is not None:
            acl = request.headers.get("x-goog-acl")
            predefined = acl2json_mapping.get(acl)
            if predefined is not None:
                self._update_predefined_acl(predefined)
            else:
                raise error_response.ErrorResponse(
                    "Invalid predefinedAcl value %s" % acl, status_code=400
                )
Esempio n. 33
0
def masked_crc32c(data: bytes):
    checksum = crc32c.crc32(data)
    return ((((checksum >> 15) & 0xffffffff) |
             ((checksum << 17) & 0xffffffff)) + 0xa282ead8) & 0xffffffff