Example #1
0
def load_button_pixbufs(color):
    global BUTTONS_SVG

    if BUTTONS_SVG is None:
        image_path = os.path.join(MODULE_DIR, 'images', 'mouse.svg')
        with open(image_path) as svg_file:
            BUTTONS_SVG = svg_file.readlines()

    if not isinstance(color, str):
        # Gdk.Color
        color = 'rgb({}, {}, {})'.format(round(color.red_float * 255),
                                         round(color.green_float * 255),
                                         round(color.blue_float * 255))
    button_pixbufs = []
    svg = NamedTemporaryFile(mode='w', suffix='.svg')
    for line in BUTTONS_SVG[1:-1]:
        svg.seek(0)
        svg.truncate()
        svg.writelines((
            BUTTONS_SVG[0],
            line.replace('#fff', color),
            BUTTONS_SVG[-1],
        ))
        svg.flush()
        os.fsync(svg.fileno())
        button_pixbufs.append(GdkPixbuf.Pixbuf.new_from_file(svg.name))
    svg.close()
    return button_pixbufs
Example #2
0
    def __init__(self, video_generators, video_format):
        from textwrap import dedent
        from tempfile import NamedTemporaryFile
        from subprocess import CalledProcessError, check_output, STDOUT
        from random import randint
        self.lighttpd_pid = None
        self.video_generators = dict(video_generators)
        video_cache_dir = _gen_video_cache_dir()
        mkdir_p(video_cache_dir)
        lighttpd_config_file = NamedTemporaryFile(
            prefix='stbt-camera-lighttpd-', suffix='.conf', delete=False)
        pidfile = NamedTemporaryFile(prefix="stbt-camera-lighttpd-",
                                     suffix=".pidfile")
        # This is an awful way to start listening on a random port and not a
        # great way of tracking the sub-process.
        port = None
        while port is None:
            try:
                lighttpd_config_file.seek(0)
                lighttpd_config_file.truncate(0)
                try_port = randint(10000, 30000)
                lighttpd_config_file.write(
                    dedent("""\
                    # This file is generated automatically by stb-tester.
                    # DO NOT EDIT.
                    server.document-root = "%s"

                    server.port = %i

                    server.pid-file            = "%s"

                    mimetype.assign = (
                      ".png" => "image/png",
                      ".mp4" => "video/mp4",
                      ".ts" => "video/MP2T"
                    )""") % (video_cache_dir, try_port, pidfile.name))
                lighttpd_config_file.flush()
                check_output(['lighttpd', '-f', lighttpd_config_file.name],
                             close_fds=True,
                             stderr=STDOUT)
                port = try_port
            except CalledProcessError as e:
                if e.output.find('Address already in use') != -1:
                    pass
                else:
                    sys.stderr.write("lighttpd failed to start: %s\n" %
                                     e.output)
                    raise
        # lighttpd writes its pidfile out after forking rather than before
        # causing a race.  The real fix is to patch lighttpd to support socket
        # passing and then open the listening socket ourselves.
        while os.fstat(pidfile.fileno()).st_size == 0:
            sleep(0.1)
        self.lighttpd_pid = int(pidfile.read())
        self.base_url = "http://%s:%i/" % (_get_external_ip(), port)
        self.video_format = video_format
Example #3
0
    def __init__(self, video_generators, video_format):
        from textwrap import dedent
        from tempfile import NamedTemporaryFile
        from subprocess import CalledProcessError, check_output, STDOUT
        from random import randint
        self.lighttpd_pid = None
        self.video_generators = dict(video_generators)
        video_cache_dir = _gen_video_cache_dir()
        mkdir_p(video_cache_dir)
        lighttpd_config_file = NamedTemporaryFile(
            prefix='stbt-camera-lighttpd-', suffix='.conf', delete=False)
        pidfile = NamedTemporaryFile(
            prefix="stbt-camera-lighttpd-", suffix=".pidfile")
        # This is an awful way to start listening on a random port and not a
        # great way of tracking the sub-process.
        port = None
        while port is None:
            try:
                lighttpd_config_file.seek(0)
                lighttpd_config_file.truncate(0)
                try_port = randint(10000, 30000)
                lighttpd_config_file.write(dedent("""\
                    # This file is generated automatically by stb-tester.
                    # DO NOT EDIT.
                    server.document-root = "%s"

                    server.port = %i

                    server.pid-file            = "%s"

                    mimetype.assign = (
                      ".png" => "image/png",
                      ".mp4" => "video/mp4",
                      ".ts" => "video/MP2T"
                    )""") % (video_cache_dir, try_port, pidfile.name))
                lighttpd_config_file.flush()
                check_output(['lighttpd', '-f', lighttpd_config_file.name],
                             close_fds=True, stderr=STDOUT)
                port = try_port
            except CalledProcessError as e:
                if e.output.find('Address already in use') != -1:
                    pass
                else:
                    sys.stderr.write("lighttpd failed to start: %s\n" %
                                     e.output)
                    raise
        # lighttpd writes its pidfile out after forking rather than before
        # casuing a race.  The real fix is to patch lighttpd to support socket
        # passing and then open the listening socket ourselves.
        while os.fstat(pidfile.fileno()).st_size == 0:
            sleep(0.1)
        self.lighttpd_pid = int(pidfile.read())
        self.base_url = "http://%s:%i/" % (_get_external_ip(), port)
        self.video_format = video_format
Example #4
0
def convert_to_pdf(file: NamedTemporaryFile, mime: str) -> bool:
    '''Convert a file to PDF if necessary.
       Return whether the conversion took place.'''
    if mime == 'application/pdf':
        return False

    unoconv = subprocess.run(['unoconv', '--stdout', '-f', 'pdf', file.name],
                             text=False,
                             capture_output=True,
                             timeout=60,
                             check=True)
    file.seek(0)
    file.write(unoconv.stdout)
    file.truncate()

    return True
Example #5
0
def process_askue():
    e = Exporter()
    try:
        logging.debug('Trying to connect to FTP server...')
        with FTP(S.FTP_SERVER, S.FTP_USER, S.FTP_PASSWORD, timeout=5) as fc:
            logging.debug('Looking for files in FTP directory')
            # Find files and retrieve it
            inbox_files = fc.mlsd(S.REMS_PATH)
            filenames = [e[0] for e in inbox_files if askue_filename(e[0])]
            if not filenames:
                logging.info('Inbox directory is empty...')
                return
            if len(filenames) > 1:
                logging.debug(
                    'More than 1 file were found: {}'.format(
                        '\n'.join(filenames)))
            rfile = max(filenames, key=date_from_filename)
            logging.info('Retrieving {}...'.format(rfile))
            tf = NamedTemporaryFile()
            fc.retrbinary('RETR {}'.format(j(S.REMS_PATH, rfile)), tf.write)
            ftp_pos = tf.tell()
            try:
                if S.APPEND_ON:
                    lines = (record_to_csv(rec) for rec in
                             e.get_routes(datetime.now()))
                    append_lines(tf, lines)
                else:
                    logging.debug(
                        'Will not append lines (switched off in settings)')
            except Exception:
                logging.exception(
                    'Error appending lines to file! Sending as is')
                tf.seek(ftp_pos)
                tf.truncate()
            tf.seek(0)
            dest_path = j(S.IOMM_PATH, rfile)
            # Send file back to FTP
            logging.info('Sending file... {}'.format(dest_path))
            fc.storbinary('STOR {}'.format(dest_path), tf)
            logging.info('Cleaning up directory...')
            for fname in filenames:
                filepath = j(S.REMS_PATH, fname)
                fc.delete(filepath)
    finally:
        e.close_connection()
Example #6
0
def write(data, path):

    with ZipFile(path, 'w', zipfile.ZIP_DEFLATED) as zip:
        content = io.StringIO()
        content.write('Manifest-Version: 1.0\n')
        content.write('Created-By: jamovi\n')
        content.write('Data-Archive-Version: 1.0.2\n')
        content.write('jamovi-Archive-Version: 1.0\n')
        zip.writestr('META-INF/MANIFEST.MF', bytes(content.getvalue(), 'utf-8'), zipfile.ZIP_DEFLATED)

        content = None

        fields = [ ]
        for column in data.dataset:
            field = { }
            field['name'] = column.name
            field['measureType'] = MeasureType.stringify(column.measure_type)
            if column.measure_type == MeasureType.CONTINUOUS:
                field['type'] = 'number'
            else:
                field['type'] = 'integer'
            field['importName'] = column.import_name
            fields.append(field)

        metadata = { }

        metadataset = { }
        metadataset['rowCount'] = data.dataset.row_count
        metadataset['columnCount'] = data.dataset.column_count
        metadataset['fields'] = fields

        metadata['dataSet'] = metadataset

        zip.writestr('metadata.json', json.dumps(metadata), zipfile.ZIP_DEFLATED)

        metadata = None

        xdata = { }
        for column in data.dataset:
            if column.has_levels:
                xdata[column.name] = { 'labels': column.levels }
        zip.writestr('xdata.json', json.dumps(xdata), zipfile.ZIP_DEFLATED)
        xdata = None

        row_count = data.dataset.row_count
        required_bytes = 0
        for column in data.dataset:
            if column.measure_type == MeasureType.CONTINUOUS:
                required_bytes += (8 * row_count)
            else:
                required_bytes += (4 * row_count)

        temp_file = NamedTemporaryFile(delete=False)
        temp_file.truncate(required_bytes)

        for column in data.dataset:
            if column.measure_type == MeasureType.CONTINUOUS:
                for i in range(0, row_count):
                    value = column.raw(i)
                    byts = struct.pack('<d', value)
                    temp_file.write(byts)
            else:
                for i in range(0, row_count):
                    value = column.raw(i)
                    byts = struct.pack('<i', value)
                    temp_file.write(byts)

        temp_file.close()

        zip.write(temp_file.name, 'data.bin')
        os.remove(temp_file.name)

        resources = [ ]

        for analysis in data.analyses:
            if analysis.has_results is False:
                continue
            analysis_dir = '{:02} {}/analysis'.format(analysis.id, analysis.name)
            zip.writestr(analysis_dir, analysis.serialize(), zipfile.ZIP_DEFLATED)
            resources += analysis.resources

        for rel_path in resources:
            abs_path = os.path.join(data.instance_path, rel_path)
            zip.write(abs_path, rel_path)
def generate_blank_syslinux():
    # Generate syslinux.qcow2 in working directory if it isn't already there
    if os.path.isfile("./syslinux.qcow2"):
        print "Found a syslinux.qcow2 image in the working directory - using it"
        return

    print "Generating an empty bootable syslinux image as ./syslinux.qcow2"
    raw_fs_image = NamedTemporaryFile(delete=False)
    raw_image_name = raw_fs_image.name
    try:
        output_image_name = "./syslinux.qcow2"

        # 200 MB sparse file
        outsize = 1024 * 1024 * 200
        raw_fs_image.truncate(outsize)
        raw_fs_image.close()

        # Partition, format and add DOS MBR
        g = guestfs.GuestFS()
        g.add_drive(raw_image_name)
        g.launch()
        g.part_disk("/dev/sda","msdos")
        g.part_set_mbr_id("/dev/sda",1,0xb)
        g.mkfs("vfat", "/dev/sda1")
        g.part_set_bootable("/dev/sda", 1, 1)
        dosmbr = open("/usr/share/syslinux/mbr.bin").read()
        ws = g.pwrite_device("/dev/sda", dosmbr, 0)
        if ws != len(dosmbr):
            raise Exception("Failed to write entire MBR")
        g.sync()
        g.close()

        # Install syslinux - this is the ugly root-requiring part
        gotloop = False
        for n in range(4):
            # If this has a nonzero return code we will take the exception
            (stdout, stderr, retcode) = subprocess_check_output(["losetup","-f"])
            loopdev = stdout.rstrip()
            # Race - Try it a few times and then give up
            try:
                subprocess_check_output(["losetup",loopdev,raw_image_name])
            except:
                sleep(1)
                continue
            gotloop = True
            break

        if not gotloop:
            raise Exception("Failed to setup loopback")

        loopbase = os.path.basename(loopdev)

        try:
            subprocess_check_output(["kpartx","-a",loopdev])
            # On RHEL6 there seems to be a short delay before the mappings actually show up
            sleep(5)
            subprocess_check_output(["syslinux", "/dev/mapper/%sp1" % (loopbase)])
            subprocess_check_output(["kpartx", "-d", loopdev])
            subprocess_check_output(["losetup", "-d", loopdev])
        except:
            print "Exception while executing syslinux install commands."
            raise

        try:
            subprocess_check_output(["qemu-img","convert","-c","-O","qcow2",raw_image_name,output_image_name])
        except:
            print "Exception while converting image to qcow2"

    finally:
        pass
Example #8
0
def main(directory, consumer_key, consumer_secret, access_token_key, access_token_secret):
    subprocess.call(["xnview", directory])
    
    images = [x for x in os.listdir(directory) if x.endswith(b".jpg")]
    if not images:
        return

    collages = os.path.join(directory, b"collages")
    if not os.path.exists(collages):
        os.mkdir(collages)

    for i in itertools.count():
        collage_dir = os.path.join(collages, b"collage%04d" % i)
        if not os.path.exists(collage_dir):
            os.mkdir(collage_dir)
            break

    collage_path = os.path.join(collage_dir, b"collage.jpg")
    if len(images) == 1:
        shutil.copy(os.path.join(directory, images[0]), collage_path)
    else:
        bigw, bigh = Image.open(os.path.join(directory, images[0])).size
        w, h = map(lambda x: int(x / 2), (bigw, bigh))

        hborder = int(w * 0.05)
        vborder = int(h * 0.05)

        w -= int(hborder / 2)
        h -= int(vborder / 2)

        cw = (w + hborder) * 2 + hborder
        ch = (h + vborder) * (int(len(images) / 2)) + vborder
        if len(images) % 2:
            ch += bigh + vborder

        im = Image.new("RGB", (cw, ch), "black")
        for i, image in enumerate(images):
            im2 = Image.open(os.path.join(directory, image))
            if len(images) % 2 and i == len(images) - 1:
                im.paste(im2, (hborder, vborder + int(len(images) / 2) * (h + vborder)))
            else:
                im2.thumbnail((w, h), Image.ANTIALIAS)
                im.paste(im2, (hborder + (w + hborder) * (i % 2), vborder + int(i / 2) * (h + vborder)))

        im.save(collage_path)

    subprocess.call(["xnview", collage_path])

    f = NamedTemporaryFile()
    f.write(os.path.basename(directory))
    f.flush()

    while True:
        try:
            tweet = subprocess.check_output(["zenity",
                                             "--title", "Tweet",
                                             "--text-info", "--editable",
                                             "--filename", f.name,
                                             "--width", "1280",
                                             "--height", "320",
                                             "--font", "Segoe UI 35"])
        except:
            return

        decoded = ""
        try:
            decoded = tweet.decode("utf-8")
            twitter.Api(consumer_key=consumer_key,
                        consumer_secret=consumer_secret,
                        access_token_key=access_token_key,
                        access_token_secret=access_token_secret).PostMedia(decoded, collage_path)
        except Exception as e:
            subprocess.check_call(["zenity", "--error", "--no-markup", "--text", repr(e) + b" (len=%d)" % len(decoded)])
            f.truncate(0)
            f.seek(0)
            f.write(tweet)
            f.flush()
        else:
            break

    for image in images:
        shutil.move(os.path.join(directory, image), collage_dir)
Example #9
0
 def _resize_partition_on_disk_image(self, additional_bytes):
     # This function expects the partitions to be in disk order: it will
     # fail if there are primary partitions after an extended one containing
     # logical partitions.
     # Resizing logical partitions that are not the last on the disk is not
     # implemented
     assert self.parent is None
     partition_table = self.partition_table
     image = self.image
     # Create a new temporary file of the correct size
     tmp = NamedTemporaryFile(dir=os.path.dirname(image), delete=False)
     tmp.truncate(file_size(image) + additional_bytes)
     tmp.close()
     # Update the partition table
     additional_sectors = additional_bytes // SECTOR_SIZE
     # resize the partition
     self.size += additional_sectors
     # move the partitions after
     for part in partition_table.partitions[self.number:]:
         part.start += additional_sectors
     # update last lba
     if partition_table.lastlba is not None:
         partition_table.lastlba += additional_sectors
     sfdisk(tmp.name, _in=partition_table.get_sfdisk_script(), **SH_OPTS)
     # Now we copy the data from the image to the temporary file
     copy = partial(
         ddd,
         _if=image,
         of=tmp.name,
         bs=1024**2,  # one MiB
         conv="notrunc",
         iflag="count_bytes,skip_bytes",  # count and skip in bytes
         oflag="seek_bytes",  # seek in bytes
     )
     # Preserve GRUB
     copy(count=MBR_BOOTSTRAP_CODE_SIZE)
     # Copy across any data that's located between the MBR and the first
     # partition (some devices rely on the bootloader being there, like the
     # Variscite DART-6UL)
     if self.partition_table.label == "dos":
         copy(
             skip=MBR_SIZE,
             seek=MBR_SIZE,
             count=partition_table.partitions[0].start_bytes - MBR_SIZE,
         )
     elif self.partition_table.label == "gpt":
         copy(
             skip=GPT_SIZE,
             seek=GPT_SIZE,
             count=partition_table.partitions[0].start_bytes - GPT_SIZE,
         )
     # TODO: if we copy an extended partition, there is no need to copy its
     # logical partitions.
     # Copy partitions before and the partition itself
     for part in partition_table.partitions[:self.number]:
         # No need to copy extended partitions, we'll copy their logical
         # partitions
         if not part.is_extended():
             copy(
                 skip=part.start_bytes,
                 seek=part.start_bytes,
                 count=part.size_bytes,
             )
     # Copy partitions after.
     for part in partition_table.partitions[self.number:]:
         if not part.is_extended():
             copy(
                 skip=part.start_bytes,
                 seek=part.start_bytes + additional_bytes,
                 count=part.size_bytes,
             )
     # Replace the original image contents.
     ddd(_if=tmp.name, of=image, bs=1024**2)
def build_summary(doc, rouge_settings):

    params = build_rouge_params(rouge_settings)
    match_pattern = "X ROUGE-{} Eval".format(rouge_settings["order"])

    ref_file = get_reference_file(doc)

    input_sentences = get_input_sentences(doc, 25)
    n_inputs = len(input_sentences)
    input_ids = [i for i in xrange(n_inputs)]

    candidate_files = [NamedTemporaryFile("w", delete=False) 
                       for i in xrange(n_inputs)]
    config_lines = ["{} {}".format(cf.name, ref_file.name)
                    for cf in candidate_files]

    config_file = NamedTemporaryFile("w", delete=False)
    
    greedy_summary = ""
    greedy_score = 0

    for z in range(n_inputs):

        cfg_text = "\n".join(config_lines)
        config_file.truncate(len(cfg_text))
        config_file.seek(0)
        config_file.write(cfg_text)
        config_file.flush()

        for i in xrange(len(config_lines)):
            input_id = input_ids[i]
            sum = "{}{}\n".format(greedy_summary, input_sentences[input_id])
            cf = candidate_files[i]
            cf.truncate(len(sum))
            cf.seek(0)
            cf.write(sum)
            cf.flush()

        output = subprocess.check_output(params + [config_file.name])
        i = 0
        max_score = greedy_score
        max_id = None
        for line in output.split("\n"):
            if line.startswith(match_pattern):

                score = float(line.split()[4][2:])
                if score > max_score:
                    max_score = score
                    max_id = i
                i += 1
        if max_id is not None:
            greedy_score = max_score
            greedy_summary = "{}{}\n".format(
                greedy_summary, input_sentences[max_id])
            input_ids.pop(max_id)
            config_lines = config_lines[:-1]
        else:
            break
    
    for cf in candidate_files:
        cf.close()
        os.remove(cf.name)
    os.remove(ref_file.name)
    config_file.close()
    os.remove(config_file.name)

    return greedy_summary.strip()
Example #11
0
    def download_rules(self):
        # Rules can be downloaded a couple of ways:
        # - From a GraphQL supplying the rule body in rawRule
        # - A file containing either compiled or plain text rule(s)

        from tempfile import NamedTemporaryFile
        from json import dumps as json_dumps
        import shutil
        urllib3.disable_warnings()

        # File into which the rules are saved
        self.temp_rules_file = NamedTemporaryFile(mode='wb')

        if self.rules_location.endswith('.yar'):
            # Assume a rules file and download it
            response = requests.get(self.rules_location,
                                    verify=self.verify,
                                    cookies=self.cookie)
            if response.status_code != 200:
                self.logger.error("%s: %s", response.status_code,
                                  response.text)
                sys.exit(1)
            self.temp_rules_file.write(response.content)
        else:
            # get rules from GraphQL backend, ignoring any disabled ones
            rules_query = """
            query {
                rules(condition: {isDisabled: false}) {
                    nodes {
                        id
                        name
                        rawRule
                    }
                }
            }"""
            response = requests.post(self.rules_location,
                                     json={'query': rules_query},
                                     verify=self.verify,
                                     cookies=self.cookie)
            if response.status_code != 200:
                self.logger.error("%s: %s", response.status_code,
                                  response.text)
                sys.exit(1)

            data = response.json()
            self.logger.debug(json_dumps(data, indent=2))

            # Do a quick test run of each rule and ignore any that cause errors or warnings
            rule_testing_file = NamedTemporaryFile(mode='wb')
            exclude_rules = ['url', 'with_sqlite']  # too spammy
            good_rules, problem_rules = {}, {}
            for rule in data['data']['rules']['nodes']:
                if any([er == rule['name'] for er in exclude_rules]):
                    problem_rules[rule['name']] = rule['id']
                    continue
                if not rule['rawRule']:
                    problem_rules[rule['name']] = rule['id']
                    continue

                # Write rule to a temporary file and run it to make sure it runs without error
                rule_testing_file.seek(0)
                rule_testing_file.write(rule['rawRule'].encode('utf-8'))
                rule_testing_file.truncate()
                rule_testing_file.flush()
                cmd = "nice -n %d %s --fail-on-warnings -p 1 -f %s /dev/null" % \
                      (self.nice, self.yara_binary, rule_testing_file.name)
                try:
                    output = subprocess.check_output(cmd,
                                                     shell=True,
                                                     stderr=subprocess.STDOUT)
                    self.logger.debug("Rule '%s' ok, adding", rule['name'])
                    good_rules[rule['name']] = rule['id']
                    self.temp_rules_file.write(rule['rawRule'].encode('utf-8'))
                except subprocess.CalledProcessError as e:
                    self.logger.warning("Rule '%s' failed: '%s', skipping ...",
                                        rule['name'], e.output)
                    problem_rules[rule['name']] = rule['id']
                    continue

            self.logger.debug("Rules to use ...")
            self.logger.debug(good_rules)
            self.logger.debug("Rules that won't be used ...")
            self.logger.debug(problem_rules)

        self.temp_rules_file.flush()
        # if -S/--save option used, copy the temp file to the user specified file and use that file
        if self.rules_file:
            self.logger.info("Saving rules to %s", self.rules_file)
            shutil.copy(self.temp_rules_file.name, self.rules_file)
            self.temp_rules_file.close()
        else:
            self.rules_file = self.temp_rules_file.name
Example #12
0
def write(data, path):

    with ZipFile(path, "w", zipfile.ZIP_DEFLATED) as zip:
        content = io.StringIO()
        content.write("Manifest-Version: 1.0\n")
        content.write("Created-By: JASP 0.7.5 Beta 2\n")
        content.write("Data-Archive-Version: 1.0.2\n")
        content.write("JASP-Archive-Version: 2.0\n")
        zip.writestr("META-INF/MANIFEST.MF", bytes(content.getvalue(), "utf-8"), zipfile.ZIP_DEFLATED)

        content = None

        fields = []
        for column in data.dataset:
            field = {}
            field["name"] = column.name
            field["measureType"] = MeasureType.stringify(column.measure_type)
            if column.measure_type == MeasureType.CONTINUOUS:
                field["type"] = "number"
            else:
                field["type"] = "integer"
            fields.append(field)

        metadata = {}

        metadataset = {}
        metadataset["rowCount"] = data.dataset.row_count
        metadataset["columnCount"] = data.dataset.column_count
        metadataset["fields"] = fields

        metadata["dataSet"] = metadataset

        zip.writestr("metadata.json", json.dumps(metadata), zipfile.ZIP_DEFLATED)

        metadata = None

        xdata = {}
        for column in data.dataset:
            if column.has_levels:
                xdata[column.name] = {"labels": column.levels}
        zip.writestr("xdata.json", json.dumps(xdata), zipfile.ZIP_DEFLATED)
        xdata = None

        row_count = data.dataset.row_count
        required_bytes = 0
        for column in data.dataset:
            if column.measure_type == MeasureType.CONTINUOUS:
                required_bytes += 8 * row_count
            else:
                required_bytes += 4 * row_count

        temp_file = NamedTemporaryFile(delete=False)
        temp_file.truncate(required_bytes)

        for column in data.dataset:
            if column.measure_type == MeasureType.CONTINUOUS:
                for i in range(0, row_count):
                    value = column.raw(i)
                    byts = struct.pack("<d", value)
                    temp_file.write(byts)
            else:
                for i in range(0, row_count):
                    value = column.raw(i)
                    byts = struct.pack("<i", value)
                    temp_file.write(byts)

        temp_file.close()

        zip.write(temp_file.name, "data.bin")
        os.remove(temp_file.name)
Example #13
0
def write(data, path, prog_cb, html=None, is_template=False):

    with ZipFile(path, 'w', zipfile.ZIP_DEFLATED) as zip:

        content = io.StringIO()
        content.write('Manifest-Version: 1.0\n')
        content.write('Data-Archive-Version: 1.0.2\n')
        content.write('jamovi-Archive-Version: 9.0\n')
        content.write('Created-By: ' + str(app_info) + '\n')
        zip.writestr('META-INF/MANIFEST.MF',
                     bytes(content.getvalue(), 'utf-8'), zipfile.ZIP_DEFLATED)

        if html is not None:
            zip.writestr('index.html', html)

        content = None
        string_table_required = False

        transforms = []
        for transform in data.transforms:
            transform_field = {}
            transform_field['name'] = transform.name
            transform_field['id'] = transform.id
            transform_field['suffix'] = transform.suffix

            transform_field['formula'] = transform.formula
            transform_field['formulaMessage'] = transform.formula_message
            transform_field['measureType'] = MeasureType.stringify(
                transform.measure_type)

            transform_field['description'] = transform.description

            transforms.append(transform_field)

        fields = []
        for column in data:
            if column.is_virtual is True:
                continue

            field = {}
            field['name'] = column.name
            field['id'] = column.id
            field['columnType'] = ColumnType.stringify(column.column_type)
            field['dataType'] = DataType.stringify(column.data_type)
            field['measureType'] = MeasureType.stringify(column.measure_type)
            field['formula'] = column.formula
            field['formulaMessage'] = column.formula_message
            field['parentId'] = column.parent_id
            field['width'] = column.width

            if column.data_type == DataType.DECIMAL:
                field['type'] = 'number'
            elif column.data_type == DataType.TEXT and column.measure_type == MeasureType.ID:
                field['type'] = 'string'
                string_table_required = True
            else:
                field['type'] = 'integer'

            if column.column_type is ColumnType.OUTPUT:
                field['outputAnalysisId'] = column.output_analysis_id
                field['outputOptionName'] = column.output_option_name
                field['outputName'] = column.output_name
                field[
                    'outputDesiredColumnName'] = column.output_desired_column_name
                field[
                    'outputAssignedColumnName'] = column.output_assigned_column_name

            field['importName'] = column.import_name
            field['description'] = column.description
            field['transform'] = column.transform
            field['edits'] = column.cell_tracker.edited_cell_ranges

            field['missingValues'] = column.missing_values

            if column.is_filter:
                field['filterNo'] = column.filter_no
                # field['hidden'] = column.hidden
                field['active'] = column.active
            else:
                if column.has_levels:
                    field['trimLevels'] = column.trim_levels

            fields.append(field)

        if is_template:
            row_count = 0
        else:
            row_count = data.row_count

        metadata = {}

        metadataset = {}
        metadataset['rowCount'] = row_count
        metadataset['columnCount'] = data.column_count
        metadataset['removedRows'] = data.row_tracker.removed_row_ranges
        metadataset['addedRows'] = data.row_tracker.added_row_ranges
        metadataset['fields'] = fields
        metadataset['transforms'] = transforms

        # if data.import_path is not '':
        #     metadataset['importPath'] = data.import_path
        # if data.embedded_path is not '':
        #     metadataset['embeddedPath'] = data.embedded_path
        # if data.embedded_name is not '':
        #     metadataset['embeddedName'] = data.embedded_name

        metadata['dataSet'] = metadataset

        zip.writestr('metadata.json', json.dumps(metadata),
                     zipfile.ZIP_DEFLATED)

        metadata = None

        xdata = {}
        for column in data:
            if column.is_virtual is True:
                continue
            if column.has_levels:
                xdata[column.name] = {'labels': column.levels}
        zip.writestr('xdata.json', json.dumps(xdata), zipfile.ZIP_DEFLATED)
        xdata = None

        required_bytes = 0
        for column in data:
            if column.is_virtual is True:
                continue
            if column.data_type == DataType.DECIMAL:
                required_bytes += (8 * row_count)
            else:
                required_bytes += (4 * row_count)

        if string_table_required:
            cursor = 0
            string_file = NamedTemporaryFile(delete=False)

        temp_file = NamedTemporaryFile(delete=False)
        temp_file.truncate(required_bytes)

        for col_no in range(data.column_count):
            column = data[col_no]
            if column.is_virtual is True:
                continue
            if column.data_type == DataType.DECIMAL:
                for i in range(0, row_count):
                    value = column.raw(i)
                    byts = struct.pack('<d', value)
                    temp_file.write(byts)
                    if i % 100000 == 0:
                        prog_cb((col_no + i / row_count) / data.column_count)
            elif column.data_type == DataType.TEXT and column.measure_type == MeasureType.ID:
                for i in range(0, row_count):
                    value = column[i]
                    if value != '':
                        byts = value.encode('utf-8')
                        string_file.write(byts)
                        string_file.write(bytes(1))
                        n = len(byts) + 1
                        byts = struct.pack('<i', cursor)
                        temp_file.write(byts)
                        cursor += n
                    else:
                        byts = struct.pack('<i', -2147483648)
                        temp_file.write(byts)
                    if i % 100000 == 0:
                        prog_cb((col_no + i / row_count) / data.column_count)
            else:
                for i in range(0, row_count):
                    value = column.raw(i)
                    byts = struct.pack('<i', value)
                    temp_file.write(byts)
                    if i % 100000 == 0:
                        prog_cb((col_no + i / row_count) / data.column_count)

        temp_file.close()
        zip.write(temp_file.name, 'data.bin')
        os.remove(temp_file.name)

        if string_table_required:
            string_file.close()
            zip.write(string_file.name, 'strings.bin')
            os.remove(string_file.name)

        resources = []

        for analysis in data.analyses:
            if analysis.has_results is False:
                continue
            analysis_dir = '{:02} {}/analysis'.format(analysis.id,
                                                      analysis.name)
            zip.writestr(analysis_dir,
                         analysis.serialize(strip_content=is_template),
                         zipfile.ZIP_DEFLATED)
            resources += analysis.resources

        for rel_path in resources:
            abs_path = os.path.join(data.instance_path, rel_path)
            zip.write(abs_path, rel_path)
Example #14
0
def o4_pyforce(debug, no_revision, args: list, quiet=False):
    """
    Encapsulates Pyforce, does book keeping to ensure that all files
    that should be operated on are in fact dealt with by p4. Handles
    retry and strips out asks for files that are caseful mismatches on
    the current file system (macOS).
    """

    from tempfile import NamedTemporaryFile
    from collections import defaultdict

    class LogAndAbort(Exception):
        'Dumps debug information on errors.'

    o4_log('pyforce', no_revision=no_revision, quiet=quiet, *args)

    tmpf = NamedTemporaryFile(dir='.o4')
    fstats = []
    for line in sys.stdin.read().splitlines():
        if line.startswith('#o4pass'):
            print(line)
            continue
        f = fstat_split(line)
        if f and caseful_accurate(f[F_PATH]):
            fstats.append(f)
        elif f:
            print(
                f"*** WARNING: Pyforce is skipping {f[F_PATH]} because it is casefully",
                "mismatching a local file.",
                file=sys.stderr)
    retries = 3
    head = _depot_path().replace('/...', '')
    while fstats:
        if no_revision:
            p4paths = [Pyforce.escape(f[F_PATH]) for f in fstats]
        else:
            p4paths = [
                f"{Pyforce.escape(f[F_PATH])}#{f[F_REVISION]}" for f in fstats
            ]
        tmpf.seek(0)
        tmpf.truncate()
        not_yet = []
        pargs = []
        xargs = []
        # This is a really bad idea, files are output to stdout before the actual
        # sync happens, causing checksum tests to start too early:
        #        if len(p4paths) > 30 and 'sync' in args:
        #            xargs.append('--parallel=threads=5')
        if sum(len(s) for s in p4paths) > 30000:
            pargs.append('-x')
            pargs.append(tmpf.name)
            for f in p4paths:
                tmpf.write(f.encode('utf8'))
                tmpf.write(b'\n')
            tmpf.flush()
        else:
            xargs.extend(p4paths)
        try:
            # TODO: Verbose
            #print('# PYFORCE({}, {}{})'.format(','.join(repr(a) for a in args), ','.join(
            #    repr(a) for a in paths[:3]), ', ...' if len(paths) > 3 else ''))
            errs = []
            repeats = defaultdict(list)
            infos = []
            for res in Pyforce(*pargs, *args, *xargs):
                if debug:
                    err_print("*** DEBUG: Received", repr(res))
                # FIXME: Delete this if-statement:
                if res.get('code', '') == 'info':
                    infos.append(res)
                    if res.get('data', '').startswith('Diff chunks: '):
                        continue
                if res.get('code', '') == 'error':
                    errs.append(res)
                    continue
                if 'resolveFlag' in res:
                    # TODO: resolveFlag can be ...?
                    #         m: merge
                    #         c: copy from  (not conflict!)
                    # We skip this entry as it is the second returned from p4
                    # for one input file
                    continue
                res_str = res.get('depotFile') or res.get('fromFile')
                if not res_str and res.get('data'):
                    res_str = head + '/' + res['data']
                if not res_str:
                    errs.append(res)
                    continue
                res_str = Pyforce.unescape(res_str)
                for i, f in enumerate(fstats):
                    if f"{head}/{f[F_PATH]}" in res_str:
                        repeats[f"{head}/{f[F_PATH]}"].append(res)
                        not_yet.append(fstats.pop(i))
                        break
                else:
                    for f in repeats.keys():
                        if f in res_str:
                            if debug:
                                err_print(
                                    f"*** DEBUG: REPEAT: {res_str}\n {res}\n {repeats[f]}"
                                )
                            break
                    else:
                        if debug:
                            err_print("*** DEBUG: ERRS APPEND", res)
                        errs.append(res)
            if errs:
                raise LogAndAbort('Unexpected reply from p4')

            if len(p4paths) == len(fstats):
                raise LogAndAbort('Nothing recognized from p4')
        except P4Error as e:
            non_recoverable = False
            for a in e.args:
                if 'clobber writable file' in a['data']:
                    fname = a['data'].split('clobber writable file')[1].strip()
                    print("*** WARNING: Saving writable file as .bak:",
                          fname,
                          file=sys.stderr)
                    if os.path.exists(fname + '.bak'):
                        now = time.time()
                        print(
                            f"*** WARNING: Moved previous .bak to {fname}.{now}",
                            file=sys.stderr)
                        os.rename(fname + '.bak', f'{fname}.bak.{now}')
                    shutil.copy(fname, fname + '.bak')
                    os.chmod(fname, 0o400)
                else:
                    non_recoverable = True
            if non_recoverable:
                raise
        except P4TimeoutError as e:
            e = str(e).replace('\n', ' ')
            print(f"# P4 TIMEOUT, RETRIES {retries}: {e}", file=sys.stderr)
            retries -= 1
            if not retries:
                sys.exit(
                    f"{CLR}*** ERROR: Perforce timed out too many times:\n{e}")
        except LogAndAbort as e:
            import json
            fname = f'debug-pyforce.{os.getpid()}.{int(time.time())}'
            d = {
                'args': args,
                'fstats': fstats,
                'errs': errs,
                'repeats': repeats,
                'infos': infos,
            }
            json.dump(d, open(f'.o4/{fname}', 'wt'))
            sys.exit(f'{CLR}*** ERROR: {e}; detail in {fname}')
        finally:
            if not quiet:
                for fstat in not_yet:
                    # Printing the fstats after the p4 process has ended, because p4 marshals
                    # its objects before operation, as in "And for my next act... !"
                    # This premature printing leads to false checksum errors during sync.
                    print(fstat_join(fstat))
Example #15
0
File: file.py Project: cfobel/durus
class File(object):
    """
    A file wrapper that smooths over some platform-specific
    operations.
    """
    def __init__(self, name=None, readonly=False, **kwargs):
        if name is None:
            self.file = NamedTemporaryFile(**kwargs)
        else:
            if exists(name):
                if readonly:
                    self.file = open(name, 'rb')
                else:
                    self.file = open(name, 'r+b')
            else:
                if readonly:
                    raise OSError('No "%s" found.' % name)
                self.file = open(name, 'w+b')
        if readonly:
            assert self.is_readonly()
        self.has_lock = False

    def get_name(self):
        return self.file.name

    def is_temporary(self):
        return isinstance(self.file, _TemporaryFileWrapper)

    def is_readonly(self):
        return self.file.mode == 'rb'

    def seek(self, n, whence=0):
        self.file.seek(n, whence)
        if whence == 0:
            assert self.file.tell() == n

    def seek_end(self):
        self.file.seek(0, 2)

    def read(self, n=None):
        if n is None:
            return self.file.read()
        else:
            return self.file.read(n)

    def tell(self):
        return self.file.tell()

    def stat(self):
        return os.stat(self.get_name())

    def __len__(self):
        return self.stat().st_size

    def rename(self, name):
        old_name = self.get_name()
        if name == old_name:
            return
        assert not self.is_temporary()
        self.obtain_lock()
        self.close()
        if exists(name):
            os.unlink(name)
        os.rename(old_name, name)
        self.file = open(name, 'r+b')
        self.obtain_lock()

    def obtain_lock(self):
        """
        Make sure that we have an exclusive lock on self.file before
        doing a write.
        If the lock is not available, raise an exception.
        """
        assert not self.is_readonly()
        if not self.has_lock:
            if os.name == 'nt':
                try:
                    win32file.LockFileEx(
                        win32file._get_osfhandle(self.file.fileno()),
                        (win32con.LOCKFILE_EXCLUSIVE_LOCK
                         | win32con.LOCKFILE_FAIL_IMMEDIATELY), 0, -65536,
                        pywintypes.OVERLAPPED())
                except pywintypes.error:
                    raise IOError("Unable to obtain lock")
            else:
                fcntl.flock(self.file, fcntl.LOCK_EX | fcntl.LOCK_NB)
            self.has_lock = True

    def release_lock(self):
        """
        Make sure that we do not retain an exclusive lock on self.file.
        """
        if self.has_lock:
            if os.name == 'nt':
                win32file.UnlockFileEx(
                    win32file._get_osfhandle(self.file.fileno()), 0, -65536,
                    pywintypes.OVERLAPPED())
            else:
                fcntl.flock(self.file, fcntl.LOCK_UN)
            self.has_lock = False

    def write(self, s):
        self.obtain_lock()
        self.file.write(s)
        if os.name == 'nt':
            # This flush helps the file knows where it ends.
            self.file.flush()

    def truncate(self):
        self.obtain_lock()
        self.file.truncate()

    def close(self):
        self.release_lock()
        self.file.close()

    def flush(self):
        self.file.flush()

    def fsync(self):
        if hasattr(os, 'fsync'):
            os.fsync(self.file)
Example #16
0
def generate_blank_syslinux():
    # Generate syslinux.qcow2 in working directory if it isn't already there
    if os.path.isfile("./syslinux.qcow2"):
        print "Found a syslinux.qcow2 image in the working directory - using it"
        return

    print "Generating an empty bootable syslinux image as ./syslinux.qcow2"
    raw_fs_image = NamedTemporaryFile(delete=False)
    raw_image_name = raw_fs_image.name
    try:
        output_image_name = "./syslinux.qcow2"

        # 200 MB sparse file
        outsize = 1024 * 1024 * 200
        raw_fs_image.truncate(outsize)
        raw_fs_image.close()

        # Partition, format and add DOS MBR
        g = guestfs.GuestFS()
        g.add_drive(raw_image_name)
        g.launch()
        g.part_disk("/dev/sda", "msdos")
        g.part_set_mbr_id("/dev/sda", 1, 0xb)
        g.mkfs("vfat", "/dev/sda1")
        g.part_set_bootable("/dev/sda", 1, 1)
        dosmbr = open("/usr/share/syslinux/mbr.bin").read()
        ws = g.pwrite_device("/dev/sda", dosmbr, 0)
        if ws != len(dosmbr):
            raise Exception("Failed to write entire MBR")
        g.sync()
        g.close()

        # Install syslinux - this is the ugly root-requiring part
        gotloop = False
        for n in range(4):
            # If this has a nonzero return code we will take the exception
            (stdout, stderr,
             retcode) = subprocess_check_output(["losetup", "-f"])
            loopdev = stdout.rstrip()
            # Race - Try it a few times and then give up
            try:
                subprocess_check_output(["losetup", loopdev, raw_image_name])
            except:
                sleep(1)
                continue
            gotloop = True
            break

        if not gotloop:
            raise Exception("Failed to setup loopback")

        loopbase = os.path.basename(loopdev)

        try:
            subprocess_check_output(["kpartx", "-a", loopdev])
            # On RHEL6 there seems to be a short delay before the mappings actually show up
            sleep(5)
            subprocess_check_output(
                ["syslinux", "/dev/mapper/%sp1" % (loopbase)])
            subprocess_check_output(["kpartx", "-d", loopdev])
            subprocess_check_output(["losetup", "-d", loopdev])
        except:
            print "Exception while executing syslinux install commands."
            raise

        try:
            subprocess_check_output([
                "qemu-img", "convert", "-c", "-O", "qcow2", raw_image_name,
                output_image_name
            ])
        except:
            print "Exception while converting image to qcow2"

    finally:
        pass
Example #17
0
def write(data, path):

    with ZipFile(path, 'w', zipfile.ZIP_DEFLATED) as zip:
        content = io.StringIO()
        content.write('Manifest-Version: 1.0\n')
        content.write('Created-By: jamovi\n')
        content.write('Data-Archive-Version: 1.0.2\n')
        content.write('jamovi-Archive-Version: 1.0\n')
        zip.writestr('META-INF/MANIFEST.MF',
                     bytes(content.getvalue(), 'utf-8'), zipfile.ZIP_DEFLATED)

        content = None

        fields = []
        for column in data.dataset:
            field = {}
            field['name'] = column.name
            field['measureType'] = MeasureType.stringify(column.measure_type)
            if column.measure_type == MeasureType.CONTINUOUS:
                field['type'] = 'number'
            else:
                field['type'] = 'integer'
            field['importName'] = column.import_name
            fields.append(field)

        metadata = {}

        metadataset = {}
        metadataset['rowCount'] = data.dataset.row_count
        metadataset['columnCount'] = data.dataset.column_count
        metadataset['fields'] = fields

        metadata['dataSet'] = metadataset

        zip.writestr('metadata.json', json.dumps(metadata),
                     zipfile.ZIP_DEFLATED)

        metadata = None

        xdata = {}
        for column in data.dataset:
            if column.has_levels:
                xdata[column.name] = {'labels': column.levels}
        zip.writestr('xdata.json', json.dumps(xdata), zipfile.ZIP_DEFLATED)
        xdata = None

        row_count = data.dataset.row_count
        required_bytes = 0
        for column in data.dataset:
            if column.measure_type == MeasureType.CONTINUOUS:
                required_bytes += (8 * row_count)
            else:
                required_bytes += (4 * row_count)

        temp_file = NamedTemporaryFile(delete=False)
        temp_file.truncate(required_bytes)

        for column in data.dataset:
            if column.measure_type == MeasureType.CONTINUOUS:
                for i in range(0, row_count):
                    value = column.raw(i)
                    byts = struct.pack('<d', value)
                    temp_file.write(byts)
            else:
                for i in range(0, row_count):
                    value = column.raw(i)
                    byts = struct.pack('<i', value)
                    temp_file.write(byts)

        temp_file.close()

        zip.write(temp_file.name, 'data.bin')
        os.remove(temp_file.name)

        resources = []

        for analysis in data.analyses:
            if analysis.has_results is False:
                continue
            analysis_dir = '{:02} {}/analysis'.format(analysis.id,
                                                      analysis.name)
            zip.writestr(analysis_dir, analysis.serialize(),
                         zipfile.ZIP_DEFLATED)
            resources += analysis.resources

        for rel_path in resources:
            abs_path = os.path.join(data.instance_path, rel_path)
            zip.write(abs_path, rel_path)

    data.title = os.path.splitext(os.path.basename(path))[0]
    data.path = path
Example #18
0
def write(data, path, prog_cb):

    with ZipFile(path, 'w', zipfile.ZIP_DEFLATED) as zip:
        content = io.StringIO()
        content.write('Manifest-Version: 1.0\n')
        content.write('Created-By: JASP 0.7.5 Beta 2\n')
        content.write('Data-Archive-Version: 1.0.2\n')
        content.write('JASP-Archive-Version: 2.0\n')
        zip.writestr('META-INF/MANIFEST.MF', bytes(content.getvalue(), 'utf-8'), zipfile.ZIP_DEFLATED)

        content = None

        fields = [ ]
        for column in data.dataset:
            field = { }
            field['name'] = column.name
            field['measureType'] = MeasureType.stringify(column.measure_type)
            if column.measure_type == MeasureType.CONTINUOUS:
                field['type'] = 'number'
            else:
                field['type'] = 'integer'
            fields.append(field)

        metadata = { }

        metadataset = { }
        metadataset['rowCount'] = data.dataset.row_count
        metadataset['columnCount'] = data.dataset.column_count
        metadataset['fields'] = fields

        metadata['dataSet'] = metadataset

        zip.writestr('metadata.json', json.dumps(metadata), zipfile.ZIP_DEFLATED)

        metadata = None

        xdata = { }
        for column in data.dataset:
            if column.has_levels:
                xdata[column.name] = { 'labels': column.levels }
        zip.writestr('xdata.json', json.dumps(xdata), zipfile.ZIP_DEFLATED)
        xdata = None

        row_count = data.dataset.row_count
        required_bytes = 0
        for column in data.dataset:
            if column.measure_type == MeasureType.CONTINUOUS:
                required_bytes += (8 * row_count)
            else:
                required_bytes += (4 * row_count)

        temp_file = NamedTemporaryFile(delete=False)
        temp_file.truncate(required_bytes)

        for column in data.dataset:
            if column.measure_type == MeasureType.CONTINUOUS:
                for i in range(0, row_count):
                    value = column.raw(i)
                    byts = struct.pack('<d', value)
                    temp_file.write(byts)
            else:
                for i in range(0, row_count):
                    value = column.raw(i)
                    byts = struct.pack('<i', value)
                    temp_file.write(byts)

        temp_file.close()

        zip.write(temp_file.name, 'data.bin')
        os.remove(temp_file.name)
Example #19
0
class File (object):
    """
    A file wrapper that smooths over some platform-specific
    operations.
    """
    def __init__(self, name=None, readonly=False, **kwargs):
        if name is None:
            self.file = NamedTemporaryFile(**kwargs)
        else:
            if exists(name):
                if readonly:
                    self.file = open(name, 'rb')
                else:
                    self.file = open(name, 'r+b')
            else:
                if readonly:
                    raise OSError('No "%s" found.' % name)
                self.file = open(name, 'w+b')
        if readonly:
            assert self.is_readonly()
        self.has_lock = False

    def get_name(self):
        return self.file.name

    def is_temporary(self):
        return isinstance(self.file, _TemporaryFileWrapper)

    def is_readonly(self):
        return self.file.mode == 'rb'

    def seek(self, n, whence=0):
        self.file.seek(n, whence)
        if whence == 0:
            assert self.file.tell() == n

    def seek_end(self):
        self.file.seek(0, 2)

    def read(self, n=None):
        if n is None:
            return self.file.read()
        else:
            return self.file.read(n)

    def tell(self):
        return self.file.tell()

    def stat(self):
        return os.stat(self.get_name())

    def __len__(self):
        return self.stat().st_size

    def rename(self, name):
        old_name = self.get_name()
        if name == old_name:
            return
        assert not self.is_temporary()
        self.obtain_lock()
        self.close()
        if exists(name):
            os.unlink(name)
        os.rename(old_name, name)
        self.file = open(name, 'r+b')
        self.obtain_lock()

    def obtain_lock(self):
        """
        Make sure that we have an exclusive lock on self.file before
        doing a write.
        If the lock is not available, raise an exception.
        """
        assert not self.is_readonly()
        if not self.has_lock:
            if os.name == 'nt':
                try:
                    win32file.LockFileEx(
                        win32file._get_osfhandle(self.file.fileno()),
                        (win32con.LOCKFILE_EXCLUSIVE_LOCK |
                         win32con.LOCKFILE_FAIL_IMMEDIATELY),
                        0, -65536, pywintypes.OVERLAPPED())
                except pywintypes.error:
                    raise IOError("Unable to obtain lock")
            else:
                fcntl.flock(self.file, fcntl.LOCK_EX | fcntl.LOCK_NB)
            self.has_lock = True

    def release_lock(self):
        """
        Make sure that we do not retain an exclusive lock on self.file.
        """
        if self.has_lock:
            if os.name == 'nt':
                win32file.UnlockFileEx(
                    win32file._get_osfhandle(self.file.fileno()),
                    0, -65536, pywintypes.OVERLAPPED())
            else:
                fcntl.flock(self.file, fcntl.LOCK_UN)
            self.has_lock = False

    def write(self, s):
        self.obtain_lock()
        self.file.write(s)
        # This flush helps the file knows where it ends.
        self.file.flush()

    def truncate(self):
        self.obtain_lock()
        self.file.truncate()

    def close(self):
        self.release_lock()
        self.file.close()

    def flush(self):
        self.file.flush()

    def fsync(self):
        if hasattr(os, 'fsync'):
            os.fsync(self.file)
Example #20
0
def write(dataset, path):

    with ZipFile(path, 'w', zipfile.ZIP_DEFLATED) as zip:
        content = io.StringIO()
        content.write('Manifest-Version: 1.0\n')
        content.write('Created-By: JASP 0.7.5 Beta 2\n')
        content.write('Data-Archive-Version: 1.0.2\n')
        content.write('JASP-Archive-Version: 2.0\n')
        zip.writestr('META-INF/MANIFEST.MF', bytes(content.getvalue(), 'utf-8'), zipfile.ZIP_DEFLATED)

        content = None

        fields = [ ]
        for column in dataset:
            field = { }
            field['name'] = column.name
            field['measureType'] = MeasureType.stringify(column.type)
            if column.type == MeasureType.CONTINUOUS:
                field['type'] = 'number'
            else:
                field['type'] = 'integer'
            fields.append(field)

        metadata = { }

        metadataset = { }
        metadataset['rowCount'] = dataset.row_count
        metadataset['columnCount'] = dataset.column_count
        metadataset['fields'] = fields

        metadata['dataSet'] = metadataset

        zip.writestr('metadata.json', json.dumps(metadata), zipfile.ZIP_DEFLATED)

        metadata = None

        xdata = { }
        for column in dataset:
            if column.has_labels:
                xdata[column.name] = column.labels
        zip.writestr('xdata.json', json.dumps(xdata), zipfile.ZIP_DEFLATED)
        xdata = None

        row_count = dataset.row_count
        required_bytes = 0
        for column in dataset:
            if column.type == MeasureType.CONTINUOUS:
                required_bytes += (8 * row_count)
            else:
                required_bytes += (4 * row_count)

        temp_file = NamedTemporaryFile(delete=False)
        temp_file.truncate(required_bytes)

        for column in dataset:
            if column.type == MeasureType.CONTINUOUS:
                for i in range(0, row_count):
                    value = column.raw(i)
                    byts = struct.pack('<d', value)
                    temp_file.write(byts)
            else:
                for i in range(0, row_count):
                    value = column.raw(i)
                    byts = struct.pack('<i', value)
                    temp_file.write(byts)

        temp_file.close()

        zip.write(temp_file.name, 'data.bin')
        os.remove(temp_file.name)
Example #21
0
def write(data, path, html=None):

    with ZipFile(path, 'w', zipfile.ZIP_DEFLATED) as zip:

        content = io.StringIO()
        content.write('Manifest-Version: 1.0\n')
        content.write('Data-Archive-Version: 1.0.2\n')
        content.write('jamovi-Archive-Version: 5.0\n')
        content.write('Created-By: ' + str(app_info) + '\n')
        zip.writestr('META-INF/MANIFEST.MF',
                     bytes(content.getvalue(), 'utf-8'), zipfile.ZIP_DEFLATED)

        if html is not None:
            zip.writestr('index.html', html)

        content = None
        string_table_required = False

        fields = []
        for column in data:
            if column.is_virtual is True:
                continue

            field = {}
            field['name'] = column.name
            field['columnType'] = ColumnType.stringify(column.column_type)
            field['dataType'] = DataType.stringify(column.data_type)
            field['measureType'] = MeasureType.stringify(column.measure_type)
            field['formula'] = column.formula
            field['formulaMessage'] = column.formula_message
            if column.data_type == DataType.DECIMAL:
                field['type'] = 'number'
            elif column.data_type == DataType.TEXT and column.measure_type == MeasureType.ID:
                field['type'] = 'string'
                string_table_required = True
            else:
                field['type'] = 'integer'
            field['importName'] = column.import_name
            field['description'] = column.description

            if column.is_filter:
                field['filterNo'] = column.filter_no
                field['hidden'] = column.hidden
                field['active'] = column.active
            else:
                if column.has_levels:
                    field['trimLevels'] = column.trim_levels

            fields.append(field)

        metadata = {}

        metadataset = {}
        metadataset['rowCount'] = data.row_count
        metadataset['columnCount'] = data.column_count
        metadataset['fields'] = fields

        # if data.import_path is not '':
        #     metadataset['importPath'] = data.import_path
        # if data.embedded_path is not '':
        #     metadataset['embeddedPath'] = data.embedded_path
        # if data.embedded_name is not '':
        #     metadataset['embeddedName'] = data.embedded_name

        metadata['dataSet'] = metadataset

        zip.writestr('metadata.json', json.dumps(metadata),
                     zipfile.ZIP_DEFLATED)

        metadata = None

        xdata = {}
        for column in data:
            if column.is_virtual is True:
                continue
            if column.has_levels:
                xdata[column.name] = {'labels': column.levels}
        zip.writestr('xdata.json', json.dumps(xdata), zipfile.ZIP_DEFLATED)
        xdata = None

        row_count = data.row_count
        required_bytes = 0
        for column in data:
            if column.is_virtual is True:
                continue
            if column.data_type == DataType.DECIMAL:
                required_bytes += (8 * row_count)
            else:
                required_bytes += (4 * row_count)

        if string_table_required:
            cursor = 0
            string_file = NamedTemporaryFile(delete=False)

        temp_file = NamedTemporaryFile(delete=False)
        temp_file.truncate(required_bytes)

        for column in data:
            if column.is_virtual is True:
                continue
            if column.data_type == DataType.DECIMAL:
                for i in range(0, row_count):
                    value = column.raw(i)
                    byts = struct.pack('<d', value)
                    temp_file.write(byts)
            elif column.data_type == DataType.TEXT and column.measure_type == MeasureType.ID:
                for i in range(0, row_count):
                    value = column[i]
                    if value != '':
                        string_file.write(value.encode('utf-8'))
                        string_file.write(bytes(1))
                        n = len(value) + 1
                        byts = struct.pack('<i', cursor)
                        temp_file.write(byts)
                        cursor += n
                    else:
                        byts = struct.pack('<i', -2147483648)
                        temp_file.write(byts)
            else:
                for i in range(0, row_count):
                    value = column.raw(i)
                    byts = struct.pack('<i', value)
                    temp_file.write(byts)

        temp_file.close()
        zip.write(temp_file.name, 'data.bin')
        os.remove(temp_file.name)

        if string_table_required:
            string_file.close()
            zip.write(string_file.name, 'strings.bin')
            os.remove(string_file.name)

        resources = []

        for analysis in data.analyses:
            if analysis.has_results is False:
                continue
            analysis_dir = '{:02} {}/analysis'.format(analysis.id,
                                                      analysis.name)
            zip.writestr(analysis_dir, analysis.serialize(),
                         zipfile.ZIP_DEFLATED)
            resources += analysis.resources

        for rel_path in resources:
            abs_path = os.path.join(data.instance_path, rel_path)
            zip.write(abs_path, rel_path)
    def create_syslinux_stub(self, image_name, cmdline, kernel_filename, ramdisk_filename):
        """

        @param cmdline: kernel command line
        @param kernel_filename: path to kernel file 
        @param ramdisk_filename: path to ramdisk file
        @return glance image id
        """

        raw_fs_image = NamedTemporaryFile(delete=False)
        raw_image_name = raw_fs_image.name
        tmp_content_dir = None
        glance_image_id = None
        try:
            qcow2_image_name = "%s.qcow2" % raw_image_name

            # 200 MB sparse file
            self.log.debug("Creating sparse 200 MB file")
            outsize = 1024 * 1024 * 200
            raw_fs_image.truncate(outsize)
            raw_fs_image.close()

            # Partition, format and add DOS MBR
            g = guestfs.GuestFS()
            g.add_drive(raw_image_name)
            g.launch()
            g.part_disk("/dev/sda","msdos")
            g.part_set_mbr_id("/dev/sda",1,0xb)
            g.mkfs("vfat", "/dev/sda1")
            g.part_set_bootable("/dev/sda", 1, 1)
            dosmbr = open("/usr/share/syslinux/mbr.bin").read()
            ws = g.pwrite_device("/dev/sda", dosmbr, 0)
            if ws != len(dosmbr):
                raise Exception("Failed to write entire MBR")
            # Install syslinux
            g.syslinux("/dev/sda1")

            #Insert kernel, ramdisk and syslinux.cfg file
            tmp_content_dir = mkdtemp()

            kernel_dest = os.path.join(tmp_content_dir,"vmlinuz")
            shutil.copy(kernel_filename, kernel_dest)

            initrd_dest = os.path.join(tmp_content_dir,"initrd.img")
            shutil.copy(ramdisk_filename, initrd_dest)

            syslinux_conf="""default customhd
        timeout 30
        prompt 1
        label customhd
          kernel vmlinuz
          append initrd=initrd.img %s
        """ % (cmdline)
            
            f = open(os.path.join(tmp_content_dir, "syslinux.cfg"),"w")
            f.write(syslinux_conf)
            f.close()

            # copy the tmp content to the image
            g.mount_options ("", "/dev/sda1", "/")
            for filename in os.listdir(tmp_content_dir):
                g.upload(os.path.join(tmp_content_dir,filename),"/" + filename)
            g.sync()
            g.close()
            try:
                self.log.debug("Converting syslinux stub image from raw to qcow2")
                self._subprocess_check_output(["qemu-img","convert","-c","-O","qcow2",raw_image_name, qcow2_image_name])
                self.log.debug("Uploading syslinux qcow2 image to glance")
                glance_image_id = self.env.upload_image_to_glance(image_name, local_path=qcow2_image_name, format='qcow2')
            except Exception, e:
                self.log.debug("Exception while converting syslinux image to qcow2: %s" % e)
                self.log.debug("Uploading syslinux raw image to glance.")
                glance_image_id = self.env.upload_image_to_glance(image_name, local_path=raw_image_name, format='raw')

        finally:
            self.log.debug("Removing temporary file.")
            if os.path.exists(raw_image_name):
                os.remove(raw_image_name)
            if os.path.exists(qcow2_image_name):
                os.remove(qcow2_image_name)
            if tmp_content_dir:
                shutil.rmtree(tmp_content_dir)

        return glance_image_id
Example #23
0
class LazyZipOverHTTP:
    """File-like object mapped to a ZIP file over HTTP.

    This uses HTTP range requests to lazily fetch the file's content,
    which is supposed to be fed to ZipFile.  If such requests are not
    supported by the server, raise HTTPRangeRequestUnsupported
    during initialization.
    """
    def __init__(self,
                 url: str,
                 session: PipSession,
                 chunk_size: int = CONTENT_CHUNK_SIZE) -> None:
        head = session.head(url, headers=HEADERS)
        raise_for_status(head)
        assert head.status_code == 200
        self._session, self._url, self._chunk_size = session, url, chunk_size
        self._length = int(head.headers["Content-Length"])
        self._file = NamedTemporaryFile()
        self.truncate(self._length)
        self._left: List[int] = []
        self._right: List[int] = []
        if "bytes" not in head.headers.get("Accept-Ranges", "none"):
            raise HTTPRangeRequestUnsupported("range request is not supported")
        self._check_zip()

    @property
    def mode(self) -> str:
        """Opening mode, which is always rb."""
        return "rb"

    @property
    def name(self) -> str:
        """Path to the underlying file."""
        return self._file.name

    def seekable(self) -> bool:
        """Return whether random access is supported, which is True."""
        return True

    def close(self) -> None:
        """Close the file."""
        self._file.close()

    @property
    def closed(self) -> bool:
        """Whether the file is closed."""
        return self._file.closed

    def read(self, size: int = -1) -> bytes:
        """Read up to size bytes from the object and return them.

        As a convenience, if size is unspecified or -1,
        all bytes until EOF are returned.  Fewer than
        size bytes may be returned if EOF is reached.
        """
        download_size = max(size, self._chunk_size)
        start, length = self.tell(), self._length
        stop = length if size < 0 else min(start + download_size, length)
        start = max(0, stop - download_size)
        self._download(start, stop - 1)
        return self._file.read(size)

    def readable(self) -> bool:
        """Return whether the file is readable, which is True."""
        return True

    def seek(self, offset: int, whence: int = 0) -> int:
        """Change stream position and return the new absolute position.

        Seek to offset relative position indicated by whence:
        * 0: Start of stream (the default).  pos should be >= 0;
        * 1: Current position - pos may be negative;
        * 2: End of stream - pos usually negative.
        """
        return self._file.seek(offset, whence)

    def tell(self) -> int:
        """Return the current position."""
        return self._file.tell()

    def truncate(self, size: Optional[int] = None) -> int:
        """Resize the stream to the given size in bytes.

        If size is unspecified resize to the current position.
        The current stream position isn't changed.

        Return the new file size.
        """
        return self._file.truncate(size)

    def writable(self) -> bool:
        """Return False."""
        return False

    def __enter__(self) -> "LazyZipOverHTTP":
        self._file.__enter__()
        return self

    def __exit__(self, *exc: Any) -> Optional[bool]:
        return self._file.__exit__(*exc)

    @contextmanager
    def _stay(self) -> Iterator[None]:
        """Return a context manager keeping the position.

        At the end of the block, seek back to original position.
        """
        pos = self.tell()
        try:
            yield
        finally:
            self.seek(pos)

    def _check_zip(self) -> None:
        """Check and download until the file is a valid ZIP."""
        end = self._length - 1
        for start in reversed(range(0, end, self._chunk_size)):
            self._download(start, end)
            with self._stay():
                try:
                    # For read-only ZIP files, ZipFile only needs
                    # methods read, seek, seekable and tell.
                    ZipFile(self)  # type: ignore
                except BadZipfile:
                    pass
                else:
                    break

    def _stream_response(self,
                         start: int,
                         end: int,
                         base_headers: Dict[str, str] = HEADERS) -> Response:
        """Return HTTP response to a range request from start to end."""
        headers = base_headers.copy()
        headers["Range"] = f"bytes={start}-{end}"
        # TODO: Get range requests to be correctly cached
        headers["Cache-Control"] = "no-cache"
        return self._session.get(self._url, headers=headers, stream=True)

    def _merge(self, start: int, end: int, left: int,
               right: int) -> Iterator[Tuple[int, int]]:
        """Return an iterator of intervals to be fetched.

        Args:
            start (int): Start of needed interval
            end (int): End of needed interval
            left (int): Index of first overlapping downloaded data
            right (int): Index after last overlapping downloaded data
        """
        lslice, rslice = self._left[left:right], self._right[left:right]
        i = start = min([start] + lslice[:1])
        end = max([end] + rslice[-1:])
        for j, k in zip(lslice, rslice):
            if j > i:
                yield i, j - 1
            i = k + 1
        if i <= end:
            yield i, end
        self._left[left:right], self._right[left:right] = [start], [end]

    def _download(self, start: int, end: int) -> None:
        """Download bytes from start to end inclusively."""
        with self._stay():
            left = bisect_left(self._right, start)
            right = bisect_right(self._left, end)
            for start, end in self._merge(start, end, left, right):
                response = self._stream_response(start, end)
                response.raise_for_status()
                self.seek(start)
                for chunk in response_chunks(response, self._chunk_size):
                    self._file.write(chunk)
class LazyZipOverHTTP(object):
    """File-like object mapped to a ZIP file over HTTP.

    This uses HTTP range requests to lazily fetch the file's content,
    which is supposed to be fed to ZipFile.  If such requests are not
    supported by the server, raise HTTPRangeRequestUnsupported
    during initialization.
    """

    def __init__(self, url, session, chunk_size=CONTENT_CHUNK_SIZE):
        # type: (str, PipSession, int) -> None
        head = session.head(url, headers=HEADERS)
        raise_for_status(head)
        assert head.status_code == 200
        self._session, self._url, self._chunk_size = session, url, chunk_size
        self._length = int(head.headers['Content-Length'])
        self._file = NamedTemporaryFile()
        self.truncate(self._length)
        self._left = []  # type: List[int]
        self._right = []  # type: List[int]
        if 'bytes' not in head.headers.get('Accept-Ranges', 'none'):
            raise HTTPRangeRequestUnsupported('range request is not supported')
        self._check_zip()

    @property
    def mode(self):
        # type: () -> str
        """Opening mode, which is always rb."""
        return 'rb'

    @property
    def name(self):
        # type: () -> str
        """Path to the underlying file."""
        return self._file.name

    def seekable(self):
        # type: () -> bool
        """Return whether random access is supported, which is True."""
        return True

    def close(self):
        # type: () -> None
        """Close the file."""
        self._file.close()

    @property
    def closed(self):
        # type: () -> bool
        """Whether the file is closed."""
        return self._file.closed

    def read(self, size=-1):
        # type: (int) -> bytes
        """Read up to size bytes from the object and return them.

        As a convenience, if size is unspecified or -1,
        all bytes until EOF are returned.  Fewer than
        size bytes may be returned if EOF is reached.
        """
        download_size = max(size, self._chunk_size)
        start, length = self.tell(), self._length
        stop = length if size < 0 else min(start+download_size, length)
        start = max(0, stop-download_size)
        self._download(start, stop-1)
        return self._file.read(size)

    def readable(self):
        # type: () -> bool
        """Return whether the file is readable, which is True."""
        return True

    def seek(self, offset, whence=0):
        # type: (int, int) -> int
        """Change stream position and return the new absolute position.

        Seek to offset relative position indicated by whence:
        * 0: Start of stream (the default).  pos should be >= 0;
        * 1: Current position - pos may be negative;
        * 2: End of stream - pos usually negative.
        """
        return self._file.seek(offset, whence)

    def tell(self):
        # type: () -> int
        """Return the current possition."""
        return self._file.tell()

    def truncate(self, size=None):
        # type: (Optional[int]) -> int
        """Resize the stream to the given size in bytes.

        If size is unspecified resize to the current position.
        The current stream position isn't changed.

        Return the new file size.
        """
        return self._file.truncate(size)

    def writable(self):
        # type: () -> bool
        """Return False."""
        return False

    def __enter__(self):
        # type: () -> LazyZipOverHTTP
        self._file.__enter__()
        return self

    def __exit__(self, *exc):
        # type: (*Any) -> Optional[bool]
        return self._file.__exit__(*exc)

    @contextmanager
    def _stay(self):
        # type: ()-> Iterator[None]
        """Return a context manager keeping the position.

        At the end of the block, seek back to original position.
        """
        pos = self.tell()
        try:
            yield
        finally:
            self.seek(pos)

    def _check_zip(self):
        # type: () -> None
        """Check and download until the file is a valid ZIP."""
        end = self._length - 1
        for start in reversed(range(0, end, self._chunk_size)):
            self._download(start, end)
            with self._stay():
                try:
                    # For read-only ZIP files, ZipFile only needs
                    # methods read, seek, seekable and tell.
                    ZipFile(self)  # type: ignore
                except BadZipfile:
                    pass
                else:
                    break

    def _stream_response(self, start, end, base_headers=HEADERS):
        # type: (int, int, Dict[str, str]) -> Response
        """Return HTTP response to a range request from start to end."""